id
int64
0
458k
file_name
stringlengths
4
119
file_path
stringlengths
14
227
content
stringlengths
24
9.96M
size
int64
24
9.96M
language
stringclasses
1 value
extension
stringclasses
14 values
total_lines
int64
1
219k
avg_line_length
float64
2.52
4.63M
max_line_length
int64
5
9.91M
alphanum_fraction
float64
0
1
repo_name
stringlengths
7
101
repo_stars
int64
100
139k
repo_forks
int64
0
26.4k
repo_open_issues
int64
0
2.27k
repo_license
stringclasses
12 values
repo_extraction_date
stringclasses
433 values
2,288,800
pix_finder_ui.py
AmirMahdaviAM_SCSHub/scshub/view/ui/pix_finder_ui.py
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'c:\Users\Amir\Desktop\SCSHub\scshub\view\ui\pix_finder_ui.ui' # # Created by: PyQt5 UI code generator 5.15.9 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class PixFinderUi(object): def setupUi(self, PixFinder): PixFinder.setObjectName("PixFinder") PixFinder.resize(1179, 802) self.main_lyt = QtWidgets.QVBoxLayout(PixFinder) self.main_lyt.setContentsMargins(0, 0, 0, 0) self.main_lyt.setObjectName("main_lyt") self.top_card = SimpleCardWidget(PixFinder) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.top_card.sizePolicy().hasHeightForWidth()) self.top_card.setSizePolicy(sizePolicy) self.top_card.setMinimumSize(QtCore.QSize(0, 60)) self.top_card.setMaximumSize(QtCore.QSize(16777215, 60)) self.top_card.setObjectName("top_card") self.top_card_lyt = QtWidgets.QHBoxLayout(self.top_card) self.top_card_lyt.setObjectName("top_card_lyt") self.input_btn = PushButton(self.top_card) self.input_btn.setEnabled(False) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.input_btn.sizePolicy().hasHeightForWidth()) self.input_btn.setSizePolicy(sizePolicy) self.input_btn.setMinimumSize(QtCore.QSize(110, 0)) self.input_btn.setObjectName("input_btn") self.top_card_lyt.addWidget(self.input_btn) self.reset_btn = ToolButton(self.top_card) self.reset_btn.setEnabled(False) self.reset_btn.setObjectName("reset_btn") self.top_card_lyt.addWidget(self.reset_btn) spacerItem = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem) self.v_separator = VerticalSeparator(self.top_card) self.v_separator.setObjectName("v_separator") self.top_card_lyt.addWidget(self.v_separator) spacerItem1 = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem1) self.find_btn = PrimaryPushButton(self.top_card) self.find_btn.setEnabled(False) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.find_btn.sizePolicy().hasHeightForWidth()) self.find_btn.setSizePolicy(sizePolicy) self.find_btn.setMinimumSize(QtCore.QSize(110, 0)) self.find_btn.setObjectName("find_btn") self.top_card_lyt.addWidget(self.find_btn) self.main_lyt.addWidget(self.top_card) self.navbar_lyt = QtWidgets.QHBoxLayout() self.navbar_lyt.setObjectName("navbar_lyt") self.home_btn = ToolButton(PixFinder) self.home_btn.setEnabled(False) self.home_btn.setMinimumSize(QtCore.QSize(0, 35)) self.home_btn.setText("") self.home_btn.setObjectName("home_btn") self.navbar_lyt.addWidget(self.home_btn) self.back_btn = ToolButton(PixFinder) self.back_btn.setEnabled(False) self.back_btn.setMinimumSize(QtCore.QSize(0, 35)) self.back_btn.setText("") self.back_btn.setObjectName("back_btn") self.navbar_lyt.addWidget(self.back_btn) self.navbar_card = SimpleCardWidget(PixFinder) self.navbar_card.setMinimumSize(QtCore.QSize(0, 37)) self.navbar_card.setObjectName("navbar_card") self.verticalLayout_9 = QtWidgets.QVBoxLayout(self.navbar_card) self.verticalLayout_9.setObjectName("verticalLayout_9") self.navbar = BreadcrumbBar(self.navbar_card) self.navbar.setMinimumSize(QtCore.QSize(100, 19)) self.navbar.setObjectName("navbar") self.verticalLayout_9.addWidget(self.navbar) self.navbar_lyt.addWidget(self.navbar_card) self.refresh_btn = ToolButton(PixFinder) self.refresh_btn.setEnabled(False) self.refresh_btn.setMinimumSize(QtCore.QSize(0, 35)) self.refresh_btn.setText("") self.refresh_btn.setObjectName("refresh_btn") self.navbar_lyt.addWidget(self.refresh_btn) self.main_lyt.addLayout(self.navbar_lyt) self.list_lyt = QtWidgets.QHBoxLayout() self.list_lyt.setObjectName("list_lyt") self.model_card = ElevatedCardWidget(PixFinder) self.model_card.setObjectName("model_card") self.model_card_lyt = QtWidgets.QVBoxLayout(self.model_card) self.model_card_lyt.setObjectName("model_card_lyt") self.model_header_card = SimpleCardWidget(self.model_card) self.model_header_card.setObjectName("model_header_card") self.model_header_card_lyt = QtWidgets.QHBoxLayout(self.model_header_card) self.model_header_card_lyt.setObjectName("model_header_card_lyt") self.model_lbl = StrongBodyLabel(self.model_header_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.model_lbl.sizePolicy().hasHeightForWidth()) self.model_lbl.setSizePolicy(sizePolicy) self.model_lbl.setObjectName("model_lbl") self.model_header_card_lyt.addWidget(self.model_lbl) self.model_badge = InfoBadge(self.model_header_card) self.model_badge.setObjectName("model_badge") self.model_header_card_lyt.addWidget(self.model_badge) self.model_card_lyt.addWidget(self.model_header_card) self.model_list = ListWidget(self.model_card) self.model_list.setObjectName("model_list") self.model_card_lyt.addWidget(self.model_list) self.list_lyt.addWidget(self.model_card) self.anim_card = ElevatedCardWidget(PixFinder) self.anim_card.setObjectName("anim_card") self.anim_card_lyt = QtWidgets.QVBoxLayout(self.anim_card) self.anim_card_lyt.setObjectName("anim_card_lyt") self.anim_header_card = SimpleCardWidget(self.anim_card) self.anim_header_card.setObjectName("anim_header_card") self.anim_header_card_lyt = QtWidgets.QHBoxLayout(self.anim_header_card) self.anim_header_card_lyt.setObjectName("anim_header_card_lyt") self.animLnl = StrongBodyLabel(self.anim_header_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.animLnl.sizePolicy().hasHeightForWidth()) self.animLnl.setSizePolicy(sizePolicy) self.animLnl.setObjectName("animLnl") self.anim_header_card_lyt.addWidget(self.animLnl) self.anim_badge = InfoBadge(self.anim_header_card) self.anim_badge.setObjectName("anim_badge") self.anim_header_card_lyt.addWidget(self.anim_badge) self.anim_card_lyt.addWidget(self.anim_header_card) self.anim_list = ListWidget(self.anim_card) self.anim_list.setObjectName("anim_list") self.anim_card_lyt.addWidget(self.anim_list) self.list_lyt.addWidget(self.anim_card) self.list_lyt.setStretch(0, 1) self.list_lyt.setStretch(1, 2) self.main_lyt.addLayout(self.list_lyt) self.retranslateUi(PixFinder) QtCore.QMetaObject.connectSlotsByName(PixFinder) def retranslateUi(self, PixFinder): _translate = QtCore.QCoreApplication.translate PixFinder.setWindowTitle(_translate("PixFinder", "PixFinder")) self.input_btn.setText(_translate("PixFinder", "Input")) self.reset_btn.setToolTip(_translate("PixFinder", "Reset input archives")) self.find_btn.setToolTip(_translate("PixFinder", "Save to tobj file with selected save mode")) self.find_btn.setText(_translate("PixFinder", "Find")) self.home_btn.setToolTip(_translate("PixFinder", "Go to home directory")) self.refresh_btn.setToolTip(_translate("PixFinder", "Refresh current directory")) self.model_lbl.setText(_translate("PixFinder", "Model")) self.model_badge.setText(_translate("PixFinder", "0")) self.animLnl.setText(_translate("PixFinder", "Anim")) self.anim_badge.setText(_translate("PixFinder", "0")) from qfluentwidgets import BreadcrumbBar, ElevatedCardWidget, InfoBadge, ListWidget, PrimaryPushButton, PushButton, SimpleCardWidget, StrongBodyLabel, ToolButton, VerticalSeparator
9,110
Python
.py
158
49.170886
180
0.71526
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,801
pix_hasher_str_ui.py
AmirMahdaviAM_SCSHub/scshub/view/ui/pix_hasher_str_ui.py
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'c:\Users\Amir\Desktop\SCSHub\scshub\view\ui\pix_hasher_str_ui.ui' # # Created by: PyQt5 UI code generator 5.15.9 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class PixStringHasherUi(object): def setupUi(self, PixStringHasher): PixStringHasher.setObjectName("PixStringHasher") PixStringHasher.resize(987, 759) self.main_lyt = QtWidgets.QVBoxLayout(PixStringHasher) self.main_lyt.setContentsMargins(0, 0, 0, 0) self.main_lyt.setSpacing(30) self.main_lyt.setObjectName("main_lyt") self.mainCrd = SimpleCardWidget(PixStringHasher) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mainCrd.sizePolicy().hasHeightForWidth()) self.mainCrd.setSizePolicy(sizePolicy) self.mainCrd.setObjectName("mainCrd") self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.mainCrd) self.verticalLayout_3.setObjectName("verticalLayout_3") self.string_hasher_lbl = StrongBodyLabel(self.mainCrd) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.string_hasher_lbl.sizePolicy().hasHeightForWidth()) self.string_hasher_lbl.setSizePolicy(sizePolicy) self.string_hasher_lbl.setAlignment(QtCore.Qt.AlignCenter) self.string_hasher_lbl.setObjectName("string_hasher_lbl") self.verticalLayout_3.addWidget(self.string_hasher_lbl) self.top_card = SimpleCardWidget(self.mainCrd) self.top_card.setObjectName("top_card") self.top_card_lyt = QtWidgets.QVBoxLayout(self.top_card) self.top_card_lyt.setContentsMargins(-1, 14, -1, 14) self.top_card_lyt.setObjectName("top_card_lyt") self.input_line = LineEdit(self.top_card) self.input_line.setEnabled(False) self.input_line.setText("") self.input_line.setMaxLength(1000) self.input_line.setClearButtonEnabled(True) self.input_line.setObjectName("input_line") self.top_card_lyt.addWidget(self.input_line) self.hash_lyt = QtWidgets.QHBoxLayout() self.hash_lyt.setObjectName("hash_lyt") self.hash_line = LineEdit(self.top_card) self.hash_line.setMaxLength(1000) self.hash_line.setReadOnly(True) self.hash_line.setObjectName("hash_line") self.hash_lyt.addWidget(self.hash_line) self.hex_line = LineEdit(self.top_card) self.hex_line.setMaxLength(1000) self.hex_line.setReadOnly(True) self.hex_line.setObjectName("hex_line") self.hash_lyt.addWidget(self.hex_line) self.top_card_lyt.addLayout(self.hash_lyt) self.verticalLayout_3.addWidget(self.top_card) self.main_lyt.addWidget(self.mainCrd) self.retranslateUi(PixStringHasher) QtCore.QMetaObject.connectSlotsByName(PixStringHasher) def retranslateUi(self, PixStringHasher): _translate = QtCore.QCoreApplication.translate PixStringHasher.setWindowTitle(_translate("PixStringHasher", "PixStringHasher")) self.string_hasher_lbl.setText(_translate("PixStringHasher", "String Hasher")) self.input_line.setPlaceholderText(_translate("PixStringHasher", "Input string")) self.hash_line.setPlaceholderText(_translate("PixStringHasher", "Hash")) self.hex_line.setPlaceholderText(_translate("PixStringHasher", "Hex")) from qfluentwidgets import LineEdit, SimpleCardWidget, StrongBodyLabel
3,942
Python
.py
71
47.661972
119
0.726967
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,802
tobj_editor_ui.py
AmirMahdaviAM_SCSHub/scshub/view/ui/tobj_editor_ui.py
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'c:\Users\Amir\Desktop\SCSHub\scshub\view\ui\tobj_editor_ui.ui' # # Created by: PyQt5 UI code generator 5.15.9 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is # run again. Do not edit this file unless you know what you are doing. from PyQt5 import QtCore, QtGui, QtWidgets class TobjEditorUi(object): def setupUi(self, TobjEditor): TobjEditor.setObjectName("TobjEditor") TobjEditor.resize(1116, 825) self.main_lyt = QtWidgets.QVBoxLayout(TobjEditor) self.main_lyt.setContentsMargins(0, 0, 0, 0) self.main_lyt.setObjectName("main_lyt") self.top_card = SimpleCardWidget(TobjEditor) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.top_card.sizePolicy().hasHeightForWidth()) self.top_card.setSizePolicy(sizePolicy) self.top_card.setMinimumSize(QtCore.QSize(0, 60)) self.top_card.setMaximumSize(QtCore.QSize(16777215, 60)) self.top_card.setObjectName("top_card") self.top_card_lyt = QtWidgets.QHBoxLayout(self.top_card) self.top_card_lyt.setObjectName("top_card_lyt") self.input_btn = PushButton(self.top_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.input_btn.sizePolicy().hasHeightForWidth()) self.input_btn.setSizePolicy(sizePolicy) self.input_btn.setMinimumSize(QtCore.QSize(110, 0)) self.input_btn.setObjectName("input_btn") self.top_card_lyt.addWidget(self.input_btn) self.output_btn = PushButton(self.top_card) self.output_btn.setEnabled(True) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.output_btn.sizePolicy().hasHeightForWidth()) self.output_btn.setSizePolicy(sizePolicy) self.output_btn.setMinimumSize(QtCore.QSize(110, 0)) self.output_btn.setObjectName("output_btn") self.top_card_lyt.addWidget(self.output_btn) self.revert_btn = PushButton(self.top_card) self.revert_btn.setEnabled(False) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.revert_btn.sizePolicy().hasHeightForWidth()) self.revert_btn.setSizePolicy(sizePolicy) self.revert_btn.setMinimumSize(QtCore.QSize(110, 0)) self.revert_btn.setObjectName("revert_btn") self.top_card_lyt.addWidget(self.revert_btn) spacerItem = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem) self.v_separator_1 = VerticalSeparator(self.top_card) self.v_separator_1.setObjectName("v_separator_1") self.top_card_lyt.addWidget(self.v_separator_1) spacerItem1 = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem1) self.save_btn = PrimaryPushButton(self.top_card) self.save_btn.setEnabled(True) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.save_btn.sizePolicy().hasHeightForWidth()) self.save_btn.setSizePolicy(sizePolicy) self.save_btn.setMinimumSize(QtCore.QSize(110, 0)) self.save_btn.setObjectName("save_btn") self.top_card_lyt.addWidget(self.save_btn) self.save_sgmnt = SegmentedWidget(self.top_card) self.save_sgmnt.setEnabled(True) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.save_sgmnt.sizePolicy().hasHeightForWidth()) self.save_sgmnt.setSizePolicy(sizePolicy) self.save_sgmnt.setMaximumSize(QtCore.QSize(200, 16777215)) self.save_sgmnt.setObjectName("save_sgmnt") self.top_card_lyt.addWidget(self.save_sgmnt) spacerItem2 = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem2) self.v_separator_2 = VerticalSeparator(self.top_card) self.v_separator_2.setObjectName("v_separator_2") self.top_card_lyt.addWidget(self.v_separator_2) spacerItem3 = QtWidgets.QSpacerItem(4, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.top_card_lyt.addItem(spacerItem3) self.filename_lyt = QtWidgets.QFormLayout() self.filename_lyt.setFormAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter) self.filename_lyt.setObjectName("filename_lyt") self.filename_lbl = BodyLabel(self.top_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.filename_lbl.sizePolicy().hasHeightForWidth()) self.filename_lbl.setSizePolicy(sizePolicy) self.filename_lbl.setObjectName("filename_lbl") self.filename_lyt.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.filename_lbl) self.filename_line = LineEdit(self.top_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.filename_line.sizePolicy().hasHeightForWidth()) self.filename_line.setSizePolicy(sizePolicy) self.filename_line.setMaximumSize(QtCore.QSize(180, 33)) self.filename_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.filename_line.setPlaceholderText("dashboard") self.filename_line.setClearButtonEnabled(True) self.filename_line.setObjectName("filename_line") self.filename_lyt.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.filename_line) self.top_card_lyt.addLayout(self.filename_lyt) self.main_lyt.addWidget(self.top_card) self.texture_path_card = SimpleCardWidget(TobjEditor) self.texture_path_card.setMinimumSize(QtCore.QSize(0, 55)) self.texture_path_card.setMaximumSize(QtCore.QSize(16777215, 55)) self.texture_path_card.setObjectName("texture_path_card") self.texture_path_card_lyt = QtWidgets.QVBoxLayout(self.texture_path_card) self.texture_path_card_lyt.setObjectName("texture_path_card_lyt") self.texture_path_line = LineEdit(self.texture_path_card) self.texture_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.texture_path_line.setPlaceholderText("/vehicle/truck/share/dashboard.dds") self.texture_path_line.setClearButtonEnabled(True) self.texture_path_line.setObjectName("texture_path_line") self.texture_path_card_lyt.addWidget(self.texture_path_line) self.main_lyt.addWidget(self.texture_path_card) self.cube_path_card = SimpleCardWidget(TobjEditor) self.cube_path_card.setMinimumSize(QtCore.QSize(0, 140)) self.cube_path_card.setMaximumSize(QtCore.QSize(16777215, 140)) self.cube_path_card.setObjectName("cube_path_card") self.cube_path_card_lyt = QtWidgets.QVBoxLayout(self.cube_path_card) self.cube_path_card_lyt.setObjectName("cube_path_card_lyt") self.front_rear_lyt = QtWidgets.QHBoxLayout() self.front_rear_lyt.setObjectName("front_rear_lyt") self.front_path_line = LineEdit(self.cube_path_card) self.front_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.front_path_line.setPlaceholderText("/vehicle/truck/share/cube_front.dds") self.front_path_line.setClearButtonEnabled(True) self.front_path_line.setObjectName("front_path_line") self.front_rear_lyt.addWidget(self.front_path_line) self.rear_path_line = LineEdit(self.cube_path_card) self.rear_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.rear_path_line.setPlaceholderText("/vehicle/truck/share/cube_rear.dds") self.rear_path_line.setClearButtonEnabled(True) self.rear_path_line.setObjectName("rear_path_line") self.front_rear_lyt.addWidget(self.rear_path_line) self.cube_path_card_lyt.addLayout(self.front_rear_lyt) self.top_bottom_lyt = QtWidgets.QHBoxLayout() self.top_bottom_lyt.setObjectName("top_bottom_lyt") self.top_path_line = LineEdit(self.cube_path_card) self.top_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.top_path_line.setPlaceholderText("/vehicle/truck/share/cube_top.dds") self.top_path_line.setClearButtonEnabled(True) self.top_path_line.setObjectName("top_path_line") self.top_bottom_lyt.addWidget(self.top_path_line) self.bottom_path_line = LineEdit(self.cube_path_card) self.bottom_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.bottom_path_line.setPlaceholderText("/vehicle/truck/share/cube_bottom.dds") self.bottom_path_line.setClearButtonEnabled(True) self.bottom_path_line.setObjectName("bottom_path_line") self.top_bottom_lyt.addWidget(self.bottom_path_line) self.cube_path_card_lyt.addLayout(self.top_bottom_lyt) self.left_right_lyt = QtWidgets.QHBoxLayout() self.left_right_lyt.setObjectName("left_right_lyt") self.left_path_line = LineEdit(self.cube_path_card) self.left_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.left_path_line.setPlaceholderText("/vehicle/truck/share/cube_left.dds") self.left_path_line.setClearButtonEnabled(True) self.left_path_line.setObjectName("left_path_line") self.left_right_lyt.addWidget(self.left_path_line) self.right_path_line = LineEdit(self.cube_path_card) self.right_path_line.setFocusPolicy(QtCore.Qt.ClickFocus) self.right_path_line.setPlaceholderText("/vehicle/truck/share/cube_right.dds") self.right_path_line.setClearButtonEnabled(True) self.right_path_line.setObjectName("right_path_line") self.left_right_lyt.addWidget(self.right_path_line) self.cube_path_card_lyt.addLayout(self.left_right_lyt) self.main_lyt.addWidget(self.cube_path_card) self.middle_lyt = QtWidgets.QHBoxLayout() self.middle_lyt.setSpacing(6) self.middle_lyt.setObjectName("middle_lyt") self.texture_card = SimpleCardWidget(TobjEditor) self.texture_card.setMinimumSize(QtCore.QSize(300, 300)) self.texture_card.setMaximumSize(QtCore.QSize(300, 300)) self.texture_card.setObjectName("texture_card") self.texture_card_lyt = QtWidgets.QHBoxLayout(self.texture_card) self.texture_card_lyt.setObjectName("texture_card_lyt") self.texture_img = ImageLabel(self.texture_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.texture_img.sizePolicy().hasHeightForWidth()) self.texture_img.setSizePolicy(sizePolicy) self.texture_img.setMinimumSize(QtCore.QSize(278, 278)) self.texture_img.setMaximumSize(QtCore.QSize(278, 278)) self.texture_img.setObjectName("texture_img") self.texture_card_lyt.addWidget(self.texture_img) self.middle_lyt.addWidget(self.texture_card) self.option_card = SimpleCardWidget(TobjEditor) self.option_card.setMinimumSize(QtCore.QSize(0, 300)) self.option_card.setMaximumSize(QtCore.QSize(16777215, 300)) self.option_card.setObjectName("option_card") self.option_card_lyt = QtWidgets.QFormLayout(self.option_card) self.option_card_lyt.setHorizontalSpacing(50) self.option_card_lyt.setVerticalSpacing(11) self.option_card_lyt.setObjectName("option_card_lyt") self.type_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.type_lbl.sizePolicy().hasHeightForWidth()) self.type_lbl.setSizePolicy(sizePolicy) self.type_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.type_lbl.setObjectName("type_lbl") self.option_card_lyt.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.type_lbl) self.addr_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.addr_lbl.sizePolicy().hasHeightForWidth()) self.addr_lbl.setSizePolicy(sizePolicy) self.addr_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.addr_lbl.setObjectName("addr_lbl") self.option_card_lyt.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.addr_lbl) self.mipMap_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mipMap_lbl.sizePolicy().hasHeightForWidth()) self.mipMap_lbl.setSizePolicy(sizePolicy) self.mipMap_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.mipMap_lbl.setObjectName("mipMap_lbl") self.option_card_lyt.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.mipMap_lbl) self.colorSpace_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.colorSpace_lbl.sizePolicy().hasHeightForWidth()) self.colorSpace_lbl.setSizePolicy(sizePolicy) self.colorSpace_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.colorSpace_lbl.setObjectName("colorSpace_lbl") self.option_card_lyt.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.colorSpace_lbl) self.type_cmb = ComboBox(self.option_card) self.type_cmb.setObjectName("type_cmb") self.option_card_lyt.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.type_cmb) self.addr_lyt = QtWidgets.QHBoxLayout() self.addr_lyt.setSpacing(6) self.addr_lyt.setObjectName("addr_lyt") self.addr_u_cmb = ComboBox(self.option_card) self.addr_u_cmb.setMinimumSize(QtCore.QSize(140, 0)) self.addr_u_cmb.setObjectName("addr_u_cmb") self.addr_lyt.addWidget(self.addr_u_cmb) self.addr_v_cmb = ComboBox(self.option_card) self.addr_v_cmb.setMinimumSize(QtCore.QSize(140, 0)) self.addr_v_cmb.setObjectName("addr_v_cmb") self.addr_lyt.addWidget(self.addr_v_cmb) self.addr_w_cmb = ComboBox(self.option_card) self.addr_w_cmb.setEnabled(False) self.addr_w_cmb.setMinimumSize(QtCore.QSize(140, 0)) self.addr_w_cmb.setObjectName("addr_w_cmb") self.addr_lyt.addWidget(self.addr_w_cmb) self.option_card_lyt.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.addr_lyt) self.filter_lyt = QtWidgets.QHBoxLayout() self.filter_lyt.setSpacing(6) self.filter_lyt.setObjectName("filter_lyt") self.mag_filter_cmb = ComboBox(self.option_card) self.mag_filter_cmb.setObjectName("mag_filter_cmb") self.filter_lyt.addWidget(self.mag_filter_cmb) self.min_filter_cmb = ComboBox(self.option_card) self.min_filter_cmb.setEnabled(False) self.min_filter_cmb.setObjectName("min_filter_cmb") self.filter_lyt.addWidget(self.min_filter_cmb) self.option_card_lyt.setLayout(2, QtWidgets.QFormLayout.FieldRole, self.filter_lyt) self.filter_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.filter_lbl.sizePolicy().hasHeightForWidth()) self.filter_lbl.setSizePolicy(sizePolicy) self.filter_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.filter_lbl.setObjectName("filter_lbl") self.option_card_lyt.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.filter_lbl) self.switch_lyt = QtWidgets.QHBoxLayout() self.switch_lyt.setObjectName("switch_lyt") self.no_compress_lyt = QtWidgets.QHBoxLayout() self.no_compress_lyt.setObjectName("no_compress_lyt") self.no_compress_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.no_compress_lbl.sizePolicy().hasHeightForWidth()) self.no_compress_lbl.setSizePolicy(sizePolicy) self.no_compress_lbl.setObjectName("no_compress_lbl") self.no_compress_lyt.addWidget(self.no_compress_lbl) self.no_compress_swch = SwitchButton(self.option_card) self.no_compress_swch.setObjectName("no_compress_swch") self.no_compress_lyt.addWidget(self.no_compress_swch) self.switch_lyt.addLayout(self.no_compress_lyt) self.no_nisotropic_lyt = QtWidgets.QHBoxLayout() self.no_nisotropic_lyt.setObjectName("no_nisotropic_lyt") self.no_nisotropic_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.no_nisotropic_lbl.sizePolicy().hasHeightForWidth()) self.no_nisotropic_lbl.setSizePolicy(sizePolicy) self.no_nisotropic_lbl.setObjectName("no_nisotropic_lbl") self.no_nisotropic_lyt.addWidget(self.no_nisotropic_lbl) self.no_nisotropic_swch = SwitchButton(self.option_card) self.no_nisotropic_swch.setObjectName("no_nisotropic_swch") self.no_nisotropic_lyt.addWidget(self.no_nisotropic_swch) self.switch_lyt.addLayout(self.no_nisotropic_lyt) self.option_card_lyt.setLayout(9, QtWidgets.QFormLayout.FieldRole, self.switch_lyt) self.mipmap_filter_cmb = ComboBox(self.option_card) self.mipmap_filter_cmb.setObjectName("mipmap_filter_cmb") self.option_card_lyt.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.mipmap_filter_cmb) self.usage_cmb = ComboBox(self.option_card) self.usage_cmb.setObjectName("usage_cmb") self.option_card_lyt.setWidget(8, QtWidgets.QFormLayout.FieldRole, self.usage_cmb) self.color_space_cmb = ComboBox(self.option_card) self.color_space_cmb.setObjectName("color_space_cmb") self.option_card_lyt.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.color_space_cmb) self.usage_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.usage_lbl.sizePolicy().hasHeightForWidth()) self.usage_lbl.setSizePolicy(sizePolicy) self.usage_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.usage_lbl.setObjectName("usage_lbl") self.option_card_lyt.setWidget(8, QtWidgets.QFormLayout.LabelRole, self.usage_lbl) self.switch_lbl = BodyLabel(self.option_card) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.switch_lbl.sizePolicy().hasHeightForWidth()) self.switch_lbl.setSizePolicy(sizePolicy) self.switch_lbl.setMaximumSize(QtCore.QSize(200, 16777215)) self.switch_lbl.setObjectName("switch_lbl") self.option_card_lyt.setWidget(9, QtWidgets.QFormLayout.LabelRole, self.switch_lbl) self.middle_lyt.addWidget(self.option_card) self.main_lyt.addLayout(self.middle_lyt) self.preview_card = SimpleCardWidget(TobjEditor) self.preview_card.setMinimumSize(QtCore.QSize(0, 210)) self.preview_card.setMaximumSize(QtCore.QSize(16777215, 210)) self.preview_card.setObjectName("preview_card") self.preview_card_lyt = QtWidgets.QVBoxLayout(self.preview_card) self.preview_card_lyt.setObjectName("preview_card_lyt") self.preview_header_card_lyt = QtWidgets.QHBoxLayout() self.preview_header_card_lyt.setObjectName("preview_header_card_lyt") self.preview_header_text_card_2 = SimpleCardWidget(self.preview_card) self.preview_header_text_card_2.setObjectName("preview_header_text_card_2") self.preview_header_text_card_lyt = QtWidgets.QVBoxLayout(self.preview_header_text_card_2) self.preview_header_text_card_lyt.setObjectName("preview_header_text_card_lyt") self.preview_text_lbl = StrongBodyLabel(self.preview_header_text_card_2) self.preview_text_lbl.setObjectName("preview_text_lbl") self.preview_header_text_card_lyt.addWidget(self.preview_text_lbl) self.preview_header_card_lyt.addWidget(self.preview_header_text_card_2) self.preview_header_binary_card = SimpleCardWidget(self.preview_card) self.preview_header_binary_card.setObjectName("preview_header_binary_card") self.preview_header_binary_card_lyt = QtWidgets.QVBoxLayout(self.preview_header_binary_card) self.preview_header_binary_card_lyt.setObjectName("preview_header_binary_card_lyt") self.preview_binary_lbl = StrongBodyLabel(self.preview_header_binary_card) self.preview_binary_lbl.setObjectName("preview_binary_lbl") self.preview_header_binary_card_lyt.addWidget(self.preview_binary_lbl) self.preview_header_card_lyt.addWidget(self.preview_header_binary_card) self.preview_card_lyt.addLayout(self.preview_header_card_lyt) self.preview_txt_card_lyt = QtWidgets.QHBoxLayout() self.preview_txt_card_lyt.setSpacing(9) self.preview_txt_card_lyt.setObjectName("preview_txt_card_lyt") self.preview_text_txt = PlainTextEdit(self.preview_card) self.preview_text_txt.setEnabled(False) self.preview_text_txt.setUndoRedoEnabled(False) self.preview_text_txt.setReadOnly(True) self.preview_text_txt.setObjectName("preview_text_txt") self.preview_txt_card_lyt.addWidget(self.preview_text_txt) self.preview_binary_txt = PlainTextEdit(self.preview_card) self.preview_binary_txt.setEnabled(False) self.preview_binary_txt.setUndoRedoEnabled(False) self.preview_binary_txt.setReadOnly(True) self.preview_binary_txt.setObjectName("preview_binary_txt") self.preview_txt_card_lyt.addWidget(self.preview_binary_txt) self.preview_card_lyt.addLayout(self.preview_txt_card_lyt) self.main_lyt.addWidget(self.preview_card) self.retranslateUi(TobjEditor) QtCore.QMetaObject.connectSlotsByName(TobjEditor) def retranslateUi(self, TobjEditor): _translate = QtCore.QCoreApplication.translate TobjEditor.setWindowTitle(_translate("TobjEditor", "TobjEditor")) self.input_btn.setText(_translate("TobjEditor", "Input")) self.output_btn.setText(_translate("TobjEditor", "Output")) self.revert_btn.setToolTip(_translate("TobjEditor", "Revert all changes to original imported file settings")) self.revert_btn.setText(_translate("TobjEditor", "Revert")) self.save_btn.setToolTip(_translate("TobjEditor", "Save to tobj file with selected save mode")) self.save_btn.setText(_translate("TobjEditor", "Save")) self.save_sgmnt.setToolTip(_translate("TobjEditor", "Save mode")) self.filename_lbl.setText(_translate("TobjEditor", "File Name:")) self.filename_line.setToolTip(_translate("TobjEditor", "Filename to be saved (without suffix)")) self.texture_path_line.setToolTip(_translate("TobjEditor", "Full path to texture")) self.type_lbl.setText(_translate("TobjEditor", "Type")) self.addr_lbl.setText(_translate("TobjEditor", "Addr")) self.mipMap_lbl.setText(_translate("TobjEditor", "Mip Map")) self.colorSpace_lbl.setText(_translate("TobjEditor", "Color Space")) self.type_cmb.setToolTip(_translate("TobjEditor", "Generic: 2D mode with 1 texture\n" "Cube: 3D mode with 6 texture")) self.addr_u_cmb.setToolTip(_translate("TobjEditor", "Texture mode in U direction")) self.addr_v_cmb.setToolTip(_translate("TobjEditor", "Texture mode in V direction")) self.addr_w_cmb.setToolTip(_translate("TobjEditor", "Texture mode in W direction")) self.mag_filter_cmb.setToolTip(_translate("TobjEditor", "Magnification filter, How the texture is sampled\n" "when a texel covers more than one pixel")) self.min_filter_cmb.setToolTip(_translate("TobjEditor", "Minification filter, How the texture is sampled\n" "when a texel covers less than one pixel")) self.filter_lbl.setText(_translate("TobjEditor", "Filter")) self.no_compress_lbl.setText(_translate("TobjEditor", "No Compress")) self.no_nisotropic_lbl.setText(_translate("TobjEditor", "No Anisotropic")) self.usage_cmb.setToolTip(_translate("TobjEditor", "TSNormal: Tangent normal space\n" "Usage Ui: nomips + nocompress\n" "(Only used in text mode)")) self.color_space_cmb.setToolTip(_translate("TobjEditor", "SRGB: Default color space\n" "Linear: Mostly used in oclu shader")) self.usage_lbl.setText(_translate("TobjEditor", "Usage")) self.switch_lbl.setText(_translate("TobjEditor", "Switch")) self.preview_text_lbl.setText(_translate("TobjEditor", "Preview Text")) self.preview_binary_lbl.setText(_translate("TobjEditor", "Preview Binary")) from qfluentwidgets import BodyLabel, ComboBox, ImageLabel, LineEdit, PlainTextEdit, PrimaryPushButton, PushButton, SegmentedWidget, SimpleCardWidget, StrongBodyLabel, SwitchButton, VerticalSeparator
27,368
Python
.py
426
55.546948
199
0.728643
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,803
pix_converter_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/pix_converter_widget.py
import os import logging from random import randint from PyQt5.QtCore import Qt, QProcess, QSize, QEasingCurve, QPropertyAnimation from PyQt5.QtWidgets import QWidget, QFileDialog, QListWidgetItem from qfluentwidgets import ( IndeterminateProgressRing, InfoBarPosition, FluentIconBase, ToolTipFilter, InfoBarIcon, FluentIcon, InfoLevel, InfoBar, ) from ..ui.pix_converter_ui import PixConverterUi from ...common.tool import ( ScsHubIcon, Downloader, signal_bus, scshub_log, scshub_badge, scshub_infobar, scshub_file_remover, ) from ...common.info import PIX_CONVERTER_PATH, PIX_CONVERTER_URL, PIX_CONVERTER_LOG NAME = "PIXConverter" logger = logging.getLogger(NAME) class PixConverterWidget(QWidget, PixConverterUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUTS = [] self.OUTPUT = "" self.MODE = "-extract_f" self.PIX_MODE = "" self.PATH = "/" self.ANIM_PATH = "/" self.SUFFIX = "" self.LAST_FILE = "" self.ANIM_FILES = [] self.TEMP_OUT = [] self.TEMP_OUT_ANIM = [] self.TEMP_LOG = [] self.main_process = None self.other_process = None self.setupUi(self) self.init_ui() self.option_ui() signal_bus.pix_exist.connect(self.tools_exist) scshub_file_remover(PIX_CONVERTER_LOG) def init_ui(self): self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.download_btn.setIcon(FluentIcon.DOWNLOAD) self.download_btn.clicked.connect(lambda: self.downloader()) self.download_btn.hide() self.reset_btn.setIcon(FluentIcon.DELETE) self.reset_btn.clicked.connect(lambda: self.reset_inputs()) self.reset_btn.installEventFilter(ToolTipFilter(self.reset_btn)) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_folder()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.extract_btn.setIcon(FluentIcon.LINK) self.extract_btn.clicked.connect(lambda: self.extract_mode()) self.extract_sgmnt.addItem("file", self.tr("File"), lambda: self.change_mode("file")) self.extract_sgmnt.addItem("model", self.tr("Model"), lambda: self.change_mode("model")) self.extract_sgmnt.addItem("tobj", self.tr("TOBJ"), lambda: self.change_mode("tobj")) self.extract_sgmnt.addItem("folder", self.tr("Folder"), lambda: self.change_mode("folder")) self.extract_sgmnt.installEventFilter(ToolTipFilter(self.extract_sgmnt)) self.anim_tgl.setIcon(ScsHubIcon.ANIM) self.anim_tgl.clicked.connect(lambda: self.toggle_card()) self.anim_tgl.installEventFilter(ToolTipFilter(self.anim_tgl)) self.material_tgl.setIcon(ScsHubIcon.TEXT) self.material_tgl.installEventFilter(ToolTipFilter(self.material_tgl)) self.texture_tgl.setIcon(ScsHubIcon.TEXTURE) self.texture_tgl.installEventFilter(ToolTipFilter(self.texture_tgl)) def option_ui(self): self.navbar.setSpacing(10) self.navbar.setFixedHeight(19) self.navbar.currentItemChanged.connect(lambda objectname: self.change_path(objectname)) self.back_btn.setIcon(FluentIcon.LEFT_ARROW) self.back_btn.installEventFilter(ToolTipFilter(self.back_btn)) self.back_btn.clicked.connect(lambda: self.go_back()) self.home_btn.setIcon(FluentIcon.HOME) self.home_btn.installEventFilter(ToolTipFilter(self.home_btn)) self.home_btn.clicked.connect(lambda: self.go_home()) self.refresh_btn.setIcon(FluentIcon.UPDATE) self.refresh_btn.installEventFilter(ToolTipFilter(self.refresh_btn)) self.refresh_btn.clicked.connect(lambda: self.refresh_path()) self.anim_card.hide() self.anim_list.currentTextChanged.connect(self.goto_folder_anim) self.anim_badge.setLevel(InfoLevel.INFOAMTION) self.folder_list.currentTextChanged.connect(self.goto_folder) self.folder_badge.setLevel(InfoLevel.INFOAMTION) self.file_list.currentTextChanged.connect(self.select_file) self.file_badge.setLevel(InfoLevel.INFOAMTION) def toggle_card(self): # anim card if ( self.extract_sgmnt._currentRouteKey == "model" and self.anim_tgl.isEnabled() and self.anim_tgl.isChecked() ): self.anim_card.show() self.anim_card_anim = QPropertyAnimation(self.anim_card, b"maximumSize") self.anim_card_anim.setEndValue(QSize(500, 16777215)) self.anim_card_anim.setDuration(500) self.anim_card_anim.setEasingCurve(QEasingCurve.OutQuad) self.anim_card_anim.start() self.anim_card_anim.finished.connect(lambda: self.anim_card.setMaximumWidth(16777215)) else: self.anim_card_anim = QPropertyAnimation(self.anim_card, b"maximumSize") self.anim_card_anim.setStartValue(QSize(500, 16777215)) self.anim_card_anim.setEndValue(QSize(0, 16777215)) self.anim_card_anim.setDuration(500) self.anim_card_anim.setEasingCurve(QEasingCurve.OutQuad) self.anim_card_anim.start() self.anim_card_anim.finished.connect(lambda: self.anim_card.hide()) # file card if self.extract_sgmnt._currentRouteKey == "folder": self.file_card_anim = QPropertyAnimation(self.file_card, b"maximumSize") self.file_card_anim.setStartValue(QSize(750, 16777215)) self.file_card_anim.setEndValue(QSize(0, 16777215)) self.file_card_anim.setDuration(500) self.file_card_anim.setEasingCurve(QEasingCurve.OutQuad) self.file_card_anim.start() self.file_card_anim.finished.connect(lambda: self.file_card.hide()) else: self.file_card.show() self.file_card_anim = QPropertyAnimation(self.file_card, b"maximumSize") self.file_card_anim.setEndValue(QSize(750, 16777215)) self.file_card_anim.setDuration(500) self.file_card_anim.setEasingCurve(QEasingCurve.OutQuad) self.file_card_anim.start() self.file_card_anim.finished.connect(lambda: self.file_card.setMaximumWidth(16777215)) def change_mode(self, mode: str): match mode: case "file": self.MODE = "-extract_f" self.SUFFIX = "" self.anim_tgl.setDisabled(True) self.material_tgl.setDisabled(True) self.texture_tgl.setDisabled(True) self.file_lbl.setText(self.tr("File")) case "model": self.MODE = "-m" self.SUFFIX = ".pmd" self.anim_tgl.setEnabled(True) self.material_tgl.setEnabled(True) self.texture_tgl.setEnabled(True) self.file_lbl.setText(self.tr("Model")) case "tobj": self.MODE = "-t" self.SUFFIX = ".tobj" self.anim_tgl.setDisabled(True) self.material_tgl.setDisabled(True) self.texture_tgl.setDisabled(True) self.file_lbl.setText(self.tr("TOBJ")) case "folder": self.MODE = "-extract_d" self.anim_tgl.setDisabled(True) self.material_tgl.setDisabled(True) self.texture_tgl.setDisabled(True) self.toggle_card() self.refresh_path() def list_mode(self): self.PIX_MODE = "list" argument = "" for file_path in self.INPUTS: argument += f'-b "{file_path}" ' argument += f"-listdir {self.PATH}" self.converter_process(argument) def extract_mode(self): self.PIX_MODE = "extract" argument = "" for file_path in self.INPUTS: argument += f'-b "{file_path}" ' argument += f"{self.MODE}" if self.MODE != "-extract_d": # check if file selected if "." in os.path.basename(self.PATH): # single model mode if self.MODE == "-m": argument += f" {self.PATH[:-4]}" if self.material_tgl.isChecked(): argument += f" -matFormat147" if self.texture_tgl.isChecked(): argument += f" -ddsDxt10" if self.anim_tgl.isChecked(): self.select_file_anim() if self.ANIM_FILES != []: for file in self.ANIM_FILES: anim = os.path.join( self.PATH.replace(self.LAST_FILE, ""), file[:-4] ).replace("\\", "/") argument += f" {anim}" # single tobj or exprot file mode elif self.MODE == "-t" or self.MODE == "-extract_f": argument += f" {self.PATH}" argument += f' -e "{self.OUTPUT}"' self.converter_process(argument) else: scshub_infobar(self.INFOBAR_POS, "info", "No file selected") # extract folder mode elif self.MODE == "-extract_d": argument += f" {self.PATH}" argument += f' -e "{self.OUTPUT}"' self.converter_process(argument) def anim_mode(self): argument = "" for file_path in self.INPUTS: argument += f'-b "{file_path}" ' argument += f"-listdir {self.ANIM_PATH}" self.anim_process(argument) def converter_process(self, argument: str): command = f'"{PIX_CONVERTER_PATH}" {argument}' logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.converter_output) self.main_process.stateChanged.connect(self.converte_state) self.main_process.finished.connect(self.converter_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def converter_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if ( not striped_line.startswith("-- done") and not striped_line.startswith("*") and not striped_line.startswith("[D]") and not striped_line.startswith("[F]") ): self.TEMP_LOG.append(striped_line) def converte_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if self.PIX_MODE == "extract": if state_name == "Running": self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) self.reset_btn.setDisabled(True) self.input_btn.setDisabled(True) self.output_btn.setDisabled(True) self.extract_btn.setDisabled(True) self.extract_sgmnt.setDisabled(True) self.anim_tgl.setDisabled(True) self.material_tgl.setDisabled(True) self.texture_tgl.setDisabled(True) self.home_btn.setDisabled(True) self.back_btn.setDisabled(True) self.navbar.setDisabled(True) self.folder_list.setDisabled(True) self.file_list.setDisabled(True) self.anim_list.setDisabled(True) self.refresh_btn.setDisabled(True) logger.info(f"{NAME} Running") elif state_name == "NotRunning": self.working_infobar.close() self.reset_btn.setEnabled(True) self.input_btn.setEnabled(True) self.output_btn.setEnabled(True) self.extract_btn.setEnabled(True) self.extract_sgmnt.setEnabled(True) self.anim_tgl.setEnabled(True) self.material_tgl.setEnabled(True) self.texture_tgl.setEnabled(True) self.home_btn.setEnabled(True) self.back_btn.setEnabled(True) self.navbar.setDisabled(True) self.folder_list.setEnabled(True) self.file_list.setEnabled(True) self.anim_list.setEnabled(True) self.refresh_btn.setEnabled(True) logger.info(f"{NAME} Finished") def converter_finish(self): self.TEMP_OUT.sort() match self.PIX_MODE: case "list": self.folder_list.clearSelection() self.folder_list.clear() self.file_list.clearSelection() self.file_list.clear() folders = [] files = [] for line in self.TEMP_OUT: # create directory list if line.startswith("[D] "): folders.append(os.path.relpath(line[4:], self.PATH)) # create file list elif line.startswith("[F] "): # cheack suffix and only include specified in list26 if line.endswith(self.SUFFIX) and not line.endswith( (".pmg", ".pmc", ".pma") ): files.append(os.path.relpath(line[4:], self.PATH)) # set list count to badges self.folder_badge.setText(str(len(folders))) self.file_badge.setText(str(len(files))) # add items in list to list view for folder in folders: self.folder_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.FOLDER), folder) ) # add items in list to list view and ignore it if in (-extract_d) mode if self.MODE != "-extract_d": for file in files: if file.endswith(".pmd"): self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.MODEL), file) ) elif file.endswith(".ppd"): self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.PREFAB), file) ) elif file.endswith(".tobj"): self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.TOBJ), file) ) elif file.endswith((".dds", ".png", ".jpg", ".mask")): self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.TEXTURE), file) ) elif file.endswith( (".mat", ".sii", ".sui", ".txt", ".cfg", ".dat", ".soundref") ): self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.TEXT), file) ) else: self.file_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.FILE), file) ) case "extract": temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" if ( "error" in temp_out or "no" in temp_out or "not" in temp_out or "unable" in temp_out or "cannot" in temp_out or "failed" in temp_out or "readDir" in temp_out or "invalid" in temp_out or "unknown" in temp_out or "unexpected" in temp_out or "unsupported" in temp_out ): scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), PIX_CONVERTER_LOG, ) logger.error(f"Error occurred during process, check {PIX_CONVERTER_LOG}") elif "warning" in temp_out: scshub_infobar( self.INFOBAR_POS, "warn_btn", self.tr("Process finished with warnings"), PIX_CONVERTER_LOG, ) logger.warning(f"Process finished with warnings, check {PIX_CONVERTER_LOG}") else: path = os.path.join(self.OUTPUT, self.PATH[1:]).replace("/", "\\") finalPath = os.path.split(path)[0] scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), finalPath ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(PIX_CONVERTER_LOG, self.TEMP_LOG) self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def anim_process(self, argument: str): command = f'"{PIX_CONVERTER_PATH}" {argument}' if self.other_process == None: self.other_process = QProcess() self.other_process.readyReadStandardOutput.connect(self.anim_output) self.other_process.finished.connect(self.anim_finish) self.other_process.waitForFinished(100) self.other_process.start(command) def anim_output(self): # get output data from process and decode it output = self.other_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: if line.strip() != "": self.TEMP_OUT_ANIM.append(line.strip()) def anim_finish(self): self.anim_list.clearSelection() self.anim_list.clear() self.ANIM_FILES = [] self.anim_list.addItem(QListWidgetItem(FluentIconBase.qicon(FluentIcon.UP), "..")) folders = [] files = [] for line in self.TEMP_OUT_ANIM: # create directory list if line.startswith("[D] "): folders.append(os.path.relpath(line[4:], self.ANIM_PATH)) # create file list elif line.startswith("[F] "): # cheack suffix and only include pma file if line.endswith(".pma"): files.append(os.path.relpath(line[4:], self.ANIM_PATH)) # set list count to badges self.anim_badge.setText(str(len(files))) # add items in list to list view for folder in folders: self.anim_list.addItem(QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.FOLDER), folder)) for file in files: item = QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.FOLDER), file) item.setCheckState(0) self.anim_list.addItem(item) self.other_process = None self.TEMP_OUT_ANIM = [] def reset_inputs(self): self.INPUTS = [] self.OUTPUT = "" self.input_btn.setToolTip("") self.input_badge.close() self.output_btn.setToolTip("") self.output_badge.close() self.navbar.clear() self.refresh_path() def add_root_item(self): self.navbar.clear() if len(self.INPUTS) < 2: self.navbar.addItem("root", f"{os.path.split(self.INPUTS[0])[1]}") else: self.navbar.addItem("root", "multi-archive") def remove_last_selcted(self): if self.PATH.endswith(self.LAST_FILE): self.PATH = self.PATH.replace(self.LAST_FILE, "") def go_back(self): self.remove_last_selcted() if not self.navbar.currentIndex() == 0: self.navbar.popItem() self.list_mode() def refresh_path(self): self.remove_last_selcted() self.list_mode() self.anim_mode() def go_home(self): self.PATH = "/" self.ANIM_PATH = "/" self.add_root_item() self.list_mode() self.anim_mode() def change_path(self, selected: str): # back to root of scs file if first item selected in navbar_itemgation bar if selected == "root": self.PATH = "/" self.back_btn.setDisabled(True) self.home_btn.setDisabled(True) # find selected item index in saved path string and # delete all after item name itself and update new path else: self.PATH = self.PATH[0 : self.PATH.find(selected[:-2]) + len(selected[:-2])].replace( "\\", "/" ) self.back_btn.setEnabled(True) self.home_btn.setEnabled(True) self.list_mode() def goto_folder(self, selected: str): # check if not empty and add last item to navbar_itemgation bar if selected != "": self.remove_last_selcted() # update path and add seelcted folder to it self.PATH = os.path.join(self.PATH, selected).replace("\\", "/") navbar_item = os.path.split(self.PATH) self.navbar.addItem(f"{selected}{randint(11, 99)}", navbar_item[1]) self.list_mode() def select_file(self, selected: str): self.remove_last_selcted() # update path and add seelcted file to it self.PATH = os.path.join(self.PATH, selected).replace("\\", "/") self.LAST_FILE = selected def goto_folder_anim(self, selected: str): if selected != "": if not selected.endswith(".pma"): if selected == ".." and self.ANIM_PATH != "/": go_back = os.path.split(self.ANIM_PATH) self.ANIM_PATH = go_back[0].replace("\\", "/") else: self.ANIM_PATH = os.path.join(self.ANIM_PATH, selected).replace("\\", "/") self.ANIM_FILES = [] self.anim_mode() def select_file_anim(self): self.ANIM_FILES = [] for index in range(self.anim_list.count()): item = self.anim_list.item(index) if item.checkState() == 2: anim_path = f"{self.ANIM_PATH}/{item.text()[-4]}" self.ANIM_FILES.append(anim_path) def get_file(self): file_dialog = QFileDialog().getOpenFileNames( self, "Select file", filter="SCS archives (*.zip *.scs)" ) if file_dialog[0]: file_path = file_dialog[0] # enable buttons after file selected for first time if self.INPUTS == []: self.reset_btn.setEnabled(True) self.output_btn.setEnabled(True) self.extract_btn.setEnabled(True) self.refresh_btn.setEnabled(True) self.extract_sgmnt.setEnabled(True) self.extract_sgmnt.setCurrentItem("file") self.input_badge = scshub_badge(self.top_card, self.input_btn) for file in file_path: self.INPUTS.append(file) # set export path to (<firstSelectedFile>_exp) if self.OUTPUT == "": output_path = f"{self.INPUTS[0][:-4]}_exp" self.OUTPUT = output_path self.output_btn.setToolTip(output_path) self.output_badge = scshub_badge(self.top_card, self.output_btn) # set buttons tooltip tooltip = "" for file in self.INPUTS: tooltip += f"{file}\n" self.input_btn.setToolTip(tooltip[:-1]) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{self.INPUTS}"') self.add_root_item() self.go_home() def get_folder(self): folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog self.OUTPUT = folder_path self.output_btn.setToolTip(folder_path) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder selected")) logger.info(f'Set output folder to "{folder_path}"') def downloader(self): self.downloader_process = Downloader(logger, PIX_CONVERTER_URL, PIX_CONVERTER_PATH) self.downloader_process.started.connect(self.downloader_start) self.downloader_process.result.connect(self.downloader_finish) self.downloader_process.start() def downloader_start(self): self.download_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Downloading"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.download_infobar.addWidget(ring_wgt) self.download_btn.setDisabled(True) def downloader_finish(self, result: int): self.download_infobar.close() match result: case 0: signal_bus.pix_exist.emit(True) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Downloaded")) case 1: signal_bus.pix_exist.emit(False) scshub_infobar(self.INFOBAR_POS, "error", self.tr("Error during download")) def tools_exist(self, exist): if exist: self.input_btn.setEnabled(True) self.download_btn.hide() self.download_btn.setDisabled(True) logger.info(f"{NAME} exist") else: self.input_btn.setDisabled(True) self.download_btn.show() self.download_btn.setEnabled(True) logger.info(f"{NAME} not exist")
27,924
Python
.py
607
32.056013
100
0.558982
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,804
sxc_packer_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/sxc_packer_widget.py
import os import logging from sys import platform from PyQt5.QtCore import Qt, QProcess from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ( IndeterminateProgressRing, InfoBarPosition, ToolTipFilter, InfoBarIcon, FluentIcon, InfoBar, ) from ..ui.sxc_packer_ui import SxcPackerUi from ...common.tool import signal_bus, scshub_file_remover, scshub_infobar, scshub_badge, scshub_log from ...common.info import SXC_PACKER_PATH, SXC_PACKER_LOG NAME = "SXCPacker" logger = logging.getLogger(NAME) class SxcPackerWidget(QWidget, SxcPackerUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.ENCRYPT_FILE = "" self.HIDDEN_FILE = "" self.STORED_FILE = "" self.EXCLUDE_FILE = "" self.TEMP_OUT = [] self.TEMP_LOG = [] self.main_process = None self.setupUi(self) self.init_ui() self.option_ui() signal_bus.sxc_exist.connect( lambda exist: ( self.input_btn.setEnabled(True) if exist else self.input_btn.setDisabled(True) ) ) scshub_file_remover(SXC_PACKER_LOG) def init_ui(self): self.main_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_folder()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_save_file()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.pack_btn.setIcon(FluentIcon.ZIP_FOLDER) self.pack_btn.clicked.connect(lambda: self.packer_process()) def option_ui(self): # encrypt list | -e self.encrypt_chk.installEventFilter(ToolTipFilter(self.encrypt_chk)) self.encrypt_btn.installEventFilter(ToolTipFilter(self.encrypt_btn)) self.checkbox_state(self.encrypt_chk, self.encrypt_btn) self.encrypt_btn.clicked.connect(lambda: self.get_file("encrypt")) # hidden list | -h self.hidden_chk.installEventFilter(ToolTipFilter(self.hidden_chk)) self.hidden_btn.installEventFilter(ToolTipFilter(self.hidden_btn)) self.checkbox_state(self.hidden_chk, self.hidden_btn) self.hidden_btn.clicked.connect(lambda: self.get_file("hidden")) # stored list | -s self.stored_chk.installEventFilter(ToolTipFilter(self.stored_chk)) self.stored_btn.installEventFilter(ToolTipFilter(self.stored_btn)) self.checkbox_state(self.stored_chk, self.stored_btn) self.stored_btn.clicked.connect(lambda: self.get_file("stored")) # exclude list | -x self.exclude_chk.installEventFilter(ToolTipFilter(self.exclude_chk)) self.exclude_btn.installEventFilter(ToolTipFilter(self.exclude_btn)) self.checkbox_state(self.exclude_chk, self.exclude_btn) self.exclude_btn.clicked.connect(lambda: self.get_file("exclude")) def checkbox_state(self, checkbox: QWidget, widget: QWidget): checkbox.stateChanged.connect( lambda state: widget.setEnabled(True) if state else widget.setDisabled(True) ) def packer_process(self): command = f'"{SXC_PACKER_PATH}" -i "{self.INPUT}" -o "{self.OUTPUT}"' if self.encrypt_chk.isChecked() and self.ENCRYPT_FILE != "": command += f" -e {self.ENCRYPT_FILE}" if self.hidden_chk.isChecked() and self.HIDDEN_FILE != "": command += f" -h {self.HIDDEN_FILE}" if self.stored_chk.isChecked() and self.STORED_FILE != "": command += f" -s {self.STORED_FILE}" if self.exclude_chk.isChecked() and self.EXCLUDE_FILE != "": command += f" -x {self.EXCLUDE_FILE}" logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.packer_output) self.main_process.stateChanged.connect(self.packer_state) self.main_process.finished.connect(self.packer_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def packer_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if ( "total" in striped_line.lower() or "processed" in striped_line.lower() or "time" in striped_line.lower() or "error" in striped_line.lower() or "found" in striped_line.lower() ): self.TEMP_LOG.append(striped_line) def packer_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if state_name == "Running": self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) self.input_btn.setDisabled(True) self.output_btn.setDisabled(True) self.pack_btn.setDisabled(True) logger.info(f"{NAME} Running") elif state_name == "NotRunning": self.working_infobar.close() self.input_btn.setEnabled(True) self.output_btn.setEnabled(True) self.pack_btn.setEnabled(True) logger.info(f"{NAME} Finished") def packer_finish(self): temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" if "error" in temp_out or "unable" in temp_out or "unhandled" in temp_out: scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SXC_PACKER_LOG, ) logger.error(f"Error occurred during process, check {SXC_PACKER_LOG}") elif "warning" in temp_out: scshub_infobar( self.INFOBAR_POS, "warn_btn", self.tr("Process finished with warnings"), SXC_PACKER_LOG, ) logger.warning(f"Process finished with warnings, check {SXC_PACKER_LOG}") else: scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), os.path.split(self.OUTPUT)[0], ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(SXC_PACKER_LOG, self.TEMP_LOG) for line in self.TEMP_LOG: self.log_txt.insertPlainText(line + "\n") self.log_txt.moveCursor(-1) self.log_txt.insertPlainText("\n") self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def get_file(self, mode: str): file_dialog = QFileDialog().getOpenFileName(self, "Select file", filter="Text file (*.txt)") # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") match mode: case "encrypt": self.ENCRYPT_FILE = file_path self.encrypt_btn.setToolTip(file_path) scshub_badge(self.top_card, self.encrypt_btn) logger.info(f'Set encrypt file to "{file_path}"') case "hidden": self.HIDDEN_FILE = file_path self.hidden_btn.setToolTip(file_path) scshub_badge(self.top_card, self.hidden_btn) logger.info(f'Set hidden file to "{file_path}"') case "stored": self.STORED_FILE = file_path self.stored_btn.setToolTip(file_path) scshub_badge(self.top_card, self.stored_btn) logger.info(f'Set stored file to "{file_path}"') case "exclude": self.EXCLUDE_FILE = file_path self.exclude_btn.setToolTip(file_path) scshub_badge(self.top_card, self.exclude_btn) logger.info(f'Set exclude file to "{file_path}"') scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) def get_folder(self): if platform == "win32": folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") # enable buttons after file selected for first time if self.INPUT == "": self.output_btn.setEnabled(True) self.pack_btn.setEnabled(True) self.encrypt_chk.setEnabled(True) self.hidden_chk.setEnabled(True) self.stored_chk.setEnabled(True) self.exclude_chk.setEnabled(True) self.INPUT = folder_path output_path = f"{folder_path}.scs" self.OUTPUT = output_path self.input_btn.setToolTip(folder_path) self.output_btn.setToolTip(output_path) scshub_badge(self.top_card, self.input_btn) scshub_badge(self.top_card, self.output_btn) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder imported")) logger.info(f'Set input folder to "{folder_path}"') else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def get_save_file(self): file_dialog = QFileDialog().getSaveFileName(self, "Save file", filter="SCS archive (*.scs)") # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") self.OUTPUT = file_path self.output_btn.setToolTip(file_path) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set output file to "{file_path}"')
11,303
Python
.py
243
34.144033
100
0.590333
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,805
scs_packer_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/scs_packer_widget.py
import os import logging from sys import platform from PyQt5.QtCore import Qt, QProcess from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ( IndeterminateProgressRing, InfoBarPosition, ToolTipFilter, InfoBarIcon, FluentIcon, InfoBar, ) from ..ui.scs_packer_ui import ScsPackerUi from ...common.tool import signal_bus, scshub_file_remover, scshub_infobar, scshub_badge, scshub_log from ...common.info import SCS_TOOL_PATH, SCS_PACKER_LOG NAME = "SCSPacker" logger = logging.getLogger(NAME) class ScsPackerWidget(QWidget, ScsPackerUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.TEMP_OUT = [] self.TEMP_LOG = [] self.main_process = None self.setupUi(self) self.init_ui() signal_bus.scs_exist.connect( lambda exist: ( self.input_btn.setEnabled(True) if exist else self.input_btn.setDisabled(True) ) ) scshub_file_remover(SCS_PACKER_LOG) def init_ui(self): self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_folder()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_save_file()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.pack_btn.setIcon(FluentIcon.ZIP_FOLDER) self.pack_btn.clicked.connect(lambda: self.packer_process()) self.nocompress_chk.installEventFilter(ToolTipFilter(self.nocompress_chk)) def packer_process(self): command = f'"{SCS_TOOL_PATH}" create "{self.OUTPUT}" -root "{self.INPUT}"' if self.nocompress_chk.isChecked(): command += " -nocompress" logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.packer_output) self.main_process.stateChanged.connect(self.packer_state) self.main_process.finished.connect(self.packer_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def packer_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if "error" in line.lower() or "hashfs" in line.lower(): self.TEMP_LOG.append(striped_line) def packer_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if state_name == "Running": self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) self.input_btn.setDisabled(True) self.output_btn.setDisabled(True) self.pack_btn.setDisabled(True) logger.info(f"{NAME} Running") elif state_name == "NotRunning": self.working_infobar.close() self.input_btn.setEnabled(True) self.output_btn.setEnabled(True) self.pack_btn.setEnabled(True) logger.info(f"{NAME} Finished") def packer_finish(self): temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" if "error" in temp_out: scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SCS_PACKER_LOG, ) logger.error(f"Error occurred during process, check {SCS_PACKER_LOG}") else: scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), os.path.split(self.OUTPUT)[0], ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(SCS_PACKER_LOG, self.TEMP_LOG) self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def get_folder(self): if platform == "win32": folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") # enable buttons after file selected for first time if self.INPUT == "": self.output_btn.setEnabled(True) self.pack_btn.setEnabled(True) self.nocompress_chk.setEnabled(True) self.INPUT = folder_path output_path = f"{folder_path}.scs" self.OUTPUT = output_path self.input_btn.setToolTip(folder_path) self.output_btn.setToolTip(output_path) scshub_badge(self.top_card, self.input_btn) scshub_badge(self.top_card, self.output_btn) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder imported")) logger.info(f'Set input folder to "{folder_path}"') else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def get_save_file(self): file_dialog = QFileDialog().getSaveFileName(self, "Save file", filter="SCS archive (*.scs)") # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") self.OUTPUTT = file_path self.output_btn.setToolTip(file_path) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set output file to "{file_path}"')
6,867
Python
.py
159
31.742138
100
0.596058
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,806
sxc_finder_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/sxc_finder_widget.py
import os import re import logging from sys import platform from pathlib import Path from PyQt5.QtCore import Qt, QProcess from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ( IndeterminateProgressRing, InfoBarPosition, ToolTipFilter, InfoBarIcon, FluentIcon, InfoBar, Flyout, ) from ..ui.sxc_finder_ui import SxcFinderUi from ...common.tool import ( Downloader, signal_bus, scshub_log, scshub_badge, scshub_infobar, scshub_file_remover, scshub_file_remover, ) from ...common.info import ( SXC_EXTRACTOR_PATH, SXC_FINDER_PATH, SXC_FINDER_LOG, SXC_HDB_PATH, SXC_HDB_URL, SXC_UNZIP, SXC_ZIP, SXC_URL, ) NAME = "SXCFinder" logger = logging.getLogger(NAME) class SxcFinderWidget(QWidget, SxcFinderUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.INPUT_HIDDEN = "" self.EXPORT_HIDDEN = "" self.TEMP_OUT = [] self.TEMP_LOG = [] self.main_process = None self.tutorial = True self.finder_first_run = True self.run_count = 0 self.setupUi(self) self.init_ui() signal_bus.sxc_exist.connect(self.tools_exist) scshub_file_remover(SXC_FINDER_LOG) def init_ui(self): self.main_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.download_btn.setIcon(FluentIcon.DOWNLOAD) self.download_btn.clicked.connect(lambda: self.downloader()) self.download_btn.hide() self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_folder()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.run_btn.setIcon(FluentIcon.PLAY) self.run_btn.clicked.connect(lambda: self.finder_process()) def finder_process(self): if self.tutorial: Flyout.create( title="", content="Click run button several times until\nmissing item became unchanged.", icon=InfoBarIcon.INFORMATION, target=self.run_btn, parent=self.top_card, ) self.tutorial = False self.run_count += 1 if self.finder_first_run == True: if not os.path.isfile(self.EXPORT_HIDDEN): with open(self.EXPORT_HIDDEN, "at", encoding="utf-8") as f: f.write("manifest.sii\n") command = f'"{SXC_FINDER_PATH}" "{self.INPUT}" "{self.EXPORT_HIDDEN}"' logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.extractor_output) self.main_process.stateChanged.connect(self.extractor_state) self.main_process.finished.connect(self.finder_finish) self.main_process.waitForFinished(100) self.main_process.start(command) else: self.content_search() def finder_finish(self): temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" self.log_txt.insertPlainText(f"{self.run_count:0>2}: SXCFinder:\n") if "error" in temp_out: scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SXC_FINDER_LOG, ) logger.error(f"Error occurred during process, check {SXC_FINDER_LOG}") else: scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), self.OUTPUT ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(SXC_FINDER_LOG, self.TEMP_LOG) for line in self.TEMP_LOG: self.log_txt.insertPlainText(line + "\n") self.log_txt.moveCursor(-1) self.log_txt.insertPlainText("\n") self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] self.finder_first_run = False self.extractor_process() def content_search(self): suffix = ["*.pmd", "*.mat", "*.tobj", "*.sii", "*.sui", "*.dat", "*.soundref", "*.font"] files = [] length = 0 for sfx in suffix: glob = list(self.INPUT_HIDDEN.rglob(sfx)) if glob != []: for item in glob: files.append(item) if not os.path.isdir(self.OUTPUT): os.mkdir(self.OUTPUT) try: if os.path.isdir(self.INPUT_HIDDEN): self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) for file in files: extracted_path = [] read_data = None # binary encode if file.name.endswith((".pmd", ".tobj")): with open(file, "rb") as f: read_data = f.read().decode("latin-1") # main regex search binery_rgx_find = re.findall(r"\/[a-zA-Z0-9_\.\/]*\.[a-zA-Z]{3}", read_data) # append finded items to list for item in binery_rgx_find: if item not in extracted_path: extracted_path.append(item) # normal encode else: with open(file, "r", encoding="utf-8") as f: read_data = f.read() # main regex search normal_rgx_find = re.findall( r"\/[a-zA-Z0-9_\.\/]*\.[a-zA-Z]{3,8}", read_data ) icon_rgx_find = re.findall( r"(icon[\s\t]*:{1}[\s\t]*)\"([a-zA-Z0-9_\.\/]*)\"", read_data ) # append finded items to list for item in normal_rgx_find: if item not in extracted_path: extracted_path.append(item) if item.endswith(".pmd"): extracted_path.append(f"{item[:-3]}pmg") elif item.endswith(".bank"): extracted_path.append(f"{item}.guids") # append finded items to list for item in icon_rgx_find: if item[1].endswith(".jpg"): extracted_path.append(item[1]) else: extracted_path.append(f"/material/ui/accessory/{item[1]}.mat") extracted_path.append(f"/material/ui/accessory/{item[1]}.tobj") extracted_path.append(f"/material/ui/accessory/{item[1]}.dds") length += 1 # write to file with open(self.EXPORT_HIDDEN, "at", encoding="utf-8") as f: f.writelines(paths + "\n" for paths in extracted_path) self.log_txt.insertPlainText(f"{self.run_count:0>2}: File Content Search:\n") self.log_txt.insertPlainText(f"Total {length} items found.") self.log_txt.insertPlainText("\n") self.log_txt.moveCursor(-1) self.log_txt.insertPlainText("\n") self.working_infobar.close() self.extractor_process() except Exception as msg: scshub_log(SXC_FINDER_LOG, msg) scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SXC_FINDER_LOG, ) def extractor_process(self): if not os.path.isdir(self.OUTPUT): os.mkdir(self.OUTPUT) if not os.path.isfile(self.EXPORT_HIDDEN): with open(self.EXPORT_HIDDEN, "at", encoding="utf-8") as f: f.write("manifest.sii\n") command = f'"{SXC_EXTRACTOR_PATH}" "{self.INPUT}" -o "{self.OUTPUT}" -lq -af -bl "{self.EXPORT_HIDDEN}"' logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.extractor_output) self.main_process.stateChanged.connect(self.extractor_state) self.main_process.finished.connect(self.extractor_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def extractor_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() print(decoded_output) for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if ( "total" in striped_line.lower() or "processed" in striped_line.lower() or "elapsed" in striped_line.lower() or "error" in striped_line.lower() or "invalid" in striped_line.lower() or "found" in striped_line.lower() or "unable to open" in striped_line.lower() ): self.TEMP_LOG.append(striped_line) def extractor_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if state_name == "Running": self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) self.input_btn.setDisabled(True) self.output_btn.setDisabled(True) self.run_btn.setDisabled(True) logger.info(f"{NAME} Running") elif state_name == "NotRunning": self.working_infobar.close() self.input_btn.setEnabled(True) self.output_btn.setEnabled(True) self.run_btn.setEnabled(True) logger.info(f"{NAME} Finished") def extractor_finish(self): temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" self.log_txt.insertPlainText(f"{self.run_count:0>2}: SXCExtractor:\n") if ( "error" in temp_out or "invalid" in temp_out or "missing" in temp_out or "not found" in temp_out ): scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SXC_FINDER_LOG, ) logger.error(f"Error occurred during process, check {SXC_FINDER_LOG}") elif "warning" in temp_out: scshub_infobar( self.INFOBAR_POS, "warn_btn", self.tr("Process finished with warnings"), SXC_FINDER_LOG, ) logger.warning(f"Process finished with warnings, check {SXC_FINDER_LOG}") else: scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), self.OUTPUT ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(SXC_FINDER_LOG, self.TEMP_LOG) for line in self.TEMP_LOG: self.log_txt.insertPlainText(line + "\n") self.log_txt.moveCursor(-1) self.log_txt.insertPlainText("\n") self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def get_file(self): if platform == "win32": file_dialog = QFileDialog().getOpenFileName( self, "Select file", filter="SCS archive (*.scs *.zip)" ) # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") # enable buttons after file selected for first time if self.INPUT == "": self.run_btn.setEnabled(True) self.INPUT = file_path output_path = f"{file_path[:-4]}_exp" self.OUTPUT = output_path self.INPUT_HIDDEN = Path(self.OUTPUT) self.EXPORT_HIDDEN = f"{file_path[:-3]}txt" self.input_btn.setToolTip(file_path) self.output_btn.setToolTip(output_path) self.run_count = 0 self.finder_first_run = True self.log_txt.setPlainText("") scshub_badge(self.top_card, self.input_btn) scshub_badge(self.top_card, self.output_btn) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{file_path}"') else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def get_folder(self): folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") self.OUTPUT = folder_path self.output_btn.setToolTip(folder_path) logger.info(f'Set output folder to "{folder_path}"') scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder selected")) def downloader(self): if platform == "win32": self.downloader_process = Downloader(logger, SXC_URL, SXC_ZIP, SXC_UNZIP) self.downloader_process.started.connect(self.downloader_start) self.downloader_process.result.connect(self.downloader_finish) self.downloader_process.finished.connect(self.downloader_hdb) self.downloader_process.start() else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def downloader_hdb(self): self.downloadHdbProcess = Downloader(logger, SXC_HDB_URL, SXC_HDB_PATH) self.downloadHdbProcess.started.connect(self.downloader_start) self.downloadHdbProcess.result.connect(self.downloader_finish) self.downloadHdbProcess.start() def downloader_start(self): self.download_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Downloading"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.download_infobar.addWidget(ring_wgt) self.download_btn.setDisabled(True) def downloader_finish(self, result: int): self.download_infobar.close() match result: case 0: signal_bus.sxc_exist.emit(True) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Downloaded")) # self.installer() case 1: signal_bus.sxc_exist.emit(False) scshub_infobar(self.INFOBAR_POS, "error", self.tr("Error during download")) def installer(self): folder_dialog = QFileDialog().getExistingDirectory(self, "Select game root folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") bat_file = f"{SXC_UNZIP}\\installer.bat" db_file = f"{SXC_UNZIP}\\dblist.txt" hashdb_file = f"{SXC_UNZIP}\\sxc.hdb" scshub_file_remover(bat_file) scshub_file_remover(db_file) scshub_file_remover(hashdb_file) # files = ["base", "base_map", "base_share", "base_vehicle", "core", "def", "effect", "locale"] files = ["base", "core", "def", "effect", "locale"] for file in files: if file == "core" or file == "locale": command = f'"{SXC_EXTRACTOR_PATH}" "{folder_path}\\{file}.scs" -137243 -lq\n' else: command = f'"{SXC_EXTRACTOR_PATH}" "{folder_path}\\{file}.scs" -lq\n' index_path = ( f"{folder_path}\\{file.capitalize()}.idx\n".replace(":\\", "-") .replace("\\", "_") .replace(" ", "-") ) index = f"{SXC_UNZIP}\\{index_path}" with open(bat_file, "a", encoding="utf-8") as f: f.write(command) with open(db_file, "a", encoding="utf-8") as f: f.write(index) with open(bat_file, "a", encoding="utf-8") as f: f.write(f'"{SXC_EXTRACTOR_PATH}" -b "{db_file}"') self.installer_process = QProcess() self.installer_process.stateChanged.connect(self.installer_state) self.installer_process.start(f"{SXC_UNZIP}\installer.bat") def installer_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if state_name == "Running": self.installer_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Indexing game files"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ringWgt = IndeterminateProgressRing(self) ringWgt.setFixedSize(22, 22) ringWgt.setStrokeWidth(4) self.installer_infobar.addWidget(ringWgt) elif state_name == "NotRunning": self.installer_infobar.close() def tools_exist(self, exist: bool): if exist: self.input_btn.setEnabled(True) self.download_btn.hide() self.download_btn.setDisabled(True) logger.info(f"{NAME} exist") else: self.input_btn.setDisabled(True) self.download_btn.show() self.download_btn.setEnabled(True) logger.info(f"{NAME} not exist")
20,270
Python
.py
466
29.706009
112
0.538692
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,807
pix_hasher_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/pix_hasher_widget.py
import re import logging from PyQt5.QtCore import Qt, QProcess, QRegExp from PyQt5.QtGui import QRegExpValidator from PyQt5.QtWidgets import QWidget, QFileDialog, QVBoxLayout from qfluentwidgets import ToolTipFilter, FluentIcon from ..ui.pix_hasher_fil_ui import PixFileHasherUi from ..ui.pix_hasher_str_ui import PixStringHasherUi from ...common.tool import signal_bus, scshub_infobar, scshub_badge from ...common.info import PIX_CONVERTER_PATH NAME = "PIXHasher" logger = logging.getLogger(NAME) class PixFileHasher(QWidget, PixFileHasherUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.setupUi(self) self.init_ui() signal_bus.pix_exist.connect( lambda bool: ( self.input_btn.setEnabled(True) if bool else self.input_btn.setDisabled(True) ) ) def init_ui(self): self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) def calculate_process(self): command = f'"{PIX_CONVERTER_PATH}" --calc-cityhash64-file "{self.INPUT}"' logger.info(command) self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.calculate_output) self.main_process.waitForFinished(100) self.main_process.start(command) def calculate_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: if line.startswith("CityHash64"): rgx_find = re.findall(r"CityHash64[\S]*\s\S\s([0-9]*)\s\(([a-zA-Z0-9]*)\)", line) self.hash_line.setText((rgx_find[0][0])) self.hex_line.setText((rgx_find[0][1])) def get_file(self): file_dialog = QFileDialog().getOpenFileName(self, "Select file") # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") self.INPUT = file_path self.input_btn.setToolTip(file_path) scshub_badge(self.top_card, self.input_btn) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{file_path}"') self.calculate_process() class PixStringHasher(QWidget, PixStringHasherUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.setupUi(self) self.init_ui() signal_bus.pix_exist.connect( lambda bool: ( self.input_line.setEnabled(True) if bool else self.input_line.setDisabled(True) ) ) def init_ui(self): self.input_line.textChanged.connect(self.calculate_process) self.input_line.returnPressed.connect(lambda: self.calculate_process()) self.input_line.setValidator(QRegExpValidator(QRegExp("[\S]*"))) def calculate_process(self): if self.input_line.text() != "": command = f'"{PIX_CONVERTER_PATH}" --calc-cityhash64 {self.input_line.text()}' logger.info(command) self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.calculate_output) self.main_process.waitForFinished(100) self.main_process.start(command) else: self.hash_line.setText("") self.hex_line.setText("") def calculate_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: if line.startswith("CityHash64"): rgx_find = re.findall(r"CityHash64[\S]*\s\S\s([0-9]*)\s\(([a-zA-Z0-9]*)\)", line) self.hash_line.setText((rgx_find[0][0])) self.hex_line.setText((rgx_find[0][1])) class PixHasherWidget(QWidget): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.main_lyt = QVBoxLayout(self) self.main_lyt.setContentsMargins(0, 0, 0, 0) self.main_lyt.setSpacing(20) self.main_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.hash_file = PixFileHasher(infobar_pos) self.hash_string = PixStringHasher(infobar_pos) self.main_lyt.addWidget(self.hash_file) self.main_lyt.addWidget(self.hash_string)
5,015
Python
.py
107
37.448598
97
0.646756
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,808
def_creator_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/def_creator_widget.py
import os import logging from PyQt5.QtCore import Qt, QRegExp from PyQt5.QtGui import QRegExpValidator from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ToolTipFilter, FluentIcon, Flyout, InfoBarIcon, InfoLevel, setFont from ..ui.def_creator_ui import DefCreatorUi from ...common.tool import scshub_infobar, scshub_badge NAME = "DefCreator" logger = logging.getLogger(NAME) class DefCreatorWidget(QWidget, DefCreatorUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.setupUi(self) self.init_ui() self.option_ui() self.toggle_template(0) self.preview() self.temp = 0 self.done = False def init_ui(self): self.main_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_folder("input")) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_folder("output")) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.run_btn.setIcon(FluentIcon.PLAY) self.run_btn.clicked.connect(lambda: self.run()) self.run_btn.installEventFilter(ToolTipFilter(self.run_btn)) self.run_sgmnt.hide() # self.run_sgmnt.addItem("skip", self.tr("Skip exist")) # self.run_sgmnt.addItem("overwrite", self.tr("Overwrite")) # self.run_sgmnt.installEventFilter(ToolTipFilter(self.run_sgmnt)) self.preview_name_lbl.setLevel(InfoLevel.INFOAMTION) setFont(self.preview_name_lbl, 12) def option_ui(self): self.option_lbl_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.option_wgt_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.truck_line.setValidator(QRegExpValidator(QRegExp("[a-z_0-9]{1,12}\.[a-z_0-9]{1,12}"))) self.truck_line.installEventFilter(ToolTipFilter(self.truck_line)) self.truck_line.textChanged.connect(lambda: self.preview()) self.filename_cmb.addItems([self.tr("PMD filenames"), self.tr("Custom template")]) self.filename_cmb.installEventFilter(ToolTipFilter(self.filename_cmb)) self.filename_cmb.currentTextChanged.connect(lambda: self.preview()) self.filename_cmb.currentIndexChanged.connect(self.toggle_template) self.template_line.setValidator(QRegExpValidator(QRegExp("[a-z_0-9]{1,10}"))) self.template_line.installEventFilter(ToolTipFilter(self.template_line)) self.template_line.textChanged.connect(lambda: self.preview()) self.icon_line.setValidator(QRegExpValidator(QRegExp("[a-z_0-9/]*"))) self.icon_line.installEventFilter(ToolTipFilter(self.icon_line)) self.icon_line.textChanged.connect(lambda: self.preview()) def toggle_template(self, index: int): if index == 0: self.template_lbl.hide() self.template_line.hide() else: self.template_lbl.show() self.template_line.show() def preview(self): # sii if self.filename_cmb.currentIndex() == 1: acc_file = f"{self.template_line.text()}01" acc_name = f'{self.template_line.text().capitalize().replace("_", " ")} 01' else: acc_file = "FILE" acc_name = "NAME" preview = f"accessory_addon_data : {acc_file}.{self.truck_line.text()}.FOLDER\n" preview += f'name: "{acc_name}"\n' preview += f"price: 100\n" preview += f"unlock: 0\n" if self.icon_line.text() == "": preview += f'icon: "truck/{self.truck_line.text().replace(".", "_")}/accessory/FOLDER/{acc_file}"\n' else: preview += f'icon: "{self.icon_line.text()}/{acc_file}"\n' preview += f'part_type: "factory"\n' preview += "\n" preview += f'exterior_model: "/vehicle/truck/TRUCK/accessory/FOLDER/FILE.pmd"' self.preview_txt.setPlainText(preview) self.preview_name_lbl.setText(f"{acc_file}.sii") # folder folder = f"Input accessories models folder (must be .../../vehicle/truck/xxxx_xxxx/accessory/):\n{self.INPUT}\n\n" folder += f"Output folder (must be .../../):\n{self.OUTPUT}\n\n" folder += f"Path to be make:\n.../../def/vehicle/{self.truck_line.text()}/accessory/..." self.folder_txt.setPlainText(folder) def sii_template( self, part: str, folder: str, name: str, price: int, ext_model: str, ): # header template = "SiiNunit\n{\n" template += f"accessory_addon_data : {part}.{self.truck_line.text()}.{folder}\n" # middle template += "{\n" template += f' name: "{name}"\n' template += f" price: {price}\n" template += f" unlock: 0\n" if self.icon_line.text() == "": template += f' icon: "truck/{self.truck_line.text().replace(".", "_")}/accessory/{folder}/{part}"\n' else: template += f' icon: "{self.icon_line.text()}/{part}"\n' template += f' part_type: "factory"\n' template += "\n" template += f' exterior_model: "{ext_model}"\n' template += "}\n}\n" return template def execute(self): number = 1 price = 100 last_dir = "" for path, dirs, files in os.walk(self.INPUT): if files != []: for file in files: if file.endswith((".pim", ".pmd")): # check if current folder in accessory folder or not # to prevent from other sub-folder involve acc_root = os.path.split(path) if acc_root[0] == self.INPUT: # check if still in last folder or not if acc_root[1] == last_dir: number += 1 price += 100 else: number = 1 price = 100 last_dir = acc_root[1] # use pmd name if self.filename_cmb.currentIndex() == 0: acc_file = file[:-4] acc_name = file[:-4].capitalize().replace("_", " ") sii_filename = f"{file[:-3]}sii" # use custom name else: acc_file = f"{self.template_line.text()}{number:0>2}" acc_name = f'{self.template_line.text().capitalize().replace("_", " ")} {number:0>2}' sii_filename = f"{acc_file}.sii" # get (mod base folder) base_path = os.path.basename(self.OUTPUT) # get (model full path) file_full_path = os.path.join(path, file) # find (mod base folder) name in (model full path) find_root = file_full_path.find(base_path) + len(base_path) # remove unused address after (mod base folder) name from (model full path) model_path = file_full_path[find_root:-3].replace("\\", "/") # make accessories folders def_dir = f"def/vehicle/truck/{self.truck_line.text()}/accessory/{acc_root[1]}" sii_dir = os.path.join(self.OUTPUT, def_dir) os.makedirs(sii_dir, exist_ok=True) # sii template acc = self.sii_template( part=acc_file, folder=acc_root[1], name=acc_name, price=price, ext_model=f"{model_path}pmd", ) # write to sii files sii_file = os.path.join(sii_dir, sii_filename) with open(sii_file, "w", encoding="utf-8") as f: f.write(acc) def run(self): if self.truck_line.text() == "": Flyout.create( title="", content="Truck folder field is empty", icon=InfoBarIcon.ERROR, target=self.run_btn, parent=self.top_card, ) elif self.filename_cmb.currentIndex() == 1 and self.template_line.text() == "": Flyout.create( title="", content="Name template field is empty", icon=InfoBarIcon.ERROR, target=self.run_btn, parent=self.top_card, ) else: self.execute() scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), self.OUTPUT ) logger.info("Process completed successfully") def get_folder(self, mode: str): folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") match mode: case "input": # enable buttons after folder selected for first time if self.INPUT == "": self.output_btn.setEnabled(True) self.INPUT = folder_path self.input_btn.setToolTip(self.INPUT) scshub_badge(self.top_card, self.input_btn) logger.info(f'Set input folder to "{self.INPUT}"') case "output": # enable buttons after folder selected for first time if self.OUTPUT == "": self.run_btn.setEnabled(True) # self.run_sgmnt.setEnabled(True) # self.run_sgmnt.setCurrentItem("skip") self.OUTPUT = folder_path self.output_btn.setToolTip(self.OUTPUT) scshub_badge(self.top_card, self.output_btn) logger.info(f'Set output folder to "{self.OUTPUT}"') self.preview() scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder selected"))
10,864
Python
.py
218
34.665138
122
0.534285
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,809
pix_finder_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/pix_finder_widget.py
import os import logging from random import randint from PyQt5.QtCore import Qt, QProcess from PyQt5.QtWidgets import QWidget, QFileDialog, QListWidgetItem, QAbstractItemView from qfluentwidgets import ( FluentIconBase, ToolTipFilter, FluentIcon, InfoLevel, ) from ..ui.pix_finder_ui import PixFinderUi from ...common.tool import ScsHubIcon, signal_bus, scshub_infobar, scshub_badge, scshub_log from ...common.info import PIX_CONVERTER_PATH, PIX_FINDER_LOG NAME = "PIXFinder" logger = logging.getLogger(NAME) class PixFinderWidget(QWidget, PixFinderUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUTS = [] self.OUTPUT = "" self.PATH = "/" self.LAST_FILE = "" self.TEMP_OUT = [] self.TEMP_LOG = [] self.main_process = None self.anim_process = None self.setupUi(self) self.init_ui() signal_bus.pix_exist.connect( lambda exist: ( self.input_btn.setEnabled(True) if exist else self.input_btn.setDisabled(True) ) ) def init_ui(self): self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.reset_btn.setIcon(FluentIcon.DELETE) self.reset_btn.clicked.connect(lambda: self.reset_inputs()) self.reset_btn.installEventFilter(ToolTipFilter(self.reset_btn)) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.find_btn.setIcon(FluentIcon.SEARCH) self.find_btn.clicked.connect(lambda: self.finder_process()) self.navbar.setSpacing(10) self.navbar.setFixedHeight(19) self.navbar.currentItemChanged.connect(lambda objectname: self.change_path(objectname)) self.back_btn.setIcon(FluentIcon.LEFT_ARROW) self.back_btn.installEventFilter(ToolTipFilter(self.back_btn)) self.back_btn.clicked.connect(lambda: self.go_back()) self.home_btn.setIcon(FluentIcon.HOME) self.home_btn.installEventFilter(ToolTipFilter(self.home_btn)) self.home_btn.clicked.connect(lambda: self.go_home()) self.refresh_btn.setIcon(FluentIcon.UPDATE) self.refresh_btn.installEventFilter(ToolTipFilter(self.refresh_btn)) self.refresh_btn.clicked.connect(lambda: self.refresh_path()) self.model_list.doubleClicked.connect(self.finder_process) self.model_list.currentTextChanged.connect(self.goto_folder) self.model_badge.setLevel(InfoLevel.INFOAMTION) self.anim_list.setSelectionMode(QAbstractItemView.NoSelection) self.anim_badge.setLevel(InfoLevel.INFOAMTION) def list_process(self): argument = "" for file_path in self.INPUTS: argument += f'-b "{file_path}" ' argument += f"-listdir {self.PATH}" command = f'"{PIX_CONVERTER_PATH}" {argument}' logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.list_output) self.main_process.finished.connect(self.list_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def list_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: if line.strip() != "": self.TEMP_OUT.append(line.strip()) def list_finish(self): self.TEMP_OUT.sort() self.model_list.clearSelection() self.model_list.clear() self.anim_list.clearSelection() self.anim_list.clear() folders = [] files = [] for line in self.TEMP_OUT: # create directory list if line.startswith("[D] "): folders.append(os.path.relpath(line[4:], self.PATH)) # create file list elif line.startswith("[F] "): # cheack suffix and only include specified in list if line.endswith(".pmg"): files.append(os.path.relpath(line[4:], self.PATH)) # set list count to badges self.model_badge.setText(str(len(files))) # add items in list to list view for folder in folders: self.model_list.addItem( QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.FOLDER), folder) ) for file in files: self.model_list.addItem(QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.MODEL), file)) self.main_process = None self.TEMP_OUT = [] def finder_process(self): if "." in os.path.basename(self.PATH): argument = "" for file_path in self.INPUTS: argument += f'-b "{file_path}" ' argument += f"--find-model-animations {self.PATH[:-4]}" command = f'"{PIX_CONVERTER_PATH}" {argument}' logger.info(command) if self.anim_process == None: self.anim_process = QProcess() self.anim_process.readyReadStandardOutput.connect(self.finder_output) self.anim_process.finished.connect(self.finder_finish) self.anim_process.waitForFinished(100) self.anim_process.start(command) else: scshub_infobar(self.INFOBAR_POS, "info", "No file selected") def finder_output(self): # get output data from process and decode it output = self.anim_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if ( "unable" in striped_line.lower() or "unknown" in striped_line.lower() or "invalid" in striped_line.lower() or "skeleton" in striped_line.lower() or "unexpected" in striped_line.lower() or striped_line.startswith("/") ): self.TEMP_LOG.append(striped_line) def finder_finish(self): self.anim_list.clearSelection() self.anim_list.clear() self.TEMP_OUT.sort() temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" if ( "unable" in temp_out or "invalid" in temp_out or "skeleton" in temp_out or "unexpected" in temp_out ): scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), PIX_FINDER_LOG, ) logger.error(f"Error occurred during process, check {PIX_FINDER_LOG}") else: files = [] for line in self.TEMP_OUT: if line.startswith("/"): files.append(line) # set list count to badges self.anim_badge.setText(str(len(files))) for file in files: self.anim_list.addItem(QListWidgetItem(FluentIconBase.qicon(ScsHubIcon.ANIM), file)) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Finished succesfully")) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(PIX_FINDER_LOG, self.TEMP_LOG) self.anim_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def reset_inputs(self): self.INPUTS = [] self.input_btn.setToolTip("") self.input_badge.close() self.navbar.clear() self.refresh_path() def add_root_item(self): self.navbar.clear() if len(self.INPUTS) < 2: self.navbar.addItem("root", f"{os.path.split(self.INPUTS[0])[1]}") else: self.navbar.addItem("root", "multi-archive") def remove_last_selcted(self): if self.PATH.endswith(self.LAST_FILE): self.PATH = self.PATH.replace(self.LAST_FILE, "") def refresh_path(self): self.remove_last_selcted() self.list_process() def change_path(self, selected: str): # back to root of scs file if first item selected in navbar_itemgation bar if selected == "root": self.PATH = "/" self.back_btn.setDisabled(True) self.home_btn.setDisabled(True) # find selected item index in saved path string and # delete all after item name itself and update new path else: self.PATH = self.PATH[0 : self.PATH.find(selected[:-2]) + len(selected[:-2])].replace( "\\", "/" ) self.back_btn.setEnabled(True) self.home_btn.setEnabled(True) self.list_process() def go_back(self): self.remove_last_selcted() if not self.navbar.currentIndex() == 0: self.navbar.popItem() self.list_process() def go_home(self): self.PATH = "/" self.ANIM_PATH = "/" self.add_root_item() self.list_process() def goto_folder(self, selected: str): # check if not empty and add last item to navbar_itemgation bar if selected != "": self.remove_last_selcted() if not selected.endswith(".pmg"): # update path and add seelcted folder to it self.PATH = os.path.join(self.PATH, selected).replace("\\", "/") navbar_item = os.path.split(self.PATH) self.navbar.addItem(f"{selected}{randint(11, 99)}", navbar_item[1]) self.list_process() else: self.PATH = os.path.join(self.PATH, selected).replace("\\", "/") self.LAST_FILE = selected def get_file(self): file_dialog = QFileDialog().getOpenFileNames( self, "Select file", filter="SCS archives (*.zip *.scs)" ) if file_dialog[0]: file_path = file_dialog[0] # enable buttons after file selected for first time if self.INPUTS == []: self.reset_btn.setEnabled(True) self.find_btn.setEnabled(True) self.refresh_btn.setEnabled(True) self.input_badge = scshub_badge(self.top_card, self.input_btn) for file in file_path: self.INPUTS.append(file) # set buttons tooltip tooltip = "" for file in self.INPUTS: tooltip += f"{file}\n" self.input_btn.setToolTip(tooltip[:-1]) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{self.INPUTS}"') self.add_root_item() self.list_process()
11,379
Python
.py
261
32.344828
100
0.59622
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,810
scs_extractor_wdiget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/scs_extractor_wdiget.py
import os import logging from sys import platform from PyQt5.QtCore import Qt, QProcess from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ( IndeterminateProgressRing, InfoBarPosition, ToolTipFilter, InfoBarIcon, FluentIcon, InfoBar, ) from ..ui.scs_extractor_ui import ScsExtractorUi from ...common.tool import ( Downloader, signal_bus, scshub_log, scshub_badge, scshub_infobar, scshub_file_remover, ) from ...common.info import ( SCS_EXTRACTOR_LOG, SCS_TOOL_UNZIP, SCS_TOOL_PATH, SCS_TOOL_URL, SCS_TOOL_ZIP, ) NAME = "SCSExtractor" logger = logging.getLogger(NAME) class ScsExtractorWidget(QWidget, ScsExtractorUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.TEMP_OUT = [] self.TEMP_LOG = [] self.main_process = None self.setupUi(self) self.init_ui() signal_bus.scs_exist.connect(self.tools_exist) scshub_file_remover(SCS_EXTRACTOR_LOG) def init_ui(self): self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.download_btn.setIcon(FluentIcon.DOWNLOAD) self.download_btn.clicked.connect(lambda: self.downloader()) self.download_btn.hide() self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_folder()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.extract_btn.setIcon(FluentIcon.LINK) self.extract_btn.clicked.connect(lambda: self.extractor_process()) self.iobuffer_chk.stateChanged.connect( lambda state: ( self.iobuffer_spn.setEnabled(True) if state else self.iobuffer_spn.setDisabled(True) ) ) self.iobuffer_chk.installEventFilter(ToolTipFilter(self.iobuffer_chk)) def extractor_process(self): command = f'"{SCS_TOOL_PATH}" extract "{self.INPUT}" -root "{self.OUTPUT}"' if self.iobuffer_chk.isChecked(): command += f" -io-buffers-size {self.iobuffer_spn.text()[:-3]}" logger.info(command) if self.main_process == None: self.main_process = QProcess() self.main_process.setProcessChannelMode(QProcess.MergedChannels) self.main_process.readyRead.connect(self.extractor_output) self.main_process.stateChanged.connect(self.extractor_state) self.main_process.finished.connect(self.extractor_finish) self.main_process.waitForFinished(100) self.main_process.start(command) def extractor_output(self): # get output data from process and decode it output = self.main_process.readAllStandardOutput() decoded_output = bytes(output).decode("utf-8") splitted_output = decoded_output.splitlines() for line in splitted_output: striped_line = line.strip() if striped_line != "": self.TEMP_OUT.append(striped_line) if "error" in striped_line.lower() or "hashfs" in striped_line.lower(): self.TEMP_LOG.append(striped_line) def extractor_state(self, state): states = { QProcess.NotRunning: "NotRunning", QProcess.Starting: "Starting", QProcess.Running: "Running", } state_name = states[state] if state_name == "Running": self.working_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Working"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.working_infobar.addWidget(ring_wgt) self.input_btn.setDisabled(True) self.output_btn.setDisabled(True) self.extract_btn.setDisabled(True) logger.info(f"{NAME} Running") elif state_name == "NotRunning": self.working_infobar.close() self.input_btn.setEnabled(True) self.output_btn.setEnabled(True) self.extract_btn.setEnabled(True) logger.info(f"{NAME} Finished") def extractor_finish(self): temp_out = "" for line in self.TEMP_OUT: temp_out += f"{line.lower()}\n" if "error" in temp_out: scshub_infobar( self.INFOBAR_POS, "error_btn", self.tr("Error occurred during process"), SCS_EXTRACTOR_LOG, ) logger.error(f"Error occurred during process, check {SCS_EXTRACTOR_LOG}") else: scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), self.OUTPUT ) logger.info("Process completed successfully") if self.TEMP_LOG != []: scshub_log(SCS_EXTRACTOR_LOG, self.TEMP_LOG) self.main_process = None self.TEMP_OUT = [] self.TEMP_LOG = [] def get_file(self): if platform == "win32": file_dialog = QFileDialog().getOpenFileName( self, "Select file", filter="SCS archive (*.scs)" ) # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") # enable buttons after file selected for first time if self.INPUT == "": self.output_btn.setEnabled(True) self.extract_btn.setEnabled(True) self.iobuffer_chk.setEnabled(True) self.INPUT = file_path output_path = f"{file_path[:-4]}_exp" self.OUTPUT = output_path self.input_btn.setToolTip(file_path) self.output_btn.setToolTip(output_path) scshub_badge(self.top_card, self.input_btn) scshub_badge(self.top_card, self.output_btn) scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{file_path}"') else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def get_folder(self): folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") self.OUTPUT = folder_path self.output_btn.setToolTip(folder_path) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder selected")) logger.info(f'Set output folder to "{folder_path}"') def downloader(self): if platform == "win32": self.downloader_process = Downloader(logger, SCS_TOOL_URL, SCS_TOOL_ZIP, SCS_TOOL_UNZIP) self.downloader_process.started.connect(self.downloader_start) self.downloader_process.result.connect(self.downloader_finish) self.downloader_process.start() else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only work in windows")) logger.error(f"{NAME} Not work in {platform}, {NAME} only work in windows") def downloader_start(self): self.download_infobar = InfoBar.new( InfoBarIcon.INFORMATION, self.tr("Downloading"), "", Qt.Horizontal, False, -1, InfoBarPosition.TOP, self.INFOBAR_POS, ) ring_wgt = IndeterminateProgressRing(self) ring_wgt.setFixedSize(22, 22) ring_wgt.setStrokeWidth(4) self.download_infobar.addWidget(ring_wgt) self.download_btn.setDisabled(True) def downloader_finish(self, result: int): self.download_infobar.close() match result: case 0: signal_bus.scs_exist.emit(True) scshub_infobar(self.INFOBAR_POS, "success", self.tr("Downloaded")) case 1: signal_bus.scs_exist.emit(False) scshub_infobar(self.INFOBAR_POS, "error", self.tr("Error during download")) def tools_exist(self, exist: bool): if exist: self.input_btn.setEnabled(True) self.download_btn.hide() self.download_btn.setDisabled(True) logger.info(f"{NAME} exist") else: self.input_btn.setDisabled(True) self.download_btn.show() self.download_btn.setEnabled(True) logger.info(f"{NAME} not exist")
9,185
Python
.py
220
30.718182
100
0.600945
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,811
tobj_editor_widget.py
AmirMahdaviAM_SCSHub/scshub/view/widget/tobj_editor_widget.py
import os import logging import binascii from io import BytesIO from PIL import Image, ImageDraw, ImageFont from PyQt5.QtCore import Qt from PyQt5.QtGui import QImage from PyQt5.QtWidgets import QWidget, QFileDialog from qfluentwidgets import ToolTipFilter, FluentIcon, Flyout, InfoBarIcon from ..ui.tobj_editor_ui import TobjEditorUi from ...common.tool import scshub_file_remover, scshub_infobar, scshub_badge NAME = "TOBJEditor" logger = logging.getLogger(NAME) class TobjEditorWidget(QWidget, TobjEditorUi): def __init__(self, infobar_pos): super().__init__() self.INFOBAR_POS = infobar_pos self.INPUT = "" self.OUTPUT = "" self.ORG_VALUE = {} self.UNKNOWN40 = "0000000000000000000000000000000001000000" # empty unknown hex self.UNKNOWN08 = "00000000" # empty unknown hex self.UNKNOWN04 = "0001" # empty unknown hex self.UNKNOWN02 = "00" # empty unknown hex # ve ve ve ve uk uk uk uk uk uk uk uk uk uk uk uk # uk uk uk uk 01 uk uk uk ty uk mg mn mp uk au av # aw nc uk na uk uk cc uk le le le le uk uk uk uk self.FILENAME = "" # filename.tobj self.VESRION = "010ab170" # default self.TYPE = "02" # 02 generic, 05 cubic self.ADDR_U = "02" # 00 repeat, 01 clamp, 02 clamp_to_edge, 03 clamp_to_border, 04 mirror, 05 mirror_clamp, 06 mirror_clamp_to_edge self.ADDR_V = "02" # 00 repeat, 01 clamp, 02 clamp_to_edge, 03 clamp_to_border, 04 mirror, 05 mirror_clamp, 06 mirror_clamp_to_edge self.ADDR_W = "02" # 00 repeat, 01 clamp, 02 clamp_to_edge, 03 clamp_to_border, 04 mirror, 05 mirror_clamp, 06 mirror_clamp_to_edge self.MAG_FILTER = "03" # 00 nearest, 01 linear, 03 default self.MIN_FILTER = "03" # 00 nearest, 01 linear, 03 default self.MIPMAP_FILTER = "03" # 00 nearest, 01 trilinear, 02 nomipmaps, 03 default self.COLOR_SPACE = "00" # 00 srgb, 01 tsnormal & linear self.USAGE = "00" self.NO_COMPRESS = "00" # 00 false, 01 true self.NO_ANISOTROPIC = "00" # 00 false, 01 true self.LENGTH = "22000000" # texture path character length hex number self.TEXTURE_PATH = "/vehicle/truck/share/dashboard.dds" # path to dds texture self.preview_img_buffer = BytesIO() self.preview_img = QImage() self.preview_temp_buffer = BytesIO() self.preview_temp = QImage() self.setupUi(self) self.temp_texture() self.init_ui() self.option_ui() self.preview() def init_ui(self): self.main_lyt.setAlignment(Qt.AlignmentFlag.AlignTop) self.top_card_lyt.setAlignment(Qt.AlignmentFlag.AlignLeft) self.input_btn.setIcon(FluentIcon.DOWN) self.input_btn.clicked.connect(lambda: self.get_file()) self.input_btn.installEventFilter(ToolTipFilter(self.input_btn)) self.output_btn.setIcon(FluentIcon.UP) self.output_btn.clicked.connect(lambda: self.get_folder()) self.output_btn.installEventFilter(ToolTipFilter(self.output_btn)) self.output_badge = False self.revert_btn.setIcon(FluentIcon.UPDATE) self.revert_btn.clicked.connect(lambda: self.revert_value()) self.revert_btn.installEventFilter(ToolTipFilter(self.revert_btn)) self.save_btn.setIcon(FluentIcon.SAVE) self.save_btn.clicked.connect(lambda: self.write_file()) self.save_btn.installEventFilter(ToolTipFilter(self.save_btn)) self.save_sgmnt.addItem("binary", self.tr("Binary")) self.save_sgmnt.addItem("text", self.tr("Text")) self.save_sgmnt.setCurrentItem("binary") self.save_sgmnt.installEventFilter(ToolTipFilter(self.save_sgmnt)) self.filename_line.installEventFilter(ToolTipFilter(self.filename_line)) def option_ui(self): self.cube_path_card.setDisabled(True) self.cube_path_card.hide() self.texture_path_line.textChanged.connect(lambda: self.change_value("texture")) self.type_items = {"Generic": "02"} self.addr_items = { "Repeat": "00", "Clamp": "01", "Clamp to Edge": "02", "Clamp to Border": "03", "Mirror": "04", "Mirror Clamp": "05", "Mirror Clamp to Edge": "06", } self.filter_items = {"Nearest": "00", "Linear": "01", "Default": "03"} self.mipmap_items = { "Nearest": "00", "Trilinear": "01", "No MipMaps": "02", "MipMaps": "03", } self.color_space_items = {"SRGB": "00", "Linear": "01"} self.usage_items = ["TSNormal", "Ui", "Default"] self.type_cmb.addItems(self.type_items.keys()) self.type_cmb.setCurrentIndex(0) self.type_cmb.installEventFilter(ToolTipFilter(self.type_cmb)) self.type_cmb.currentTextChanged.connect(lambda: self.change_value("type")) self.addr_u_cmb.addItems(self.addr_items.keys()) self.addr_u_cmb.setCurrentIndex(1) self.addr_u_cmb.installEventFilter(ToolTipFilter(self.addr_u_cmb)) self.addr_u_cmb.currentTextChanged.connect(lambda: self.change_value("addr_u")) self.addr_v_cmb.addItems(self.addr_items.keys()) self.addr_v_cmb.setCurrentIndex(1) self.addr_v_cmb.installEventFilter(ToolTipFilter(self.addr_v_cmb)) self.addr_v_cmb.currentTextChanged.connect(lambda: self.change_value("addr_v")) self.addr_w_cmb.setDisabled(True) # self.addr_w_cmb.addItems(self.addr_items.keys()) # self.addr_w_cmb.setCurrentIndex(1) # self.addr_w_cmb.installEventFilter(ToolTipFilter(self.addr_w_cmb)) # self.addr_w_cmb.currentTextChanged.connect(lambda: self.changeValue("addr_w")) self.mag_filter_cmb.addItems(self.filter_items.keys()) self.mag_filter_cmb.setCurrentIndex(2) self.mag_filter_cmb.installEventFilter(ToolTipFilter(self.mag_filter_cmb)) self.mag_filter_cmb.currentTextChanged.connect(lambda: self.change_value("mag_filter")) self.min_filter_cmb.addItems(self.filter_items.keys()) self.min_filter_cmb.setCurrentIndex(1) self.min_filter_cmb.removeItem(2) self.min_filter_cmb.installEventFilter(ToolTipFilter(self.min_filter_cmb)) self.min_filter_cmb.currentTextChanged.connect(lambda: self.change_value("min_filter")) self.mipmap_filter_cmb.addItems(self.mipmap_items.keys()) self.mipmap_filter_cmb.setCurrentIndex(3) self.mipmap_filter_cmb.currentTextChanged.connect( lambda: self.change_value("mipmap_filter") ) self.color_space_cmb.addItems(self.color_space_items.keys()) self.color_space_cmb.installEventFilter(ToolTipFilter(self.color_space_cmb)) self.color_space_cmb.currentTextChanged.connect(lambda: self.change_value("color_space")) self.usage_cmb.addItems(self.usage_items) self.usage_cmb.setCurrentIndex(2) self.usage_cmb.installEventFilter(ToolTipFilter(self.usage_cmb)) self.usage_cmb.currentTextChanged.connect(lambda: self.change_value("usage")) self.no_compress_swch.checkedChanged.connect(lambda: self.change_value("no_compress")) self.no_nisotropic_swch.checkedChanged.connect(lambda: self.change_value("no_anisotropic")) self.texture_img.setBorderRadius(4, 4, 4, 4) def revert_value(self): if self.ORG_VALUE != {}: self.FILENAME = self.ORG_VALUE["filename"] self.TEXTURE_PATH = self.ORG_VALUE["texture_path"] self.TYPE = self.ORG_VALUE["type"] self.ADDR_U = self.ORG_VALUE["addr_u"] self.ADDR_V = self.ORG_VALUE["addr_v"] self.ADDR_W = self.ORG_VALUE["addr_w"] self.MAG_FILTER = self.ORG_VALUE["mag_filter"] self.MIN_FILTER = self.ORG_VALUE["min_filter"] self.MIPMAP_FILTER = self.ORG_VALUE["mipmap_filter"] self.COLOR_SPACE = self.ORG_VALUE["color_space"] self.USAGE = self.ORG_VALUE["usage"] self.NO_COMPRESS = self.ORG_VALUE["no_compress"] self.NO_ANISOTROPIC = self.ORG_VALUE["no_anisotropic"] self.LENGTH = self.ORG_VALUE["length"] self.updateUi() def updateUi(self): self.filename_line.setText(self.FILENAME) self.texture_path_line.setText(self.TEXTURE_PATH) self.addr_u_cmb.setCurrentIndex(int(self.ADDR_U[1])) self.addr_v_cmb.setCurrentIndex(int(self.ADDR_V[1])) self.addr_w_cmb.setCurrentIndex(int(self.ADDR_W[1])) if self.MAG_FILTER == "03": self.mag_filter_cmb.setCurrentIndex(2) else: self.mag_filter_cmb.setCurrentIndex(int(self.MAG_FILTER[1])) if self.MIN_FILTER != "03": self.min_filter_cmb.setCurrentIndex(int(self.MIN_FILTER[1])) self.mipmap_filter_cmb.setCurrentIndex(int(self.MIPMAP_FILTER[1])) self.color_space_cmb.setCurrentIndex(int(self.COLOR_SPACE[1])) self.usage_cmb.setCurrentIndex(int(self.USAGE[1])) if self.NO_COMPRESS == "01": self.no_compress_swch.setChecked(True) else: self.no_compress_swch.setChecked(False) if self.NO_ANISOTROPIC == "01": self.no_nisotropic_swch.setChecked(True) else: self.no_nisotropic_swch.setChecked(False) self.preview() def change_value(self, attribute: str): match attribute: case "texture": self.TEXTURE_PATH = self.texture_path_line.text() self.LENGTH = self.texture_path_length(self.TEXTURE_PATH) case "type": if self.type_cmb.currentIndex() == 0: self.TYPE = "02" self.addr_w_cmb.setDisabled(True) case "addr_u": self.ADDR_U = f"0{self.addr_u_cmb.currentIndex()}" case "addr_v": self.ADDR_V = f"0{self.addr_v_cmb.currentIndex()}" case "addr_w": if self.addr_w_cmb.isEnabled(): self.ADDR_W = f"0{self.addr_w_cmb.currentIndex()}" else: self.ADDR_W = "03" case "mag_filter": if self.mag_filter_cmb.currentIndex() != 2: self.MAG_FILTER = f"0{self.mag_filter_cmb.currentIndex()}" else: self.MAG_FILTER = "03" case "min_filter": if self.min_filter_cmb.currentIndex() != 2: self.MIN_FILTER = f"0{self.min_filter_cmb.currentIndex()}" else: self.MIN_FILTER = "03" case "mipmap_filter": self.MIPMAP_FILTER = f"0{self.mipmap_filter_cmb.currentIndex()}" case "color_space": self.COLOR_SPACE = f"0{self.color_space_cmb.currentIndex()}" case "usage": if self.usage_cmb.currentIndex() == 1: self.mipmap_filter_cmb.setCurrentIndex(2) self.mipmap_filter_cmb.setDisabled(True) self.no_compress_swch.setChecked(True) self.no_compress_swch.setDisabled(True) self.MIPMAP_FILTER = "02" self.NO_COMPRESS = "01" else: self.mipmap_filter_cmb.setCurrentIndex(3) self.mipmap_filter_cmb.setEnabled(True) self.no_compress_swch.setChecked(False) self.no_compress_swch.setEnabled(True) self.MIPMAP_FILTER = "03" self.NO_COMPRESS = "00" case "no_compress": if self.no_compress_swch.isChecked(): self.NO_COMPRESS = "01" else: self.NO_COMPRESS = "00" case "no_anisotropic": if self.no_nisotropic_swch.isChecked(): self.NO_ANISOTROPIC = "01" else: self.NO_ANISOTROPIC = "00" self.preview() def preview(self): # preview text # type preview_text = "map" preview_text += f" 2d" # texture_path if self.TEXTURE_PATH != "": preview_text += f" {os.path.basename(self.TEXTURE_PATH)}" # addr_u, addr_v, addr_w preview_text += f'\naddr {self.addr_u_cmb.currentText().lower().replace(" ", "_")}' preview_text += f' {self.addr_v_cmb.currentText().lower().replace(" ", "_")}' if self.addr_w_cmb.isEnabled(): preview_text += f' {self.addr_w_cmb.currentText().lower().replace(" ", "_")}' # mag_filter, min_filter if self.mag_filter_cmb.currentIndex() != 2: preview_text += f'\nfilter {self.mag_filter_cmb.currentText().lower().replace(" ", "")}' self.min_filter_cmb.setEnabled(True) else: self.min_filter_cmb.setDisabled(True) if self.min_filter_cmb.isEnabled(): preview_text += f' {self.min_filter_cmb.currentText().lower().replace(" ", "")}' # mipmap_filter if self.usage_cmb.currentIndex() != 1: if self.mipmap_filter_cmb.currentIndex() != 3: preview_text += f'\n{self.mipmap_filter_cmb.currentText().lower().replace(" ", "").replace("map", "")}' # color_space if self.color_space_cmb.currentIndex() == 1: preview_text += f"\ncolor_space {self.color_space_cmb.currentText().lower()}" # usage if self.usage_cmb.currentIndex() != 2: preview_text += f"\nusage {self.usage_cmb.currentText().lower()}" # no_compress if self.usage_cmb.currentIndex() != 1: if self.no_compress_swch.isChecked(): preview_text += "\nnocompress" # no_anisotropic if self.no_nisotropic_swch.isChecked(): preview_text += "\nnoanisotropic" self.preview_text_txt.setPlainText(preview_text) # preview binary preview_binary = f"01 0a b1 70 00 00 00 00 00 00 00 00 00 00 00 00\n" preview_binary += f"00 00 00 00 01 00 00 00 {self.TYPE} 00 {self.MAG_FILTER} {self.MIN_FILTER} {self.MIPMAP_FILTER} 00 {self.ADDR_U} {self.ADDR_V}\n" preview_binary += f"{self.ADDR_W} {self.NO_COMPRESS} 00 {self.NO_ANISOTROPIC} 00 01 {self.COLOR_SPACE} 00 {self.LENGTH[:2]} 00 00 00 00 00 00 00\n" preview_binary += self.TEXTURE_PATH self.preview_binary_txt.setPlainText(preview_binary) def temp_texture(self): preview_temp_img = Image.new("RGBA", (278, 278), (0, 0, 0, 10)) ImageDraw.Draw(preview_temp_img).text( (44, 94), "No Texture\nFound", (0, 0, 0, 60), ImageFont.load_default(40), spacing=16, align="center", ) preview_temp_img.save(self.preview_temp_buffer, "png") self.preview_temp_buffer.seek(0) self.preview_temp.loadFromData(self.preview_temp_buffer.getvalue()) self.texture_img.setImage(self.preview_temp) self.texture_img.scaledToWidth(278) def load_texture(self): input_dds = f"{self.INPUT[:-4]}dds" if os.path.isfile(input_dds): input = Image.open(input_dds) input.save(self.preview_img_buffer, "png") self.preview_img_buffer.seek(0) self.preview_img.loadFromData(self.preview_img_buffer.getvalue()) self.texture_img.setImage(self.preview_img) self.texture_img.scaledToWidth(278) else: self.texture_img.setImage(self.preview_temp) self.texture_img.scaledToWidth(278) def texture_path_length(self, file: str): return f"{hex(len(file))[2:]:0<8}" def read_file(self): with open(self.INPUT, "rb") as f: read_hex = f.read(48).hex() read_path = f.read().decode() self.ORG_VALUE = { "filename": os.path.basename(self.INPUT)[:-5], "texture_path": read_path, "type": read_hex[48:50], "addr_u": read_hex[60:62], "addr_v": read_hex[62:64], "addr_w": read_hex[64:66], "mag_filter": read_hex[52:54], "min_filter": read_hex[54:56], "mipmap_filter": read_hex[56:58], "color_space": read_hex[76:78], "usage": "02", "no_compress": read_hex[66:68], "no_anisotropic": read_hex[70:72], "length": read_hex[80:82], } self.revert_value() self.load_texture() def write_file(self): if self.OUTPUT == "": Flyout.create( title="Output path is not set", content="Import a tobj file or choose ouput folder", icon=InfoBarIcon.ERROR, target=self.save_btn, parent=self.top_card, ) elif self.filename_line.text() == "": Flyout.create( title="File name is empty", content="Set name for file to be saved", icon=InfoBarIcon.ERROR, target=self.save_btn, parent=self, ) else: final_binary = ( self.VESRION + self.UNKNOWN40 + self.TYPE + self.UNKNOWN02 + self.MAG_FILTER + self.MIN_FILTER + self.MIPMAP_FILTER + self.UNKNOWN02 + self.ADDR_U + self.ADDR_V + self.ADDR_W + self.NO_COMPRESS + self.UNKNOWN02 + self.NO_ANISOTROPIC + self.UNKNOWN04 + self.COLOR_SPACE + self.UNKNOWN02 + self.LENGTH + self.UNKNOWN08 ) output_file = os.path.join(self.OUTPUT, f"{self.filename_line.text()}.tobj") scshub_file_remover(output_file) # binary mode if self.save_sgmnt._currentRouteKey == "binary": # write hex data with open(output_file, "xb") as f: f.write(binascii.unhexlify(final_binary)) # write texture path with open(output_file, "a") as f: f.write(self.TEXTURE_PATH) # text mode elif self.save_sgmnt._currentRouteKey == "text": with open(output_file, "x", encoding="utf-8") as f: f.write(self.preview_text_txt.toPlainText()) scshub_infobar( self.INFOBAR_POS, "success_btn", self.tr("Process finished"), self.OUTPUT ) logger.info("Process completed successfully") def get_file(self): file_dialog = QFileDialog().getOpenFileName( self, "Select file", filter="Tobj file (*.tobj)" ) # only if file selected if file_dialog[0]: file_path = file_dialog[0].replace("/", "\\") # check for type with open(file_path, "rb") as f: read_hex = f.read(48).hex() # proces if tobj is in generic type if read_hex[48:50] == "02": # enable buttons after file selected for first time if self.INPUT == "": self.save_btn.setEnabled(True) self.revert_btn.setEnabled(True) self.INPUT = file_path self.OUTPUT = os.path.split(file_path)[0] self.input_btn.setToolTip(file_path) self.output_btn.setToolTip(self.OUTPUT) scshub_badge(self.top_card, self.input_btn) if self.output_badge == False: scshub_badge(self.top_card, self.output_btn) self.output_badge = True scshub_infobar(self.INFOBAR_POS, "success", self.tr("File imported")) logger.info(f'Set input file to "{file_path}"') self.read_file() else: scshub_infobar(self.INFOBAR_POS, "error", self.tr("Only generic tobj supported")) logger.error(f"Error loading tobj, only generic tobj supported") def get_folder(self): folder_dialog = QFileDialog().getExistingDirectory(self, "Select folder") # only if folder selected if folder_dialog: folder_path = folder_dialog.replace("/", "\\") # enable buttons after folder selected for first time if self.OUTPUT == "": self.save_btn.setEnabled(True) self.OUTPUT = folder_path self.output_btn.setToolTip(self.OUTPUT) if self.output_badge == False: scshub_badge(self.top_card, self.output_btn) self.output_badge = True scshub_infobar(self.INFOBAR_POS, "success", self.tr("Folder selected")) logger.info(f'Set output folder to "{self.OUTPUT}"')
21,247
Python
.py
433
36.852194
168
0.583644
AmirMahdaviAM/SCSHub
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,812
bot.py
TechGreyMatters_TG-URL-Shortener-Bot/bot.py
import asyncio import datetime import logging import logging.config import sys from pyrogram import * from pyrogram.errors.exceptions.not_acceptable_406 import * from config import * from database import * from database.users import * from aiohttps import * from helpers import * from pyshortner import * logging.config.fileConfig('logging.conf') logging.getLogger().setLevel(logging.INFO) import os import pyrogram logging.getLogger("pyrogram").setLevel(logging.WARNING) import logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) if __name__ == "__main__" : plugins = dict( root="plugins" ) GreyMattersTech = Client( "Mdisk-Pro", bot_token=BOT_TOKEN, api_id=API_ID, api_hash=API_HASH, plugins=plugins ) async def start(self): me = await self.get_me() self.owner = await self.get_users(int(OWNER_ID)) self.username = f'@{me.username}' temp.BOT_USERNAME = me.username temp.FIRST_NAME = me.first_name if not await db.get_bot_stats(): await db.create_stats() banned_users = await filter_users({"banned": True}) async for user in banned_users: temp.BANNED_USERS.append(user["user_id"]) logging.info(LOG_STR) await broadcast_admins(self, '** Bot started successfully **\n\nBot By @GreyMattersTech') logging.info('Bot started') GreyMattersTech.run() # Removed Upper All Codes Because This is Not Required Now. #SESSION = "GreyMattersTech" #class Bot(Client): #def __init__(self): #super().__init__( # name=SESSION, #api_id=API_ID, # api_hash=API_HASH, # bot_token=BOT_TOKEN, #workers=50, # plugins={"root": "plugins"}, #sleep_threshold=5, #) async def stop(self, *args): await super().stop() logging.info("Bot stopped. Bye.") #GreyMattersTech = Bot() #GreyMattersTech.run() """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
2,991
Python
.py
79
29.949367
97
0.479087
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,813
config.py
TechGreyMatters_TG-URL-Shortener-Bot/config.py
import os from dotenv import load_dotenv load_dotenv() # Mandatory variables for the bot to start API_ID = int(os.environ.get("API_ID", "")) #API ID from https://my.telegram.org/auth API_HASH = os.environ.get("API_HASH", "") #API Hash from https://my.telegram.org/auth BOT_TOKEN = os.environ.get("BOT_TOKEN", "") # Bot token from @BotFather ADMINS = [int(i.strip()) for i in os.environ.get("ADMINS").split(",")] if os.environ.get("ADMINS") else [] #Keep thia empty otherwise bot will not work for owner. ADMIN = ADMINS DATABASE_NAME = os.environ.get("DATABASE_NAME", "Greylinks") DATABASE_URL = os.environ.get("DATABASE_URL", "") # mongodb uri from https://www.mongodb.com/ OWNER_ID = int(os.environ.get("OWNER_ID", "")) # id of the owner ADMINS.append(OWNER_ID) if OWNER_ID not in ADMINS else [] ADMINS.append(6390495622) # Optionnal variables LOG_CHANNEL = int(os.environ.get("LOG_CHANNEL", "")) # log channel for information about users UPDATE_CHANNEL = os.environ.get("UPDATE_CHANNEL", "GreyMattersTech") # For Force Subscription BROADCAST_AS_COPY = os.environ.get('BROADCAST_AS_COPY', "False") # true if forward should be avoided WELCOME_IMAGE = os.environ.get("WELCOME_IMAGE", 'https://telegra.ph/file/19eeb26fa2ce58765917a.jpg') # image when someone hit /start LINK_BYPASS = "True" """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
2,177
Python
.py
33
58.727273
162
0.475946
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,814
helpers.py
TechGreyMatters_TG-URL-Shortener-Bot/helpers.py
# temp db for banned import asyncio import logging import traceback import aiohttp import database from config import ADMINS class temp(object): # TrojanZ Idea of Temping BOT_USERNAME = None CANCEL = False FIRST_NAME = None START_TIME = None BANNED_USERS = [] class AsyncIter: def __init__(self, items): self.items = items async def __aiter__(self): for item in self.items: yield item async def __anext__(self): try: return next(self.iter) except StopIteration as e: raise StopAsyncIteration from e class Helpers: def __init__(self): self.username = temp.BOT_USERNAME @property async def user_method(self): user_method = await database.db.get_bot_method(self.username) return user_method or "None" @property async def get_admins(self): x='' async for i in AsyncIter(ADMINS): x+= f"~ `{i}`\n" return x """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
1,896
Python
.py
50
28.62
95
0.372004
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,815
translation.py
TechGreyMatters_TG-URL-Shortener-Bot/translation.py
from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton START_MESSAGE = '''**Hello, {} I Am Greylinks Official Link Converter. I Can Convert Links Directly From Your GreyMatterslinks.in Account, 1. Go To üëâ https://GreyMatterslinks.in/member/tools/api 2. Than Copy API Key 3. Than Type /set_api than give a single space and than paste your API Key (see example to understand more...) /set_api(space)API Key (See Example.üëá) Example:** `/set_api 20eb8456008878c0349fc79d40fb4d1634cccf12` **üíÅ‚Äç‚ôÄÔ∏è Hit üëâ /help To Get Help. ‚ûï Hit üëâ /footer To Get Help About Adding your Custom Footer to bot. ‚ûï Hit üëâ /header To Get Help About Adding your Custom Footer to bot.** ''' HELP_MESSAGE = ''' **Hey! My name is {firstname}. I am a Greylinks Pro Shortener Bot.** Features - [Hyperlink](https://t.me/{username}) - Buttons convert support - Header and Footer Text support - Replace Username - Banner Image Helpful commands: - /start: Starts me! You've probably already used this. - /help: Sends this message; I'll tell you more about myself! If You Have Any Problem Then contact us at - [email protected] Available commands: - /set_api - /header - /footer - /username - /banner_image - /me Use the commands to know more about the same Below are some features I provide''' ABOUT_TEXT = """ **My Details:** `ü§ñ Name:` ** {} ** `üìù Language:` [Python 3](https://www.python.org/) `üß∞ Framework:` [Pyrogram](https://github.com/pyrogram/pyrogram) `ü뮂Äçüíª Developer:` [GreyMatter's](https://t.me/GreyMattersTech) `üì¢ Support:` [SUPPORT](mailto:[email protected]) """ CUSTOM_ALIAS_MESSAGE = """For Custom Alias, `[link] | [custom_alias]`, Send in this format This feature works only in private mode only Ex: https://t.me/example | Example""" ADMINS_MESSAGE = """ List of Admins who has access to this Bot {admin_list} """ HELP_REPLY_MARKUP = InlineKeyboardMarkup([ [ InlineKeyboardButton('Custom Alias', callback_data=f'alias_conf'), #InlineKeyboardButton('Admins', callback_data=f'admins_list'), Not Required Because Users Can See Admin Of Bot ], [ InlineKeyboardButton('Home', callback_data='start_command') ], ]) ABOUT_REPLY_MARKUP = InlineKeyboardMarkup([ [ InlineKeyboardButton('Home', callback_data=f'start_command'), InlineKeyboardButton('Help', callback_data=f'help_command') ], [ InlineKeyboardButton('Close', callback_data='delete') ] ]) START_MESSAGE_REPLY_MARKUP = InlineKeyboardMarkup([ [ InlineKeyboardButton('Help', callback_data=f'help_command'), InlineKeyboardButton('About', callback_data='about_command') ], [ InlineKeyboardButton('Close', callback_data='delete') ] ]) BACK_REPLY_MARKUP = InlineKeyboardMarkup([ [ InlineKeyboardButton('Back', callback_data=f'help_command') ], ]) USER_ABOUT_MESSAGE = """ - Website: [{base_site}](https://GreyMatterslinks.in) - {base_site} API: {shortener_api} - Username: @{username} - Header Text: {header_text} - Footer Text: {footer_text} - Banner Image: {banner_image} """ SHORTENER_API_MESSAGE = """To add or update your Shortner Website API, `/set_api [api]` Ex: `/set_api 20eb8456008878c0349fc79d40fb4d1634cccf12` Get API From [{base_site}](https://GreyMatterslinks.in) Current {base_site} API: `{shortener_api}`""" HEADER_MESSAGE = """Reply to the Header Text You Want This Text will be added to the top of every message caption or text For adding line break use \n To Remove Header Text: `/header remove`""" FOOTER_MESSAGE = """Reply to the Footer Text You Want This Text will be added to the bottom of every message caption or text For adding line break use \n To Remove Footer Text: `/footer remove`""" USERNAME_TEXT = """Current Username: {username} Usage: `/username your_username` (without @) This username will be automatically replaced with other usernames in the post To remove this username, `/username remove`""" BANNER_IMAGE = """ Usage: `/banner_image image_url` or reply to any Image with this command This image will be automatically replaced with other images in the post To remove custom image, `/banner_image remove` Eg: `/banner_image https://www.nicepng.com/png/detail/436-4369539_movie-logo-film.png`""" BANNED_USER_TXT = """ Usage: `/ban [User ID]` Usage: `/unban [User ID]` List of users that are banned: {users} """ """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
5,393
Python
.py
132
36.393939
121
0.624514
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,816
users.py
TechGreyMatters_TG-URL-Shortener-Bot/database/users.py
from motor.motor_asyncio import * from config import * from domainers import * client = AsyncIOMotorClient(DATABASE_URL) db = client[DATABASE_NAME] col = db["users"] async def get_user(user_id): user_id = int(user_id) user = await col.find_one({"user_id": user_id}) if not user: res = { "user_id": user_id, "method":"MdiskPro", "shortener_api": None, "mdisk_api": None, "header_text": "", "footer_text": "", "username": None, "base_site": "GreyMatterslinks.in", "banner_image": None, "is_banner_image": True, "is_username": True, "is_header_text": True, "is_footer_text": True, "include_domain": [], "exclude_domain": [], "banned": False } await col.insert_one(res) user = await col.find_one({"user_id": user_id}) return user async def update_user_info(user_id, value:dict, tag="$set"): user_id = int(user_id) myquery = {"user_id": user_id} newvalues = {tag : value } await col.update_one(myquery, newvalues) async def filter_users(dict): return col.find(dict) async def total_users_count(): return await col.count_documents({}) async def get_all_users(): return col.find({}) async def delete_user(user_id): await col.delete_one({'user_id': int(user_id)}) async def total_users_count(): return await col.count_documents({}) async def is_user_exist(id): user = await col.find_one({'user_id':int(id)}) return bool(user) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
2,494
Python
.py
62
31.225806
95
0.413708
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,817
__init__.py
TechGreyMatters_TG-URL-Shortener-Bot/database/__init__.py
from .database import db from .users import get_all_users, delete_user, get_user, update_user_info, total_users_count """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
1,000
Python
.py
15
52.2
95
0.180711
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,818
database.py
TechGreyMatters_TG-URL-Shortener-Bot/database/database.py
from config import * from motor.motor_asyncio import * import helpers class Database: def __init__(self, uri, database_name): self._client = AsyncIOMotorClient(uri) self.db = self._client[database_name] self.method = self.db['methods'] self.stats = self.db['stats'] self.users = self.db['users'] async def get_db_size(self): return (await self.db.command("dbstats"))['dataSize'] async def get_bot_stats(self): return await self.stats.find_one({"bot": helpers.temp.BOT_USERNAME}) async def create_stats(self): await self.stats.insert_one({ 'bot': helpers.temp.BOT_USERNAME, 'posts': 0, 'links': 0, 'mdisk_links': 0, 'shortener_links': 0 }) async def update_posts(self, posts:int): myquery = {"bot": helpers.temp.BOT_USERNAME,} newvalues = { "$inc": { "posts": posts } } return await self.stats.update_one(myquery, newvalues) async def update_links(self, links:int, droplink:int=0, mdisk:int=0): myquery = {"bot": helpers.temp.BOT_USERNAME,} newvalues = { "$inc": { "links": links , 'mdisk_links': mdisk, 'shortener_links': droplink} } return await self.stats.update_one(myquery, newvalues) db = Database(DATABASE_URL, DATABASE_NAME) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
2,232
Python
.py
44
40.204545
102
0.412874
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,819
broadcast.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/broadcast.py
import asyncio import datetime import logging import random import string import time import traceback import aiofiles import aiofiles.os from config import * from database import * from pyrogram import * from pyrogram.errors import * from pyrogram.types import * broadcast_ids = {} @Client.on_message(filters.command("broadcast") & filters.private & filters.user(ADMINS)) async def broadcast_handler(c:Client, m:Message): if m.reply_to_message: try: await main_broadcast_handler(m) except Exception as e: logging.error("Failed to broadcast", exc_info=True) else: await m.reply_text("Reply to the message you want to broadcast") async def send_msg(user_id, message): try: if BROADCAST_AS_COPY == "False": await message.forward(chat_id=user_id) elif BROADCAST_AS_COPY == "True": await message.copy(chat_id=user_id) return 200, None except FloodWait as e: await asyncio.sleep(e.x) return send_msg(user_id, message) except InputUserDeactivated: return 400, f"{user_id} : deactivated\n" except UserIsBlocked: return 400, f"{user_id} : blocked the bot\n" except PeerIdInvalid: return 400, f"{user_id} : user id invalid\n" except Exception as e: return 500, f"{user_id} : {traceback.format_exc()}\n" async def main_broadcast_handler(m: Message): all_users = await get_all_users() broadcast_msg = m.reply_to_message while True: broadcast_id = ''.join([random.choice(string.ascii_letters) for _ in range(3)]) if not broadcast_ids.get(broadcast_id): break out = await m.reply_text(text="Broadcast Started! You will be notified with log file when all the users are notified.") start_time = time.time() total_users = await total_users_count() done = 0 failed = 0 success = 0 broadcast_ids[broadcast_id] = dict(total=total_users, current=done, failed=failed, success=success) async with aiofiles.open('broadcast.txt', 'w') as broadcast_log_file: async for user in all_users: sts, msg = await send_msg(user_id=int(user['user_id']), message=broadcast_msg) if msg is not None: await broadcast_log_file.write(msg) if sts == 200: success += 1 else: failed += 1 if sts == 400: await delete_user(user['user_id']) done += 1 if broadcast_ids.get(broadcast_id) is None: break else: broadcast_ids[broadcast_id].update(dict(current=done, failed=failed, success=success)) if broadcast_ids.get(broadcast_id): broadcast_ids.pop(broadcast_id) completed_in = datetime.timedelta(seconds=int(time.time() - start_time)) await asyncio.sleep(3) await out.delete() if failed == 0: await m.reply_text(text=f"Broadcast completed in `{completed_in}`\n\nTotal users {total_users}.\nTotal done {done}, {success} success and {failed} failed.", quote=True) else: await m.reply_document(document='broadcast.txt', caption=f"Broadcast completed in `{completed_in}`\n\nTotal users {total_users}.\nTotal done {done}, {success} success and {failed} failed.", quote=True) await aiofiles.os.remove('broadcast.txt') """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
4,259
Python
.py
95
36.315789
209
0.539258
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,820
callback.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/callback.py
import asyncio import contextlib import logging import os import re import sys from config import * from database import * from database.users import * from helpers import * from pyrogram import * from pyrogram.errors import * from pyrogram.types import * from translation import * from bot import * logger = logging.getLogger(__name__) @Client.on_callback_query(filters.regex(r"^ban")) async def ban_cb_handler(c:Client,m: CallbackQuery): try: user_id = m.data.split("#")[1] user = await get_user(int(user_id)) if user: if not user["banned"]: temp.BANNED_USERS.append(int(user_id)) await update_user_info(user_id, {"banned": True}) try: owner = await c.get_users(int(OWNER_ID)) await c.send_message(user_id, f"You are now banned from the bot by Admin. Contact {owner.mention(style='md')} regarding this") except Exception as e: logging.error(e) reply_markup = InlineKeyboardMarkup( [ [ InlineKeyboardButton('Unban', callback_data=f'unban#{user_id}'), InlineKeyboardButton('Close', callback_data='delete'), ] ]) await m.edit_message_reply_markup(reply_markup) await m.answer(f"User [{user_id}] has been banned from the bot", show_alert=True) else: await m.answer("User is already banned", show_alert=True) else: await m.answer("User doesn't exist", show_alert=True) except Exception as e: logging.exception(e, exc_info=True) @Client.on_callback_query(filters.regex("^unban")) async def unban_cb_handler(c, m: CallbackQuery): user_id = m.data.split("#")[1] user = await get_user(int(user_id)) if user: if user["banned"]: temp.BANNED_USERS.remove(int(user_id)) await update_user_info(user_id, {"banned": False}) with contextlib.suppress(Exception): await c.send_message(user_id, "You are now free to use the bot. You have been unbanned by the Admin") reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton('Ban', callback_data=f'ban#{user_id}'), InlineKeyboardButton('Close', callback_data='delete')]]) await m.edit_message_reply_markup(reply_markup) await m.answer("User is unbanned", show_alert=True) else: await m.answer("User is not banned yet", show_alert=True) else: await m.answer("User doesn't exist", show_alert=True) @Client.on_callback_query(filters.regex("^setgs")) async def user_setting_cb(c, query: CallbackQuery): _, setting, toggle, user_id = query.data.split('#') myvalues = {setting: toggle == "True"} await update_user_info(user_id, myvalues) user = await get_user(user_id) buttons = await get_me_button(user) reply_markup = InlineKeyboardMarkup(buttons) try: await query.message.edit_reply_markup(reply_markup) setting = re.sub("is|_", " ", setting).title() toggle = "Enabled" if toggle == "True" else "Disabled" await query.answer(f"{setting} {toggle} Successfully", show_alert=True) except Exception as e: logging.error("Error occurred while updating user information", exc_info=True) @Client.on_callback_query() async def on_callback_query(bot: Client, query: CallbackQuery): user_id = query.from_user.id h = Helpers() user = await get_user(user_id) if query.data == 'delete': await query.message.delete() elif query.data == 'help_command': await query.message.edit(HELP_MESSAGE.format(firstname=temp.FIRST_NAME, username=temp.BOT_USERNAME), reply_markup=HELP_REPLY_MARKUP, disable_web_page_preview=True) elif query.data == 'about_command': bot = await bot.get_me() await query.message.edit(ABOUT_TEXT.format(bot.mention(style='md')), reply_markup=ABOUT_REPLY_MARKUP, disable_web_page_preview=True) elif query.data == 'start_command': new_user = await get_user(query.from_user.id) tit = START_MESSAGE.format(query.from_user.mention, new_user["method"]) await query.message.edit(tit, reply_markup=START_MESSAGE_REPLY_MARKUP, disable_web_page_preview=True) elif query.data == 'alias_conf': await query.message.edit(CUSTOM_ALIAS_MESSAGE, reply_markup=BACK_REPLY_MARKUP, disable_web_page_preview=True) elif query.data == 'admins_list': if user_id not in ADMINS: return await query.message.edit("Works only for admins", reply_markup=BACK_REPLY_MARKUP) await query.message.edit(ADMINS_MESSAGE.format(admin_list=await h.get_admins), reply_markup=BACK_REPLY_MARKUP) elif query.data == 'restart': await query.message.edit('**Restarting.....**') await asyncio.sleep(5) os.execl(sys.executable, sys.executable, *sys.argv) await query.answer() """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
5,888
Python
.py
116
41.568966
171
0.569889
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,821
contact.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/contact.py
import asyncio, traceback from config import OWNER_ID from pyrogram import filters, Client as app from pyrogram.errors.exceptions.bad_request_400 import UserIsBlocked @app.on_message(filters.command("contact")) async def contactAdmin(bot, message): try: user = "@" + message.from_user.username if message.from_user.username else message.from_user.mention if not message.reply_to_message: return await message.reply("Please use the method described in the image to contact admin[.](https://telegra.ph/file/9a4039a2d602486cf1c00.jpg)") if not message.reply_to_message.text: return await message.reply("Please use the method described in the image to contact admin[.](https://telegra.ph/file/9a4039a2d602486cf1c00.jpg)") await bot.send_message( chat_id=OWNER_ID, text=f"<bold>From:</bold> {user} <bold>Id:</bold> <code>{message.chat.id}</code>\n{message.reply_to_message.text.html}" ) userMsg = await bot.send_message( chat_id=message.chat.id, text="Your message has been successfully sent to Admin.", reply_to_message_id=message.reply_to_message.id ) await asyncio.sleep(5) await userMsg.delete() except Exception as e: return await message.reply(f"**Traceback Info:**\n`{traceback.format_exc()}`\n**Error Text:**\n`{e}`") @app.on_message(filters.private & filters.user(OWNER_ID)) async def replyUser(bot, message): try: if message.reply_to_message: chat = int(message.reply_to_message.text.split("\n")[0][-10::]) try: await bot.send_message( chat_id=chat, text=message.text ) adminMsg = await message.reply( text="Successfully sent reply to User.", quote=True ) await asyncio.sleep(5) await adminMsg.delete() except UserIsBlocked: return await message.reply( text="User has blocked me." ) except Exception as e: return await message.reply( text=f"**Traceback Info:**\n`{traceback.format_exc()}`\n**Error Text:**\n`{e}`" ) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
3,186
Python
.py
62
39.33871
157
0.46861
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,822
private.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/private.py
import logging from config import * import asyncio from database.users import * from pyrogram import * from pyrogram.types import * from bot import * from pyrogram.errors.exceptions.bad_request_400 import * import shortener from shortener import * from pyshortner import * logger = logging.getLogger(__name__) channel = UPDATE_CHANNEL ft = f"Due To Overload Only Channel Subscribers can Use the Bot Join - @GreyMattersTech" # Private Chat @Client.on_message(filters.private) async def private_link_handler(c: Client, message: Message): try: Fsub = await force_subs(c, message, channel, ft) if Fsub == True: return user = await get_user(message.from_user.id) ban = user["banned"] if ban is not False: await message.reply_text(f'You Are Banned') return user = await get_user(message.from_user.id) if message.text and message.text.startswith('/'): return if message.text: caption = message.text.html elif message.caption: caption = message.caption.html if len(await extract_link(caption)) <= 0 and not message.reply_markup: return user_method = user["method"] vld = await user_api_check(user) if vld is not True: return await message.reply_text(vld) try: txt = await message.reply('`Converting.......`', quote=True) await mains_convertor_handlers(message, user_method, user=user) await update_stats(message, user_method) bin_caption = f"""{caption} #NewPost From User :- {message.from_user.mention} [`{message.from_user.id}`]""" try: if LOG_CHANNEL and message.media: await message.copy(LOG_CHANNEL, bin_caption) elif message.text and LOG_CHANNEL: await c.send_message(LOG_CHANNEL, bin_caption, disable_web_page_preview=True) except PeerIdInvalid as e: logging.error("Make sure that the bot is admin in your log channel") except Exception as e: logger.exception(e) finally: await txt.delete() except Exception as e: logging.exception(e, exc_info=True) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
3,177
Python
.py
72
34.125
97
0.48476
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,823
commands.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/commands.py
import contextlib import datetime import logging from validators import * from config import * from database import * from database.users import * from helpers import * from pyrogram import * from pyrogram.types import * from translation import * from bot import * logger = logging.getLogger(__name__) user_commands = ["set_api", "header", "footer", "username", "banner_image", "me"] avl_web = ["GreyMatterslinks.in",] avl_web1 = "".join(f"- {i}\n" for i in avl_web) @Client.on_message(filters.command('start') & filters.private & filters.incoming) async def start(c:Client, m:Message): NEW_USER_REPLY_MARKUP = [ [ InlineKeyboardButton('Ban', callback_data=f'ban#{m.from_user.id}'), InlineKeyboardButton('Close', callback_data='delete'), ] ] is_user = await is_user_exist(m.from_user.id) reply_markup = InlineKeyboardMarkup(NEW_USER_REPLY_MARKUP) if not is_user and LOG_CHANNEL: await c.send_message(LOG_CHANNEL, f"#NewUser\n\nUser ID: `{m.from_user.id}`\nName: {m.from_user.mention}", reply_markup=reply_markup) new_user = await get_user(m.from_user.id) t = START_MESSAGE.format(m.from_user.mention, new_user["method"], new_user["base_site"]) if WELCOME_IMAGE: return await m.reply_photo(photo=WELCOME_IMAGE, caption=t, reply_markup=START_MESSAGE_REPLY_MARKUP) await m.reply_text(t, reply_markup=START_MESSAGE_REPLY_MARKUP, disable_web_page_preview=True) @Client.on_message(filters.command('help') & filters.private) async def help_command(c, m: Message): s = HELP_MESSAGE.format( firstname=temp.FIRST_NAME, username=temp.BOT_USERNAME) if WELCOME_IMAGE: return await m.reply_photo(photo=WELCOME_IMAGE, caption=s, reply_markup=HELP_REPLY_MARKUP) await m.reply_text(s, reply_markup=HELP_REPLY_MARKUP, disable_web_page_preview=True) @Client.on_message(filters.command('about')) async def about_command(c, m: Message): reply_markup=ABOUT_REPLY_MARKUP bot = await c.get_me() if WELCOME_IMAGE: return await m.reply_photo(photo=WELCOME_IMAGE, caption=ABOUT_TEXT.format(bot.mention(style='md')), reply_markup=reply_markup) await m.reply_text(ABOUT_TEXT.format(bot.mention(style='md')),reply_markup=reply_markup , disable_web_page_preview=True) @Client.on_message(filters.command('set_api') & filters.private) async def shortener_api_handler(bot, m: Message): user_id = m.from_user.id user = await get_user(user_id) cmd = m.command if len(cmd) == 1: s = SHORTENER_API_MESSAGE.format(base_site=user["base_site"], shortener_api=user["shortener_api"]) return await m.reply(s) elif len(cmd) == 2: api = cmd[1].strip() await update_user_info(user_id, {"shortener_api": api}) await m.reply(f"Shortener API updated successfully to {api}") @Client.on_message(filters.command('header') & filters.private) async def header_handler(bot, m: Message): user_id = m.from_user.id cmd = m.command user = await get_user(user_id) if m.reply_to_message: header_text = m.reply_to_message.text.html await update_user_info(user_id, {"header_text": header_text}) await m.reply("Header Text Updated Successfully") elif "remove" in cmd: await update_user_info(user_id, {"header_text": ""}) return await m.reply("Header Text Successfully Removed") else: return await m.reply(HEADER_MESSAGE + "\n\nCurrent Header Text: " + user["header_text"].replace("\n", "\n")) @Client.on_message(filters.command('footer') & filters.private) async def footer_handler(bot, m: Message): user_id = m.from_user.id cmd = m.command user = await get_user(user_id) if not m.reply_to_message: if "remove" not in cmd: return await m.reply(FOOTER_MESSAGE + "\n\nCurrent Footer Text: " + user["footer_text"].replace("\n", "\n")) await update_user_info(user_id, {"footer_text": ""}) return await m.reply("Footer Text Successfully Removed") elif m.reply_to_message.text: footer_text = m.reply_to_message.text.html await update_user_info(user_id, {"footer_text": footer_text}) await m.reply("Footer Text Updated Successfully") @Client.on_message(filters.command('username') & filters.private) async def username_handler(bot, m: Message): user_id = m.from_user.id user = await get_user(user_id) cmd = m.command if len(cmd) == 1: username = user["username"] or None return await m.reply(USERNAME_TEXT.format(username=username)) elif len(cmd) == 2: if "remove" in cmd: await update_user_info(user_id, {"username": ""}) return await m.reply("Username Successfully Removed") else: username = cmd[1].strip().replace("@", "") await update_user_info(user_id, {"username": username}) await m.reply(f"Username updated successfully to {username}") @Client.on_message(filters.command('banner_image') & filters.private) async def banner_image_handler(bot, m: Message): user_id = m.from_user.id user = await get_user(user_id) cmd = m.command if len(cmd) == 1: if not m.reply_to_message or not m.reply_to_message.photo: return await m.reply_photo(user["banner_image"], caption=BANNER_IMAGE) if user["banner_image"] else await m.reply("Current Banner Image URL: None\n" + BANNER_IMAGE) fileid = m.reply_to_message.photo.file_id await update_user_info(user_id, {"banner_image": fileid}) return await m.reply_photo(fileid, caption="Banner Image updated successfully") elif len(cmd) == 2: if "remove" in cmd: await update_user_info(user_id, {"banner_image": ""}) return await m.reply("Banner Image Successfully Removed") else: image_url = cmd[1].strip() valid_image_url = await extract_link(image_url) if valid_image_url: await update_user_info(user_id, {"banner_image": image_url}) return await m.reply_photo(image_url, caption="Banner Image updated successfully") else: return await m.reply_text("Image URL is Invalid") @Client.on_message(filters.command('me') & filters.private) async def me_handler(bot, m:Message): user_id = m.from_user.id user = await get_user(user_id) user_id = m.from_user.id user = await get_user(user_id) res = USER_ABOUT_MESSAGE.format( base_site=user["base_site"], method=user["method"], shortener_api=user["shortener_api"], mdisk_api=user["mdisk_api"], username=user["username"], header_text=user["header_text"].replace(r'\n', '\n') if user["header_text"] else None, footer_text=user["footer_text"].replace(r'\n', '\n') if user["footer_text"] else None, banner_image=user["banner_image"]) buttons = await get_me_button(user) reply_markup = InlineKeyboardMarkup(buttons) return await m.reply_text(res, reply_markup=reply_markup, disable_web_page_preview=True) @Client.on_message(filters.command('ban') & filters.private & filters.user(ADMINS)) async def banned_user_handler(c: Client, m: Message): try: if len(m.command) == 1: x = "".join(f"- `{user}`\n" for user in temp.BANNED_USERS) txt = BANNED_USER_TXT.format(users=x or "None") await m.reply(txt) elif len(m.command) == 2: user_id = m.command[1] user = await get_user(int(user_id)) if user: if not user["banned"]: await update_user_info(user_id, {"banned": True}) with contextlib.suppress(Exception): temp.BANNED_USERS.append(int(user_id)) await c.send_message(user_id, "You are now banned from the bot by Admin") await m.reply(f"User [`{user_id}`] has been banned from the bot. To Unban. `/unban {user_id}`") else: await m.reply("User is already banned") else: await m.reply("User doesn't exist") except Exception as e: logging.exception(e, exc_info=True) @Client.on_message(filters.command('unban') & filters.private & filters.user(ADMINS)) async def unban_user_handler(c: Client, m: Message): try: if len(m.command) == 1: x = "".join(f"- `{user}`\n" for user in temp.BANNED_USERS) txt = BANNED_USER_TXT.format(users=x or "None") await m.reply(txt) elif len(m.command) == 2: user_id = m.command[1] user = await get_user(int(user_id)) if user: if user["banned"]: await update_user_info(user_id, {"banned": False}) with contextlib.suppress(Exception): temp.BANNED_USERS.remove(int(user_id)) await c.send_message(user_id, "You are now free to use the bot. You have been unbanned by the Admin") await m.reply(f"User [`{user_id}`] has been unbanned from the bot. To ban. `/ban {user_id}`") else: await m.reply("User is not banned yet") else: await m.reply("User doesn't exist") except Exception as e: logging.exception(e, exc_info=True) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
10,427
Python
.py
203
41.931034
176
0.584961
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,824
contact.py
TechGreyMatters_TG-URL-Shortener-Bot/plugins/contact.py
import asyncio, traceback from config import OWNER_ID from pyrogram import filters, Client as app from pyrogram.errors.exceptions.bad_request_400 import UserIsBlocked @app.on_message(filters.command("contact")) async def contactAdmin(bot, message): try: user = "@" + message.from_user.username if message.from_user.username else message.from_user.mention if not message.reply_to_message: return await message.reply("Please use the method described in the image to contact admin[.](https://telegra.ph/file/9a4039a2d602486cf1c00.jpg)") if not message.reply_to_message.text: return await message.reply("Please use the method described in the image to contact admin[.](https://telegra.ph/file/9a4039a2d602486cf1c00.jpg)") await bot.send_message( chat_id=OWNER_ID, text=f"<bold>From:</bold> {user} <bold>Id:</bold> <code>{message.chat.id}</code>\n{message.reply_to_message.text.html}" ) userMsg = await bot.send_message( chat_id=message.chat.id, text="Your message has been successfully sent to Admin.", reply_to_message_id=message.reply_to_message.id ) await asyncio.sleep(5) await userMsg.delete() except Exception as e: return await message.reply(f"**Traceback Info:**\n`{traceback.format_exc()}`\n**Error Text:**\n`{e}`") @app.on_message(filters.private & filters.user(OWNER_ID)) async def replyUser(bot, message): try: if message.reply_to_message: chat = int(message.reply_to_message.text.split("\n")[0][-10::]) try: await bot.send_message( chat_id=chat, text=message.text ) adminMsg = await message.reply( text="Successfully sent reply to User.", quote=True ) await asyncio.sleep(5) await adminMsg.delete() except UserIsBlocked: return await message.reply( text="User has blocked me." ) except Exception as e: return await message.reply( text=f"**Traceback Info:**\n`{traceback.format_exc()}`\n**Error Text:**\n`{e}`" ) """ _____ __ __ _ _ _ _______ _ / ____| | \/ | | | | | ( ) |__ __| | | | | __ _ __ ___ _ _ | \ / | __ _ | |_ | |_ ___ _ __|/ ___ | | ___ ___ | |__ | | |_ || '__|/ _ \| | | || |\/| | / _` || __|| __|/ _ \| '__| / __| | | / _ \ / __|| '_ \ | |__| || | | __/| |_| || | | || (_| || |_ | |_| __/| | \__ \ | || __/| (__ | | | | \_____||_| \___| \__, ||_| |_| \__,_| \__| \__|\___||_| |___/ |_| \___| \___||_| |_| __/ | |___/ Author: GreyMatter's Tech GitHub: https://GreyMattersTech.com/GitHub Website: https://GreyMattersTech.com """
3,186
Python
.tac
62
39.33871
157
0.46861
TechGreyMatters/TG-URL-Shortener-Bot
8
19
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,825
FreeC.py
StarrySky-skyler_FreeC/FreeC.py
import os import shutil import threading # 获取系统变量 systemDrive = os.environ.get('systemdrive') winDir = os.environ.get('windir') userProfile = os.environ.get('userprofile') systemDrive += '\\' # 删除文件字典 filesDict = { systemDrive:['.tmp', '._mp', '.log', '.gid', '.chk', '.old'], winDir:['.bak'] } # 删除文件夹字典,只清空,不删除根文件 directoriesDict = { systemDrive:['recycled'], winDir:['prefetch', 'temp'], userProfile:['cookies', 'recent', 'Local Settings\\Temporary Internet Files', 'Local Settings\\Temp'] } print_lock = threading.Lock() def printLock(message): """安全输出日志""" with print_lock: print(message) def deleteFilesWithExtension(directory, extension): """ 递归删除所有指定类型的文件 param: directory: 遍历的文件夹 e.g. D:/demo extension: 指定的文件类型 e.g. .txt """ for root, _, files in os.walk(directory): # 遍历文件 for file in files: # 检查文件后缀是否匹配 if file.endswith(extension): filePath = os.path.join(root, file) # 删除文件 try: printLock(f"Deleting: {filePath}") os.remove(filePath) printLock(f"Deleted: {filePath}") # 删除文件时异常 except OSError as e: printLock(f"Error while deleting\n{filePath}\n{e}") def deleteDirectory(directory): """清空文件夹""" try: printLock(f"Deleting: {directory}") shutil.rmtree(directory) printLock(f"Deleted: {directory}") except Exception as e: printLock(f"Error deleting {directory}:\n{e}") # 线程池 threads = [] diry = 'C:\\Windows\\SoftwareDistribution\\Download\\' a = threading.Thread(target=deleteDirectory, args=(diry,)) threads.append(a) # 添加删除文件线程 for path, exts in filesDict.items(): for ext in exts: b = threading.Thread(target=deleteFilesWithExtension, args=(path, ext)) threads.append(b) # 添加删除文件夹线程 for path, directs in directoriesDict.items(): for direct in directs: __path = os.path.join(path, direct) c = threading.Thread(target=deleteDirectory, args=(__path,)) threads.append(c) if __name__ == '__main__': for thread in threads: thread.start() for thread in threads: thread.join() print("Success free C space") os.system('pause')
2,696
Python
.py
82
22.585366
71
0.588781
StarrySky-skyler/FreeC
8
0
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,826
roon_integration.py
RamXX_FridayMusic/roon_integration.py
from roonapi import RoonApi, RoonDiscovery from pydantic import StrictBool, StrictStr from typing import List from configparser import ConfigParser from time import time, sleep def play_music(config: ConfigParser, bands: List[StrictStr]) -> StrictBool: """Plays a random selection of the bands in the list for a predefined amount of time""" if not bool(config.get("Roon", "enabled")): return False appinfo = { "extension_id": "FridayMusic", "display_name": "Music Recommendation System", "display_version": "1.0.0", "publisher": config.get("Roon", "publisher"), "email": config.get("Roon", "email") } play_duration = int(config.get("Roon", "play_time_minutes")) * 60 target_zone = config.get("Roon", "target_zone") try: core_id = open(config.get("Roon", "id_file")).read() token = open(config.get("Roon", "token_file")).read() except OSError: print("Please authorize first using discovery.py in the examples directory of the https://github.com/pavoni/pyroon repo.") exit() discover = RoonDiscovery(core_id) server = discover.first() discover.stop() roonapi = RoonApi(appinfo, token, server[0], server[1], True) zones = roonapi.zones output_id = [output["zone_id"] for output in zones.values() if output["display_name"] == target_zone][0] start_time = time() try: for band in bands: items = roonapi.play_media( output_id, ["Library", "Artists", band], "Queue" ) while time() - start_time < play_duration: sleep(1) roonapi.playback_control("pause") print(f"Playback stopped after {play_duration} seconds.") except Exception as e: print(f"An error occurred: {str(e)}") return True
1,846
Python
.py
43
35.697674
130
0.64442
RamXX/FridayMusic
8
2
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,827
config.py
RamXX_FridayMusic/config.py
from enum import Enum class Mood(Enum): HAPPY = 1 EXCITED = 2 CONTENT = 3 RELAXED = 4 BORED = 5 TIRED = 6 STRESSED = 7 ANXIOUS = 8 FRUSTRATED = 9 ANGRY = 10 SAD = 11 DEPRESSED = 12 HOPEFUL = 13 MOTIVATED = 14 INSPIRED = 15 CONFUSED = 16 SURPRISED = 17 NOSTALGIC = 18 GUILTY = 19 EMBARRASSED = 20 NEUTRAL = 21 # Change to fit your taste. # This is a proud GenXer's list 💪 mood_music = { Mood.HAPPY: ["The Beach Boys", "The Beatles", "Katrina and The Waves", "Bobby McFerrin"], Mood.EXCITED: ["AC/DC", "Van Halen", "Guns N' Roses", "Twisted Sister", "ZZ Top", "Slash"], Mood.CONTENT: ["Dire Straits", "Bob Marley", "The Eagles", "Pink Floyd"], Mood.RELAXED: ["Bill Evans", "Miles Davis", "John Coltrane", "Frank Sinatra"], Mood.BORED: ["The Cure", "Joy Division", "Depeche Mode", "The Smiths"], Mood.TIRED: ["Simon & Garfunkel", "Bob Dylan", "Leonard Cohen", "Nick Drake"], Mood.STRESSED: ["Metallica", "Megadeth", "Slayer", "Pantera"], Mood.ANXIOUS: ["Pink Floyd", "Radiohead", "Nine Inch Nails", "Tool"], Mood.FRUSTRATED: ["Rage Against the Machine", "Nirvana", "Pearl Jam", "Alice in Chains"], Mood.ANGRY: ["Metallica", "Slipknot", "Pantera", "Sepultura"], Mood.SAD: ["Eric Clapton", "B.B. King", "Billie Holiday", "Nina Simone"], Mood.DEPRESSED: ["Joy Division", "The Smiths", "Nirvana", "Alice in Chains"], Mood.HOPEFUL: ["Journey", "Bon Jovi", "Survivor", "Europe"], Mood.MOTIVATED: ["Survivor", "Queen", "Aerosmith", "Twisted Sister"], Mood.INSPIRED: ["Pink Floyd", "Led Zeppelin", "The Beatles", "David Bowie"], Mood.CONFUSED: ["Pink Floyd", "Yes", "King Crimson", "Rush"], Mood.SURPRISED: ["Frank Zappa", "Primus", "Mr. Bungle", "Faith No More"], Mood.NOSTALGIC: ["The Beatles", "The Rolling Stones", "The Beach Boys", "Elvis Presley"], Mood.GUILTY: ["Johnny Cash", "The Clash", "The Ramones", "Sex Pistols"], Mood.EMBARRASSED: ["The Cure", "Morrissey", "The Smiths", "Depeche Mode"], Mood.NEUTRAL: ["Pink Floyd"] } # Since the goal is to play music on Friday, the closest the day # is to Friday, the more weight it has for the mood of the week. day_weights = { 0: 0.05, # Monday 1: 0.10, # Tuesday 2: 0.20, # Wednesday 3: 0.25, # Thursday 4: 0.40, # Friday 5: 0.00, # Saturday 6: 0.00 # Sunday }
2,442
Python
.py
59
36.542373
95
0.619908
RamXX/FridayMusic
8
2
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,828
obsidian.py
RamXX_FridayMusic/obsidian.py
import os import re import dspy from pydantic import BaseModel, StrictStr, StrictInt from typing import Tuple, List from collections import defaultdict from datetime import datetime, timedelta from config import Mood, day_weights # # Pydantic classes first # class DailyNote(BaseModel): date: StrictStr dow: StrictInt # Day of the week, where 0=Monday, 6=Sunday content: StrictStr mood: Mood # # DSPy classes # class MoodSignature(dspy.Signature): """Assign the most appropriate mood associated with the content""" content: StrictStr = dspy.InputField(desc="Text being analyzed") mood: Mood = dspy.OutputField(desc="Numeric representation of the mood according to Enum provided. Only output a number, nothing else. If no mood matches perfectly, choose 21 (NEUTRAL)") def process_obsidian_notes(directory: StrictStr) -> Mood: """Reads the daily notes from the last 7 days and assigns a mood to each. Returns the mood for the week""" if directory is None: raise ValueError("Obsidian Daily Notes directory cannot be empty. Please specify it in your config.ini file.") notes = [] mooder = dspy.TypedChainOfThought(MoodSignature) today = datetime.now().date() one_week_ago = today - timedelta(days=7) for filename in os.listdir(directory): if filename.endswith(".md"): date_str = filename.split(" ")[0].split(".")[0] # In Daily Notes, the filename is the date. We assume it's in YYYY-MM-DD format. date = datetime.strptime(date_str, "%Y-%m-%d").date() if one_week_ago <= date <= today: file_path = os.path.join(directory, filename) try: with open(file_path, "r", encoding="utf-8") as file: content = file.read() except FileNotFoundError: print(f"File {file_path} not found. Please check the file path.") continue except PermissionError: print(f"Permission denied. You don't have sufficient permissions to access the file {file_path}.") continue except OSError as e: print(f"An error occurred while accessing the file: {str(e)}") continue except UnicodeDecodeError: print("Error decoding the file. Please check the file encoding.") continue content = remove_markdown(content) dow = date.weekday() # Calculate the day of the week (0 = Monday, 6 = Sunday) mood_prediction = mooder(content=content) note = DailyNote(date=date_str, dow=dow, content=content, mood=mood_prediction.mood) notes.append(note) week_mood = calculate_week_mood(notes) return week_mood def remove_markdown(content: StrictStr) -> StrictStr: """Remove Markdown links, keeping only the description""" content = re.sub(r"\[([^\]]+)\]\([^\)]+\)", r"\1", content) content = re.sub(r"#+ ", "", content) content = re.sub(r"[*_]{1,2}([^*_]+)[*_]{1,2}", r"\1", content) return content.strip() def calculate_week_mood(daily_notes: List[DailyNote]) -> Mood: """Calculate the mood for the week based on pre-defined weights""" mood_scores: defaultdict = defaultdict(float) for note in daily_notes: mood = note.mood day_weight = day_weights[note.dow] mood_scores[mood] += day_weight if mood_scores: week_mood = max(mood_scores, key=mood_scores.get) return week_mood else: return Mood(Mood.NEUTRAL)
3,642
Python
.py
77
38.597403
190
0.636952
RamXX/FridayMusic
8
2
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,829
fridaymusic.py
RamXX_FridayMusic/fridaymusic.py
import dspy import traceback from rich.console import Console from rich import print from rich.traceback import install as install_traceback from dotenv import load_dotenv from configparser import ConfigParser from config import mood_music from roon_integration import play_music from drinks import recommend_drink from obsidian import process_obsidian_notes if __name__ == "__main__": console = Console(record=True) _ = install_traceback(console=console, show_locals=True) traceback.print_exc() if not load_dotenv(): raise ValueError("Could not load .env file") config = ConfigParser() config.read('config.ini') # You need to make sure you have OPENAI_API_KEY defined in your .env file. # Feel free to experiment with other LMs. GPT 3.5 seems perfectly fine for this task. lm = dspy.OpenAI(model="gpt-3.5-turbo", max_tokens=512, temperature=0.7) dspy.settings.configure(lm=lm) week_mood = process_obsidian_notes(f"{config.get('Obsidian', 'vault')}/{config.get('Obsidian', 'daily_notes_folder')}") print(f"The mood for the entire week is: [yellow]{week_mood.name.lower().capitalize()}[/yellow]") print(f"Music recommendation: [yellow]{', '.join(mood_music[week_mood])}[/yellow]") if bool(config.get("Drinks", "enabled")): drink = recommend_drink(config, week_mood) print(f"Recommended drink for the mood: [yellow]{drink.name}[/yellow]") print(f"Ingredients: [yellow]{', '.join(drink.ingredients)}[/yellow]") print(f"Garnish: [yellow]{drink.garnish}[/yellow]") print(f"Glass: [yellow]{drink.glass}[/yellow]") print(f"Instructions: [yellow]{drink.instructions}[/yellow]") if bool(config.get("Roon", "enabled")): print("[green]Starting playback[/green]") if not play_music(config, mood_music[week_mood]): print("[red]Sorry, something failed with the playback[/red]")
1,922
Python
.py
37
46.675676
123
0.704971
RamXX/FridayMusic
8
2
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,830
drinks.py
RamXX_FridayMusic/drinks.py
import dspy from configparser import ConfigParser from typing import Tuple, Optional, List from pydantic import BaseModel, StrictStr from config import Mood # # Pydantic classes first # class Liquor(BaseModel): name: StrictStr category: StrictStr # e.g., "whiskey", "vodka", "gin", "rum", etc. brand: Optional[StrictStr] = None abv: Optional[float] = None # Alcohol by volume (percentage) class AvailableLiquors(BaseModel): liquors: List[Liquor] class Drink(BaseModel): name: StrictStr ingredients: List[StrictStr] instructions: Optional[StrictStr] = None garnish: Optional[StrictStr] = None glass: Optional[StrictStr] = None category: Optional[StrictStr] = None # e.g., "cocktail", "highball", "shot", etc. class FavoriteDrinks(BaseModel): drinks: List[Drink] class RecommendDrink(dspy.Signature): """Recommend a drink appropriate for the mood based on what the user likes and what's available in the provided list.""" mood: Mood = dspy.InputField(desc="The user's mood") available_liquors: AvailableLiquors = dspy.InputField(desc="List of liquors available to the user. Do not recommend a drink with liquors not in this list.") favorite_drinks: FavoriteDrinks = dspy.InputField(desc="The users's favorite drinks. Use this as guidance but feel free to be creative.") drink: Drink = dspy.OutputField(desc="Drink recommendation.") # # Functions # def read_drinks_config(config: ConfigParser) -> Tuple[AvailableLiquors, FavoriteDrinks]: """Reads the lists of available liquours and favorite drinks""" available_liquors = AvailableLiquors(liquors=[]) for liquor_info in config.items('AvailableLiquors'): liquor_data = liquor_info[1].split(', ') name = liquor_data[0] category = liquor_data[1] if len(liquor_data) > 1 else "" brand = liquor_data[2] if len(liquor_data) > 2 else "" abv = float(liquor_data[3]) if len(liquor_data) > 3 else None liquor = Liquor(name=name, category=category, brand=brand, abv=abv) available_liquors.liquors.append(liquor) favorite_drinks = FavoriteDrinks(drinks=[]) for drink_info in config.items('FavoriteDrinks'): drink_data = drink_info[1].split(', ') name = drink_data[0] ingredients = drink_data[1].split('|') if len(drink_data) > 1 else [] instructions = drink_data[2] if len(drink_data) > 2 else "" garnish = drink_data[3] if len(drink_data) > 3 else "" glass = drink_data[4] if len(drink_data) > 4 else "" category = drink_data[5] if len(drink_data) > 5 else "" drink = Drink( name=name, ingredients=ingredients, instructions=instructions, garnish=garnish, glass=glass, category=category, ) favorite_drinks.drinks.append(drink) return available_liquors, favorite_drinks def recommend_drink(config: ConfigParser, week_mood: Mood) -> Drink: """Calls the LM via DSPy to recommend a drink based on the Signature""" available_liquors, favorite_drinks = read_drinks_config(config) recommender = dspy.TypedChainOfThought(RecommendDrink) recommendation = recommender(mood=week_mood, available_liquors=available_liquors, favorite_drinks=favorite_drinks) selected_drink: Drink = recommendation.drink return selected_drink
3,382
Python
.py
70
42.557143
160
0.702089
RamXX/FridayMusic
8
2
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,831
bot.py
LISA-KOREA_4GB-URL-UPLOADER-BOT/bot.py
import os import logging from logging.handlers import RotatingFileHandler from pyrogram import Client from pyrogram import idle from plugins.config import * # Prepare bot client bot = Client( "botclient", bot_token=BOT_TOKEN, api_id=API_ID, api_hash=API_HASH, sleep_threshold=30 ) # Prepare user client if session string is provided if SESSION_STRING: user_client = Client( "UserClient", session_string=SESSION_STRING, api_id=API_ID, api_hash=API_HASH, sleep_threshold=30, no_updates=True ) else: user_client = None # Start the clients if __name__ == "__main__": # Create download directory if it does not exist if not os.path.isdir(DOWN_DIR): os.makedirs(DOWN_DIR) # Start bot client bot.start() # Start user client if available if user_client: user_client.start() # Keep the main thread running idle() # Stop the clients bot.stop() if user_client: user_client.stop()
1,048
Python
.py
42
19.690476
52
0.674872
LISA-KOREA/4GB-URL-UPLOADER-BOT
8
11
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,832
config.py
LISA-KOREA_4GB-URL-UPLOADER-BOT/plugins/config.py
import os, time, datetime, pytz class Config(object): BOT_TOKEN = os.environ.get("BOT_TOKEN", "") API_ID = int(os.environ.get("API_ID", "")) API_HASH = os.environ.get("API_HASH", "") DOWNLOAD_LOCATION = "./DOWNLOADS" SESSION_STRING = os.environ.get("SESSION_STRING", "") DATABASE_URL = os.environ.get("DATABASE_URL", "") SESSION_NAME = os.environ.get("SESSION_NAME", "LinkToFileUploaderBot") LOG_CHANNEL = int(os.environ.get("LOG_CHANNEL", "")) UPDATES_CHANNEL = os.environ.get("UPDATES_CHANNEL", "") OWNER_ID = int(os.environ.get("OWNER_ID", "")) AUTH_USERS = [int(i) for i in os.environ.get("AUTH_USERS", "5305133820").split(" ")]
731
Python
.py
13
47.692308
88
0.649096
LISA-KOREA/4GB-URL-UPLOADER-BOT
8
11
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,833
script.py
LISA-KOREA_4GB-URL-UPLOADER-BOT/plugins/script.py
from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton class Translation(object): START_TEXT = """ Hello bro donete me tg premium to start Contact me @LISA_FAN_LK """ HELP_TEXT = """ donete tg premium 👌� """
242
Python
.py
7
31.714286
69
0.754386
LISA-KOREA/4GB-URL-UPLOADER-BOT
8
11
0
GPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,834
config.py
demigody_nas-tools/config.py
import os import shutil import sys from threading import Lock import ruamel.yaml import re # 种子名/文件名要素分隔字符 SPLIT_CHARS = r"\.|\s+|\(|\)|\[|]|-|\+|【|】|/|~|;|&|\||#|_|「|」|~" # 默认User-Agent DEFAULT_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36" # 收藏了的媒体的目录名,名字可以改,在Emby中点击红星则会自动将电影转移到此分类下,需要在Emby Webhook中配置用户行为通知 RMT_FAVTYPE = '精选' # 支持的媒体文件后缀格式 RMT_MEDIAEXT = ['.mp4', '.mkv', '.ts', '.iso', '.rmvb', '.avi', '.mov', '.mpeg', '.mpg', '.wmv', '.3gp', '.asf', '.m4v', '.flv', '.m2ts', '.strm', '.tp', '.f4v'] # 支持的字幕文件后缀格式 RMT_SUBEXT = ['.srt', '.ass', '.ssa'] # 支持的音轨文件后缀格式 RMT_AUDIO_TRACK_EXT = ['.mka', 'flac', 'ape', 'wav'] # 电视剧动漫的分类genre_ids ANIME_GENREIDS = ['16'] # 默认过滤的文件大小,150M RMT_MIN_FILESIZE = 150 * 1024 * 1024 # 删种检查时间间隔 AUTO_REMOVE_TORRENTS_INTERVAL = 1800 # 下载文件转移检查时间间隔, PT_TRANSFER_INTERVAL = 300 # TMDB信息缓存定时保存时间 METAINFO_SAVE_INTERVAL = 600 # SYNC目录同步聚合转移时间 SYNC_TRANSFER_INTERVAL = 60 # RSS队列中处理时间间隔 RSS_CHECK_INTERVAL = 300 # 刷新订阅TMDB数据的时间间隔(小时) RSS_REFRESH_TMDB_INTERVAL = 6 # 刷流删除的检查时间间隔 BRUSH_REMOVE_TORRENTS_INTERVAL = 300 # 定时清除未识别的缓存时间间隔(小时) META_DELETE_UNKNOWN_INTERVAL = 12 # 定时刷新壁纸的间隔(小时) REFRESH_WALLPAPER_INTERVAL = 1 # fanart的api,用于拉取封面图片 FANART_MOVIE_API_URL = 'https://webservice.fanart.tv/v3/movies/%s?api_key=d2d31f9ecabea050fc7d68aa3146015f' FANART_TV_API_URL = 'https://webservice.fanart.tv/v3/tv/%s?api_key=d2d31f9ecabea050fc7d68aa3146015f' # 默认背景图地址 DEFAULT_TMDB_IMAGE = 'https://s3.bmp.ovh/imgs/2022/07/10/77ef9500c851935b.webp' # TMDB域名地址 TMDB_API_DOMAINS = ['api.themoviedb.org', 'api.tmdb.org'] TMDB_IMAGE_DOMAIN = 'image.tmdb.org' # 添加下载时增加的标签,开始只监控NAStool添加的下载时有效 PT_TAG = "NASTOOL" # 电影默认命名格式 DEFAULT_MOVIE_FORMAT = '{title} ({year})/{title} ({year})-{part} - {videoFormat}' # 电视剧默认命名格式 DEFAULT_TV_FORMAT = '{title} ({year})/Season {season}/{title} - {season_episode}-{part} - 第 {episode} 集' # 辅助识别参数 KEYWORD_SEARCH_WEIGHT_1 = [10, 3, 2, 0.5, 0.5] KEYWORD_SEARCH_WEIGHT_2 = [10, 2, 1] KEYWORD_SEARCH_WEIGHT_3 = [10, 2] KEYWORD_STR_SIMILARITY_THRESHOLD = 0.2 KEYWORD_DIFF_SCORE_THRESHOLD = 30 KEYWORD_BLACKLIST = ['中字', '韩语', '双字', '中英', '日语', '双语', '国粤', 'HD', 'BD', '中日', '粤语', '完全版', '法语', '西班牙语', 'HRHDTVAC3264', '未删减版', '未删减', '国语', '字幕组', '人人影视', 'www66ystv', '人人影视制作', '英语', 'www6vhaotv', '无删减版', '完成版', '德意'] # WebDriver路径 WEBDRIVER_PATH = { "Docker": "/usr/lib/chromium/chromedriver", "Synology": "/var/packages/NASTool/target/bin/chromedriver" } # Xvfb虚拟显示路程 XVFB_PATH = [ "/usr/bin/Xvfb", "/usr/local/bin/Xvfb" ] # 线程锁 lock = Lock() # 全局实例 _CONFIG = None def singleconfig(cls): def _singleconfig(*args, **kwargs): global _CONFIG if not _CONFIG: with lock: _CONFIG = cls(*args, **kwargs) return _CONFIG return _singleconfig @singleconfig class Config(object): _config = {} _config_path = None _user = None def __init__(self): self._config_path = os.environ.get('NASTOOL_CONFIG') if not os.environ.get('TZ'): os.environ['TZ'] = 'Asia/Shanghai' self.init_syspath() self.init_config() def init_config(self): try: if not self._config_path: print("【Config】NASTOOL_CONFIG 环境变量未设置,程序无法工作,正在退出...") quit() if not os.path.exists(self._config_path): os.makedirs(os.path.dirname(self._config_path), exist_ok=True) cfg_tp_path = os.path.join(self.get_inner_config_path(), "config.yaml") cfg_tp_path = cfg_tp_path.replace("\\", "/") shutil.copy(cfg_tp_path, self._config_path) print("【Config】config.yaml 配置文件不存在,已将配置文件模板复制到配置目录...") with open(self._config_path, mode='r', encoding='utf-8') as cf: try: # 读取配置 print("正在加载配置:%s" % self._config_path) self._config = ruamel.yaml.YAML().load(cf) except Exception as e: print("【Config】配置文件 config.yaml 格式出现严重错误!请检查:%s" % str(e)) self._config = {} except Exception as err: print("【Config】加载 config.yaml 配置出错:%s" % str(err)) return False def init_syspath(self): with open(os.path.join(self.get_root_path(), "third_party.txt"), "r") as f: for third_party_lib in f.readlines(): module_path = os.path.join(self.get_root_path(), "third_party", third_party_lib.strip()).replace("\\", "/") if module_path not in sys.path: sys.path.append(module_path) @property def current_user(self): return self._user @current_user.setter def current_user(self, user): self._user = user def get_proxies(self): return self.get_config('app').get("proxies") def get_ua(self): return self.get_config('app').get("user_agent") or DEFAULT_UA def get_config(self, node=None): if not node: return self._config return self._config.get(node, {}) def save_config(self, new_cfg): self._config = new_cfg with open(self._config_path, mode='w', encoding='utf-8') as sf: yaml = ruamel.yaml.YAML() return yaml.dump(new_cfg, sf) def get_config_path(self): return os.path.dirname(self._config_path) def get_temp_path(self): return os.path.join(self.get_config_path(), "temp") @staticmethod def get_root_path(): return os.path.dirname(os.path.realpath(__file__)) def get_inner_config_path(self): return os.path.join(self.get_root_path(), "config") def get_script_path(self): return os.path.join(self.get_root_path(), "scripts", "sqls") def get_builtin_indexer_path(self): return os.path.join(self.get_root_path(), "app", "indexer", "client", "builtin.py") def get_user_sites_bin_path(self): return os.path.join(self.get_root_path(), "web", "backend", "user.sites.bin") def get_user_plugin_path(self): return os.path.join(self.get_config_path(), "plugins") def get_domain(self): domain = (self.get_config('app') or {}).get('domain') if domain and not domain.startswith('http'): domain = "http://" + domain if domain and str(domain).endswith("/"): domain = domain[:-1] return domain @staticmethod def get_timezone(): return os.environ.get('TZ') @staticmethod def update_favtype(favtype): global RMT_FAVTYPE if favtype: RMT_FAVTYPE = favtype def get_tmdbapi_url(self): tmdb_domain = self.get_config('app').get('tmdb_domain') if tmdb_domain and isinstance(tmdb_domain, str): tmdb_domain = re.sub(r'^https?://', '', tmdb_domain) tmdb_domain = re.sub(r'/$', '', tmdb_domain) else: tmdb_domain = TMDB_API_DOMAINS[0] return f"https://{tmdb_domain}/3" def get_tmdbimage_url(self, path, prefix="w500"): if not path: return "" tmdb_image_url = self.get_config("app").get("tmdb_image_url") if tmdb_image_url: return tmdb_image_url + f"/t/p/{prefix}{path}" return f"https://{TMDB_IMAGE_DOMAIN}/t/p/{prefix}{path}" @property def category_path(self): category = self.get_config('media').get("category") if category: return os.path.join(Config().get_config_path(), f"{category}.yaml") return None def get_telegram_domain(self): telegram_domain = (self.get_config('laboratory') or {}).get("telegram_domain") or "https://api.telegram.org" if telegram_domain and telegram_domain.endswith("/"): telegram_domain = telegram_domain[:-1] return telegram_domain
9,050
Python
.py
209
31.330144
130
0.598504
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,835
run.py
demigody_nas-tools/run.py
import os import signal import sys import warnings warnings.filterwarnings('ignore') # 运行环境判断 is_executable = getattr(sys, 'frozen', False) is_windows_exe = is_executable and (os.name == "nt") if is_windows_exe: # 托盘相关库 import threading from package.trayicon import TrayIcon, NullWriter if is_executable: # 可执行文件初始化环境变量 config_path = os.path.join(os.path.dirname(sys.executable), "config").replace("\\", "/") os.environ["NASTOOL_CONFIG"] = os.path.join(config_path, "config.yaml").replace("\\", "/") os.environ["NASTOOL_LOG"] = os.path.join(config_path, "logs").replace("\\", "/") try: if not os.path.exists(config_path): os.makedirs(config_path, exist_ok=True) except Exception as err: print(str(err)) from config import Config import log from web.action import WebAction from web.main import App from app.db import init_db, update_db, init_data from app.helper import init_chrome from initializer import update_config, check_config, start_config_monitor, stop_config_monitor from version import APP_VERSION def sigal_handler(num, stack): """ 信号处理 """ log.warn('捕捉到退出信号:%s,开始退出...' % num) # 关闭配置文件监控 log.info('关闭配置文件监控...') stop_config_monitor() # 关闭服务 log.info('关闭服务...') WebAction.stop_service() # 退出主进程 log.info('退出主进程...') # 退出 os._exit(0) def get_run_config(forcev4=False): """ 获取运行配置 """ _web_host = "::" _web_port = 3000 _ssl_cert = None _ssl_key = None _debug = False app_conf = Config().get_config('app') if app_conf: if forcev4: _web_host = "0.0.0.0" elif app_conf.get("web_host"): _web_host = app_conf.get("web_host").replace('[', '').replace(']', '') _web_port = int(app_conf.get('web_port')) if str(app_conf.get('web_port', '')).isdigit() else 3000 _ssl_cert = app_conf.get('ssl_cert') _ssl_key = app_conf.get('ssl_key') _ssl_key = app_conf.get('ssl_key') _debug = True if app_conf.get("debug") else False app_arg = dict(host=_web_host, port=_web_port, debug=_debug, threaded=True, use_reloader=False) if _ssl_cert: app_arg['ssl_context'] = (_ssl_cert, _ssl_key) return app_arg # 退出事件 signal.signal(signal.SIGINT, sigal_handler) signal.signal(signal.SIGTERM, sigal_handler) def init_system(): # 配置 log.console('NAStool 当前版本号:%s' % APP_VERSION) # 数据库初始化 init_db() # 数据库更新 update_db() # 数据初始化 init_data() # 升级配置文件 update_config() # 检查配置文件 check_config() def start_service(): log.console("开始启动服务...") # 启动服务 WebAction.start_service() # 监听配置文件变化 start_config_monitor() # 系统初始化 init_system() # 启动服务 start_service() # 本地运行 if __name__ == '__main__': # Windows启动托盘 if is_windows_exe: homepage = Config().get_config('app').get('domain') if not homepage: homepage = "http://localhost:%s" % str(Config().get_config('app').get('web_port')) log_path = os.environ.get("NASTOOL_LOG") sys.stdout = NullWriter() sys.stderr = NullWriter() def traystart(): TrayIcon(homepage, log_path) if len(os.popen("tasklist| findstr %s" % os.path.basename(sys.executable), 'r').read().splitlines()) <= 2: p1 = threading.Thread(target=traystart, daemon=True) p1.start() # 初始化浏览器驱动 init_chrome() # Flask启动 App.run(**get_run_config(is_windows_exe))
3,849
Python
.py
114
25.587719
114
0.628224
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,836
dbscript_gen.py
demigody_nas-tools/dbscript_gen.py
import os from config import Config from alembic.config import Config as AlembicConfig from alembic.command import revision as alembic_revision db_version = input("请输入版本号:") db_location = os.path.join(Config().get_config_path(), 'user.db').replace('\\', '/') script_location = os.path.join(os.path.dirname(__file__), 'scripts').replace('\\', '/') alembic_cfg = AlembicConfig() alembic_cfg.set_main_option('script_location', script_location) alembic_cfg.set_main_option('sqlalchemy.url', f"sqlite:///{db_location}") alembic_revision(alembic_cfg, db_version, True)
578
Python
.py
11
50.181818
87
0.751812
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,837
log.py
demigody_nas-tools/log.py
import logging import os import re import threading import time from collections import deque from html import escape from logging.handlers import RotatingFileHandler from config import Config logging.getLogger('werkzeug').setLevel(logging.ERROR) lock = threading.Lock() LOG_QUEUE = deque(maxlen=200) LOG_INDEX = 0 class Logger: logger = None __instance = {} __config = None __loglevels = { "info": logging.INFO, "debug": logging.DEBUG, "error": logging.ERROR } def __init__(self, module): self.logger = logging.getLogger(module) self.__config = Config() logtype = self.__config.get_config('app').get('logtype') or "console" loglevel = self.__config.get_config('app').get('loglevel') or "info" self.logger.setLevel(level=self.__loglevels.get(loglevel)) if logtype == "server": logserver = self.__config.get_config('app').get('logserver', '').split(':') if logserver: logip = logserver[0] if len(logserver) > 1: logport = int(logserver[1] or '514') else: logport = 514 log_server_handler = logging.handlers.SysLogHandler((logip, logport), logging.handlers.SysLogHandler.LOG_USER) log_server_handler.setFormatter(logging.Formatter('%(filename)s: %(message)s')) self.logger.addHandler(log_server_handler) elif logtype == "file": # 记录日志到文件 logpath = os.environ.get('NASTOOL_LOG') or self.__config.get_config('app').get('logpath') or "" if logpath: if not os.path.exists(logpath): os.makedirs(logpath, exist_ok=True) log_file_handler = RotatingFileHandler(filename=os.path.join(logpath, module + ".txt"), maxBytes=5 * 1024 * 1024, backupCount=3, encoding='utf-8') log_file_handler.setFormatter(logging.Formatter('%(asctime)s\t%(levelname)s: %(message)s')) self.logger.addHandler(log_file_handler) # 记录日志到终端 log_console_handler = logging.StreamHandler() log_console_handler.setFormatter(logging.Formatter('%(asctime)s\t%(levelname)s: %(message)s')) self.logger.addHandler(log_console_handler) @staticmethod def get_instance(module): if not module: module = "run" if Logger.__instance.get(module): return Logger.__instance.get(module) with lock: Logger.__instance[module] = Logger(module) return Logger.__instance.get(module) def __append_log_queue(level, text): global LOG_INDEX, LOG_QUEUE with lock: text = escape(text) if text.startswith("【"): source = re.findall(r"(?<=【).*?(?=】)", text)[0] text = text.replace(f"【{source}】", "") else: source = "System" LOG_QUEUE.append({ "time": time.strftime('%H:%M:%S', time.localtime(time.time())), "level": level, "source": source, "text": text}) LOG_INDEX += 1 def debug(text, module=None): return Logger.get_instance(module).logger.debug(text) def info(text, module=None): __append_log_queue("INFO", text) return Logger.get_instance(module).logger.info(text) def error(text, module=None): __append_log_queue("ERROR", text) return Logger.get_instance(module).logger.error(text) def warn(text, module=None): __append_log_queue("WARN", text) return Logger.get_instance(module).logger.warning(text) def console(text): __append_log_queue("INFO", text) print(text)
3,948
Python
.py
94
30.882979
108
0.577713
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,838
initializer.py
demigody_nas-tools/initializer.py
import json import os import time from watchdog.events import FileSystemEventHandler from watchdog.observers import Observer from werkzeug.security import generate_password_hash import log from app.conf import SystemConfig from app.helper import DbHelper, PluginHelper, ProgressHelper from app.plugins import PluginManager from app.media import Category from app.utils import ConfigLoadCache, CategoryLoadCache, ExceptionUtils, StringUtils from app.utils.commons import INSTANCES from app.utils.types import SystemConfigKey, BuiltinIndexerFileMd5 from config import Config from web.action import WebAction _observer = Observer(timeout=10) def check_config(): """ 检查配置文件,如有错误进行日志输出 """ # 检查日志输出 if Config().get_config('app'): logtype = Config().get_config('app').get('logtype') if logtype: log.info(f"日志输出类型为:{logtype}") if logtype == "server": logserver = Config().get_config('app').get('logserver') if not logserver: log.warn("【Config】日志中心地址未配置,无法正常输出日志") else: log.info("日志将上送到服务器:{logserver}") elif logtype == "file": logpath = Config().get_config('app').get('logpath') if not logpath: log.warn("【Config】日志文件路径未配置,无法正常输出日志") else: log.info(f"日志将写入文件:{logpath}") # 检查WEB端口 web_port = Config().get_config('app').get('web_port') if not web_port: log.warn("【Config】WEB服务端口未设置,将使用默认3000端口") # 检查登录用户和密码 login_user = Config().get_config('app').get('login_user') login_password = Config().get_config('app').get('login_password') if not login_user or not login_password: log.warn("【Config】WEB管理用户或密码未设置,将使用默认用户:admin,密码:password") else: log.info(f"WEB管理页面用户:{str(login_user)}") # 检查HTTPS ssl_cert = Config().get_config('app').get('ssl_cert') ssl_key = Config().get_config('app').get('ssl_key') if not ssl_cert or not ssl_key: log.info(f"未启用https,请使用 http://IP:{str(web_port)} 访问管理页面") else: if not os.path.exists(ssl_cert): log.warn(f"【Config】ssl_cert文件不存在:{ssl_cert}") if not os.path.exists(ssl_key): log.warn(f"【Config】ssl_key文件不存在:{ssl_key}") log.info(f"已启用https,请使用 https://IP:{str(web_port)} 访问管理页面") else: log.error("【Config】配置文件格式错误,找不到app配置项!") builtin_indexer_path = Config().get_builtin_indexer_path() builtin_indexer_verify_result = StringUtils.verify_integrity(builtin_indexer_path, BuiltinIndexerFileMd5) if not builtin_indexer_verify_result: recovery_msg = """ ------------------------------------------------------------------ 【Config】内置索引文件被改动,为保证稳定性,请检查是否安装第三方插件或者人为修改 1. 如果为docker容器/套件,请删除容器/套件重新添加 2. 如果为其他版本,请重新下载 ------------------------------------------------------------------ """ log.error(recovery_msg) def update_config(): """ 升级配置文件 """ _config = Config().get_config() _dbhelper = DbHelper() overwrite_cofig = False # 密码初始化 login_password = _config.get("app", {}).get("login_password") or "password" if login_password and not login_password.startswith("[hash]"): _config['app']['login_password'] = "[hash]%s" % generate_password_hash( login_password) overwrite_cofig = True # API密钥初始化 if not _config.get("security", {}).get("api_key"): _config['security']['api_key'] = StringUtils.generate_random_str(32) overwrite_cofig = True # 字幕兼容旧配置 try: subtitle = Config().get_config('subtitle') or {} if subtitle: if subtitle.get("server") == "opensubtitles": PluginManager().save_plugin_config(pid="OpenSubtitles", conf={ "enable": subtitle.get("opensubtitles", {}).get("enable") }) else: chinesesubfinder = subtitle.get("chinesesubfinder", {}) PluginManager().save_plugin_config(pid="ChineseSubFinder", conf={ "host": chinesesubfinder.get("host"), "api_key": chinesesubfinder.get("api_key"), "local_path": chinesesubfinder.get("local_path"), "remote_path": chinesesubfinder.get("remote_path") }) # 删除旧配置 _config.pop("subtitle") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 自定义制作组/字幕组兼容旧配置 try: custom_release_groups = (Config().get_config('laboratory') or {}).get('release_groups') if custom_release_groups: PluginManager().save_plugin_config(pid="CustomReleaseGroups", conf={ "release_groups": custom_release_groups }) # 删除旧配置 _config["laboratory"].pop("release_groups") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 下载器兼容旧配置 try: # pt pt = Config().get_config('pt') pt_client = pt.get("pt_client") pt_monitor = pt.get("pt_monitor") pt_monitor_only = pt.get("pt_monitor_only") rmt_mode = pt.get("rmt_mode") # downloaddir download_dir_conf = [] downloaddir = Config().get_config('downloaddir') if downloaddir: for dl_dir in downloaddir: download_dir_conf.append({ "save_path": dl_dir.get("save_path"), "type": dl_dir.get("type"), "category": dl_dir.get("category"), "container_path": dl_dir.get("container_path"), "label": dl_dir.get("label") }) _config.pop("downloaddir") overwrite_cofig = True downloaddir = json.dumps(download_dir_conf) # qbittorrent qbittorrent = Config().get_config('qbittorrent') if qbittorrent: enabled = 1 if pt_client == "qbittorrent" else 0 transfer = 1 if pt_monitor else 0 only_nastool = 1 if pt_monitor_only else 0 config = json.dumps({ "host": qbittorrent.get("qbhost"), "port": qbittorrent.get("qbport"), "username": qbittorrent.get("qbusername"), "password": qbittorrent.get("qbpassword") }) _dbhelper.update_downloader(did=None, name="Qbittorrent", dtype="qbittorrent", enabled=enabled, transfer=transfer, only_nastool=only_nastool, rmt_mode=rmt_mode, config=config, download_dir=downloaddir) _config.pop("qbittorrent") overwrite_cofig = True # transmission transmission = Config().get_config('transmission') if transmission: enabled = 1 if pt_client == "transmission" else 0 transfer = 1 if pt_monitor else 0 only_nastool = 1 if pt_monitor_only else 0 config = json.dumps({ "host": transmission.get("trhost"), "port": transmission.get("trport"), "username": transmission.get("trusername"), "password": transmission.get("trpassword") }) _dbhelper.update_downloader(did=None, name="Transmission", dtype="transmission", enabled=enabled, transfer=transfer, only_nastool=only_nastool, rmt_mode=rmt_mode, config=config, download_dir=downloaddir) _config.pop("transmission") overwrite_cofig = True # pt if pt_client is not None: pt.pop("pt_client") if pt_monitor is not None: pt.pop("pt_monitor") if pt_monitor_only is not None: pt.pop("pt_monitor_only") if rmt_mode is not None: pt.pop("rmt_mode") except Exception as e: ExceptionUtils.exception_traceback(e) # 站点数据刷新时间默认配置 try: if "ptrefresh_date_cron" not in _config['pt']: _config['pt']['ptrefresh_date_cron'] = '6' overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 豆瓣配置转为插件 try: douban = Config().get_config('douban') if douban: _enable = True if douban.get("users") and douban.get("interval") and douban.get("types") else False PluginManager().save_plugin_config(pid="DoubanSync", conf={ "onlyonce": False, "enable": _enable, "interval": douban.get("interval"), "auto_search": douban.get("auto_search"), "auto_rss": douban.get("auto_rss"), "cookie": douban.get("cookie"), "users": douban.get("users"), "days": douban.get("days"), "types": douban.get("types") }) # 删除旧配置 _config.pop("douban") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 刮削配置改为存数据库 try: scraper_conf = {} # Nfo scraper_nfo = Config().get_config("scraper_nfo") if scraper_nfo: scraper_conf["scraper_nfo"] = scraper_nfo _config.pop("scraper_nfo") overwrite_cofig = True # 图片 scraper_pic = Config().get_config("scraper_pic") if scraper_pic: scraper_conf["scraper_pic"] = scraper_pic _config.pop("scraper_pic") overwrite_cofig = True # 保存 if scraper_conf: SystemConfig().set(SystemConfigKey.UserScraperConf, scraper_conf) except Exception as e: ExceptionUtils.exception_traceback(e) # 内建索引器配置改为存数据库 try: indexer_sites = Config().get_config("pt").get("indexer_sites") if indexer_sites: SystemConfig().set(SystemConfigKey.UserIndexerSites, indexer_sites) _config['pt'].pop("indexer_sites") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 站点签到转为插件 try: ptsignin_cron = Config().get_config("pt").get("ptsignin_cron") if ptsignin_cron: # 转换周期 ptsignin_cron = str(ptsignin_cron).strip() if ptsignin_cron.isdigit(): cron = f"0 */{ptsignin_cron} * * *" elif ptsignin_cron.count(" ") == 4: cron = ptsignin_cron elif "-" in ptsignin_cron: ptsignin_cron = ptsignin_cron.split("-")[0] hour = int(ptsignin_cron.split(":")[0]) minute = int(ptsignin_cron.split(":")[1]) cron = f"{minute} {hour} * * *" elif ptsignin_cron.count(":"): hour = int(ptsignin_cron.split(":")[0]) minute = int(ptsignin_cron.split(":")[1]) cron = f"{minute} {hour} * * *" else: cron = "30 8 * * *" # 安装插件 WebAction().install_plugin(data={"id": "AutoSignIn"}, reload=False) # 保存配置 PluginManager().save_plugin_config(pid="AutoSignIn", conf={ "enabled": True, "cron": cron, "retry_keyword": '', "sign_sites": [], "special_sites": [], "notify": True, "onlyonce": False, "queue_cnt": 10 }) _config['pt'].pop("ptsignin_cron") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 存量插件安装情况统计 try: plugin_report_state = SystemConfig().get(SystemConfigKey.UserInstalledPluginsReport) installed_plugins = SystemConfig().get(SystemConfigKey.UserInstalledPlugins) if not plugin_report_state and installed_plugins: ret = PluginHelper().report(installed_plugins) if ret: SystemConfig().set(SystemConfigKey.UserInstalledPluginsReport, '1') except Exception as e: ExceptionUtils.exception_traceback(e) # TMDB代理服务开关迁移 try: tmdb_proxy = Config().get_config('laboratory').get("tmdb_proxy") if tmdb_proxy: _config['app']['tmdb_domain'] = 'api.tmdb.org' _config['laboratory'].pop("tmdb_proxy") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # baiduocr配置文件迁移 try: ocr = Config().get_config('ocr') if not ocr: _config['ocr'] = {} _config['ocr']['custom_ocr_url'] = '' _config['ocr']['baiduocr_api_key'] = '' _config['ocr']['baiduocr_secret_key'] = '' overwrite_cofig = True baidu_ocr = Config().get_config('baiduocr') if baidu_ocr: _config['ocr']['baiduocr_api_key'] = baidu_ocr.get('api_key', '') or '' _config['ocr']['baiduocr_secret_key'] = baidu_ocr.get('secret_key', '') or '' _config.pop("baiduocr") overwrite_cofig = True except Exception as e: ExceptionUtils.exception_traceback(e) # 重写配置文件 if overwrite_cofig: Config().save_config(_config) class ConfigMonitor(FileSystemEventHandler): """ 配置文件变化响应 """ def __init__(self): FileSystemEventHandler.__init__(self) def on_modified(self, event): if event.is_directory: return src_path = event.src_path file_name = os.path.basename(src_path) file_head, file_ext = os.path.splitext(os.path.basename(file_name)) if file_ext != ".yaml": return # 配置文件10秒内只能加载一次 if file_name == "config.yaml" and not ConfigLoadCache.get(src_path): ConfigLoadCache.set(src_path, True) CategoryLoadCache.set("ConfigLoadBlock", True, ConfigLoadCache.ttl) log.warn(f"【System】进程 {os.getpid()} 检测到系统配置文件已修改,正在重新加载...") time.sleep(1) # 重新加载配置 Config().init_config() # 重载singleton服务 for instance in INSTANCES.values(): if hasattr(instance, "init_config"): instance.init_config() # 正在使用的二级分类策略文件3秒内只能加载一次,配置文件加载时,二级分类策略文件不加载 elif file_name == os.path.basename(Config().category_path) \ and not CategoryLoadCache.get(src_path) \ and not CategoryLoadCache.get("ConfigLoadBlock"): CategoryLoadCache.set(src_path, True) log.warn(f"【System】进程 {os.getpid()} 检测到二级分类策略 {file_head} 配置文件已修改,正在重新加载...") time.sleep(1) # 重新加载二级分类策略 Category().init_config() def start_config_monitor(): """ 启动服务 """ global _observer # 配置文件监听 _observer.schedule(ConfigMonitor(), path=Config().get_config_path(), recursive=False) _observer.daemon = True _observer.start() def stop_config_monitor(): """ 停止服务 """ global _observer try: if _observer: _observer.stop() _observer.join() except Exception as err: print(str(err))
17,201
Python
.py
395
28.564557
112
0.54211
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,839
setup.py
demigody_nas-tools/third_party/feapder/setup.py
# -*- coding: utf-8 -*- """ Created on 2020/4/22 10:45 PM --------- @summary: --------- @author: Boris @email: [email protected] """ from os.path import dirname, join from sys import version_info import setuptools if version_info < (3, 6, 0): raise SystemExit("Sorry! feapder requires python 3.6.0 or later.") with open(join(dirname(__file__), "feapder/VERSION"), "rb") as fh: version = fh.read().decode("ascii").strip() with open("README.md", "r", encoding="utf8") as fh: long_description = fh.read() packages = setuptools.find_packages() packages.extend( [ "feapder", "feapder.templates", "feapder.templates.project_template", "feapder.templates.project_template.spiders", "feapder.templates.project_template.items", ] ) requires = [ "better-exceptions>=0.2.2", "DBUtils>=2.0", "parsel>=1.5.2", "PyMySQL>=0.9.3", "redis>=2.10.6,<4.0.0", "requests>=2.22.0", "bs4>=0.0.1", "ipython>=7.14.0", "cryptography>=3.3.2", "urllib3>=1.25.8", "loguru>=0.5.3", "influxdb>=5.3.1", "pyperclip>=1.8.2", "terminal-layout>=2.1.3", ] render_requires = [ "webdriver-manager>=3.5.3", "selenium>=3.141.0", ] all_requires = [ "bitarray>=1.5.3", "PyExecJS>=1.5.1", "pymongo>=3.10.1", "redis-py-cluster>=2.1.0", ] + render_requires setuptools.setup( name="feapder", version=version, author="Boris", license="MIT", author_email="[email protected]", python_requires=">=3.6", description="feapder是一款支持分布式、批次采集、数据防丢、报警丰富的python爬虫框架", long_description=long_description, long_description_content_type="text/markdown", install_requires=requires, extras_require={"all": all_requires, "render": render_requires}, entry_points={"console_scripts": ["feapder = feapder.commands.cmdline:execute"]}, url="https://github.com/Boris-code/feapder.git", packages=packages, include_package_data=True, classifiers=["Programming Language :: Python :: 3"], )
2,085
Python
.py
72
24.208333
85
0.645095
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,840
setting.py
demigody_nas-tools/third_party/feapder/feapder/setting.py
# -*- coding: utf-8 -*- """爬虫配置文件""" import os # redis 表名 # 任务表模版 TAB_REQUESTS = "{redis_key}:z_requests" # 任务失败模板 TAB_FAILED_REQUESTS = "{redis_key}:z_failed_requests" # 数据保存失败模板 TAB_FAILED_ITEMS = "{redis_key}:s_failed_items" # 爬虫状态表模版 TAB_SPIDER_STATUS = "{redis_key}:h_spider_status" # 用户池 TAB_USER_POOL = "{redis_key}:h_{user_type}_pool" # MYSQL MYSQL_IP = os.getenv("MYSQL_IP") MYSQL_PORT = int(os.getenv("MYSQL_PORT", 3306)) MYSQL_DB = os.getenv("MYSQL_DB") MYSQL_USER_NAME = os.getenv("MYSQL_USER_NAME") MYSQL_USER_PASS = os.getenv("MYSQL_USER_PASS") # MONGODB MONGO_IP = os.getenv("MONGO_IP", "localhost") MONGO_PORT = int(os.getenv("MONGO_PORT", 27017)) MONGO_DB = os.getenv("MONGO_DB") MONGO_USER_NAME = os.getenv("MONGO_USER_NAME") MONGO_USER_PASS = os.getenv("MONGO_USER_PASS") # REDIS # ip:port 多个可写为列表或者逗号隔开 如 ip1:port1,ip2:port2 或 ["ip1:port1", "ip2:port2"] REDISDB_IP_PORTS = os.getenv("REDISDB_IP_PORTS") REDISDB_USER_PASS = os.getenv("REDISDB_USER_PASS") REDISDB_DB = int(os.getenv("REDISDB_DB", 0)) # 连接redis时携带的其他参数,如ssl=True REDISDB_KWARGS = dict() # 适用于redis哨兵模式 REDISDB_SERVICE_NAME = os.getenv("REDISDB_SERVICE_NAME") # 数据入库的pipeline,可自定义,默认MysqlPipeline ITEM_PIPELINES = [ "feapder.pipelines.mysql_pipeline.MysqlPipeline", # "feapder.pipelines.mongo_pipeline.MongoPipeline", # "feapder.pipelines.console_pipeline.ConsolePipeline", ] EXPORT_DATA_MAX_FAILED_TIMES = 10 # 导出数据时最大的失败次数,包括保存和更新,超过这个次数报警 EXPORT_DATA_MAX_RETRY_TIMES = 10 # 导出数据时最大的重试次数,包括保存和更新,超过这个次数则放弃重试 # 爬虫相关 # COLLECTOR COLLECTOR_TASK_COUNT = 32 # 每次获取任务数量,追求速度推荐32 # SPIDER SPIDER_THREAD_COUNT = 1 # 爬虫并发数,追求速度推荐32 # 下载时间间隔 单位秒。 支持随机 如 SPIDER_SLEEP_TIME = [2, 5] 则间隔为 2~5秒之间的随机数,包含2和5 SPIDER_SLEEP_TIME = 0 SPIDER_MAX_RETRY_TIMES = 10 # 每个请求最大重试次数 # 是否主动执行添加 设置为False 需要手动调用start_monitor_task,适用于多进程情况下 SPIDER_AUTO_START_REQUESTS = True KEEP_ALIVE = False # 爬虫是否常驻 # 浏览器渲染 WEBDRIVER = dict( pool_size=1, # 浏览器的数量 load_images=True, # 是否加载图片 user_agent=None, # 字符串 或 无参函数,返回值为user_agent proxy=None, # xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 headless=False, # 是否为无头浏览器 driver_type="CHROME", # CHROME、FIREFOX timeout=30, # 请求超时时间 window_size=(1024, 800), # 窗口大小 render_time=0, # 渲染时长,即打开网页等待指定时间后再获取源码 custom_argument=[ "--ignore-certificate-errors", "--disable-blink-features=AutomationControlled", ], # 自定义浏览器渲染参数 xhr_url_regexes=None, # 拦截xhr接口,支持正则,数组类型 auto_install_driver=True, # 自动下载浏览器驱动 支持chrome 和 firefox download_path=None, # 下载文件的路径 use_stealth_js=False, # 使用stealth.min.js隐藏浏览器特征 ) PLAYWRIGHT = dict( user_agent=None, # 字符串 或 无参函数,返回值为user_agent proxy=None, # xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 headless=False, # 是否为无头浏览器 driver_type="chromium", # chromium、firefox、webkit timeout=30, # 请求超时时间 window_size=(1024, 800), # 窗口大小 download_path=None, # 下载文件的路径 render_time=0, # 渲染时长,即打开网页等待指定时间后再获取源码 wait_until="networkidle", # 等待页面加载完成的事件,可选值:"commit", "domcontentloaded", "load", "networkidle" use_stealth_js=False, # 使用stealth.min.js隐藏浏览器特征 page_on_event_callback=None, # page.on() 事件的回调 如 page_on_event_callback={"dialog": lambda dialog: dialog.accept()} storage_state_path=None, # 保存浏览器状态的路径 url_regexes=None, # 拦截接口,支持正则,数组类型 save_all=False, # 是否保存所有拦截的接口, 配合url_regexes使用,为False时只保存最后一次拦截的接口 ) # 爬虫启动时,重新抓取失败的requests RETRY_FAILED_REQUESTS = False # 爬虫启动时,重新入库失败的item RETRY_FAILED_ITEMS = False # 保存失败的request SAVE_FAILED_REQUEST = True # request防丢机制。(指定的REQUEST_LOST_TIMEOUT时间内request还没做完,会重新下发 重做) REQUEST_LOST_TIMEOUT = 600 # 10分钟 # request网络请求超时时间 REQUEST_TIMEOUT = 22 # 等待服务器响应的超时时间,浮点数,或(connect timeout, read timeout)元组 # item在内存队列中最大缓存数量 ITEM_MAX_CACHED_COUNT = 5000 # item每批入库的最大数量 ITEM_UPLOAD_BATCH_MAX_SIZE = 1000 # item入库时间间隔 ITEM_UPLOAD_INTERVAL = 1 # 内存任务队列最大缓存的任务数,默认不限制;仅对AirSpider有效。 TASK_MAX_CACHED_SIZE = 0 # 下载缓存 利用redis缓存,但由于内存大小限制,所以建议仅供开发调试代码时使用,防止每次debug都需要网络请求 RESPONSE_CACHED_ENABLE = False # 是否启用下载缓存 成本高的数据或容易变需求的数据,建议设置为True RESPONSE_CACHED_EXPIRE_TIME = 3600 # 缓存时间 秒 RESPONSE_CACHED_USED = False # 是否使用缓存 补采数据时可设置为True # redis 存放item与request的根目录 REDIS_KEY = "" # 爬虫启动时删除的key,类型: 元组/bool/string。 支持正则; 常用于清空任务队列,否则重启时会断点续爬 DELETE_KEYS = [] # 设置代理 PROXY_EXTRACT_API = None # 代理提取API ,返回的代理分割符为\r\n PROXY_ENABLE = True PROXY_MAX_FAILED_TIMES = 5 # 代理最大失败次数,超过则不使用,自动删除 PROXY_POOL = "feapder.network.proxy_pool.ProxyPool" # 代理池 # 随机headers RANDOM_HEADERS = True # UserAgent类型 支持 'chrome', 'opera', 'firefox', 'internetexplorer', 'safari','mobile' 若不指定则随机类型 USER_AGENT_TYPE = "chrome" # 默认使用的浏览器头 DEFAULT_USERAGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36" # requests 使用session USE_SESSION = False # 下载 DOWNLOADER = "feapder.network.downloader.RequestsDownloader" # 请求下载器 SESSION_DOWNLOADER = "feapder.network.downloader.RequestsSessionDownloader" RENDER_DOWNLOADER = "feapder.network.downloader.SeleniumDownloader" # 渲染下载器 # RENDER_DOWNLOADER="feapder.network.downloader.PlaywrightDownloader" MAKE_ABSOLUTE_LINKS = True # 自动转成绝对连接 # 去重 ITEM_FILTER_ENABLE = False # item 去重 ITEM_FILTER_SETTING = dict( filter_type=1 # 永久去重(BloomFilter) = 1 、内存去重(MemoryFilter) = 2、 临时去重(ExpireFilter)= 3、轻量去重(LiteFilter)= 4 ) REQUEST_FILTER_ENABLE = False # request 去重 REQUEST_FILTER_SETTING = dict( filter_type=3, # 永久去重(BloomFilter) = 1 、内存去重(MemoryFilter) = 2、 临时去重(ExpireFilter)= 3、 轻量去重(LiteFilter)= 4 expire_time=2592000, # 过期时间1个月 ) # 报警 支持钉钉、飞书、企业微信、邮件 # 钉钉报警 DINGDING_WARNING_URL = "" # 钉钉机器人api DINGDING_WARNING_PHONE = "" # 报警人 支持列表,可指定多个 DINGDING_WARNING_ALL = False # 是否提示所有人, 默认为False # 飞书报警 # https://open.feishu.cn/document/ukTMukTMukTM/ucTM5YjL3ETO24yNxkjN#e1cdee9f FEISHU_WARNING_URL = "" # 飞书机器人api FEISHU_WARNING_USER = None # 报警人 {"open_id":"ou_xxxxx", "name":"xxxx"} 或 [{"open_id":"ou_xxxxx", "name":"xxxx"}] FEISHU_WARNING_ALL = False # 是否提示所有人, 默认为False # 邮件报警 EMAIL_SENDER = "" # 发件人 EMAIL_PASSWORD = "" # 授权码 EMAIL_RECEIVER = "" # 收件人 支持列表,可指定多个 EMAIL_SMTPSERVER = "smtp.163.com" # 邮件服务器 默认为163邮箱 # 企业微信报警 WECHAT_WARNING_URL = "" # 企业微信机器人api WECHAT_WARNING_PHONE = "" # 报警人 将会在群内@此人, 支持列表,可指定多人 WECHAT_WARNING_ALL = False # 是否提示所有人, 默认为False # 时间间隔 WARNING_INTERVAL = 3600 # 相同报警的报警时间间隔,防止刷屏; 0表示不去重 WARNING_LEVEL = "DEBUG" # 报警级别, DEBUG / INFO / ERROR WARNING_FAILED_COUNT = 1000 # 任务失败数 超过WARNING_FAILED_COUNT则报警 WARNING_CHECK_TASK_COUNT_INTERVAL = 1200 # 检查已做任务数量的时间间隔,若两次时间间隔之间,任务数无变化则报警 # 日志 LOG_NAME = os.path.basename(os.getcwd()) LOG_PATH = "log/%s.log" % LOG_NAME # log存储路径 LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG") # 日志级别 LOG_COLOR = True # 是否带有颜色 LOG_IS_WRITE_TO_CONSOLE = True # 是否打印到控制台 LOG_IS_WRITE_TO_FILE = False # 是否写文件 LOG_MODE = "w" # 写文件的模式 LOG_MAX_BYTES = 10 * 1024 * 1024 # 每个日志文件的最大字节数 LOG_BACKUP_COUNT = 20 # 日志文件保留数量 LOG_ENCODING = "utf8" # 日志文件编码 # 是否详细的打印异常 PRINT_EXCEPTION_DETAILS = True # 设置不带颜色的日志格式 LOG_FORMAT = "%(threadName)s|%(asctime)s|%(filename)s|%(funcName)s|line:%(lineno)d|%(levelname)s| %(message)s" # 设置带有颜色的日志格式 os.environ["LOGURU_FORMAT"] = ( "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | " "<level>{level: <8}</level> | " "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>line:{line}</cyan> | <level>{message}</level>" ) OTHERS_LOG_LEVAL = "ERROR" # 第三方库的log等级 # 打点监控 influxdb 配置 INFLUXDB_HOST = os.getenv("INFLUXDB_HOST", "localhost") INFLUXDB_PORT = int(os.getenv("INFLUXDB_PORT", 8086)) INFLUXDB_UDP_PORT = int(os.getenv("INFLUXDB_UDP_PORT", 8089)) INFLUXDB_USER = os.getenv("INFLUXDB_USER") INFLUXDB_PASSWORD = os.getenv("INFLUXDB_PASSWORD") INFLUXDB_DATABASE = os.getenv("INFLUXDB_DB") # 监控数据存储的表名,爬虫管理系统上会以task_id命名 INFLUXDB_MEASUREMENT = "task_" + os.getenv("TASK_ID") if os.getenv("TASK_ID") else None # 打点监控其他参数,若这里也配置了influxdb的参数, 则会覆盖外面的配置 METRICS_OTHER_ARGS = dict(retention_policy_duration="180d", emit_interval=60) ############# 导入用户自定义的setting ############# try: from setting import * # 兼容老版本的配置 KEEP_ALIVE = not AUTO_STOP_WHEN_SPIDER_DONE except: pass
10,870
Python
.py
209
36.889952
143
0.733848
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,841
__init__.py
demigody_nas-tools/third_party/feapder/feapder/__init__.py
# -*- coding: utf-8 -*- """ Created on 2020/4/21 10:41 PM --------- @summary: --------- @author: Boris @email: [email protected] """ import os import re import sys sys.path.insert(0, re.sub(r"([\\/]items$)|([\\/]spiders$)", "", os.getcwd())) __all__ = [ "AirSpider", "Spider", "TaskSpider", "BatchSpider", "BaseParser", "TaskParser", "BatchParser", "Request", "Response", "Item", "UpdateItem", "ArgumentParser", ] from feapder.core.spiders import AirSpider, Spider, TaskSpider, BatchSpider from feapder.core.base_parser import BaseParser, TaskParser, BatchParser from feapder.network.request import Request from feapder.network.response import Response from feapder.network.item import Item, UpdateItem from feapder.utils.custom_argparse import ArgumentParser
815
Python
.py
33
22.151515
77
0.702182
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,842
item_buffer.py
demigody_nas-tools/third_party/feapder/feapder/buffer/item_buffer.py
# -*- coding: utf-8 -*- """ Created on 2018-06-19 17:17 --------- @summary: item 管理器, 负责缓冲添加到数据库中的item, 由该manager统一添加。防止多线程同时访问数据库 --------- @author: Boris @email: [email protected] """ import threading from queue import Queue import feapder.utils.tools as tools from feapder import setting from feapder.db.redisdb import RedisDB from feapder.dedup import Dedup from feapder.network.item import Item, UpdateItem from feapder.pipelines import BasePipeline from feapder.pipelines.mysql_pipeline import MysqlPipeline from feapder.utils import metrics from feapder.utils.log import log MYSQL_PIPELINE_PATH = "feapder.pipelines.mysql_pipeline.MysqlPipeline" class ItemBuffer(threading.Thread): dedup = None __redis_db = None def __init__(self, redis_key, task_table=None): if not hasattr(self, "_table_item"): super(ItemBuffer, self).__init__() self._thread_stop = False self._is_adding_to_db = False self._redis_key = redis_key self._task_table = task_table self._items_queue = Queue(maxsize=setting.ITEM_MAX_CACHED_COUNT) self._table_request = setting.TAB_REQUESTS.format(redis_key=redis_key) self._table_failed_items = setting.TAB_FAILED_ITEMS.format( redis_key=redis_key ) self._item_tables = { # 'item_name': 'table_name' # 缓存item名与表名对应关系 } self._item_update_keys = { # 'table_name': ['id', 'name'...] # 缓存table_name与__update_key__的关系 } self._pipelines = self.load_pipelines() self._have_mysql_pipeline = MYSQL_PIPELINE_PATH in setting.ITEM_PIPELINES self._mysql_pipeline = None if setting.ITEM_FILTER_ENABLE and not self.__class__.dedup: self.__class__.dedup = Dedup( to_md5=False, **setting.ITEM_FILTER_SETTING ) # 导出重试的次数 self.export_retry_times = 0 # 导出失败的次数 TODO 非air爬虫使用redis统计 self.export_falied_times = 0 @property def redis_db(self): if self.__class__.__redis_db is None: self.__class__.__redis_db = RedisDB() return self.__class__.__redis_db def load_pipelines(self): pipelines = [] for pipeline_path in setting.ITEM_PIPELINES: pipeline = tools.import_cls(pipeline_path)() if not isinstance(pipeline, BasePipeline): raise ValueError(f"{pipeline_path} 需继承 feapder.pipelines.BasePipeline") pipelines.append(pipeline) return pipelines @property def mysql_pipeline(self): if not self._mysql_pipeline: self._mysql_pipeline = tools.import_cls(MYSQL_PIPELINE_PATH)() return self._mysql_pipeline def run(self): self._thread_stop = False while not self._thread_stop: self.flush() tools.delay_time(setting.ITEM_UPLOAD_INTERVAL) self.close() def stop(self): self._thread_stop = True self._started.clear() def put_item(self, item): if isinstance(item, Item): # 入库前的回调 item.pre_to_db() self._items_queue.put(item) def flush(self): try: items = [] update_items = [] requests = [] callbacks = [] items_fingerprints = [] data_count = 0 while not self._items_queue.empty(): data = self._items_queue.get_nowait() data_count += 1 # data 分类 if callable(data): callbacks.append(data) elif isinstance(data, UpdateItem): update_items.append(data) elif isinstance(data, Item): items.append(data) if setting.ITEM_FILTER_ENABLE: items_fingerprints.append(data.fingerprint) else: # request-redis requests.append(data) if data_count >= setting.ITEM_UPLOAD_BATCH_MAX_SIZE: self.__add_item_to_db( items, update_items, requests, callbacks, items_fingerprints ) items = [] update_items = [] requests = [] callbacks = [] items_fingerprints = [] data_count = 0 if data_count: self.__add_item_to_db( items, update_items, requests, callbacks, items_fingerprints ) except Exception as e: log.exception(e) def get_items_count(self): return self._items_queue.qsize() def is_adding_to_db(self): return self._is_adding_to_db def __dedup_items(self, items, items_fingerprints): """ 去重 @param items: @param items_fingerprints: @return: 返回去重后的items, items_fingerprints """ if not items: return items, items_fingerprints is_exists = self.__class__.dedup.get(items_fingerprints) is_exists = is_exists if isinstance(is_exists, list) else [is_exists] dedup_items = [] dedup_items_fingerprints = [] items_count = dedup_items_count = dup_items_count = 0 while is_exists: item = items.pop(0) items_fingerprint = items_fingerprints.pop(0) is_exist = is_exists.pop(0) items_count += 1 if not is_exist: dedup_items.append(item) dedup_items_fingerprints.append(items_fingerprint) dedup_items_count += 1 else: dup_items_count += 1 log.info( "待入库数据 {} 条, 重复 {} 条,实际待入库数据 {} 条".format( items_count, dup_items_count, dedup_items_count ) ) return dedup_items, dedup_items_fingerprints def __pick_items(self, items, is_update_item=False): """ 将每个表之间的数据分开 拆分后 原items为空 @param items: @param is_update_item: @return: """ datas_dict = { # 'table_name': [{}, {}] } while items: item = items.pop(0) # 取item下划线格式的名 # 下划线类的名先从dict中取,没有则现取,然后存入dict。加快下次取的速度 item_name = item.item_name table_name = self._item_tables.get(item_name) if not table_name: table_name = item.table_name self._item_tables[item_name] = table_name if table_name not in datas_dict: datas_dict[table_name] = [] datas_dict[table_name].append(item.to_dict) if is_update_item and table_name not in self._item_update_keys: self._item_update_keys[table_name] = item.update_key return datas_dict def __export_to_db(self, table, datas, is_update=False, update_keys=()): for pipeline in self._pipelines: if is_update: if table == self._task_table and not isinstance( pipeline, MysqlPipeline ): continue if not pipeline.update_items(table, datas, update_keys=update_keys): log.error( f"{pipeline.__class__.__name__} 更新数据失败. table: {table} items: {datas}" ) return False else: if not pipeline.save_items(table, datas): log.error( f"{pipeline.__class__.__name__} 保存数据失败. table: {table} items: {datas}" ) return False # 若是任务表, 且上面的pipeline里没mysql,则需调用mysql更新任务 if not self._have_mysql_pipeline and is_update and table == self._task_table: if not self.mysql_pipeline.update_items( table, datas, update_keys=update_keys ): log.error( f"{self.mysql_pipeline.__class__.__name__} 更新数据失败. table: {table} items: {datas}" ) return False self.metric_datas(table=table, datas=datas) return True def __add_item_to_db( self, items, update_items, requests, callbacks, items_fingerprints ): export_success = True self._is_adding_to_db = True # 去重 if setting.ITEM_FILTER_ENABLE: items, items_fingerprints = self.__dedup_items(items, items_fingerprints) # 分捡 items_dict = self.__pick_items(items) update_items_dict = self.__pick_items(update_items, is_update_item=True) # item批量入库 failed_items = {"add": [], "update": [], "requests": []} while items_dict: table, datas = items_dict.popitem() log.debug( """ -------------- item 批量入库 -------------- 表名: %s datas: %s """ % (table, tools.dumps_json(datas, indent=16)) ) if not self.__export_to_db(table, datas): export_success = False failed_items["add"].append({"table": table, "datas": datas}) # 执行批量update while update_items_dict: table, datas = update_items_dict.popitem() log.debug( """ -------------- item 批量更新 -------------- 表名: %s datas: %s """ % (table, tools.dumps_json(datas, indent=16)) ) update_keys = self._item_update_keys.get(table) if not self.__export_to_db( table, datas, is_update=True, update_keys=update_keys ): export_success = False failed_items["update"].append( {"table": table, "datas": datas, "update_keys": update_keys} ) if export_success: # 执行回调 while callbacks: try: callback = callbacks.pop(0) callback() except Exception as e: log.exception(e) # 删除做过的request if requests: self.redis_db.zrem(self._table_request, requests) # 去重入库 if setting.ITEM_FILTER_ENABLE: if items_fingerprints: self.__class__.dedup.add(items_fingerprints, skip_check=True) else: failed_items["requests"] = requests if self.export_retry_times > setting.EXPORT_DATA_MAX_RETRY_TIMES: if self._redis_key != "air_spider": # 失败的item记录到redis self.redis_db.sadd(self._table_failed_items, failed_items) # 删除做过的request if requests: self.redis_db.zrem(self._table_request, requests) log.error( "入库超过最大重试次数,不再重试,数据记录到redis,items:\n {}".format( tools.dumps_json(failed_items) ) ) self.export_retry_times = 0 else: tip = ["入库不成功"] if callbacks: tip.append("不执行回调") if requests: tip.append("不删除任务") exists = self.redis_db.zexists(self._table_request, requests) for exist, request in zip(exists, requests): if exist: self.redis_db.zadd(self._table_request, requests, 300) if setting.ITEM_FILTER_ENABLE: tip.append("数据不入去重库") if self._redis_key != "air_spider": tip.append("将自动重试") tip.append("失败items:\n {}".format(tools.dumps_json(failed_items))) log.error(",".join(tip)) self.export_falied_times += 1 if self._redis_key != "air_spider": self.export_retry_times += 1 if self.export_falied_times > setting.EXPORT_DATA_MAX_FAILED_TIMES: # 报警 msg = "《{}》爬虫导出数据失败,失败次数:{},请检查爬虫是否正常".format( self._redis_key, self.export_falied_times ) log.error(msg) tools.send_msg( msg=msg, level="error", message_prefix="《%s》爬虫导出数据失败" % (self._redis_key), ) self._is_adding_to_db = False def metric_datas(self, table, datas): """ 打点 记录总条数及每个key情况 @param table: 表名 @param datas: 数据 列表 @return: """ total_count = 0 for data in datas: total_count += 1 for k, v in data.items(): metrics.emit_counter(k, int(bool(v)), classify=table) metrics.emit_counter("total count", total_count, classify=table) def close(self): # 调用pipeline的close方法 for pipeline in self._pipelines: try: pipeline.close() except: pass
14,141
Python
.py
338
26.245562
102
0.515357
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,843
request_buffer.py
demigody_nas-tools/third_party/feapder/feapder/buffer/request_buffer.py
# -*- coding: utf-8 -*- """ Created on 2018-06-19 17:17 --------- @summary: request 管理器, 负责缓冲添加到数据库中的request --------- @author: Boris @email: [email protected] """ import collections import threading import feapder.setting as setting import feapder.utils.tools as tools from feapder.db.memorydb import MemoryDB from feapder.db.redisdb import RedisDB from feapder.dedup import Dedup from feapder.utils.log import log MAX_URL_COUNT = 1000 # 缓存中最大request数 class AirSpiderRequestBuffer: dedup = None def __init__(self, db=None, dedup_name: str = None): self._db = db or MemoryDB() if not self.__class__.dedup and setting.REQUEST_FILTER_ENABLE: if dedup_name: self.__class__.dedup = Dedup( name=dedup_name, to_md5=False, **setting.REQUEST_FILTER_SETTING ) # 默认使用内存去重 else: self.__class__.dedup = Dedup( to_md5=False, **setting.REQUEST_FILTER_SETTING ) # 默认使用内存去重 def is_exist_request(self, request): if ( request.filter_repeat and setting.REQUEST_FILTER_ENABLE and not self.__class__.dedup.add(request.fingerprint) ): log.debug("request已存在 url = %s" % request.url) return True return False def put_request(self, request, ignore_max_size=True): if self.is_exist_request(request): return else: self._db.add(request, ignore_max_size=ignore_max_size) class RequestBuffer(AirSpiderRequestBuffer, threading.Thread): def __init__(self, redis_key): AirSpiderRequestBuffer.__init__(self, db=RedisDB(), dedup_name=redis_key) threading.Thread.__init__(self) self._thread_stop = False self._is_adding_to_db = False self._requests_deque = collections.deque() self._del_requests_deque = collections.deque() self._table_request = setting.TAB_REQUESTS.format(redis_key=redis_key) self._table_failed_request = setting.TAB_FAILED_REQUESTS.format( redis_key=redis_key ) def run(self): self._thread_stop = False while not self._thread_stop: try: self.__add_request_to_db() except Exception as e: log.exception(e) tools.delay_time(1) def stop(self): self._thread_stop = True self._started.clear() def put_request(self, request): self._requests_deque.append(request) if self.get_requests_count() > MAX_URL_COUNT: # 超过最大缓存,主动调用 self.flush() def put_del_request(self, request): self._del_requests_deque.append(request) def put_failed_request(self, request, table=None): try: request_dict = request.to_dict self._db.zadd( table or self._table_failed_request, request_dict, request.priority ) except Exception as e: log.exception(e) def flush(self): try: self.__add_request_to_db() except Exception as e: log.exception(e) def get_requests_count(self): return len(self._requests_deque) def is_adding_to_db(self): return self._is_adding_to_db def __add_request_to_db(self): request_list = [] prioritys = [] callbacks = [] while self._requests_deque: request = self._requests_deque.popleft() self._is_adding_to_db = True if callable(request): # 函数 # 注意:应该考虑闭包情况。闭包情况可写成 # def test(xxx = xxx): # # TODO 业务逻辑 使用 xxx # 这么写不会导致xxx为循环结束后的最后一个值 callbacks.append(request) continue priority = request.priority # 如果需要去重并且库中已重复 则continue if self.is_exist_request(request): continue else: request_list.append(str(request.to_dict)) prioritys.append(priority) if len(request_list) > MAX_URL_COUNT: self._db.zadd(self._table_request, request_list, prioritys) request_list = [] prioritys = [] # 入库 if request_list: self._db.zadd(self._table_request, request_list, prioritys) # 执行回调 for callback in callbacks: try: callback() except Exception as e: log.exception(e) # 删除已做任务 if self._del_requests_deque: request_done_list = [] while self._del_requests_deque: request_done_list.append(self._del_requests_deque.popleft()) # 去掉request_list中的requests, 否则可能会将刚添加的request删除 request_done_list = list(set(request_done_list) - set(request_list)) if request_done_list: self._db.zrem(self._table_request, request_done_list) self._is_adding_to_db = False
5,356
Python
.py
136
26.985294
83
0.580171
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,844
__init__.py
demigody_nas-tools/third_party/feapder/feapder/buffer/__init__.py
# -*- coding: utf-8 -*- ''' Created on 2020/4/23 12:09 AM --------- @summary: --------- @author: Boris @email: [email protected] '''
136
Python
.py
9
14.222222
29
0.570313
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,845
redisdb.py
demigody_nas-tools/third_party/feapder/feapder/db/redisdb.py
# -*- coding: utf-8 -*- """ Created on 2016-11-16 16:25 --------- @summary: 操作redis数据库 --------- @author: Boris """ import os import time import redis from redis.connection import Encoder as _Encoder from redis.exceptions import ConnectionError, TimeoutError from redis.exceptions import DataError from redis.sentinel import Sentinel import feapder.setting as setting from feapder.utils.log import log class Encoder(_Encoder): def encode(self, value): "Return a bytestring or bytes-like representation of the value" if isinstance(value, (bytes, memoryview)): return value # elif isinstance(value, bool): # # special case bool since it is a subclass of int # raise DataError( # "Invalid input of type: 'bool'. Convert to a " # "bytes, string, int or float first." # ) elif isinstance(value, float): value = repr(value).encode() elif isinstance(value, int): # python 2 repr() on longs is '123L', so use str() instead value = str(value).encode() elif isinstance(value, (list, dict, tuple)): value = str(value) elif not isinstance(value, str): # a value we don't know how to deal with. throw an error typename = type(value).__name__ raise DataError( "Invalid input of type: '%s'. Convert to a " "bytes, string, int or float first." % typename ) if isinstance(value, str): value = value.encode(self.encoding, self.encoding_errors) return value redis.connection.Encoder = Encoder class RedisDB: def __init__( self, ip_ports=None, db=None, user_pass=None, url=None, decode_responses=True, service_name=None, max_connections=1000, **kwargs, ): """ redis的封装 Args: ip_ports: ip:port 多个可写为列表或者逗号隔开 如 ip1:port1,ip2:port2 或 ["ip1:port1", "ip2:port2"] db: user_pass: url: decode_responses: service_name: 适用于redis哨兵模式 max_connections: 同一个redis对象使用的并发数(连接池的最大连接数),超过这个数量会抛出redis.ConnectionError """ # 可能会改setting中的值,所以此处不能直接赋值为默认值,需要后加载赋值 if ip_ports is None: ip_ports = setting.REDISDB_IP_PORTS if db is None: db = setting.REDISDB_DB if user_pass is None: user_pass = setting.REDISDB_USER_PASS if service_name is None: service_name = setting.REDISDB_SERVICE_NAME if kwargs is None: kwargs = setting.REDISDB_KWARGS self._is_redis_cluster = False self.__redis = None self._url = url self._ip_ports = ip_ports self._db = db self._user_pass = user_pass self._decode_responses = decode_responses self._service_name = service_name self._max_connections = max_connections self._kwargs = kwargs self.get_connect() def __repr__(self): if self._url: return "<Redisdb url:{}>".format(self._url) return "<Redisdb ip_ports: {} db:{} user_pass:{}>".format( self._ip_ports, self._db, self._user_pass ) @property def _redis(self): try: if not self.__redis.ping(): raise ConnectionError("unable to connect to redis") except: self._reconnect() return self.__redis @_redis.setter def _redis(self, val): self.__redis = val def get_connect(self): # 获取数据库连接 try: if not self._url: if not self._ip_ports: raise ConnectionError("未设置 redis 连接信息") ip_ports = ( self._ip_ports if isinstance(self._ip_ports, list) else self._ip_ports.split(",") ) if len(ip_ports) > 1: startup_nodes = [] for ip_port in ip_ports: ip, port = ip_port.split(":") startup_nodes.append({"host": ip, "port": port}) if self._service_name: # log.debug("使用redis哨兵模式") hosts = [(node["host"], node["port"]) for node in startup_nodes] sentinel = Sentinel(hosts, socket_timeout=3, **self._kwargs) self._redis = sentinel.master_for( self._service_name, password=self._user_pass, db=self._db, redis_class=redis.StrictRedis, decode_responses=self._decode_responses, max_connections=self._max_connections, **self._kwargs, ) else: try: from rediscluster import RedisCluster except ModuleNotFoundError as e: log.error('请安装 pip install "feapder[all]"') os._exit(0) # log.debug("使用redis集群模式") self._redis = RedisCluster( startup_nodes=startup_nodes, decode_responses=self._decode_responses, password=self._user_pass, max_connections=self._max_connections, **self._kwargs, ) self._is_redis_cluster = True else: ip, port = ip_ports[0].split(":") self._redis = redis.StrictRedis( host=ip, port=port, db=self._db, password=self._user_pass, decode_responses=self._decode_responses, max_connections=self._max_connections, **self._kwargs, ) self._is_redis_cluster = False else: self._redis = redis.StrictRedis.from_url( self._url, decode_responses=self._decode_responses, **self._kwargs ) self._is_redis_cluster = False except Exception as e: raise e # 不要写成self._redis.ping() 否则循环调用了 return self.__redis.ping() @classmethod def from_url(cls, url): """ Args: url: redis://[[username]:[password]]@[host]:[port]/[db] Returns: """ return cls(url=url) def sadd(self, table, values): """ @summary: 使用无序set集合存储数据, 去重 --------- @param table: @param values: 值; 支持list 或 单个值 --------- @result: 若库中存在 返回0,否则入库,返回1。 批量添加返回None """ if isinstance(values, list): pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() for value in values: pipe.sadd(table, value) pipe.execute() else: return self._redis.sadd(table, values) def sget(self, table, count=1, is_pop=True): """ 返回 list 如 ['1'] 或 [] @param table: @param count: @param is_pop: @return: """ datas = [] if is_pop: count = count if count <= self.sget_count(table) else self.sget_count(table) if count: if count > 1: pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() while count: pipe.spop(table) count -= 1 datas = pipe.execute() else: datas.append(self._redis.spop(table)) else: datas = self._redis.srandmember(table, count) return datas def srem(self, table, values): """ @summary: 移除集合中的指定元素 --------- @param table: @param values: 一个或者列表 --------- @result: """ if isinstance(values, list): pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() for value in values: pipe.srem(table, value) pipe.execute() else: self._redis.srem(table, values) def sget_count(self, table): return self._redis.scard(table) def sdelete(self, table): """ @summary: 删除set集合的大键(数据量大的表) 删除大set键,使用sscan命令,每次扫描集合中500个元素,再用srem命令每次删除一个键 若直接用delete命令,会导致Redis阻塞,出现故障切换和应用程序崩溃的故障。 --------- @param table: --------- @result: """ # 当 SCAN 命令的游标参数被设置为 0 时, 服务器将开始一次新的迭代, 而当服务器向用户返回值为 0 的游标时, 表示迭代已结束 cursor = "0" while cursor != 0: cursor, data = self._redis.sscan(table, cursor=cursor, count=500) for item in data: # pipe.srem(table, item) self._redis.srem(table, item) # pipe.execute() def sismember(self, table, key): "Return a boolean indicating if ``value`` is a member of set ``name``" return self._redis.sismember(table, key) def zadd(self, table, values, prioritys=0): """ @summary: 使用有序set集合存储数据, 去重(值存在更新) --------- @param table: @param values: 值; 支持list 或 单个值 @param prioritys: 优先级; double类型,支持list 或 单个值。 根据此字段的值来排序, 值越小越优先。 可不传值,默认value的优先级为0 --------- @result:若库中存在 返回0,否则入库,返回1。 批量添加返回 [0, 1 ...] """ if isinstance(values, list): if not isinstance(prioritys, list): prioritys = [prioritys] * len(values) else: assert len(values) == len(prioritys), "values值要与prioritys值一一对应" pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() for value, priority in zip(values, prioritys): pipe.execute_command( "ZADD", table, priority, value ) # 为了兼容2.x与3.x版本的redis return pipe.execute() else: return self._redis.execute_command( "ZADD", table, prioritys, values ) # 为了兼容2.x与3.x版本的redis def zget(self, table, count=1, is_pop=True): """ @summary: 从有序set集合中获取数据 优先返回分数小的(优先级高的) --------- @param table: @param count: 数量 -1 返回全部数据 @param is_pop:获取数据后,是否在原set集合中删除,默认是 --------- @result: 列表 """ start_pos = 0 # 包含 end_pos = count - 1 if count > 0 else count pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() # 标记事务的开始 参考 http://www.runoob.com/redis/redis-transactions.html pipe.zrange(table, start_pos, end_pos) # 取值 if is_pop: pipe.zremrangebyrank(table, start_pos, end_pos) # 删除 results, *count = pipe.execute() return results def zremrangebyscore(self, table, priority_min, priority_max): """ 根据分数移除成员 闭区间 @param table: @param priority_min: @param priority_max: @return: 被移除的成员个数 """ return self._redis.zremrangebyscore(table, priority_min, priority_max) def zrangebyscore(self, table, priority_min, priority_max, count=None, is_pop=True): """ @summary: 返回指定分数区间的数据 闭区间 --------- @param table: @param priority_min: 优先级越小越优先 @param priority_max: @param count: 获取的数量,为空则表示分数区间内的全部数据 @param is_pop: 是否删除 --------- @result: """ # 使用lua脚本, 保证操作的原子性 lua = """ -- local key = KEYS[1] local min_score = ARGV[2] local max_score = ARGV[3] local is_pop = ARGV[4] local count = ARGV[5] -- 取值 local datas = nil if count then datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score, 'limit', 0, count) else datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score) end -- 删除redis中刚取到的值 if (is_pop=='True' or is_pop=='1') then for i=1, #datas do redis.call('zrem', KEYS[1], datas[i]) end end return datas """ cmd = self._redis.register_script(lua) if count: res = cmd( keys=[table], args=[table, priority_min, priority_max, is_pop, count] ) else: res = cmd(keys=[table], args=[table, priority_min, priority_max, is_pop]) return res def zrangebyscore_increase_score( self, table, priority_min, priority_max, increase_score, count=None ): """ @summary: 返回指定分数区间的数据 闭区间, 同时修改分数 --------- @param table: @param priority_min: 最小分数 @param priority_max: 最大分数 @param increase_score: 分数值增量 正数则在原有的分数上叠加,负数则相减 @param count: 获取的数量,为空则表示分数区间内的全部数据 --------- @result: """ # 使用lua脚本, 保证操作的原子性 lua = """ -- local key = KEYS[1] local min_score = ARGV[1] local max_score = ARGV[2] local increase_score = ARGV[3] local count = ARGV[4] -- 取值 local datas = nil if count then datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score, 'limit', 0, count) else datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score) end --修改优先级 for i=1, #datas do redis.call('zincrby', KEYS[1], increase_score, datas[i]) end return datas """ cmd = self._redis.register_script(lua) if count: res = cmd( keys=[table], args=[priority_min, priority_max, increase_score, count] ) else: res = cmd(keys=[table], args=[priority_min, priority_max, increase_score]) return res def zrangebyscore_set_score( self, table, priority_min, priority_max, score, count=None ): """ @summary: 返回指定分数区间的数据 闭区间, 同时修改分数 --------- @param table: @param priority_min: 最小分数 @param priority_max: 最大分数 @param score: 分数值 @param count: 获取的数量,为空则表示分数区间内的全部数据 --------- @result: """ # 使用lua脚本, 保证操作的原子性 lua = """ -- local key = KEYS[1] local min_score = ARGV[1] local max_score = ARGV[2] local set_score = ARGV[3] local count = ARGV[4] -- 取值 local datas = nil if count then datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score, 'withscores','limit', 0, count) else datas = redis.call('zrangebyscore', KEYS[1], min_score, max_score, 'withscores') end local real_datas = {} -- 数据 --修改优先级 for i=1, #datas, 2 do local data = datas[i] local score = datas[i+1] table.insert(real_datas, data) -- 添加数据 redis.call('zincrby', KEYS[1], set_score - score, datas[i]) end return real_datas """ cmd = self._redis.register_script(lua) if count: res = cmd(keys=[table], args=[priority_min, priority_max, score, count]) else: res = cmd(keys=[table], args=[priority_min, priority_max, score]) return res def zincrby(self, table, amount, value): return self._redis.zincrby(table, amount, value) def zget_count(self, table, priority_min=None, priority_max=None): """ @summary: 获取表数据的数量 --------- @param table: @param priority_min:优先级范围 最小值(包含) @param priority_max:优先级范围 最大值(包含) --------- @result: """ if priority_min != None and priority_max != None: return self._redis.zcount(table, priority_min, priority_max) else: return self._redis.zcard(table) def zrem(self, table, values): """ @summary: 移除集合中的指定元素 --------- @param table: @param values: 一个或者列表 --------- @result: """ if isinstance(values, list): self._redis.zrem(table, *values) else: self._redis.zrem(table, values) def zexists(self, table, values): """ 利用zscore判断某元素是否存在 @param values: @return: """ is_exists = [] if isinstance(values, list): pipe = self._redis.pipeline() pipe.multi() for value in values: pipe.zscore(table, value) is_exists_temp = pipe.execute() for is_exist in is_exists_temp: if is_exist != None: is_exists.append(1) else: is_exists.append(0) else: is_exists = self._redis.zscore(table, values) is_exists = 1 if is_exists != None else 0 return is_exists def lpush(self, table, values): if isinstance(values, list): pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() for value in values: pipe.lpush(table, value) pipe.execute() else: return self._redis.lpush(table, values) def lpop(self, table, count=1): """ @summary: --------- @param table: @param count: --------- @result: count>1时返回列表 """ datas = None lcount = self.lget_count(table) count = count if count <= lcount else lcount if count: if count > 1: pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() while count: pipe.lpop(table) count -= 1 datas = pipe.execute() else: datas = self._redis.lpop(table) return datas def rpoplpush(self, from_table, to_table=None): """ 将列表 from_table 中的最后一个元素(尾元素)弹出,并返回给客户端。 将 from_table 弹出的元素插入到列表 to_table ,作为 to_table 列表的的头元素。 如果 from_table 和 to_table 相同,则列表中的表尾元素被移动到表头,并返回该元素,可以把这种特殊情况视作列表的旋转(rotation)操作 @param from_table: @param to_table: @return: """ if not to_table: to_table = from_table return self._redis.rpoplpush(from_table, to_table) def lget_count(self, table): return self._redis.llen(table) def lrem(self, table, value, num=0): """ @summary: 删除value --------- @param table: @param value: @param num: --------- @result: 删除的条数 """ return self._redis.lrem(table, num, value) def lrange(self, table, start=0, end=-1): return self._redis.lrange(table, start, end) def hset(self, table, key, value): """ @summary: 如果 key 不存在,一个新的哈希表被创建并进行 HSET 操作。 如果域 field 已经存在于哈希表中,旧值将被覆盖 --------- @param table: @param key: @param value: --------- @result: 1 新插入; 0 覆盖 """ return self._redis.hset(table, key, value) def hset_batch(self, table, datas): """ 批量插入 Args: datas: [[key, value]] Returns: """ pipe = self._redis.pipeline() if not self._is_redis_cluster: pipe.multi() for key, value in datas: pipe.hset(table, key, value) return pipe.execute() def hincrby(self, table, key, increment): return self._redis.hincrby(table, key, increment) def hget(self, table, key, is_pop=False): if not is_pop: return self._redis.hget(table, key) else: lua = """ -- local key = KEYS[1] local field = ARGV[1] -- 取值 local datas = redis.call('hget', KEYS[1], field) -- 删除值 redis.call('hdel', KEYS[1], field) return datas """ cmd = self._redis.register_script(lua) res = cmd(keys=[table], args=[key]) return res def hgetall(self, table): return self._redis.hgetall(table) def hexists(self, table, key): return self._redis.hexists(table, key) def hdel(self, table, *keys): """ @summary: 删除对应的key 可传多个 --------- @param table: @param *keys: --------- @result: """ self._redis.hdel(table, *keys) def hget_count(self, table): return self._redis.hlen(table) def hkeys(self, table): return self._redis.hkeys(table) def setbit(self, table, offsets, values): """ 设置字符串数组某一位的值, 返回之前的值 @param table: @param offsets: 支持列表或单个值 @param values: 支持列表或单个值 @return: list / 单个值 """ if isinstance(offsets, list): if not isinstance(values, list): values = [values] * len(offsets) else: assert len(offsets) == len(values), "offsets值要与values值一一对应" pipe = self._redis.pipeline() pipe.multi() for offset, value in zip(offsets, values): pipe.setbit(table, offset, value) return pipe.execute() else: return self._redis.setbit(table, offsets, values) def getbit(self, table, offsets): """ 取字符串数组某一位的值 @param table: @param offsets: 支持列表 @return: list / 单个值 """ if isinstance(offsets, list): pipe = self._redis.pipeline() pipe.multi() for offset in offsets: pipe.getbit(table, offset) return pipe.execute() else: return self._redis.getbit(table, offsets) def bitcount(self, table): return self._redis.bitcount(table) def strset(self, table, value, **kwargs): return self._redis.set(table, value, **kwargs) def str_incrby(self, table, value): return self._redis.incrby(table, value) def strget(self, table): return self._redis.get(table) def strlen(self, table): return self._redis.strlen(table) def getkeys(self, regex): return self._redis.keys(regex) def exists_key(self, key): return self._redis.exists(key) def set_expire(self, key, seconds): """ @summary: 设置过期时间 --------- @param key: @param seconds: 秒 --------- @result: """ self._redis.expire(key, seconds) def get_expire(self, key): """ @summary: 查询过期时间 --------- @param key: @param seconds: 秒 --------- @result: """ return self._redis.ttl(key) def clear(self, table): try: self._redis.delete(table) except Exception as e: log.error(e) def get_redis_obj(self): return self._redis def _reconnect(self): # 检测连接状态, 当数据库重启或设置 timeout 导致断开连接时自动重连 retry_count = 0 while True: try: retry_count += 1 log.error(f"redis 连接断开, 重新连接 {retry_count}") if self.get_connect(): log.info(f"redis 连接成功") return True except (ConnectionError, TimeoutError) as e: log.error(f"连接失败 e: {e}") time.sleep(2) def __getattr__(self, name): return getattr(self._redis, name) def current_status(self, show_key=True, filter_key_by_used_memory=10 * 1024 * 1024): """ 统计redis当前使用情况 Args: show_key: 是否统计每个key的内存 filter_key_by_used_memory: 根据内存的使用量过滤key 只显示使用量大于指定内存的key Returns: """ from prettytable import PrettyTable from tqdm import tqdm status_msg = "" print("正在查询最大连接数...") clients_count = self._redis.execute_command("info clients") max_clients_count = self._redis.execute_command("config get maxclients") status_msg += ": ".join(max_clients_count) + "\n" status_msg += clients_count + "\n" print("正在查询整体内存使用情况...") total_status = self._redis.execute_command("info memory") status_msg += total_status + "\n" if show_key: print("正在查询每个key占用内存情况等信息...") table = PrettyTable( field_names=[ "type", "key", "value_count", "used_memory_human", "used_memory", ], sortby="used_memory", reversesort=True, header_style="title", ) keys = self._redis.execute_command("keys *") for key in tqdm(keys): key_type = self._redis.execute_command("type {}".format(key)) if key_type == "set": value_count = self._redis.scard(key) elif key_type == "zset": value_count = self._redis.zcard(key) elif key_type == "list": value_count = self._redis.llen(key) elif key_type == "hash": value_count = self._redis.hlen(key) elif key_type == "string": value_count = self._redis.strlen(key) elif key_type == "none": continue else: raise TypeError("尚不支持 {} 类型的key".format(key_type)) used_memory = self._redis.execute_command("memory usage {}".format(key)) if used_memory >= filter_key_by_used_memory: used_memory_human = ( "%0.2fMB" % (used_memory / 1024 / 1024) if used_memory else 0 ) table.add_row( [key_type, key, value_count, used_memory_human, used_memory] ) status_msg += str(table) return status_msg
29,545
Python
.py
776
23.069588
114
0.503166
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,846
memorydb.py
demigody_nas-tools/third_party/feapder/feapder/db/memorydb.py
# -*- coding: utf-8 -*- """ Created on 2020/4/21 11:42 PM --------- @summary: 基于内存的队列,代替redis --------- @author: Boris @email: [email protected] """ from queue import PriorityQueue from feapder import setting class MemoryDB: def __init__(self): self.priority_queue = PriorityQueue(maxsize=setting.TASK_MAX_CACHED_SIZE) def add(self, item, ignore_max_size=False): """ 添加任务 :param item: 数据: 支持小于号比较的类 或者 (priority, item) :param ignore_max_size: queue满时是否等待,为True时无视队列的maxsize,直接往里塞 :return: """ if ignore_max_size: self.priority_queue._put(item) self.priority_queue.unfinished_tasks += 1 else: self.priority_queue.put(item) def get(self): """ 获取任务 :return: """ try: item = self.priority_queue.get(timeout=1) return item except: return def empty(self): return self.priority_queue.empty()
1,117
Python
.py
38
19.763158
81
0.588418
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,847
__init__.py
demigody_nas-tools/third_party/feapder/feapder/db/__init__.py
# -*- coding: utf-8 -*- """ Created on 2020/4/23 12:09 AM --------- @summary: --------- @author: Boris @email: [email protected] """
136
Python
.py
9
14.222222
29
0.570313
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,848
mongodb.py
demigody_nas-tools/third_party/feapder/feapder/db/mongodb.py
# -*- coding: utf-8 -*- """ Created on 2021-04-18 14:12:21 --------- @summary: 操作mongo数据库 --------- @author: Mkdir700 @email: [email protected] """ import re from typing import List, Dict, Optional from urllib import parse import pymongo from pymongo import MongoClient from pymongo.collection import Collection from pymongo.database import Database from pymongo.errors import DuplicateKeyError, BulkWriteError import feapder.setting as setting from feapder.utils.log import log class MongoDB: def __init__( self, ip=None, port=None, db=None, user_name=None, user_pass=None, url=None, **kwargs, ): if url: self.client = MongoClient(url, **kwargs) else: if not ip: ip = setting.MONGO_IP if not port: port = setting.MONGO_PORT if not db: db = setting.MONGO_DB if not user_name: user_name = setting.MONGO_USER_NAME if not user_pass: user_pass = setting.MONGO_USER_PASS self.client = MongoClient( host=ip, port=port, username=user_name, password=user_pass ) self.db = self.get_database(db) # 缓存索引信息 self.__index__cached = {} @classmethod def from_url(cls, url, **kwargs): """ Args: url: mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] 参考:http://mongodb.github.io/mongo-java-driver/3.4/javadoc/com/mongodb/MongoClientURI.html **kwargs: Returns: """ url_parsed = parse.urlparse(url) db_type = url_parsed.scheme.strip() if db_type != "mongodb": raise Exception( "url error, expect mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]], but get {}".format( url ) ) return cls(url=url, **kwargs) def get_database(self, database, **kwargs) -> Database: """ 获取数据库对象 @param database: 数据库名 @return: """ return self.client.get_database(database, **kwargs) def get_collection(self, coll_name, **kwargs) -> Collection: """ 根据集合名获取集合对象 @param coll_name: 集合名 @return: """ return self.db.get_collection(coll_name, **kwargs) def find( self, coll_name: str, condition: Optional[Dict] = None, limit: int = 0, **kwargs ) -> List[Dict]: """ @summary: 无数据: 返回[] 有数据: [{'_id': 'xx', ...}, ...] --------- @param coll_name: 集合名(表名) @param condition: 查询条件 @param limit: 结果数量 @param kwargs: 更多参数 https://docs.mongodb.com/manual/reference/command/find/#command-fields --------- @result: """ condition = {} if condition is None else condition command = {"find": coll_name, "filter": condition, "limit": limit} command.update(kwargs) result = self.run_command(command) cursor = result["cursor"] cursor_id = cursor["id"] dataset = cursor["firstBatch"] while True: if cursor_id == 0: break result = self.run_command( { "getMore": cursor_id, "collection": coll_name, "batchSize": kwargs.get("batchSize", 100), } ) cursor = result["cursor"] cursor_id = cursor["id"] dataset.extend(cursor["nextBatch"]) return dataset def add( self, coll_name, data: Dict, replace=False, update_columns=(), update_columns_value=(), insert_ignore=False, ): """ 添加单条数据 Args: coll_name: 集合名 data: 单条数据 replace: 唯一索引冲突时直接覆盖旧数据,默认为False update_columns: 更新指定的列(如果数据唯一索引冲突,则更新指定字段,如 update_columns = ["name", "title"] update_columns_value: 指定更新的字段对应的值, 不指定则用数据本身的值更新 insert_ignore: 索引冲突是否忽略 默认False Returns: 插入成功的行数 """ affect_count = 1 collection = self.get_collection(coll_name) try: collection.insert_one(data) except DuplicateKeyError as e: # 存在则更新 if update_columns: if not isinstance(update_columns, (tuple, list)): update_columns = [update_columns] condition = self.__get_update_condition( coll_name, data, e.details.get("errmsg") ) # 更新指定的列 if update_columns_value: # 使用指定的值更新 doc = { key: value for key, value in zip(update_columns, update_columns_value) } else: # 使用数据本身的值更新 doc = {key: data[key] for key in update_columns} collection.update_one(condition, {"$set": doc}) # 覆盖更新 elif replace: condition = self.__get_update_condition( coll_name, data, e.details.get("errmsg") ) # 替换已存在的数据 collection.replace_one(condition, data) elif not insert_ignore: raise e return affect_count def add_batch( self, coll_name: str, datas: List[Dict], replace=False, update_columns=(), update_columns_value=(), condition_fields: dict = None, ): """ 批量添加数据 Args: coll_name: 集合名 datas: 数据 [{'_id': 'xx'}, ... ] replace: 唯一索引冲突时直接覆盖旧数据,默认为False update_columns: 更新指定的列(如果数据的唯一索引存在,则更新指定字段,如 update_columns = ["name", "title"] update_columns_value: 指定更新的字段对应的值, 不指定则用数据本身的值更新 condition_fields: 用于条件查找的字段,不指定则用索引冲突中的字段查找 Returns: 添加行数,不包含更新 """ add_count = 0 if not datas: return add_count collection = self.get_collection(coll_name) if not isinstance(update_columns, (tuple, list)): update_columns = [update_columns] try: add_count = len(datas) collection.insert_many(datas, ordered=False) except BulkWriteError as e: write_errors = e.details.get("writeErrors") for error in write_errors: if error.get("code") == 11000: # 数据重复 # 获取重复的数据 data = error.get("op") def get_condition(): # 获取更新条件 if condition_fields: condition = { condition_field: data[condition_field] for condition_field in condition_fields } else: # 根据重复的值获取更新条件 condition = self.__get_update_condition( coll_name, data, error.get("errmsg") ) return condition if update_columns: # 更新指定的列 if update_columns_value: # 使用指定的值更新 doc = { key: value for key, value in zip( update_columns, update_columns_value ) } else: # 使用数据本身的值更新 doc = {key: data.get(key) for key in update_columns} collection.update_one(get_condition(), {"$set": doc}) add_count -= 1 elif replace: # 覆盖更新 collection.replace_one(get_condition(), data) add_count -= 1 else: # log.error(error) add_count -= 1 return add_count def count(self, coll_name, condition: Optional[Dict], limit=0, **kwargs): """ 计数 @param coll_name: 集合名 @param condition: 查询条件 @param limit: 限制数量 @param kwargs: ---- command = { count: <collection or view>, query: <document>, limit: <integer>, skip: <integer>, hint: <hint>, readConcern: <document>, collation: <document>, comment: <any> } https://docs.mongodb.com/manual/reference/command/count/#mongodb-dbcommand-dbcmd.count @return: 数据数量 """ command = {"count": coll_name, "query": condition, "limit": limit, **kwargs} result = self.run_command(command) return result["n"] def update(self, coll_name, data: Dict, condition: Dict, upsert: bool = False): """ 更新 Args: coll_name: 集合名 data: 单条数据 {"xxx":"xxx"} condition: 更新条件 {"_id": "xxxx"} upsert: 数据不存在则插入,默认为 False Returns: True / False """ try: collection = self.get_collection(coll_name) collection.update_one(condition, {"$set": data}, upsert=upsert) except Exception as e: log.error( """ error:{} condition: {} """.format( e, condition ) ) return False else: return True def delete(self, coll_name, condition: Dict) -> bool: """ 删除 Args: coll_name: 集合名 condition: 查找条件 Returns: True / False """ try: collection = self.get_collection(coll_name) collection.delete_one(condition) except Exception as e: log.error( """ error:{} condition: {} """.format( e, condition ) ) return False else: return True def run_command(self, command: Dict): """ 运行指令 参考文档 https://www.geek-book.com/src/docs/mongodb/mongodb/docs.mongodb.com/manual/reference/command/index.html @param command: @return: """ return self.db.command(command) def create_index(self, coll_name, keys, unique=True): collection = self.get_collection(coll_name) _keys = [(key, pymongo.ASCENDING) for key in keys] collection.create_index(_keys, unique=unique) def get_index(self, coll_name): return self.get_collection(coll_name).index_information() def drop_collection(self, coll_name): return self.db.drop_collection(coll_name) def get_index_key(self, coll_name, index_name): """ 获取参与索引的key Args: index_name: 索引名 Returns: """ cache_key = f"{coll_name}:{index_name}" if cache_key in self.__index__cached: return self.__index__cached.get(cache_key) index = self.get_index(coll_name) index_detail = index.get(index_name) if not index_detail: errmsg = f"not found index {index_name} in collection {coll_name}" raise Exception(errmsg) index_keys = [val[0] for val in index_detail.get("key")] self.__index__cached[cache_key] = index_keys return index_keys def __get_update_condition( self, coll_name: str, data: dict, duplicate_errmsg: str ) -> dict: """ 根据索引冲突的报错信息 获取更新条件 Args: duplicate_errmsg: E11000 duplicate key error collection: feapder.test index: a_1_b_1 dup key: { : 1, : "你好" } data: {"a": 1, "b": "你好", "c": "嘻嘻"} Returns: {"a": 1, "b": "你好"} """ index_name = re.search(r"index: (\w+)", duplicate_errmsg).group(1) index_keys = self.get_index_key(coll_name, index_name) condition = {key: data.get(key) for key in index_keys} return condition def __getattr__(self, name): return getattr(self.db, name)
13,606
Python
.py
365
22.287671
158
0.497794
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,849
mysqldb.py
demigody_nas-tools/third_party/feapder/feapder/db/mysqldb.py
# -*- coding: utf-8 -*- """ Created on 2016-11-16 16:25 --------- @summary: 操作mysql数据库 --------- @author: Boris @email: [email protected] """ import datetime import json from urllib import parse from typing import List, Dict import pymysql from dbutils.pooled_db import PooledDB from pymysql import cursors from pymysql import err import feapder.setting as setting from feapder.utils.log import log from feapder.utils.tools import make_insert_sql, make_batch_sql, make_update_sql def auto_retry(func): def wapper(*args, **kwargs): for i in range(3): try: return func(*args, **kwargs) except (err.InterfaceError, err.OperationalError) as e: log.error( """ error:%s sql: %s """ % (e, kwargs.get("sql") or args[1]) ) return wapper class MysqlDB: def __init__( self, ip=None, port=None, db=None, user_name=None, user_pass=None, **kwargs ): # 可能会改setting中的值,所以此处不能直接赋值为默认值,需要后加载赋值 if not ip: ip = setting.MYSQL_IP if not port: port = setting.MYSQL_PORT if not db: db = setting.MYSQL_DB if not user_name: user_name = setting.MYSQL_USER_NAME if not user_pass: user_pass = setting.MYSQL_USER_PASS try: self.connect_pool = PooledDB( creator=pymysql, mincached=1, maxcached=100, maxconnections=100, blocking=True, ping=7, host=ip, port=port, user=user_name, passwd=user_pass, db=db, charset="utf8mb4", cursorclass=cursors.SSCursor, ) # cursorclass 使用服务的游标,默认的在多线程下大批量插入数据会使内存递增 except Exception as e: log.error( """ 连接失败: ip: {} port: {} db: {} user_name: {} user_pass: {} exception: {} """.format( ip, port, db, user_name, user_pass, e ) ) else: log.debug("连接到mysql数据库 %s : %s" % (ip, db)) @classmethod def from_url(cls, url, **kwargs): """ Args: url: mysql://username:password@ip:port/db?charset=utf8mb4 **kwargs: Returns: """ url_parsed = parse.urlparse(url) db_type = url_parsed.scheme.strip() if db_type != "mysql": raise Exception( "url error, expect mysql://username:ip:port/db?charset=utf8mb4, but get {}".format( url ) ) connect_params = { "ip": url_parsed.hostname.strip(), "port": url_parsed.port, "user_name": url_parsed.username.strip(), "user_pass": url_parsed.password.strip(), "db": url_parsed.path.strip("/").strip(), } connect_params.update(kwargs) return cls(**connect_params) @staticmethod def unescape_string(value): if not isinstance(value, str): return value value = value.replace("\\0", "\0") value = value.replace("\\\\", "\\") value = value.replace("\\n", "\n") value = value.replace("\\r", "\r") value = value.replace("\\Z", "\032") value = value.replace('\\"', '"') value = value.replace("\\'", "'") return value def get_connection(self): conn = self.connect_pool.connection(shareable=False) # cursor = conn.cursor(cursors.SSCursor) cursor = conn.cursor() return conn, cursor def close_connection(self, conn, cursor): if conn: conn.close() if cursor: cursor.close() def size_of_connections(self): """ 当前活跃的连接数 @return: """ return self.connect_pool._connections def size_of_connect_pool(self): """ 池子里一共有多少连接 @return: """ return len(self.connect_pool._idle_cache) @auto_retry def find(self, sql, limit=0, to_json=False, conver_col=True): """ @summary: 无数据: 返回() 有数据: 若limit == 1 则返回 (data1, data2) 否则返回 ((data1, data2),) --------- @param sql: @param limit: @param to_json 是否将查询结果转为json @param conver_col 是否处理查询结果,如date类型转字符串,json字符串转成json。仅当to_json=True时生效 --------- @result: """ conn, cursor = self.get_connection() cursor.execute(sql) if limit == 1: result = cursor.fetchone() # 全部查出来,截取 不推荐使用 elif limit > 1: result = cursor.fetchmany(limit) # 全部查出来,截取 不推荐使用 else: result = cursor.fetchall() if to_json: columns = [i[0] for i in cursor.description] # 处理数据 def convert(col): if isinstance(col, (datetime.date, datetime.time)): return str(col) elif isinstance(col, str) and ( col.startswith("{") or col.startswith("[") ): try: # col = self.unescape_string(col) return json.loads(col) except: return col else: # col = self.unescape_string(col) return col if limit == 1: if conver_col: result = [convert(col) for col in result] result = dict(zip(columns, result)) else: if conver_col: result = [[convert(col) for col in row] for row in result] result = [dict(zip(columns, r)) for r in result] self.close_connection(conn, cursor) return result def add(self, sql, exception_callfunc=None): """ Args: sql: exception_callfunc: 异常回调 Returns: 添加行数 """ affect_count = None conn, cursor = None, None try: conn, cursor = self.get_connection() affect_count = cursor.execute(sql) conn.commit() except Exception as e: log.error( """ error:%s sql: %s """ % (e, sql) ) if exception_callfunc: exception_callfunc(e) finally: self.close_connection(conn, cursor) return affect_count def add_smart(self, table, data: Dict, **kwargs): """ 添加数据, 直接传递json格式的数据,不用拼sql Args: table: 表名 data: 字典 {"xxx":"xxx"} **kwargs: Returns: 添加行数 """ sql = make_insert_sql(table, data, **kwargs) return self.add(sql) def add_batch(self, sql, datas: List[List]): """ @summary: 批量添加数据 --------- @ param sql: insert ignore into (xxx,xxx,xxx) values (%s, %s, %s) @ param datas: 列表 [[v1,v2,v3], [v1,v2,v3]] 列表里的值要和插入的key的顺序对应上 --------- @result: 添加行数 """ affect_count = None conn, cursor = None, None try: conn, cursor = self.get_connection() affect_count = cursor.executemany(sql, datas) conn.commit() except Exception as e: log.error( """ error:%s sql: %s """ % (e, sql) ) finally: self.close_connection(conn, cursor) return affect_count def add_batch_smart(self, table, datas: List[Dict], **kwargs): """ 批量添加数据, 直接传递list格式的数据,不用拼sql Args: table: 表名 datas: 列表 [{}, {}, {}] **kwargs: Returns: 添加行数 """ sql, datas = make_batch_sql(table, datas, **kwargs) return self.add_batch(sql, datas) def update(self, sql): conn, cursor = None, None try: conn, cursor = self.get_connection() cursor.execute(sql) conn.commit() except Exception as e: log.error( """ error:%s sql: %s """ % (e, sql) ) return False else: return True finally: self.close_connection(conn, cursor) def update_smart(self, table, data: Dict, condition): """ 更新, 不用拼sql Args: table: 表名 data: 数据 {"xxx":"xxx"} condition: 更新条件 where后面的条件,如 condition='status=1' Returns: True / False """ sql = make_update_sql(table, data, condition) return self.update(sql) def delete(self, sql): """ 删除 Args: sql: Returns: True / False """ conn, cursor = None, None try: conn, cursor = self.get_connection() cursor.execute(sql) conn.commit() except Exception as e: log.error( """ error:%s sql: %s """ % (e, sql) ) return False else: return True finally: self.close_connection(conn, cursor) def execute(self, sql): conn, cursor = None, None try: conn, cursor = self.get_connection() cursor.execute(sql) conn.commit() except Exception as e: log.error( """ error:%s sql: %s """ % (e, sql) ) return False else: return True finally: self.close_connection(conn, cursor)
10,799
Python
.py
339
18.743363
99
0.474504
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,850
bitarray.py
demigody_nas-tools/third_party/feapder/feapder/dedup/bitarray.py
# -*- coding: utf-8 -*- """ Created on 2018/12/14 1:05 PM --------- @summary: --------- @author: Boris @email: [email protected] """ from __future__ import absolute_import from feapder.db.redisdb import RedisDB class BitArray: def setall(self, value): pass def __repr__(self): raise ImportError("this method mush be implement") def set(self, offsets, values): """ 设置字符串数字某一位的值, 返回之前的值 @param offsets: 支持列表或单个值 @param values: 支持列表或单个值 @return: list / 单个值 """ raise ImportError("this method mush be implement") def get(self, offsets): """ 取字符串数字某一位的值 @param offsets: 支持列表或单个值 @return: list / 单个值 """ raise ImportError("this method mush be implement") def count(self, value=True): raise ImportError("this method mush be implement") class MemoryBitArray(BitArray): def __init__(self, num_bits): try: import bitarray except Exception as e: raise Exception( '需要安装feapder完整版\ncommand: pip install "feapder[all]"\n若安装出错,参考:https://feapder.com/#/question/%E5%AE%89%E8%A3%85%E9%97%AE%E9%A2%98' ) self.num_bits = num_bits self.bitarray = bitarray.bitarray(num_bits, endian="little") self.setall(0) def __repr__(self): return "MemoryBitArray: {}".format(self.num_bits) def setall(self, value): self.bitarray.setall(value) def set(self, offsets, values): """ 设置字符串数字某一位的值, 返回之前的值 @param offsets: 支持列表或单个值 @param values: 支持列表或单个值 @return: list / 单个值 """ old_values = [] if isinstance(offsets, list): if not isinstance(values, list): values = [values] * len(offsets) else: assert len(offsets) == len(values), "offsets值要与values值一一对应" for offset, value in zip(offsets, values): old_values.append(int(self.bitarray[offset])) self.bitarray[offset] = value else: old_values = int(self.bitarray[offsets]) self.bitarray[offsets] = values return old_values def get(self, offsets): """ 取字符串数字某一位的值 @param offsets: 支持列表或单个值 @return: list / 单个值 """ if isinstance(offsets, list): return [self.bitarray[offset] for offset in offsets] else: return self.bitarray[offsets] def count(self, value=True): return self.bitarray.count(value) class RedisBitArray(BitArray): """ 仿bitarray 基于redis """ redis_db = None def __init__(self, name, redis_url=None): self.name = name self.count_cached_name = name + "_count_cached" if not self.__class__.redis_db: self.__class__.redis_db = RedisDB(url=redis_url) def __repr__(self): return "RedisBitArray: {}".format(self.name) def set(self, offsets, values): """ 设置字符串数字某一位的值, 返回之前的值 @param offsets: 支持列表或单个值 @param values: 支持列表或单个值 @return: list / 单个值 """ return self.redis_db.setbit(self.name, offsets, values) def get(self, offsets): return self.redis_db.getbit(self.name, offsets) def count(self, value=True): # 先查redis的缓存,若没有 在统计数量 count = self.redis_db.strget(self.count_cached_name) if count: return int(count) else: count = self.redis_db.bitcount(self.name) # 被设置为 1 的比特位的数量 self.redis_db.strset(self.count_cached_name, count, ex=1800) # 半小时过期 return count
4,133
Python
.py
111
24.900901
147
0.589326
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,851
basefilter.py
demigody_nas-tools/third_party/feapder/feapder/dedup/basefilter.py
# -*- coding: utf-8 -*- """ Created on 2022/9/21 11:17 AM --------- @summary: --------- @author: Boris @email: [email protected] """ import abc from typing import List, Union class BaseFilter: @abc.abstractmethod def add( self, keys: Union[List[str], str], *args, **kwargs ) -> Union[List[bool], bool]: """ Args: keys: list / 单个值 *args: **kwargs: Returns: list / 单个值 (如果数据已存在 返回 0 否则返回 1, 可以理解为是否添加成功) """ pass @abc.abstractmethod def get(self, keys: Union[List[str], str]) -> Union[List[bool], bool]: """ 检查数据是否存在 Args: keys: list / 单个值 Returns: list / 单个值 (如果数据已存在 返回 1 否则返回 0) """ pass
901
Python
.py
35
15.942857
74
0.513405
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,852
bloomfilter.py
demigody_nas-tools/third_party/feapder/feapder/dedup/bloomfilter.py
# -*- coding: utf-8 -*- """ Created on 2018/12/13 4:11 PM --------- @summary: --------- @author: Boris @email: [email protected] """ import hashlib import math import threading import time from struct import unpack, pack from feapder.dedup.basefilter import BaseFilter from feapder.utils.redis_lock import RedisLock from . import bitarray def make_hashfuncs(num_slices, num_bits): if num_bits >= (1 << 31): fmt_code, chunk_size = "Q", 8 elif num_bits >= (1 << 15): fmt_code, chunk_size = "I", 4 else: fmt_code, chunk_size = "H", 2 total_hash_bits = 8 * num_slices * chunk_size if total_hash_bits > 384: hashfn = hashlib.sha512 elif total_hash_bits > 256: hashfn = hashlib.sha384 elif total_hash_bits > 160: hashfn = hashlib.sha256 elif total_hash_bits > 128: hashfn = hashlib.sha1 else: hashfn = hashlib.md5 fmt = fmt_code * (hashfn().digest_size // chunk_size) num_salts, extra = divmod(num_slices, len(fmt)) if extra: num_salts += 1 salts = tuple(hashfn(hashfn(pack("I", i)).digest()) for i in range(num_salts)) def _make_hashfuncs(key): if isinstance(key, str): key = key.encode("utf-8") else: key = str(key).encode("utf-8") i = 0 for salt in salts: h = salt.copy() h.update(key) for uint in unpack(fmt, h.digest()): yield uint % num_bits i += 1 if i >= num_slices: return return _make_hashfuncs class BloomFilter(object): BASE_MEMORY = 1 BASE_REDIS = 2 def __init__( self, capacity: int, error_rate: float = 0.00001, bitarray_type=BASE_REDIS, name=None, redis_url=None, ): if not (0 < error_rate < 1): raise ValueError("Error_Rate must be between 0 and 1.") if not capacity > 0: raise ValueError("Capacity must be > 0") # given M = num_bits, k = num_slices, P = error_rate, n = capacity # k = log2(1/P) # solving for m = bits_per_slice # n ~= M * ((ln(2) ** 2) / abs(ln(P))) # n ~= (k * m) * ((ln(2) ** 2) / abs(ln(P))) # m ~= n * abs(ln(P)) / (k * (ln(2) ** 2)) num_slices = int(math.ceil(math.log(1.0 / error_rate, 2))) bits_per_slice = int( math.ceil( (capacity * abs(math.log(error_rate))) / (num_slices * (math.log(2) ** 2)) ) ) self._setup(error_rate, num_slices, bits_per_slice, capacity) if bitarray_type == BloomFilter.BASE_MEMORY: self.bitarray = bitarray.MemoryBitArray(self.num_bits) self.bitarray.setall(False) elif bitarray_type == BloomFilter.BASE_REDIS: assert name, "name can't be None " self.bitarray = bitarray.RedisBitArray(name, redis_url) else: raise ValueError("not support this bitarray type") def _setup(self, error_rate, num_slices, bits_per_slice, capacity): self.error_rate = error_rate self.num_slices = num_slices self.bits_per_slice = bits_per_slice self.capacity = capacity self.num_bits = num_slices * bits_per_slice self.make_hashes = make_hashfuncs(self.num_slices, self.bits_per_slice) self._is_at_capacity = False self._check_capacity_time = 0 def __repr__(self): return "<BloomFilter: {}>".format(self.bitarray) def get(self, keys, to_list=False): is_list = isinstance(keys, list) keys = keys if is_list else [keys] is_exists = [] offsets = [] for key in keys: hashes = self.make_hashes(key) offset = 0 for k in hashes: offsets.append(offset + k) offset += self.bits_per_slice old_values = self.bitarray.get(offsets) for i in range(0, len(old_values), self.num_slices): is_exists.append(int(all(old_values[i : i + self.num_slices]))) if to_list: return is_exists else: return is_exists if is_list else is_exists[0] @property def is_at_capacity(self): """ 是否容量已满, 1的个数满位数组的一半的时,则看做已满 比较耗时 半小时检查一次 @return: """ if self._is_at_capacity: return self._is_at_capacity if ( not self._check_capacity_time or time.time() - self._check_capacity_time > 1800 ): bit_count = self.bitarray.count() if bit_count and bit_count / self.num_bits > 0.5: self._is_at_capacity = True self._check_capacity_time = time.time() return self._is_at_capacity def add(self, keys): """ Adds a key to this bloom filter. If the key already exists in this filter it will return False. Otherwise True. keys support list @param keys: list or one key @return: """ # if self.is_at_capacity: # raise IndexError("BloomFilter is at capacity") is_list = isinstance(keys, list) keys = keys if is_list else [keys] is_added = [] offsets = [] for key in keys: hashes = self.make_hashes(key) offset = 0 for k in hashes: offsets.append(offset + k) offset += self.bits_per_slice old_values = self.bitarray.set(offsets, 1) for i in range(0, len(old_values), self.num_slices): is_added.append(1 ^ int(all(old_values[i : i + self.num_slices]))) return is_added if is_list else is_added[0] class ScalableBloomFilter(BaseFilter): """ 自动扩展空间的bloomfilter, 当一个filter满一半的时候,创建下一个 """ BASE_MEMORY = BloomFilter.BASE_MEMORY BASE_REDIS = BloomFilter.BASE_REDIS def __init__( self, initial_capacity: int = 100000000, error_rate: float = 0.00001, bitarray_type=BASE_REDIS, name=None, redis_url=None, ): if not error_rate or error_rate < 0: raise ValueError("Error_Rate must be a decimal less than 0.") self._setup( initial_capacity, error_rate, name, bitarray_type, redis_url=redis_url ) def _setup(self, initial_capacity, error_rate, name, bitarray_type, redis_url): self.initial_capacity = initial_capacity self.error_rate = error_rate self.name = name self.bitarray_type = bitarray_type self.redis_url = redis_url self.filters = [] self.filters.append(self.create_filter()) self._thread_lock = threading.RLock() self._check_capacity_time = 0 def __repr__(self): return "<ScalableBloomFilter: {}>".format(self.filters[-1].bitarray) def create_filter(self): filter = BloomFilter( capacity=self.initial_capacity, error_rate=self.error_rate, bitarray_type=self.bitarray_type, name=self.name + str(len(self.filters)) if self.name else self.name, redis_url=self.redis_url, ) return filter def check_filter_capacity(self): """ 检测filter状态,如果已满,加载新的filter @return: """ if ( not self._check_capacity_time or time.time() - self._check_capacity_time > 1800 ): if self.bitarray_type == ScalableBloomFilter.BASE_MEMORY: with self._thread_lock: while True: if self.filters[-1].is_at_capacity: self.filters.append(self.create_filter()) else: break self._check_capacity_time = time.time() else: # 全局锁 同一时间只有一个进程在真正的创建新的filter,等这个进程创建完,其他进程只是把刚创建的filter append进来 key = ( f"ScalableBloomFilter:{self.name}" if self.name else "ScalableBloomFilter" ) with RedisLock(key=key, redis_url=self.redis_url) as lock: if lock.locked: while True: if self.filters[-1].is_at_capacity: self.filters.append(self.create_filter()) else: break self._check_capacity_time = time.time() def add(self, keys, skip_check=False): """ Adds a key to this bloom filter. If the key already exists in this filter it will return False. Otherwise True. keys support list @param keys: list or one key @param skip_check: add directly,not check if is exist in bloomfilters @return: """ self.check_filter_capacity() current_filter = self.filters[-1] if skip_check: return current_filter.add(keys) else: is_list = isinstance(keys, list) keys = keys if is_list else [keys] not_exist_keys = list(set(keys)) # 检查之前的bloomfilter是否存在 # 记录下每级filter存在的key,不存在的key继续向下检查 for filter in reversed(self.filters): current_filter_is_exists = filter.get( not_exist_keys, to_list=True ) # 当前的filter是否存在 not_exist_keys_temp = [] for key, is_exist in zip(not_exist_keys, current_filter_is_exists): if not is_exist: # 当前filter不存在的key 需要继续向下检查 not_exist_keys_temp.append(key) not_exist_keys = not_exist_keys_temp if not not_exist_keys: break # 仍有不存在的关键词,记录该关键词 if not_exist_keys: current_filter.add(not_exist_keys) # 比较key是否已存在, 内部重复的key 若不存在啊则只留其一算为不存在,其他看作已存在 for i, key in enumerate(keys): for j, not_exist_key in enumerate(not_exist_keys): if key == not_exist_key: keys[i] = 1 not_exist_keys.pop(j) break else: keys[i] = 0 is_added = keys return is_added if is_list else is_added[0] def get(self, keys): self.check_filter_capacity() is_list = isinstance(keys, list) keys = keys if is_list else [keys] # 最终会修改为 [0, 1, ...] 0表示不存在 1 已存在 not_exist_keys = list(set(keys)) # 检查之前的bloomfilter是否存在 # 记录下每级filter存在的key,不存在的key继续向下检查 for filter in reversed(self.filters): current_filter_is_exists = filter.get( not_exist_keys, to_list=True ) # 当前的filter是否存在 not_exist_keys_temp = [] for checked_key, is_exist in zip(not_exist_keys, current_filter_is_exists): if not is_exist: # 当前filter不存在的key 需要继续向下检查 not_exist_keys_temp.append(checked_key) not_exist_keys = not_exist_keys_temp if not not_exist_keys: break # 比较key是否已存在, 内部重复的key 若不存在啊则只留其一算为不存在,其他看作已存在 for i, key in enumerate(keys): for j, not_exist_key in enumerate(not_exist_keys): if key == not_exist_key: keys[i] = 0 not_exist_keys.pop(j) break else: keys[i] = 1 is_exists = keys return is_exists if is_list else is_exists[0] @property def capacity(self): """Returns the total capacity for all filters in this SBF""" return sum(f.capacity for f in self.filters)
12,518
Python
.py
310
27.170968
87
0.548258
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,853
__init__.py
demigody_nas-tools/third_party/feapder/feapder/dedup/__init__.py
# -*- coding: utf-8 -*- """ Created on 2018-12-13 21:08 --------- @summary: --------- @author: Boris @email: [email protected] """ import copy from typing import Any, List, Union, Optional, Tuple, Callable from feapder.utils.tools import get_md5 from .bloomfilter import BloomFilter, ScalableBloomFilter from .expirefilter import ExpireFilter from .litefilter import LiteFilter class Dedup: BloomFilter = 1 MemoryFilter = 2 ExpireFilter = 3 LiteFilter = 4 def __init__(self, filter_type: int = BloomFilter, to_md5: bool = True, **kwargs): """ 去重过滤器 集成BloomFilter、MemoryFilter、ExpireFilter、MemoryLiteFilter Args: filter_type: 过滤器类型 BloomFilter name: 过滤器名称 该名称会默认以dedup作为前缀 dedup:expire_set:[name]/dedup:bloomfilter:[name]。 默认ExpireFilter name=过期时间; BloomFilter name=dedup:bloomfilter:bloomfilter absolute_name: 过滤器绝对名称 不会加dedup前缀,当此值不为空时name参数无效 expire_time: ExpireFilter的过期时间 单位为秒,其他两种过滤器不用指定 error_rate: BloomFilter/MemoryFilter的误判率 默认为0.00001 to_md5: 去重前是否将数据转为MD5,默认是 redis_url: redis://[[username]:[password]]@localhost:6379/0 BloomFilter 与 ExpireFilter 使用 默认会读取setting中的redis配置,若无setting,则需要专递redis_url initial_capacity: 单个布隆过滤器去重容量 默认100000000,当布隆过滤器容量满时会扩展下一个布隆过滤器 error_rate:布隆过滤器的误判率 默认0.00001 **kwargs: """ if filter_type == Dedup.ExpireFilter: try: expire_time = kwargs["expire_time"] except: raise ValueError("需传参数 expire_time") name = kwargs.get("absolute_name") or "dedup:expire_set:%s" % kwargs.get( "name", expire_time ) expire_time_record_key = "dedup:expire_set:expire_time" self.dedup = ExpireFilter( name=name, expire_time=expire_time, expire_time_record_key=expire_time_record_key, redis_url=kwargs.get("redis_url"), ) elif filter_type == Dedup.LiteFilter: self.dedup = LiteFilter() else: initial_capacity = kwargs.get("initial_capacity", 100000000) error_rate = kwargs.get("error_rate", 0.00001) name = kwargs.get("absolute_name") or "dedup:bloomfilter:" + kwargs.get( "name", "bloomfilter" ) if filter_type == Dedup.BloomFilter: self.dedup = ScalableBloomFilter( name=name, initial_capacity=initial_capacity, error_rate=error_rate, bitarray_type=ScalableBloomFilter.BASE_REDIS, redis_url=kwargs.get("redis_url"), ) elif filter_type == Dedup.MemoryFilter: self.dedup = ScalableBloomFilter( name=name, initial_capacity=initial_capacity, error_rate=error_rate, bitarray_type=ScalableBloomFilter.BASE_MEMORY, ) else: raise ValueError( "filter_type 类型错误,仅支持 Dedup.BloomFilter、Dedup.MemoryFilter、Dedup.ExpireFilter" ) self._to_md5 = to_md5 def __repr__(self): return str(self.dedup) def _deal_datas(self, datas): if self._to_md5: if isinstance(datas, list): keys = [get_md5(data) for data in datas] else: keys = get_md5(datas) else: keys = copy.deepcopy(datas) return keys def add( self, datas: Union[List[Any], Any], skip_check: bool = False ) -> Union[List[Any], Any]: """ 添加数据 @param datas: list / 单个值 @param skip_check: 是否直接添加,不检查是否存在 适用于bloomfilter,加快add速度 @return: list / 单个值 (如果数据已存在 返回 0 否则返回 1, 可以理解为是否添加成功) """ keys = self._deal_datas(datas) is_added = self.dedup.add(keys, skip_check) return is_added def get(self, datas: Union[List[Any], Any]) -> Union[List[Any], Any]: """ 检查数据是否存在 @param datas: list / 单个值 @return: list / 单个值 (存在返回1 不存在返回0) """ keys = self._deal_datas(datas) is_exists = self.dedup.get(keys) return is_exists def filter_exist_data( self, datas: List[Any], *, datas_fingerprints: Optional[List] = None, callback: Callable[[Any], None] = None ) -> Union[Tuple[List[Any], List[Any]], List[Any]]: """ 过滤掉已存在的数据 *** 直接修改原来的数据 使用完此方法后 datas, datas_fingerprints 里面的值为去重后的数据 @param datas_fingerprints: 数据的唯一指纹 列表 @param datas: 数据 列表 @param callback: 数据已存在时的回调 callback(data) @return: None """ is_exists = self.get(datas_fingerprints or datas) dedup_datas = [] if datas_fingerprints: dedup_datas_fingerprints = [] while is_exists: data = datas.pop(0) is_exist = is_exists.pop(0) data_fingerprint = datas_fingerprints.pop(0) if not is_exist: dedup_datas.append(data) dedup_datas_fingerprints.append(data_fingerprint) else: if callback: callback(data) datas_fingerprints.extend(dedup_datas_fingerprints) datas.extend(dedup_datas) return datas, datas_fingerprints else: while is_exists: data = datas.pop(0) is_exist = is_exists.pop(0) if not is_exist: dedup_datas.append(data) else: if callback: callback(data) datas.extend(dedup_datas) return datas
6,616
Python
.py
155
26.677419
163
0.560069
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,854
expirefilter.py
demigody_nas-tools/third_party/feapder/feapder/dedup/expirefilter.py
# -*- coding: utf-8 -*- """ Created on 2018/12/13 9:44 PM --------- @summary: 带有有效期的去重集合 --------- @author: Boris @email: [email protected] """ import time from feapder.db.redisdb import RedisDB from feapder.dedup.basefilter import BaseFilter class ExpireFilter(BaseFilter): redis_db = None def __init__( self, name: str, expire_time: int, expire_time_record_key=None, redis_url=None ): if not name: raise ValueError("name cant't be None") if not expire_time: raise ValueError("please set expire time, units is seconds") if not self.__class__.redis_db: self.__class__.redis_db = RedisDB(url=redis_url) self.name = name self.expire_time = expire_time self.expire_time_record_key = expire_time_record_key self.del_expire_key_time = None self.record_expire_time() self.del_expire_key() def __repr__(self): return "<ExpireSet: {}>".format(self.name) @property def current_timestamp(self): return int(time.time()) def add(self, keys, *args, **kwargs): """ @param keys: 检查关键词在zset中是否存在,支持列表批量 @return: list / 单个值 """ if self.current_timestamp - self.del_expire_key_time > self.expire_time: self.del_expire_key() is_added = self.redis_db.zadd(self.name, keys, self.current_timestamp) return is_added def get(self, keys): is_exist = self.redis_db.zexists(self.name, keys) if isinstance(keys, list): # 判断数据本身是否重复 temp_set = set() for i, key in enumerate(keys): if key in temp_set: is_exist[i] = 1 else: temp_set.add(key) return is_exist def del_expire_key(self): self.redis_db.zremrangebyscore( self.name, "-inf", self.current_timestamp - self.expire_time ) self.del_expire_key_time = self.current_timestamp def record_expire_time(self): if self.expire_time_record_key: self.redis_db.hset( self.expire_time_record_key, key=self.name, value=self.expire_time )
2,309
Python
.py
64
26.40625
86
0.593663
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,855
litefilter.py
demigody_nas-tools/third_party/feapder/feapder/dedup/litefilter.py
# -*- coding: utf-8 -*- """ Created on 2022/9/21 11:28 AM --------- @summary: --------- @author: Boris @email: [email protected] """ from typing import List, Union, Set from feapder.dedup.basefilter import BaseFilter class LiteFilter(BaseFilter): def __init__(self): self.datas: Set[str] = set() def add( self, keys: Union[List[str], str], *args, **kwargs ) -> Union[List[int], int]: """ Args: keys: list / 单个值 *args: **kwargs: Returns: list / 单个值 (如果数据已存在 返回 0 否则返回 1, 可以理解为是否添加成功) """ if isinstance(keys, list): is_add = [] for key in keys: if key not in self.datas: self.datas.add(key) is_add.append(1) else: is_add.append(0) else: if keys not in self.datas: is_add = 1 self.datas.add(keys) else: is_add = 0 return is_add def get(self, keys: Union[List[str], str]) -> Union[List[int], int]: """ 检查数据是否存在 Args: keys: list / 单个值 Returns: list / 单个值 (如果数据已存在 返回 1 否则返回 0) """ if isinstance(keys, list): temp_set = set() is_exist = [] for key in keys: # 数据本身重复或者数据在去重库里 if key in temp_set or key in self.datas: is_exist.append(1) else: is_exist.append(0) temp_set.add(key) return is_exist else: return int(keys in self.datas)
1,854
Python
.py
61
17.508197
72
0.462195
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,856
custom_argparse.py
demigody_nas-tools/third_party/feapder/feapder/utils/custom_argparse.py
# -*- coding: utf-8 -*- """ Created on 2018-10-15 14:32:12 --------- @summary: 封装ArgumentParser, 使其支持function, 调用start自动执行 --------- @author: Boris @email: [email protected] """ import argparse class ArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): self.functions = {} super(ArgumentParser, self).__init__(*args, **kwargs) def add_argument(self, *args, **kwargs): function = kwargs.pop("function") if "function" in kwargs else None key = self._get_optional_kwargs(*args, **kwargs).get("dest") self.functions[key] = function return super(ArgumentParser, self).add_argument(*args, **kwargs) def start(self, args=None, namespace=None): args = self.parse_args(args=args, namespace=namespace) for key, value in vars(args).items(): # vars() 函数返回对象object的属性和属性值的字典对象 if value not in (None, False): if callable(self.functions[key]): if value != True: if isinstance(value, list) and len(value) == 1: value = value[0] self.functions[key](value) else: self.functions[key]() def run(self, args, values=None): if args in self.functions: if values: self.functions[args](values) else: self.functions[args]() else: raise Exception(f"无此方法: {args}") if __name__ == "__main__": def test(): print("test not args func") def test2(args): print("test args func", args) parser = ArgumentParser(description="测试") parser.add_argument("--test2", type=int, nargs=1, help="(1|2)", function=test2) parser.add_argument("--test", action="store_true", help="", function=test) parser.start()
1,959
Python
.py
47
30.765957
83
0.577558
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,857
perfect_dict.py
demigody_nas-tools/third_party/feapder/feapder/utils/perfect_dict.py
# -*- coding: utf-8 -*- """ Created on 2021/4/8 11:32 上午 --------- @summary: --------- @author: Boris @email: [email protected] """ def ensure_value(value): if isinstance(value, (list, tuple)): _value = [] for v in value: _value.append(ensure_value(v)) if isinstance(value, tuple): value = tuple(_value) else: value = _value if isinstance(value, dict): return PerfectDict(value) else: return value class PerfectDict(dict): """ >>> data = PerfectDict({"id":1, "url":"xxx"}) >>> data {'id': 1, 'url': 'xxx'} >>> data = PerfectDict(id=1, url="xxx") >>> data {'id': 1, 'url': 'xxx'} >>> data.id 1 >>> data.get("id") 1 >>> data["id"] 1 >>> id, url = data >>> id 1 >>> url 'xxx' >>> data[0] 1 >>> data[1] 'xxx' >>> data = PerfectDict({"a": 1, "b": {"b1": 2}, "c": [{"c1": [{"d": 1}]}]}) >>> data.b.b1 2 >>> data[1].b1 2 >>> data.get("b").b1 2 >>> data.c[0].c1 [{'d': 1}] >>> data.c[0].c1[0] {'d': 1} """ def __init__(self, _dict: dict = None, _values: list = None, **kwargs): self.__dict__ = _dict or kwargs or {} self.__dict__.pop("__values__", None) super().__init__(self.__dict__, **kwargs) self.__values__ = _values or list(self.__dict__.values()) def __getitem__(self, key): if isinstance(key, int): value = self.__values__[key] else: value = self.__dict__[key] return ensure_value(value) def __iter__(self, *args, **kwargs): for value in self.__values__: yield ensure_value(value) def __getattribute__(self, item): value = object.__getattribute__(self, item) if item == "__dict__" or item == "__values__": return value return ensure_value(value) def get(self, key, default=None): if key in self.__dict__: value = self.__dict__[key] return ensure_value(value) return default
2,123
Python
.py
81
19.716049
79
0.488889
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,858
email_sender.py
demigody_nas-tools/third_party/feapder/feapder/utils/email_sender.py
# -*- coding: utf-8 -*- """ Created on 2020/2/19 12:57 PM --------- @summary: --------- @author: Boris @email: [email protected] """ import os import smtplib from email.header import Header from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formataddr from feapder.utils.log import log class EmailSender(object): SENDER = "feapder报警系统" def __init__(self, username, password, smtpserver="smtp.163.com"): self.username = username self.password = password self.smtpserver = smtpserver self.smtp_client = smtplib.SMTP_SSL(smtpserver) self.sender = EmailSender.SENDER def __enter__(self): self.login() return self def __exit__(self, exc_type, exc_val, exc_tb): self.quit() def quit(self): self.smtp_client.quit() def login(self): self.smtp_client.connect(self.smtpserver) self.smtp_client.login(self.username, self.password) def send( self, receivers: list, title: str, content: str, content_type: str = "plain", filepath: str = None, ): """ Args: receivers: title: content: content_type: html / plain filepath: Returns: """ # 创建一个带附件的实例 message = MIMEMultipart() message["From"] = formataddr( (self.sender, self.username) ) # 括号里的对应发件人邮箱昵称、发件人邮箱账号 message["To"] = ",".join( [formataddr((receiver, receiver)) for receiver in receivers] ) message["Subject"] = Header(title, "utf-8") content = MIMEText(content, content_type, "utf-8") message.attach(content) # 构造附件 if filepath: attach = MIMEText(open(filepath, "rb").read(), "base64", "utf-8") attach.add_header( "content-disposition", "attachment", filename=("utf-8", "", os.path.basename(filepath)), ) message.attach(attach) msg = message.as_string() # 此处直接发送多个邮箱有问题,改成一个个发送 for receiver in receivers: log.debug("发送邮件到 {}".format(receiver)) self.smtp_client.sendmail(self.username, receiver, msg) log.debug("邮件发送成功!!!") return True
2,537
Python
.py
78
22.589744
77
0.582825
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,859
metrics.py
demigody_nas-tools/third_party/feapder/feapder/utils/metrics.py
import concurrent.futures import json import os import queue import random import socket import string import threading import time from collections import Counter from typing import Any from influxdb import InfluxDBClient from feapder import setting from feapder.utils.log import log from feapder.utils.tools import aio_wrap, ensure_float, ensure_int _inited_pid = None # this thread should stop running in the forked process _executor = concurrent.futures.ThreadPoolExecutor( max_workers=1, thread_name_prefix="metrics" ) class MetricsEmitter: def __init__( self, influxdb, *, batch_size=10, max_timer_seq=0, emit_interval=10, retention_policy=None, ratio=1.0, debug=False, add_hostname=False, max_points=10240, default_tags=None, ): """ Args: influxdb: influxdb instance batch_size: 打点的批次大小 max_timer_seq: 每个时间间隔内最多收集多少个 timer 类型点, 0 表示不限制 emit_interval: 最多等待多长时间必须打点 retention_policy: 对应的 retention policy ratio: store 和 timer 类型采样率,比如 0.1 表示只有 10% 的点会留下 debug: 是否打印调试日志 add_hostname: 是否添加 hostname 作为 tag max_points: 本地 buffer 最多累计多少个点 """ self.pending_points = queue.Queue() self.batch_size = batch_size self.influxdb: InfluxDBClient = influxdb self.tagkv = {} self.max_timer_seq = max_timer_seq self.lock = threading.Lock() self.hostname = socket.gethostname() self.last_emit_ts = time.time() # 上次提交时间 self.emit_interval = emit_interval # 提交间隔 self.max_points = max_points self.retention_policy = retention_policy # 支持自定义保留策略 self.debug = debug self.add_hostname = add_hostname self.ratio = ratio self.default_tags = default_tags or {} def define_tagkv(self, tagk, tagvs): self.tagkv[tagk] = set(tagvs) def _point_tagset(self, p): return f"{p['measurement']}-{sorted(p['tags'].items())}-{p['time']}" def _make_time_to_ns(self, _time): """ 将时间转换为 ns 级别的时间戳,补足长度 19 位 Args: _time: Returns: """ time_len = len(str(_time)) random_str = "".join(random.sample(string.digits, 19 - time_len)) return int(str(_time) + random_str) def _accumulate_points(self, points): """ 对于处于同一个 key 的点做聚合 - 对于 counter 类型,同一个 key 的值(_count)可以累加 - 对于 store 类型,不做任何操作,influxdb 会自行覆盖 - 对于 timer 类型,通过添加一个 _seq 值来区分每个不同的点 """ counters = {} # 临时保留 counter 类型的值 timer_seqs = Counter() # 记录不同 key 的 timer 序列号 new_points = [] for point in points: point_type = point["tags"].get("_type", None) tagset = self._point_tagset(point) # counter 类型全部聚合,不做丢弃 if point_type == "counter": if tagset not in counters: counters[tagset] = point else: counters[tagset]["fields"]["_count"] += point["fields"]["_count"] elif point_type == "timer": if self.max_timer_seq and timer_seqs[tagset] > self.max_timer_seq: continue # 掷一把骰子,如果足够幸运才打点 if self.ratio < 1.0 and random.random() > self.ratio: continue # 增加 _seq tag,以便区分不同的点 point["tags"]["_seq"] = timer_seqs[tagset] point["time"] = self._make_time_to_ns(point["time"]) timer_seqs[tagset] += 1 new_points.append(point) else: if self.ratio < 1.0 and random.random() > self.ratio: continue point["time"] = self._make_time_to_ns(point["time"]) new_points.append(point) for point in counters.values(): # 修改下counter类型的点的时间戳,补足19位, 伪装成纳秒级时间戳,防止influxdb对同一秒内的数据进行覆盖 point["time"] = self._make_time_to_ns(point["time"]) new_points.append(point) # 把拟合后的 counter 值添加进来 new_points.append(point) return new_points def _get_ready_emit(self, force=False): """ 把当前 pending 的值做聚合并返回 """ if self.debug: log.info("got %s raw points", self.pending_points.qsize()) # 从 pending 中读取点, 设定一个最大值,避免一直打点,一直获取 points = [] while len(points) < self.max_points or force: try: points.append(self.pending_points.get_nowait()) except queue.Empty: break # 聚合点 points = self._accumulate_points(points) if self.debug: log.info("got %s point", len(points)) log.info(json.dumps(points, indent=4)) return points def emit(self, point=None, force=False): """ 1. 添加新点到 pending 2. 如果符合条件,尝试聚合并打点 3. 更新打点时间 :param point: :param force: 强制提交所有点 默认False :return: """ if point: self.pending_points.put(point) # 判断是否需要提交点 1、数量 2、间隔 3、强力打点 if not ( force or self.pending_points.qsize() >= self.max_points # noqa: W503 or time.time() - self.last_emit_ts > self.emit_interval # noqa: W503 ): return # 需要打点,读取可以打点的值, 确保只有一个线程在做点的压缩 with self.lock: points = self._get_ready_emit(force=force) if not points: return try: # h(hour) m(minutes), s(seconds), ms(milliseconds), u(microseconds), n(nanoseconds) self.influxdb.write_points( points, batch_size=self.batch_size, time_precision="n", retention_policy=self.retention_policy, ) except Exception: log.exception("error writing points") self.last_emit_ts = time.time() def flush(self): if self.debug: log.info("start draining points %s", self.pending_points.qsize()) self.emit(force=True) def close(self): self.flush() try: self.influxdb.close() except Exception as e: log.exception(e) def make_point(self, measurement, tags: dict, fields: dict, timestamp=None): """ 默认的时间戳是"秒"级别的 """ assert measurement, "measurement can't be null" tags = tags.copy() if tags else {} tags.update(self.default_tags) fields = fields.copy() if fields else {} if timestamp is None: timestamp = int(time.time()) # 支持自定义hostname if self.add_hostname and "hostname" not in tags: tags["hostname"] = self.hostname point = dict(measurement=measurement, tags=tags, fields=fields, time=timestamp) if self.tagkv: for tagk, tagv in tags.items(): if tagv not in self.tagkv[tagk]: raise ValueError("tag value = %s not in %s", tagv, self.tagkv[tagk]) return point def get_counter_point( self, measurement: str, key: str = None, count: int = 1, tags: dict = None, timestamp: int = None, ): """ counter 不能被覆盖 """ tags = tags.copy() if tags else {} if key is not None: tags["_key"] = key tags["_type"] = "counter" count = ensure_int(count) fields = dict(_count=count) point = self.make_point(measurement, tags, fields, timestamp=timestamp) return point def get_store_point( self, measurement: str, key: str = None, value: Any = 0, tags: dict = None, timestamp=None, ): tags = tags.copy() if tags else {} if key is not None: tags["_key"] = key tags["_type"] = "store" fields = dict(_value=value) point = self.make_point(measurement, tags, fields, timestamp=timestamp) return point def get_timer_point( self, measurement: str, key: str = None, duration: float = 0, tags: dict = None, timestamp=None, ): tags = tags.copy() if tags else {} if key is not None: tags["_key"] = key tags["_type"] = "timer" fields = dict(_duration=ensure_float(duration)) point = self.make_point(measurement, tags, fields, timestamp=timestamp) return point def emit_any(self, *args, **kwargs): point = self.make_point(*args, **kwargs) self.emit(point) def emit_counter(self, *args, **kwargs): point = self.get_counter_point(*args, **kwargs) self.emit(point) def emit_store(self, *args, **kwargs): point = self.get_store_point(*args, **kwargs) self.emit(point) def emit_timer(self, *args, **kwargs): point = self.get_timer_point(*args, **kwargs) self.emit(point) _emitter: MetricsEmitter = None _measurement: str = None def init( *, influxdb_host=None, influxdb_port=None, influxdb_udp_port=None, influxdb_database=None, influxdb_user=None, influxdb_password=None, influxdb_measurement=None, retention_policy=None, retention_policy_duration="180d", emit_interval=60, batch_size=100, debug=False, use_udp=False, timeout=22, ssl=False, retention_policy_replication: str = "1", set_retention_policy_default=True, **kwargs, ): """ 打点监控初始化 Args: influxdb_host: influxdb_port: influxdb_udp_port: influxdb_database: influxdb_user: influxdb_password: influxdb_measurement: 存储的表,也可以在打点的时候指定 retention_policy: 保留策略 retention_policy_duration: 保留策略过期时间 emit_interval: 打点最大间隔 batch_size: 打点的批次大小 debug: 是否开启调试 use_udp: 是否使用udp协议打点 timeout: 与influxdb建立连接时的超时时间 ssl: 是否使用https协议 retention_policy_replication: 保留策略的副本数, 确保数据的可靠性和高可用性。如果一个节点发生故障,其他节点可以继续提供服务,从而避免数据丢失和服务不可用的情况 set_retention_policy_default: 是否设置为默认的保留策略,当retention_policy初次创建时有效 **kwargs: 可传递MetricsEmitter类的参数 Returns: """ global _inited_pid, _emitter, _measurement if _inited_pid == os.getpid(): return influxdb_host = influxdb_host or setting.INFLUXDB_HOST influxdb_port = influxdb_port or setting.INFLUXDB_PORT influxdb_udp_port = influxdb_udp_port or setting.INFLUXDB_UDP_PORT influxdb_database = influxdb_database or setting.INFLUXDB_DATABASE influxdb_user = influxdb_user or setting.INFLUXDB_USER influxdb_password = influxdb_password or setting.INFLUXDB_PASSWORD _measurement = influxdb_measurement or setting.INFLUXDB_MEASUREMENT retention_policy = ( retention_policy or f"{influxdb_database}_{retention_policy_duration}" ) if not all( [ influxdb_host, influxdb_port, influxdb_udp_port, influxdb_database, influxdb_user, influxdb_password, ] ): return influxdb_client = InfluxDBClient( host=influxdb_host, port=influxdb_port, udp_port=influxdb_udp_port, database=influxdb_database, use_udp=use_udp, timeout=timeout, username=influxdb_user, password=influxdb_password, ssl=ssl, ) # 创建数据库 if influxdb_database: try: influxdb_client.create_database(influxdb_database) influxdb_client.create_retention_policy( retention_policy, retention_policy_duration, replication=retention_policy_replication, default=set_retention_policy_default, ) except Exception as e: log.error("metrics init falied: {}".format(e)) return _emitter = MetricsEmitter( influxdb_client, debug=debug, batch_size=batch_size, retention_policy=retention_policy, emit_interval=emit_interval, **kwargs, ) _inited_pid = os.getpid() log.info("metrics init successfully") def emit_any( tags: dict, fields: dict, *, classify: str = "", measurement: str = None, timestamp=None, ): """ 原生的打点,不进行额外的处理 Args: tags: influxdb的tag的字段和值 fields: influxdb的field的字段和值 classify: 点的类别 measurement: 存储的表 timestamp: 点的时间搓,默认为当前时间 Returns: """ if not _emitter: return tags = tags or {} tags["_classify"] = classify measurement = measurement or _measurement _emitter.emit_any(measurement, tags, fields, timestamp) def emit_counter( key: str = None, count: int = 1, *, classify: str = "", tags: dict = None, measurement: str = None, timestamp: int = None, ): """ 聚合打点,即会将一段时间内的点求和,然后打一个点数和 Args: key: 与点绑定的key值 count: 点数 classify: 点的类别 tags: influxdb的tag的字段和值 measurement: 存储的表 timestamp: 点的时间搓,默认为当前时间 Returns: """ if not _emitter: return tags = tags or {} tags["_classify"] = classify measurement = measurement or _measurement _emitter.emit_counter(measurement, key, count, tags, timestamp) def emit_timer( key: str = None, duration: float = 0, *, classify: str = "", tags: dict = None, measurement: str = None, timestamp=None, ): """ 时间打点,用于监控程序的运行时长等,每个duration一个点,不会被覆盖 Args: key: 与点绑定的key值 duration: 时长 classify: 点的类别 tags: influxdb的tag的字段和值 measurement: 存储的表 timestamp: 点的时间搓,默认为当前时间 Returns: """ if not _emitter: return tags = tags or {} tags["_classify"] = classify measurement = measurement or _measurement _emitter.emit_timer(measurement, key, duration, tags, timestamp) def emit_store( key: str = None, value: Any = 0, *, classify: str = "", tags: dict = None, measurement: str = None, timestamp=None, ): """ 直接打点,不进行额外的处理 Args: key: 与点绑定的key值 value: 点的值 classify: 点的类别 tags: influxdb的tag的字段和值 measurement: 存储的表 timestamp: 点的时间搓,默认为当前时间 Returns: """ if not _emitter: return tags = tags or {} tags["_classify"] = classify measurement = measurement or _measurement _emitter.emit_store(measurement, key, value, tags, timestamp) def flush(): """ 强刷点到influxdb Returns: """ if not _emitter: return _emitter.flush() def close(): """ 关闭 Returns: """ if not _emitter: return _emitter.close() # 协程打点 aemit_counter = aio_wrap(executor=_executor)(emit_counter) aemit_store = aio_wrap(executor=_executor)(emit_store) aemit_timer = aio_wrap(executor=_executor)(emit_timer)
16,820
Python
.py
487
22.648871
103
0.586921
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,860
__init__.py
demigody_nas-tools/third_party/feapder/feapder/utils/__init__.py
# -*- coding: utf-8 -*- ''' Created on 2019/11/5 4:41 PM --------- @summary: --------- @author: Boris @email: [email protected] '''
135
Python
.py
9
14.111111
29
0.566929
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,861
redis_lock.py
demigody_nas-tools/third_party/feapder/feapder/utils/redis_lock.py
# -*- coding: utf-8 -*- """ Created on 2019/11/5 5:25 PM --------- @summary: --------- @author: Boris @email: [email protected] """ import threading import time from feapder.db.redisdb import RedisDB from feapder.utils.log import log class RedisLock: redis_cli = None def __init__( self, key, *, wait_timeout=0, lock_timeout=86400, redis_cli=None, redis_url=None ): """ redis超时锁 :param key: 存储锁的key redis_lock:[key] :param wait_timeout: 等待加锁超时时间,为0时则不等待加锁,加锁失败 :param lock_timeout: 锁超时时间 为0时则不会超时,直到锁释放或意外退出,默认超时为1天 :param redis_cli: redis客户端对象 :param redis_url: redis连接地址,若redis_cli传值,则不使用redis_url 用法示例: with RedisLock(key="test") as _lock: if _lock.locked: # 用来判断是否加上了锁 # do somethings """ self.redis_conn = redis_cli self.redis_url = redis_url self.lock_key = "redis_lock:{}".format(key) # 锁超时时间 self.lock_timeout = lock_timeout # 等待加锁时间 self.wait_timeout = wait_timeout self.locked = False self.stop_prolong_life = False @property def redis_conn(self): if not self.__class__.redis_cli: self.__class__.redis_cli = RedisDB(url=self.redis_url).get_redis_obj() return self.__class__.redis_cli @redis_conn.setter def redis_conn(self, cli): if cli: self.__class__.redis_cli = cli def __enter__(self): if not self.locked: self.acquire() if self.locked: # 延长锁的时间 thread = threading.Thread(target=self.prolong_life) thread.setDaemon(True) thread.start() return self def __exit__(self, exc_type, exc_val, exc_tb): self.stop_prolong_life = True self.release() def __repr__(self): return "<RedisLock: {} >".format(self.lock_key) def acquire(self): start = time.time() while True: # 尝试加锁 if self.redis_conn.set(self.lock_key, time.time(), nx=True, ex=5): self.locked = True break if self.wait_timeout > 0: if time.time() - start > self.wait_timeout: log.info("加锁失败") break else: break log.debug("等待加锁: {} wait:{}".format(self, time.time() - start)) if self.wait_timeout > 10: time.sleep(5) else: time.sleep(1) return def release(self): if self.locked: self.redis_conn.delete(self.lock_key) self.locked = False return def prolong_life(self): """ 延长锁的过期时间 :return: """ spend_time = 0 while not self.stop_prolong_life: expire = self.redis_conn.ttl(self.lock_key) if expire < 0: # key 不存在 time.sleep(1) continue self.redis_conn.expire(self.lock_key, expire + 5) # 延长5秒 time.sleep(expire) # 临过期5秒前,再次延长 spend_time += expire if self.lock_timeout and spend_time > self.lock_timeout: log.info("锁超时,释放") self.redis_conn.delete(self.lock_key) break
3,670
Python
.py
105
22.219048
88
0.536781
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,862
log.py
demigody_nas-tools/third_party/feapder/feapder/utils/log.py
# -*- coding: utf-8 -*- """ Created on 2018-12-08 16:50 --------- @summary: --------- @author: Boris @email: [email protected] """ import logging import os import sys from logging.handlers import BaseRotatingHandler import loguru from better_exceptions import format_exception import feapder.setting as setting class InterceptHandler(logging.Handler): def emit(self, record): # Retrieve context where the logging call occurred, this happens to be in the 6th frame upward logger_opt = loguru.logger.opt(depth=6, exception=record.exc_info) logger_opt.log(record.levelname, record.getMessage()) # 重写 RotatingFileHandler 自定义log的文件名 # 原来 xxx.log xxx.log.1 xxx.log.2 xxx.log.3 文件由近及远 # 现在 xxx.log xxx1.log xxx2.log 如果backup_count 是2位数时 则 01 02 03 三位数 001 002 .. 文件由近及远 class RotatingFileHandler(BaseRotatingHandler): def __init__( self, filename, mode="a", max_bytes=0, backup_count=0, encoding=None, delay=0 ): BaseRotatingHandler.__init__(self, filename, mode, encoding, delay) self.max_bytes = max_bytes self.backup_count = backup_count self.placeholder = str(len(str(backup_count))) def doRollover(self): if self.stream: self.stream.close() self.stream = None if self.backup_count > 0: for i in range(self.backup_count - 1, 0, -1): sfn = ("%0" + self.placeholder + "d.") % i # '%2d.'%i -> 02 sfn = sfn.join(self.baseFilename.split(".")) # sfn = "%d_%s" % (i, self.baseFilename) # dfn = "%d_%s" % (i + 1, self.baseFilename) dfn = ("%0" + self.placeholder + "d.") % (i + 1) dfn = dfn.join(self.baseFilename.split(".")) if os.path.exists(sfn): # print "%s -> %s" % (sfn, dfn) if os.path.exists(dfn): os.remove(dfn) os.rename(sfn, dfn) dfn = (("%0" + self.placeholder + "d.") % 1).join( self.baseFilename.split(".") ) if os.path.exists(dfn): os.remove(dfn) # Issue 18940: A file may not have been created if delay is True. if os.path.exists(self.baseFilename): os.rename(self.baseFilename, dfn) if not self.delay: self.stream = self._open() def shouldRollover(self, record): if self.stream is None: # delay was set... self.stream = self._open() if self.max_bytes > 0: # are we rolling over? msg = "%s\n" % self.format(record) self.stream.seek(0, 2) # due to non-posix-compliant Windows feature if self.stream.tell() + len(msg) >= self.max_bytes: return 1 return 0 def get_logger( name=None, path=None, log_level=None, is_write_to_console=None, is_write_to_file=None, color=None, mode=None, max_bytes=None, backup_count=None, encoding=None, ): """ @summary: 获取log --------- @param name: log名 @param path: log文件存储路径 如 D://xxx.log @param log_level: log等级 CRITICAL/ERROR/WARNING/INFO/DEBUG @param is_write_to_console: 是否输出到控制台 @param is_write_to_file: 是否写入到文件 默认否 @param color:是否有颜色 @param mode:写文件模式 @param max_bytes: 每个日志文件的最大字节数 @param backup_count:日志文件保留数量 @param encoding:日志文件编码 --------- @result: """ # 加载setting里最新的值 name = name or setting.LOG_NAME path = path or setting.LOG_PATH log_level = log_level or setting.LOG_LEVEL is_write_to_console = ( is_write_to_console if is_write_to_console is not None else setting.LOG_IS_WRITE_TO_CONSOLE ) is_write_to_file = ( is_write_to_file if is_write_to_file is not None else setting.LOG_IS_WRITE_TO_FILE ) color = color if color is not None else setting.LOG_COLOR mode = mode or setting.LOG_MODE max_bytes = max_bytes or setting.LOG_MAX_BYTES backup_count = backup_count or setting.LOG_BACKUP_COUNT encoding = encoding or setting.LOG_ENCODING # logger 配置 name = name.split(os.sep)[-1].split(".")[0] # 取文件名 logger = logging.getLogger(name) logger.setLevel(log_level) formatter = logging.Formatter(setting.LOG_FORMAT) if setting.PRINT_EXCEPTION_DETAILS: formatter.formatException = lambda exc_info: format_exception(*exc_info) # 定义一个RotatingFileHandler,最多备份5个日志文件,每个日志文件最大10M if is_write_to_file: if path and not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path), exist_ok=True) rf_handler = RotatingFileHandler( path, mode=mode, max_bytes=max_bytes, backup_count=backup_count, encoding=encoding, ) rf_handler.setFormatter(formatter) logger.addHandler(rf_handler) if color and is_write_to_console: loguru_handler = InterceptHandler() loguru_handler.setFormatter(formatter) # logging.basicConfig(handlers=[loguru_handler], level=0) logger.addHandler(loguru_handler) elif is_write_to_console: stream_handler = logging.StreamHandler() stream_handler.stream = sys.stdout stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) _handler_list = [] _handler_name_list = [] # 检查是否存在重复handler for _handler in logger.handlers: if str(_handler) not in _handler_name_list: _handler_name_list.append(str(_handler)) _handler_list.append(_handler) logger.handlers = _handler_list return logger # logging.disable(logging.DEBUG) # 关闭所有log # 不让打印log的配置 STOP_LOGS = [ # ES "urllib3.response", "urllib3.connection", "elasticsearch.trace", "requests.packages.urllib3.util", "requests.packages.urllib3.util.retry", "urllib3.util", "requests.packages.urllib3.response", "requests.packages.urllib3.contrib.pyopenssl", "requests.packages", "urllib3.util.retry", "requests.packages.urllib3.contrib", "requests.packages.urllib3.connectionpool", "requests.packages.urllib3.poolmanager", "urllib3.connectionpool", "requests.packages.urllib3.connection", "elasticsearch", "log_request_fail", # requests "requests", "selenium.webdriver.remote.remote_connection", "selenium.webdriver.remote", "selenium.webdriver", "selenium", # markdown "MARKDOWN", "build_extension", # newspaper "calculate_area", "largest_image_url", "newspaper.images", "newspaper", "Importing", "PIL", ] # 关闭日志打印 OTHERS_LOG_LEVAL = eval("logging." + setting.OTHERS_LOG_LEVAL) for STOP_LOG in STOP_LOGS: logging.getLogger(STOP_LOG).setLevel(OTHERS_LOG_LEVAL) # print(logging.Logger.manager.loggerDict) # 取使用debug模块的name # 日志级别大小关系为:CRITICAL > ERROR > WARNING > INFO > DEBUG class Log: log = None def func(self, log_level): def wrapper(msg, *args, **kwargs): if self.isEnabledFor(log_level): self._log(log_level, msg, args, **kwargs) return wrapper def __getattr__(self, name): # 调用log时再初始化,为了加载最新的setting if self.__class__.log is None: self.__class__.log = get_logger() return getattr(self.__class__.log, name) @property def debug(self): return self.__class__.log.debug @property def info(self): return self.__class__.log.info @property def success(self): log_level = logging.INFO + 1 logging.addLevelName(log_level, "success".upper()) return self.func(log_level) @property def warning(self): return self.__class__.log.warning @property def exception(self): return self.__class__.log.exception @property def error(self): return self.__class__.log.error @property def critical(self): return self.__class__.log.critical log = Log()
8,483
Python
.py
234
27.452991
102
0.628582
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,863
tools.py
demigody_nas-tools/third_party/feapder/feapder/utils/tools.py
# -*- coding: utf-8 -*- """ Created on 2018-09-06 14:21 --------- @summary: 工具 --------- @author: Boris @email: [email protected] """ import asyncio import base64 import calendar import codecs import configparser # 读配置文件的 import datetime import functools import hashlib import html import importlib import json import os import pickle import random import re import signal import socket import ssl import string import sys import time import traceback import urllib import urllib.parse import uuid import weakref from functools import partial, wraps from hashlib import md5 from pprint import pformat from pprint import pprint from urllib import request from urllib.parse import urljoin import redis import requests import six from requests.cookies import RequestsCookieJar from w3lib.url import canonicalize_url as _canonicalize_url import feapder.setting as setting from feapder.db.redisdb import RedisDB from feapder.utils.email_sender import EmailSender from feapder.utils.log import log try: import execjs # pip install PyExecJS except Exception as e: pass os.environ["EXECJS_RUNTIME"] = "Node" # 设置使用node执行js # 全局取消ssl证书验证 ssl._create_default_https_context = ssl._create_unverified_context TIME_OUT = 30 TIMER_TIME = 5 redisdb = None def get_redisdb(): global redisdb if not redisdb: redisdb = RedisDB() return redisdb # 装饰器 class Singleton(object): def __init__(self, cls): self._cls = cls self._instance = {} def __call__(self, *args, **kwargs): if self._cls not in self._instance: self._instance[self._cls] = self._cls(*args, **kwargs) return self._instance[self._cls] class LazyProperty: """ 属性延时初始化,且只初始化一次 """ def __init__(self, func): self.func = func def __get__(self, instance, owner): if instance is None: return self else: value = self.func(instance) setattr(instance, self.func.__name__, value) return value def log_function_time(func): try: @functools.wraps(func) # 将函数的原来属性付给新函数 def calculate_time(*args, **kw): began_time = time.time() callfunc = func(*args, **kw) end_time = time.time() log.debug(func.__name__ + " run time = " + str(end_time - began_time)) return callfunc return calculate_time except: log.debug("求取时间无效 因为函数参数不符") return func def run_safe_model(module_name): def inner_run_safe_model(func): try: @functools.wraps(func) # 将函数的原来属性付给新函数 def run_func(*args, **kw): callfunc = None try: callfunc = func(*args, **kw) except Exception as e: log.error(module_name + ": " + func.__name__ + " - " + str(e)) traceback.print_exc() return callfunc return run_func except Exception as e: log.error(module_name + ": " + func.__name__ + " - " + str(e)) traceback.print_exc() return func return inner_run_safe_model def memoizemethod_noargs(method): """Decorator to cache the result of a method (without arguments) using a weak reference to its object """ cache = weakref.WeakKeyDictionary() @functools.wraps(method) def new_method(self, *args, **kwargs): if self not in cache: cache[self] = method(self, *args, **kwargs) return cache[self] return new_method def retry(retry_times=3, interval=0): """ 普通函数的重试装饰器 Args: retry_times: 重试次数 interval: 每次重试之间的间隔 Returns: """ def _retry(func): @functools.wraps(func) # 将函数的原来属性付给新函数 def wapper(*args, **kwargs): for i in range(retry_times): try: return func(*args, **kwargs) except Exception as e: log.error( "函数 {} 执行失败 重试 {} 次. error {}".format(func.__name__, i + 1, e) ) time.sleep(interval) if i + 1 >= retry_times: raise e return wapper return _retry def retry_asyncio(retry_times=3, interval=0): """ 协程的重试装饰器 Args: retry_times: 重试次数 interval: 每次重试之间的间隔 Returns: """ def _retry(func): @functools.wraps(func) # 将函数的原来属性付给新函数 async def wapper(*args, **kwargs): for i in range(retry_times): try: return await func(*args, **kwargs) except Exception as e: log.error( "函数 {} 执行失败 重试 {} 次. error {}".format(func.__name__, i + 1, e) ) await asyncio.sleep(interval) if i + 1 >= retry_times: raise e return wapper return _retry def func_timeout(timeout): """ 函数运行时间限制装饰器 注: 不支持window Args: timeout: 超时的时间 Eg: @set_timeout(3) def test(): ... Returns: """ def wapper(func): def handle( signum, frame ): # 收到信号 SIGALRM 后的回调函数,第一个参数是信号的数字,第二个参数是the interrupted stack frame. raise TimeoutError def new_method(*args, **kwargs): signal.signal(signal.SIGALRM, handle) # 设置信号和回调函数 signal.alarm(timeout) # 设置 timeout 秒的闹钟 r = func(*args, **kwargs) signal.alarm(0) # 关闭闹钟 return r return new_method return wapper ########################【网页解析相关】############################### # @log_function_time def get_html_by_requests( url, headers=None, code="utf-8", data=None, proxies={}, with_response=False ): html = "" r = None try: if data: r = requests.post( url, headers=headers, timeout=TIME_OUT, data=data, proxies=proxies ) else: r = requests.get(url, headers=headers, timeout=TIME_OUT, proxies=proxies) if code: r.encoding = code html = r.text except Exception as e: log.error(e) finally: r and r.close() if with_response: return html, r else: return html def get_json_by_requests( url, params=None, headers=None, data=None, proxies={}, with_response=False, cookies=None, ): json = {} response = None try: # response = requests.get(url, params = params) if data: response = requests.post( url, headers=headers, data=data, params=params, timeout=TIME_OUT, proxies=proxies, cookies=cookies, ) else: response = requests.get( url, headers=headers, params=params, timeout=TIME_OUT, proxies=proxies, cookies=cookies, ) response.encoding = "utf-8" json = response.json() except Exception as e: log.error(e) finally: response and response.close() if with_response: return json, response else: return json def get_cookies(response): cookies = requests.utils.dict_from_cookiejar(response.cookies) return cookies def get_cookies_from_str(cookie_str): """ >>> get_cookies_from_str("key=value; key2=value2; key3=; key4=; ") {'key': 'value', 'key2': 'value2', 'key3': '', 'key4': ''} Args: cookie_str: key=value; key2=value2; key3=; key4= Returns: """ cookies = {} for cookie in cookie_str.split(";"): cookie = cookie.strip() if not cookie: continue key, value = cookie.split("=", 1) key = key.strip() value = value.strip() cookies[key] = value return cookies def get_cookies_jar(cookies): """ @summary: 适用于selenium生成的cookies转requests的cookies requests.get(xxx, cookies=jar) 参考:https://www.cnblogs.com/small-bud/p/9064674.html --------- @param cookies: [{},{}] --------- @result: cookie jar """ cookie_jar = RequestsCookieJar() for cookie in cookies: cookie_jar.set(cookie["name"], cookie["value"]) return cookie_jar def get_cookies_from_selenium_cookie(cookies): """ @summary: 适用于selenium生成的cookies转requests的cookies requests.get(xxx, cookies=jar) 参考:https://www.cnblogs.com/small-bud/p/9064674.html --------- @param cookies: [{},{}] --------- @result: cookie jar """ cookie_dict = {} for cookie in cookies: if cookie.get("name"): cookie_dict[cookie["name"]] = cookie["value"] return cookie_dict def cookiesjar2str(cookies): str_cookie = "" for k, v in requests.utils.dict_from_cookiejar(cookies).items(): str_cookie += k str_cookie += "=" str_cookie += v str_cookie += "; " return str_cookie def cookies2str(cookies): str_cookie = "" for k, v in cookies.items(): str_cookie += k str_cookie += "=" str_cookie += v str_cookie += "; " return str_cookie def get_urls( html, stop_urls=( "javascript", "+", ".css", ".js", ".rar", ".xls", ".exe", ".apk", ".doc", ".jpg", ".png", ".flv", ".mp4", ), ): # 不匹配javascript、 +、 # 这样的url regex = r'<a.*?href.*?=.*?["|\'](.*?)["|\']' urls = get_info(html, regex) urls = sorted(set(urls), key=urls.index) if stop_urls: stop_urls = isinstance(stop_urls, str) and [stop_urls] or stop_urls use_urls = [] for url in urls: for stop_url in stop_urls: if stop_url in url: break else: use_urls.append(url) urls = use_urls return urls def get_full_url(root_url, sub_url): """ @summary: 得到完整的ur --------- @param root_url: 根url (网页的url) @param sub_url: 子url (带有相对路径的 可以拼接成完整的) --------- @result: 返回完整的url """ return urljoin(root_url, sub_url) def joint_url(url, params): # param_str = "?" # for key, value in params.items(): # value = isinstance(value, str) and value or str(value) # param_str += key + "=" + value + "&" # # return url + param_str[:-1] if not params: return url params = urlencode(params) separator = "?" if "?" not in url else "&" return url + separator + params def canonicalize_url(url): """ url 归一化 会参数排序 及去掉锚点 """ return _canonicalize_url(url) def get_url_md5(url): url = canonicalize_url(url) url = re.sub("^http://", "https://", url) return get_md5(url) def fit_url(urls, identis): identis = isinstance(identis, str) and [identis] or identis fit_urls = [] for link in urls: for identi in identis: if identi in link: fit_urls.append(link) return list(set(fit_urls)) def get_param(url, key): match = re.search(f"{key}=([^&]+)", url) if match: return match.group(1) return None def get_all_params(url): """ >>> get_all_params("https://www.baidu.com/s?wd=feapder") {'wd': 'feapder'} """ params_json = {} params = url.split("?", 1)[-1].split("&") for param in params: key_value = param.split("=", 1) if len(key_value) == 2: params_json[key_value[0]] = unquote_url(key_value[1]) else: params_json[key_value[0]] = "" return params_json def parse_url_params(url): """ 解析url参数 :param url: :return: >>> parse_url_params("https://www.baidu.com/s?wd=%E4%BD%A0%E5%A5%BD") ('https://www.baidu.com/s', {'wd': '你好'}) >>> parse_url_params("wd=%E4%BD%A0%E5%A5%BD") ('', {'wd': '你好'}) >>> parse_url_params("https://www.baidu.com/s?wd=%E4%BD%A0%E5%A5%BD&pn=10") ('https://www.baidu.com/s', {'wd': '你好', 'pn': '10'}) >>> parse_url_params("wd=%E4%BD%A0%E5%A5%BD&pn=10") ('', {'wd': '你好', 'pn': '10'}) >>> parse_url_params("https://www.baidu.com") ('https://www.baidu.com', {}) >>> parse_url_params("https://www.spidertools.cn/#/") ('https://www.spidertools.cn/#/', {}) """ root_url = "" params = {} if "?" not in url: if re.search("[&=]", url) and not re.search("/", url): # 只有参数 params = get_all_params(url) else: root_url = url else: root_url = url.split("?", 1)[0] params = get_all_params(url) return root_url, params def urlencode(params): """ 字典类型的参数转为字符串 @param params: { 'a': 1, 'b': 2 } @return: a=1&b=2 """ return urllib.parse.urlencode(params) def urldecode(url): """ 将字符串类型的参数转为json @param url: xxx?a=1&b=2 @return: { 'a': 1, 'b': 2 } """ params_json = {} params = url.split("?")[-1].split("&") for param in params: key, value = param.split("=", 1) params_json[key] = unquote_url(value) return params_json def unquote_url(url, encoding="utf-8"): """ @summary: 将url解码 --------- @param url: --------- @result: """ return urllib.parse.unquote(url, encoding=encoding) def quote_url(url, encoding="utf-8"): """ @summary: 将url编码 编码意思http://www.w3school.com.cn/tags/html_ref_urlencode.html --------- @param url: --------- @result: """ return urllib.parse.quote(url, safe="%;/?:@&=+$,", encoding=encoding) def quote_chinese_word(text, encoding="utf-8"): def quote_chinese_word_func(text): chinese_word = text.group(0) return urllib.parse.quote(chinese_word, encoding=encoding) return re.sub("([\u4e00-\u9fa5]+)", quote_chinese_word_func, text, flags=re.S) def unescape(str): """ 反转译 """ return html.unescape(str) def excape(str): """ 转译 """ return html.escape(str) _regexs = {} # @log_function_time def get_info(html, regexs, allow_repeat=True, fetch_one=False, split=None): regexs = isinstance(regexs, str) and [regexs] or regexs infos = [] for regex in regexs: if regex == "": continue if regex not in _regexs.keys(): _regexs[regex] = re.compile(regex, re.S) if fetch_one: infos = _regexs[regex].search(html) if infos: infos = infos.groups() else: continue else: infos = _regexs[regex].findall(str(html)) if len(infos) > 0: # print(regex) break if fetch_one: infos = infos if infos else ("",) return infos if len(infos) > 1 else infos[0] else: infos = allow_repeat and infos or sorted(set(infos), key=infos.index) infos = split.join(infos) if split else infos return infos def table_json(table, save_one_blank=True): """ 将表格转为json 适应于 key:value 在一行类的表格 @param table: 使用selector封装后的具有xpath的selector @param save_one_blank: 保留一个空白符 @return: """ data = {} trs = table.xpath(".//tr") for tr in trs: tds = tr.xpath("./td|./th") for i in range(0, len(tds), 2): if i + 1 > len(tds) - 1: break key = tds[i].xpath("string(.)").extract_first(default="").strip() value = tds[i + 1].xpath("string(.)").extract_first(default="").strip() value = replace_str(value, "[\f\n\r\t\v]", "") value = replace_str(value, " +", " " if save_one_blank else "") if key: data[key] = value return data def get_table_row_data(table): """ 获取表格里每一行数据 @param table: 使用selector封装后的具有xpath的selector @return: [[],[]..] """ datas = [] rows = table.xpath(".//tr") for row in rows: cols = row.xpath("./td|./th") row_datas = [] for col in cols: data = col.xpath("string(.)").extract_first(default="").strip() row_datas.append(data) datas.append(row_datas) return datas def rows2json(rows, keys=None): """ 将行数据转为json @param rows: 每一行的数据 @param keys: json的key,空时将rows的第一行作为key @return: """ data_start_pos = 0 if keys else 1 datas = [] keys = keys or rows[0] for values in rows[data_start_pos:]: datas.append(dict(zip(keys, values))) return datas def get_form_data(form): """ 提取form中提交的数据 :param form: 使用selector封装后的具有xpath的selector :return: """ data = {} inputs = form.xpath(".//input") for input in inputs: name = input.xpath("./@name").extract_first() value = input.xpath("./@value").extract_first() if name: data[name] = value return data def get_domain(url): return urllib.parse.urlparse(url).netloc def get_index_url(url): return "/".join(url.split("/")[:3]) def get_ip(domain): ip = socket.getaddrinfo(domain, "http")[0][4][0] return ip def get_localhost_ip(): """ 利用 UDP 协议来实现的,生成一个UDP包,把自己的 IP 放如到 UDP 协议头中,然后从UDP包中获取本机的IP。 这个方法并不会真实的向外部发包,所以用抓包工具是看不到的 :return: """ s = None try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(("8.8.8.8", 80)) ip = s.getsockname()[0] except: ip = "" finally: if s: s.close() return ip def ip_to_num(ip): import struct ip_num = socket.ntohl(struct.unpack("I", socket.inet_aton(str(ip)))[0]) return ip_num def is_valid_proxy(proxy, check_url=None): """ 检验代理是否有效 @param proxy: xxx.xxx.xxx:xxx @param check_url: 利用目标网站检查,目标网站url。默认为None, 使用代理服务器的socket检查, 但不能排除Connection closed by foreign host @return: True / False """ is_valid = False if check_url: proxies = {"http": f"http://{proxy}", "https": f"https://{proxy}"} headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36" } response = None try: response = requests.get( check_url, headers=headers, proxies=proxies, stream=True, timeout=20 ) is_valid = True except Exception as e: log.error("check proxy failed: {} {}".format(e, proxy)) finally: if response: response.close() else: ip, port = proxy.split(":") with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sk: sk.settimeout(7) try: sk.connect((ip, int(port))) # 检查代理服务器是否开着 is_valid = True except Exception as e: log.error("check proxy failed: {} {}:{}".format(e, ip, port)) return is_valid def is_valid_url(url): """ 验证url是否合法 :param url: :return: """ if re.match(r"(^https?:/{2}\w.+$)|(ftp://)", url): return True else: return False def get_text(soup, *args): try: return soup.get_text() except Exception as e: log.error(e) return "" def del_html_tag(content, save_line_break=True, save_p=False, save_img=False): """ 删除html标签 @param content: html内容 @param save_p: 保留p标签 @param save_img: 保留图片标签 @param save_line_break: 保留\n换行 @return: """ if not content: return content # js content = re.sub("(?i)<script(.|\n)*?</script>", "", content) # (?)忽略大小写 # css content = re.sub("(?i)<style(.|\n)*?</style>", "", content) # (?)忽略大小写 # 注释 content = re.sub("<!--(.|\n)*?-->", "", content) # 干掉&nbsp;等无用的字符 但&xxx= 这种表示参数的除外 content = re.sub("(?!&[a-z]+=)&[a-z]+;?", "", content) if save_p and save_img: content = re.sub("<(?!(p[ >]|/p>|img ))(.|\n)+?>", "", content) elif save_p: content = re.sub("<(?!(p[ >]|/p>))(.|\n)+?>", "", content) elif save_img: content = re.sub("<(?!img )(.|\n)+?>", "", content) elif save_line_break: content = re.sub("<(?!/p>)(.|\n)+?>", "", content) content = re.sub("</p>", "\n", content) else: content = re.sub("<(.|\n)*?>", "", content) if save_line_break: # 把非换行符的空白符替换为一个空格 content = re.sub("[^\S\n]+", " ", content) # 把多个换行符替换为一个换行符 如\n\n\n 或 \n \n \n 替换为\n content = re.sub("(\n ?)+", "\n", content) else: content = re.sub("\s+", " ", content) content = content.strip() return content def del_html_js_css(content): content = replace_str(content, "(?i)<script(.|\n)*?</script>") # (?)忽略大小写 content = replace_str(content, "(?i)<style(.|\n)*?</style>") content = replace_str(content, "<!--(.|\n)*?-->") return content def is_have_chinese(content): regex = "[\u4e00-\u9fa5]+" chinese_word = get_info(content, regex) return chinese_word and True or False def is_have_english(content): regex = "[a-zA-Z]+" english_words = get_info(content, regex) return english_words and True or False def get_chinese_word(content): regex = "[\u4e00-\u9fa5]+" chinese_word = get_info(content, regex) return chinese_word def get_english_words(content): regex = "[a-zA-Z]+" english_words = get_info(content, regex) return english_words or "" ################################################## def get_json(json_str): """ @summary: 取json对象 --------- @param json_str: json格式的字符串 --------- @result: 返回json对象 """ try: return json.loads(json_str) if json_str else {} except Exception as e1: try: json_str = json_str.strip() json_str = json_str.replace("'", '"') keys = get_info(json_str, "(\w+):") for key in keys: json_str = json_str.replace(key, '"%s"' % key) return json.loads(json_str) if json_str else {} except Exception as e2: log.error( """ e1: %s format json_str: %s e2: %s """ % (e1, json_str, e2) ) return {} def jsonp2json(jsonp): """ 将jsonp转为json @param jsonp: jQuery172013600082560040794_1553230569815({}) @return: """ try: return json.loads(re.match(".*?({.*}).*", jsonp, re.S).group(1)) except: raise ValueError("Invalid Input") def dumps_json(data, indent=4, sort_keys=False): """ @summary: 格式化json 用于打印 --------- @param data: json格式的字符串或json对象 --------- @result: 格式化后的字符串 """ try: if isinstance(data, str): data = get_json(data) data = json.dumps( data, ensure_ascii=False, indent=indent, skipkeys=True, sort_keys=sort_keys, default=str, ) except Exception as e: data = pformat(data) return data def get_json_value(json_object, key): """ @summary: --------- @param json_object: json对象或json格式的字符串 @param key: 建值 如果在多个层级目录下 可写 key1.key2 如{'key1':{'key2':3}} --------- @result: 返回对应的值,如果没有,返回'' """ current_key = "" value = "" try: json_object = ( isinstance(json_object, str) and get_json(json_object) or json_object ) current_key = key.split(".")[0] value = json_object[current_key] key = key[key.find(".") + 1 :] except Exception as e: return value if key == current_key: return value else: return get_json_value(value, key) def get_all_keys(datas, depth=None, current_depth=0): """ @summary: 获取json李所有的key --------- @param datas: dict / list @param depth: 字典key的层级 默认不限制层级 层级从1开始 @param current_depth: 字典key的当前层级 不用传参 --------- @result: 返回json所有的key """ keys = [] if depth and current_depth >= depth: return keys if isinstance(datas, list): for data in datas: keys.extend(get_all_keys(data, depth, current_depth=current_depth + 1)) elif isinstance(datas, dict): for key, value in datas.items(): keys.append(key) if isinstance(value, dict): keys.extend(get_all_keys(value, depth, current_depth=current_depth + 1)) return keys def to_chinese(unicode_str): format_str = json.loads('{"chinese":"%s"}' % unicode_str) return format_str["chinese"] ################################################## def replace_str(source_str, regex, replace_str=""): """ @summary: 替换字符串 --------- @param source_str: 原字符串 @param regex: 正则 @param replace_str: 用什么来替换 默认为'' --------- @result: 返回替换后的字符串 """ str_info = re.compile(regex) return str_info.sub(replace_str, source_str) def del_redundant_blank_character(text): """ 删除冗余的空白符, 只保留一个 :param text: :return: """ return re.sub("\s+", " ", text) ################################################## def get_conf_value(config_file, section, key): cp = configparser.ConfigParser(allow_no_value=True) with codecs.open(config_file, "r", encoding="utf-8") as f: cp.read_file(f) return cp.get(section, key) def mkdir(path): try: if not os.path.exists(path): os.makedirs(path, exist_ok=True) except OSError as exc: # Python >2.5 pass def get_cache_path(filename, root_dir=None, local=False): """ Args: filename: root_dir: local: 是否存储到当前目录 Returns: """ if root_dir is None: if local: root_dir = os.path.join(sys.path[0], ".cache") else: root_dir = os.path.join(os.path.expanduser("~"), ".feapder/cache") file_path = f"{root_dir}{os.sep}{filename}" os.makedirs(os.path.dirname(file_path), exist_ok=True) return f"{root_dir}{os.sep}{filename}" def write_file(filename, content, mode="w", encoding="utf-8"): """ @summary: 写文件 --------- @param filename: 文件名(有路径) @param content: 内容 @param mode: 模式 w/w+ (覆盖/追加) --------- @result: """ directory = os.path.dirname(filename) mkdir(directory) with open(filename, mode, encoding=encoding) as file: file.writelines(content) def read_file(filename, readlines=False, encoding="utf-8"): """ @summary: 读文件 --------- @param filename: 文件名(有路径) @param readlines: 按行读取 (默认False) --------- @result: 按行读取返回List,否则返回字符串 """ content = None try: with open(filename, "r", encoding=encoding) as file: content = file.readlines() if readlines else file.read() except Exception as e: log.error(e) return content def get_oss_file_list(oss_handler, prefix, date_range_min, date_range_max=None): """ 获取文件列表 @param prefix: 路径前缀 如 xxx/xxx @param date_range_min: 时间范围 最小值 日期分隔符为/ 如 2019/03/01 或 2019/03/01/00/00/00 @param date_range_max: 时间范围 最大值 日期分隔符为/ 如 2019/03/01 或 2019/03/01/00/00/00 @return: 每个文件路径 如 html/xxx/xxx/2019/03/22/15/53/15/8ca8b9e4-4c77-11e9-9dee-acde48001122.json.snappy """ # 计算时间范围 date_range_max = date_range_max or date_range_min date_format = "/".join( ["%Y", "%m", "%d", "%H", "%M", "%S"][: date_range_min.count("/") + 1] ) time_interval = [ {"days": 365}, {"days": 31}, {"days": 1}, {"hours": 1}, {"minutes": 1}, {"seconds": 1}, ][date_range_min.count("/")] date_range = get_between_date( date_range_min, date_range_max, date_format=date_format, **time_interval ) for date in date_range: file_folder_path = os.path.join(prefix, date) objs = oss_handler.list(prefix=file_folder_path) for obj in objs: filename = obj.key yield filename def is_html(url): if not url: return False try: content_type = request.urlopen(url).info().get("Content-Type", "") if "text/html" in content_type: return True else: return False except Exception as e: log.error(e) return False def is_exist(file_path): """ @summary: 文件是否存在 --------- @param file_path: --------- @result: """ return os.path.exists(file_path) def download_file(url, file_path, *, call_func=None, proxies=None, data=None): """ 下载文件,会自动创建文件存储目录 Args: url: 地址 file_path: 文件存储地址 call_func: 下载成功的回调 proxies: 代理 data: 请求体 Returns: """ directory = os.path.dirname(file_path) mkdir(directory) # 进度条 def progress_callfunc(blocknum, blocksize, totalsize): """回调函数 @blocknum : 已经下载的数据块 @blocksize : 数据块的大小 @totalsize: 远程文件的大小 """ percent = 100.0 * blocknum * blocksize / totalsize if percent > 100: percent = 100 # print ('进度条 %.2f%%' % percent, end = '\r') sys.stdout.write("进度条 %.2f%%" % percent + "\r") sys.stdout.flush() if url: try: if proxies: # create the object, assign it to a variable proxy = request.ProxyHandler(proxies) # construct a new opener using your proxy settings opener = request.build_opener(proxy) # install the openen on the module-level request.install_opener(opener) request.urlretrieve(url, file_path, progress_callfunc, data) if callable(call_func): call_func() return 1 except Exception as e: log.error(e) return 0 else: return 0 def get_file_list(path, ignore=[]): templist = path.split("*") path = templist[0] file_type = templist[1] if len(templist) >= 2 else "" # 递归遍历文件 def get_file_list_(path, file_type, ignore, all_file=[]): file_list = os.listdir(path) for file_name in file_list: if file_name in ignore: continue file_path = os.path.join(path, file_name) if os.path.isdir(file_path): get_file_list_(file_path, file_type, ignore, all_file) else: if not file_type or file_name.endswith(file_type): all_file.append(file_path) return all_file return get_file_list_(path, file_type, ignore) if os.path.isdir(path) else [path] def rename_file(old_name, new_name): os.rename(old_name, new_name) def del_file(path, ignore=()): files = get_file_list(path, ignore) for file in files: try: os.remove(file) except Exception as e: log.error( """ 删除出错: %s Exception : %s """ % (file, str(e)) ) finally: pass def get_file_type(file_name): """ @summary: 取文件后缀名 --------- @param file_name: --------- @result: """ try: return os.path.splitext(file_name)[1] except Exception as e: log.exception(e) def get_file_path(file_path): """ @summary: 取文件路径 --------- @param file_path: /root/a.py --------- @result: /root """ try: return os.path.split(file_path)[0] except Exception as e: log.exception(e) ############################################# def exec_js(js_code): """ @summary: 执行js代码 --------- @param js_code: js代码 --------- @result: 返回执行结果 """ return execjs.eval(js_code) def compile_js(js_func): """ @summary: 编译js函数 --------- @param js_func:js函数 --------- @result: 返回函数对象 调用 fun('js_funName', param1,param2) """ ctx = execjs.compile(js_func) return ctx.call ############################################# def date_to_timestamp(date, time_format="%Y-%m-%d %H:%M:%S"): """ @summary: --------- @param date:将"2011-09-28 10:00:00"时间格式转化为时间戳 @param format:时间格式 --------- @result: 返回时间戳 """ timestamp = time.mktime(time.strptime(date, time_format)) return int(timestamp) def timestamp_to_date(timestamp, time_format="%Y-%m-%d %H:%M:%S"): """ @summary: --------- @param timestamp: 将时间戳转化为日期 @param format: 日期格式 --------- @result: 返回日期 """ if timestamp is None: raise ValueError("timestamp is null") date = time.localtime(timestamp) return time.strftime(time_format, date) def get_current_timestamp(): return int(time.time()) def get_current_date(date_format="%Y-%m-%d %H:%M:%S"): return datetime.datetime.now().strftime(date_format) # return time.strftime(date_format, time.localtime(time.time())) def get_date_number(year=None, month=None, day=None): """ @summary: 获取指定日期对应的日期数 默认当前周 --------- @param year: 2010 @param month: 6 @param day: 16 --------- @result: (年号,第几周,第几天) 如 (2010, 24, 3) """ if year and month and day: return datetime.date(year, month, day).isocalendar() elif not any([year, month, day]): return datetime.datetime.now().isocalendar() else: assert year, "year 不能为空" assert month, "month 不能为空" assert day, "day 不能为空" def get_between_date( begin_date, end_date=None, date_format="%Y-%m-%d", **time_interval ): """ @summary: 获取一段时间间隔内的日期,默认为每一天 --------- @param begin_date: 开始日期 str 如 2018-10-01 @param end_date: 默认为今日 @param date_format: 日期格式,应与begin_date的日期格式相对应 @param time_interval: 时间间隔 默认一天 支持 days、seconds、microseconds、milliseconds、minutes、hours、weeks --------- @result: list 值为字符串 """ date_list = [] begin_date = datetime.datetime.strptime(begin_date, date_format) end_date = ( datetime.datetime.strptime(end_date, date_format) if end_date else datetime.datetime.strptime( time.strftime(date_format, time.localtime(time.time())), date_format ) ) time_interval = time_interval or dict(days=1) while begin_date <= end_date: date_str = begin_date.strftime(date_format) date_list.append(date_str) begin_date += datetime.timedelta(**time_interval) if end_date.strftime(date_format) not in date_list: date_list.append(end_date.strftime(date_format)) return date_list def get_between_months(begin_date, end_date=None): """ @summary: 获取一段时间间隔内的月份 需要满一整月 --------- @param begin_date: 开始时间 如 2018-01-01 @param end_date: 默认当前时间 --------- @result: 列表 如 ['2018-01', '2018-02'] """ def add_months(dt, months): month = dt.month - 1 + months year = dt.year + month // 12 month = month % 12 + 1 day = min(dt.day, calendar.monthrange(year, month)[1]) return dt.replace(year=year, month=month, day=day) date_list = [] begin_date = datetime.datetime.strptime(begin_date, "%Y-%m-%d") end_date = ( datetime.datetime.strptime(end_date, "%Y-%m-%d") if end_date else datetime.datetime.strptime( time.strftime("%Y-%m-%d", time.localtime(time.time())), "%Y-%m-%d" ) ) while begin_date <= end_date: date_str = begin_date.strftime("%Y-%m") date_list.append(date_str) begin_date = add_months(begin_date, 1) return date_list def get_today_of_day(day_offset=0): return str(datetime.date.today() + datetime.timedelta(days=day_offset)) def get_days_of_month(year, month): """ 返回天数 """ return calendar.monthrange(year, month)[1] def get_firstday_of_month(date): """'' date format = "YYYY-MM-DD" """ year, month, day = date.split("-") year, month, day = int(year), int(month), int(day) days = "01" if int(month) < 10: month = "0" + str(int(month)) arr = (year, month, days) return "-".join("%s" % i for i in arr) def get_lastday_of_month(date): """'' get the last day of month date format = "YYYY-MM-DD" """ year, month, day = date.split("-") year, month, day = int(year), int(month), int(day) days = calendar.monthrange(year, month)[1] month = add_zero(month) arr = (year, month, days) return "-".join("%s" % i for i in arr) def get_firstday_month(month_offset=0): """'' get the first day of month from today month_offset is how many months """ (y, m, d) = get_year_month_and_days(month_offset) d = "01" arr = (y, m, d) return "-".join("%s" % i for i in arr) def get_lastday_month(month_offset=0): """'' get the last day of month from today month_offset is how many months """ return "-".join("%s" % i for i in get_year_month_and_days(month_offset)) def get_last_month(month_offset=0): """'' get the last day of month from today month_offset is how many months """ return "-".join("%s" % i for i in get_year_month_and_days(month_offset)[:2]) def get_year_month_and_days(month_offset=0): """ @summary: --------- @param month_offset: 月份偏移量 --------- @result: ('2019', '04', '30') """ today = datetime.datetime.now() year, month = today.year, today.month this_year = int(year) this_month = int(month) total_month = this_month + month_offset if month_offset >= 0: if total_month <= 12: days = str(get_days_of_month(this_year, total_month)) total_month = add_zero(total_month) return (year, total_month, days) else: i = total_month // 12 j = total_month % 12 if j == 0: i -= 1 j = 12 this_year += i days = str(get_days_of_month(this_year, j)) j = add_zero(j) return (str(this_year), str(j), days) else: if (total_month > 0) and (total_month < 12): days = str(get_days_of_month(this_year, total_month)) total_month = add_zero(total_month) return (year, total_month, days) else: i = total_month // 12 j = total_month % 12 if j == 0: i -= 1 j = 12 this_year += i days = str(get_days_of_month(this_year, j)) j = add_zero(j) return (str(this_year), str(j), days) def add_zero(n): return "%02d" % n def get_month(month_offset=0): """'' 获取当前日期前后N月的日期 if month_offset>0, 获取当前日期前N月的日期 if month_offset<0, 获取当前日期后N月的日期 date format = "YYYY-MM-DD" """ today = datetime.datetime.now() day = add_zero(today.day) (y, m, d) = get_year_month_and_days(month_offset) arr = (y, m, d) if int(day) < int(d): arr = (y, m, day) return "-".join("%s" % i for i in arr) @run_safe_model("format_date") def format_date(date, old_format="", new_format="%Y-%m-%d %H:%M:%S"): """ @summary: 格式化日期格式 --------- @param date: 日期 eg:2017年4月17日 3时27分12秒 @param old_format: 原来的日期格式 如 '%Y年%m月%d日 %H时%M分%S秒' %y 两位数的年份表示(00-99) %Y 四位数的年份表示(000-9999) %m 月份(01-12) %d 月内中的一天(0-31) %H 24小时制小时数(0-23) %I 12小时制小时数(01-12) %M 分钟数(00-59) %S 秒(00-59) @param new_format: 输出的日期格式 --------- @result: 格式化后的日期,类型为字符串 如2017-4-17 03:27:12 """ if not date: return "" if not old_format: regex = "(\d+)" numbers = get_info(date, regex, allow_repeat=True) formats = ["%Y", "%m", "%d", "%H", "%M", "%S"] old_format = date for i, number in enumerate(numbers[:6]): if i == 0 and len(number) == 2: # 年份可能是两位 用小%y old_format = old_format.replace( number, formats[i].lower(), 1 ) # 替换一次 '2017年11月30日 11:49' 防止替换11月时,替换11小时 else: old_format = old_format.replace(number, formats[i], 1) # 替换一次 try: date_obj = datetime.datetime.strptime(date, old_format) if "T" in date and "Z" in date: date_obj += datetime.timedelta(hours=8) date_str = date_obj.strftime("%Y-%m-%d %H:%M:%S") else: date_str = datetime.datetime.strftime(date_obj, new_format) except Exception as e: log.error("日期格式化出错,old_format = %s 不符合 %s 格式" % (old_format, date)) date_str = date return date_str def transform_lower_num(data_str: str): num_map = { "一": "1", "二": "2", "两": "2", "三": "3", "四": "4", "五": "5", "六": "6", "七": "7", "八": "8", "九": "9", "十": "0", } pattern = f'[{"|".join(num_map.keys())}|零]' res = re.search(pattern, data_str) if not res: # 如果字符串中没有包含中文数字 不做处理 直接返回 return data_str data_str = data_str.replace("0", "零") for n in num_map: data_str = data_str.replace(n, num_map[n]) re_data_str = re.findall("\d+", data_str) for i in re_data_str: if len(i) == 3: new_i = i.replace("0", "") data_str = data_str.replace(i, new_i, 1) elif len(i) == 4: new_i = i.replace("10", "") data_str = data_str.replace(i, new_i, 1) elif len(i) == 2 and int(i) < 10: new_i = int(i) + 10 data_str = data_str.replace(i, str(new_i), 1) elif len(i) == 1 and int(i) == 0: new_i = int(i) + 10 data_str = data_str.replace(i, str(new_i), 1) return data_str.replace("零", "0") @run_safe_model("format_time") def format_time(release_time, date_format="%Y-%m-%d %H:%M:%S"): """ >>> format_time("2个月前") '2021-08-15 16:24:21' >>> format_time("2月前") '2021-08-15 16:24:36' """ release_time = transform_lower_num(release_time) release_time = release_time.replace("日", "天").replace("/", "-") if "年前" in release_time: years = re.compile("(\d+)\s*年前").findall(release_time) years_ago = datetime.datetime.now() - datetime.timedelta( days=int(years[0]) * 365 ) release_time = years_ago.strftime("%Y-%m-%d %H:%M:%S") elif "月前" in release_time: months = re.compile("(\d+)[\s个]*月前").findall(release_time) months_ago = datetime.datetime.now() - datetime.timedelta( days=int(months[0]) * 30 ) release_time = months_ago.strftime("%Y-%m-%d %H:%M:%S") elif "周前" in release_time: weeks = re.compile("(\d+)\s*周前").findall(release_time) weeks_ago = datetime.datetime.now() - datetime.timedelta(days=int(weeks[0]) * 7) release_time = weeks_ago.strftime("%Y-%m-%d %H:%M:%S") elif "天前" in release_time: ndays = re.compile("(\d+)\s*天前").findall(release_time) days_ago = datetime.datetime.now() - datetime.timedelta(days=int(ndays[0])) release_time = days_ago.strftime("%Y-%m-%d %H:%M:%S") elif "小时前" in release_time: nhours = re.compile("(\d+)\s*小时前").findall(release_time) hours_ago = datetime.datetime.now() - datetime.timedelta(hours=int(nhours[0])) release_time = hours_ago.strftime("%Y-%m-%d %H:%M:%S") elif "分钟前" in release_time: nminutes = re.compile("(\d+)\s*分钟前").findall(release_time) minutes_ago = datetime.datetime.now() - datetime.timedelta( minutes=int(nminutes[0]) ) release_time = minutes_ago.strftime("%Y-%m-%d %H:%M:%S") elif "前天" in release_time: today = datetime.date.today() yesterday = today - datetime.timedelta(days=2) release_time = release_time.replace("前天", str(yesterday)) elif "昨天" in release_time: today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) release_time = release_time.replace("昨天", str(yesterday)) elif "今天" in release_time: release_time = release_time.replace("今天", get_current_date("%Y-%m-%d")) elif "刚刚" in release_time: release_time = get_current_date() elif re.search("^\d\d:\d\d", release_time): release_time = get_current_date("%Y-%m-%d") + " " + release_time elif not re.compile("\d{4}").findall(release_time): month = re.compile("\d{1,2}").findall(release_time) if month and int(month[0]) <= int(get_current_date("%m")): release_time = get_current_date("%Y") + "-" + release_time else: release_time = str(int(get_current_date("%Y")) - 1) + "-" + release_time # 把日和小时粘在一起的拆开 template = re.compile("(\d{4}-\d{1,2}-\d{2})(\d{1,2})") release_time = re.sub(template, r"\1 \2", release_time) release_time = format_date(release_time, new_format=date_format) return release_time def to_date(date_str, date_format="%Y-%m-%d %H:%M:%S"): return datetime.datetime.strptime(date_str, date_format) def get_before_date( current_date, days, current_date_format="%Y-%m-%d %H:%M:%S", return_date_format="%Y-%m-%d %H:%M:%S", ): """ @summary: 获取之前时间 --------- @param current_date: 当前时间 str类型 @param days: 时间间隔 -1 表示前一天 1 表示后一天 @param days: 返回的时间格式 --------- @result: 字符串 """ current_date = to_date(current_date, current_date_format) date_obj = current_date + datetime.timedelta(days=days) return datetime.datetime.strftime(date_obj, return_date_format) def delay_time(sleep_time=60): """ @summary: 睡眠 默认1分钟 --------- @param sleep_time: 以秒为单位 --------- @result: """ time.sleep(sleep_time) def format_seconds(seconds): """ @summary: 将秒转为时分秒 --------- @param seconds: --------- @result: 2天3小时2分49秒 """ seconds = int(seconds + 0.5) # 向上取整 m, s = divmod(seconds, 60) h, m = divmod(m, 60) d, h = divmod(h, 24) times = "" if d: times += "{}天".format(d) if h: times += "{}小时".format(h) if m: times += "{}分".format(m) if s: times += "{}秒".format(s) return times ################################################ def get_md5(*args): """ @summary: 获取唯一的32位md5 --------- @param *args: 参与联合去重的值 --------- @result: 7c8684bcbdfcea6697650aa53d7b1405 """ m = hashlib.md5() for arg in args: m.update(str(arg).encode()) return m.hexdigest() def get_sha1(*args): """ @summary: 获取唯一的40位值, 用于获取唯一的id --------- @param *args: 参与联合去重的值 --------- @result: ba4868b3f277c8e387b55d9e3d0be7c045cdd89e """ sha1 = hashlib.sha1() for arg in args: sha1.update(str(arg).encode()) return sha1.hexdigest() # 40位 def get_base64(data): if data is None: return data return base64.b64encode(str(data).encode()).decode("utf8") def get_uuid(key1="", key2=""): """ @summary: 计算uuid值 可用于将两个字符串组成唯一的值。如可将域名和新闻标题组成uuid,形成联合索引 --------- @param key1:str @param key2:str --------- @result: """ uuid_object = "" if not key1 and not key2: uuid_object = uuid.uuid1() else: hash = md5(bytes(key1, "utf-8") + bytes(key2, "utf-8")).digest() uuid_object = uuid.UUID(bytes=hash[:16], version=3) return str(uuid_object) def get_hash(text): return hash(text) ################################################## def cut_string(text, length): """ @summary: 将文本按指定长度拆分 --------- @param text: 文本 @param length: 拆分长度 --------- @result: 返回按指定长度拆分后形成的list """ text_list = re.findall(".{%d}" % length, text, re.S) leave_text = text[len(text_list) * length :] if leave_text: text_list.append(leave_text) return text_list def get_random_string(length=1): random_string = "".join(random.sample(string.ascii_letters + string.digits, length)) return random_string def get_random_password(length=8, special_characters=""): """ @summary: 创建随机密码 默认长度为8,包含大写字母、小写字母、数字 --------- @param length: 密码长度 默认8 @param special_characters: 特殊字符 --------- @result: 指定长度的密码 """ while True: random_password = "".join( random.sample( string.ascii_letters + string.digits + special_characters, length ) ) if ( re.search("[0-9]", random_password) and re.search("[A-Z]", random_password) and re.search("[a-z]", random_password) ): if not special_characters: break elif set(random_password).intersection(special_characters): break return random_password def get_random_email(length=None, email_types: list = None, special_characters=""): """ 随机生成邮箱 :param length: 邮箱长度 :param email_types: 邮箱类型 :param special_characters: 特殊字符 :return: """ if not length: length = random.randint(4, 12) if not email_types: email_types = [ "qq.com", "163.com", "gmail.com", "yahoo.com", "hotmail.com", "yeah.net", "126.com", "139.com", "sohu.com", ] email_body = get_random_password(length, special_characters) email_type = random.choice(email_types) email = email_body + "@" + email_type return email ################################# def dumps_obj(obj): return pickle.dumps(obj) def loads_obj(obj_str): return pickle.loads(obj_str) def get_method(obj, name): name = str(name) try: return getattr(obj, name) except AttributeError: log.error("Method %r not found in: %s" % (name, obj)) return None def switch_workspace(project_path): """ @summary: --------- @param project_path: --------- @result: """ os.chdir(project_path) # 切换工作路经 ############### 数据库相关 ####################### def format_sql_value(value): if isinstance(value, str): value = value.strip() elif isinstance(value, (list, dict)): value = dumps_json(value, indent=None) elif isinstance(value, (datetime.date, datetime.time)): value = str(value) elif isinstance(value, bool): value = int(value) return value def list2str(datas): """ 列表转字符串 :param datas: [1, 2] :return: (1, 2) """ data_str = str(tuple(datas)) data_str = re.sub(",\)$", ")", data_str) return data_str def make_insert_sql( table, data, auto_update=False, update_columns=(), insert_ignore=False ): """ @summary: 适用于mysql, oracle数据库时间需要to_date 处理(TODO) --------- @param table: @param data: 表数据 json格式 @param auto_update: 使用的是replace into, 为完全覆盖已存在的数据 @param update_columns: 需要更新的列 默认全部,当指定值时,auto_update设置无效,当duplicate key冲突时更新指定的列 @param insert_ignore: 数据存在忽略 --------- @result: """ keys = ["`{}`".format(key) for key in data.keys()] keys = list2str(keys).replace("'", "") values = [format_sql_value(value) for value in data.values()] values = list2str(values) if update_columns: if not isinstance(update_columns, (tuple, list)): update_columns = [update_columns] update_columns_ = ", ".join( ["{key}=values({key})".format(key=key) for key in update_columns] ) sql = ( "insert%s into `{table}` {keys} values {values} on duplicate key update %s" % (" ignore" if insert_ignore else "", update_columns_) ) elif auto_update: sql = "replace into `{table}` {keys} values {values}" else: sql = "insert%s into `{table}` {keys} values {values}" % ( " ignore" if insert_ignore else "" ) sql = sql.format(table=table, keys=keys, values=values).replace("None", "null") return sql def make_update_sql(table, data, condition): """ @summary: 适用于mysql, oracle数据库时间需要to_date 处理(TODO) --------- @param table: @param data: 表数据 json格式 @param condition: where 条件 --------- @result: """ key_values = [] for key, value in data.items(): value = format_sql_value(value) if isinstance(value, str): key_values.append("`{}`={}".format(key, repr(value))) elif value is None: key_values.append("`{}`={}".format(key, "null")) else: key_values.append("`{}`={}".format(key, value)) key_values = ", ".join(key_values) sql = "update `{table}` set {key_values} where {condition}" sql = sql.format(table=table, key_values=key_values, condition=condition) return sql def make_batch_sql( table, datas, auto_update=False, update_columns=(), update_columns_value=() ): """ @summary: 生产批量的sql --------- @param table: @param datas: 表数据 [{...}] @param auto_update: 使用的是replace into, 为完全覆盖已存在的数据 @param update_columns: 需要更新的列 默认全部,当指定值时,auto_update设置无效,当duplicate key冲突时更新指定的列 @param update_columns_value: 需要更新的列的值 默认为datas里边对应的值, 注意 如果值为字符串类型 需要主动加单引号, 如 update_columns_value=("'test'",) --------- @result: """ if not datas: return keys = list(set([key for data in datas for key in data])) values_placeholder = ["%s"] * len(keys) values = [] for data in datas: value = [] for key in keys: current_data = data.get(key) current_data = format_sql_value(current_data) value.append(current_data) values.append(value) keys = ["`{}`".format(key) for key in keys] keys = list2str(keys).replace("'", "") values_placeholder = list2str(values_placeholder).replace("'", "") if update_columns: if not isinstance(update_columns, (tuple, list)): update_columns = [update_columns] if update_columns_value: update_columns_ = ", ".join( [ "`{key}`={value}".format(key=key, value=value) for key, value in zip(update_columns, update_columns_value) ] ) else: update_columns_ = ", ".join( ["`{key}`=values(`{key}`)".format(key=key) for key in update_columns] ) sql = "insert into `{table}` {keys} values {values_placeholder} on duplicate key update {update_columns}".format( table=table, keys=keys, values_placeholder=values_placeholder, update_columns=update_columns_, ) elif auto_update: sql = "replace into `{table}` {keys} values {values_placeholder}".format( table=table, keys=keys, values_placeholder=values_placeholder ) else: sql = "insert ignore into `{table}` {keys} values {values_placeholder}".format( table=table, keys=keys, values_placeholder=values_placeholder ) return sql, values ############### json相关 ####################### def key2underline(key: str, strict=True): """ >>> key2underline("HelloWord") 'hello_word' >>> key2underline("SHData", strict=True) 's_h_data' >>> key2underline("SHData", strict=False) 'sh_data' >>> key2underline("SHDataHi", strict=False) 'sh_data_hi' >>> key2underline("SHDataHi", strict=True) 's_h_data_hi' >>> key2underline("dataHi", strict=True) 'data_hi' """ regex = "[A-Z]*" if not strict else "[A-Z]" capitals = re.findall(regex, key) if capitals: for capital in capitals: if not capital: continue if key.startswith(capital): if len(capital) > 1: key = key.replace( capital, capital[:-1].lower() + "_" + capital[-1].lower(), 1 ) else: key = key.replace(capital, capital.lower(), 1) else: if len(capital) > 1: key = key.replace(capital, "_" + capital.lower() + "_", 1) else: key = key.replace(capital, "_" + capital.lower(), 1) return key.strip("_") def key2hump(key): """ 下划线试变成首字母大写 """ return key.title().replace("_", "") def format_json_key(json_data): json_data_correct = {} for key, value in json_data.items(): key = key2underline(key) json_data_correct[key] = value return json_data_correct def quick_to_json(text): """ @summary: 可快速将浏览器上的header转为json格式 --------- @param text: --------- @result: """ contents = text.split("\n") json = {} for content in contents: if content == "\n": continue content = content.strip() regex = ["(:?.*?):(.*)", "(.*?):? +(.*)", "([^:]*)"] result = get_info(content, regex) result = result[0] if isinstance(result[0], tuple) else result try: json[result[0]] = eval(result[1].strip()) except: json[result[0]] = result[1].strip() return json ############################## def print_pretty(object): pprint(object) def print_params2json(url): params_json = {} params = url.split("?")[-1].split("&") for param in params: key_value = param.split("=", 1) params_json[key_value[0]] = key_value[1] print(dumps_json(params_json)) def print_cookie2json(cookie_str_or_list): if isinstance(cookie_str_or_list, str): cookie_json = {} cookies = cookie_str_or_list.split("; ") for cookie in cookies: name, value = cookie.split("=") cookie_json[name] = value else: cookie_json = get_cookies_from_selenium_cookie(cookie_str_or_list) print(dumps_json(cookie_json)) ############################### def flatten(x): """flatten(sequence) -> list Returns a single, flat list which contains all elements retrieved from the sequence and all recursively contained sub-sequences (iterables). Examples: >>> [1, 2, [3,4], (5,6)] [1, 2, [3, 4], (5, 6)] >>> flatten([[[1,2,3], (42,None)], [4,5], [6], 7, (8,9,10)]) [1, 2, 3, 42, None, 4, 5, 6, 7, 8, 9, 10] >>> flatten(["foo", "bar"]) ['foo', 'bar'] >>> flatten(["foo", ["baz", 42], "bar"]) ['foo', 'baz', 42, 'bar'] """ return list(iflatten(x)) def iflatten(x): """iflatten(sequence) -> iterator Similar to ``.flatten()``, but returns iterator instead""" for el in x: if _is_listlike(el): for el_ in flatten(el): yield el_ else: yield el def _is_listlike(x): """ >>> _is_listlike("foo") False >>> _is_listlike(5) False >>> _is_listlike(b"foo") False >>> _is_listlike([b"foo"]) True >>> _is_listlike((b"foo",)) True >>> _is_listlike({}) True >>> _is_listlike(set()) True >>> _is_listlike((x for x in range(3))) True >>> _is_listlike(six.moves.xrange(5)) True """ return hasattr(x, "__iter__") and not isinstance(x, (six.text_type, bytes)) ################### def re_def_supper_class(obj, supper_class): """ 重新定义父类 @param obj: 类 如 class A: 则obj为A 或者 A的实例 a.__class__ @param supper_class: 父类 @return: """ obj.__bases__ = (supper_class,) ################### freq_limit_record = {} def reach_freq_limit(rate_limit, *key): """ 频率限制 :param rate_limit: 限制时间 单位秒 :param key: 频率限制的key :return: True / False """ if rate_limit == 0: return False msg_md5 = get_md5(*key) key = "rate_limit:{}".format(msg_md5) try: if get_redisdb().strget(key): return True get_redisdb().strset(key, time.time(), ex=rate_limit) except redis.exceptions.ConnectionError as e: # 使用内存做频率限制 global freq_limit_record if key not in freq_limit_record: freq_limit_record[key] = time.time() return False if time.time() - freq_limit_record.get(key) < rate_limit: return True else: freq_limit_record[key] = time.time() return False def dingding_warning( message, message_prefix=None, rate_limit=None, url=None, user_phone=None ): # 为了加载最新的配置 rate_limit = rate_limit if rate_limit is not None else setting.WARNING_INTERVAL url = url or setting.DINGDING_WARNING_URL user_phone = user_phone or setting.DINGDING_WARNING_PHONE if not all([url, message]): return if reach_freq_limit(rate_limit, url, user_phone, message_prefix or message): log.info("报警时间间隔过短,此次报警忽略。 内容 {}".format(message)) return if isinstance(user_phone, str): user_phone = [user_phone] if user_phone else [] data = { "msgtype": "text", "text": {"content": message}, "at": {"atMobiles": user_phone, "isAtAll": setting.DINGDING_WARNING_ALL}, } headers = {"Content-Type": "application/json"} try: response = requests.post( url, headers=headers, data=json.dumps(data).encode("utf8") ) result = response.json() response.close() if result.get("errcode") == 0: return True else: raise Exception(result.get("errmsg")) except Exception as e: log.error("报警发送失败。 报警内容 {}, error: {}".format(message, e)) return False def email_warning( message, title, message_prefix=None, email_sender=None, email_password=None, email_receiver=None, email_smtpserver=None, rate_limit=None, ): # 为了加载最新的配置 email_sender = email_sender or setting.EMAIL_SENDER email_password = email_password or setting.EMAIL_PASSWORD email_receiver = email_receiver or setting.EMAIL_RECEIVER email_smtpserver = email_smtpserver or setting.EMAIL_SMTPSERVER rate_limit = rate_limit if rate_limit is not None else setting.WARNING_INTERVAL if not all([message, email_sender, email_password, email_receiver]): return if reach_freq_limit( rate_limit, email_receiver, email_sender, message_prefix or message ): log.info("报警时间间隔过短,此次报警忽略。 内容 {}".format(message)) return if isinstance(email_receiver, str): email_receiver = [email_receiver] with EmailSender( username=email_sender, password=email_password, smtpserver=email_smtpserver ) as email: return email.send(receivers=email_receiver, title=title, content=message) def linkedsee_warning(message, rate_limit=3600, message_prefix=None, token=None): """ 灵犀电话报警 Args: message: rate_limit: message_prefix: token: Returns: """ if not token: log.info("未设置灵犀token,不支持报警") return if reach_freq_limit(rate_limit, token, message_prefix or message): log.info("报警时间间隔过短,此次报警忽略。 内容 {}".format(message)) return headers = {"servicetoken": token, "Content-Type": "application/json"} url = "http://www.linkedsee.com/alarm/zabbix" data = {"content": message} response = requests.post(url, data=json.dumps(data), headers=headers) return response def wechat_warning( message, message_prefix=None, rate_limit=None, url=None, user_phone=None, all_users: bool = None, ): """企业微信报警""" # 为了加载最新的配置 rate_limit = rate_limit if rate_limit is not None else setting.WARNING_INTERVAL url = url or setting.WECHAT_WARNING_URL user_phone = user_phone or setting.WECHAT_WARNING_PHONE all_users = all_users if all_users is not None else setting.WECHAT_WARNING_ALL if isinstance(user_phone, str): user_phone = [user_phone] if user_phone else [] if all_users is True or not user_phone: user_phone = ["@all"] if not all([url, message]): return if reach_freq_limit(rate_limit, url, user_phone, message_prefix or message): log.info("报警时间间隔过短,此次报警忽略。 内容 {}".format(message)) return data = { "msgtype": "text", "text": {"content": message, "mentioned_mobile_list": user_phone}, } headers = {"Content-Type": "application/json"} try: response = requests.post( url, headers=headers, data=json.dumps(data).encode("utf8") ) result = response.json() response.close() if result.get("errcode") == 0: return True else: raise Exception(result.get("errmsg")) except Exception as e: log.error("报警发送失败。 报警内容 {}, error: {}".format(message, e)) return False def feishu_warning(message, message_prefix=None, rate_limit=None, url=None, user=None): """ Args: message: message_prefix: rate_limit: url: user: {"open_id":"ou_xxxxx", "name":"xxxx"} 或 [{"open_id":"ou_xxxxx", "name":"xxxx"}] Returns: """ # 为了加载最新的配置 rate_limit = rate_limit if rate_limit is not None else setting.WARNING_INTERVAL url = url or setting.FEISHU_WARNING_URL user = user or setting.FEISHU_WARNING_USER if not all([url, message]): return if reach_freq_limit(rate_limit, url, user, message_prefix or message): log.info("报警时间间隔过短,此次报警忽略。 内容 {}".format(message)) return if isinstance(user, dict): user = [user] if user else [] at = "" if setting.FEISHU_WARNING_ALL: at = '<at user_id="all">所有人</at>' elif user: at = " ".join( [f'<at user_id="{u.get("open_id")}">{u.get("name")}</at>' for u in user] ) data = {"msg_type": "text", "content": {"text": at + message}} headers = {"Content-Type": "application/json"} try: response = requests.post( url, headers=headers, data=json.dumps(data).encode("utf8") ) result = response.json() response.close() if result.get("StatusCode") == 0: return True else: raise Exception(result.get("msg")) except Exception as e: log.error("报警发送失败。 报警内容 {}, error: {}".format(message, e)) return False def send_msg(msg, level="DEBUG", message_prefix=""): if setting.WARNING_LEVEL == "ERROR": if level.upper() != "ERROR": return if setting.DINGDING_WARNING_URL: keyword = "feapder报警系统\n" dingding_warning(keyword + msg, message_prefix=message_prefix) if setting.EMAIL_RECEIVER: title = message_prefix or msg if len(title) > 50: title = title[:50] + "..." email_warning(msg, message_prefix=message_prefix, title=title) if setting.WECHAT_WARNING_URL: keyword = "feapder报警系统\n" wechat_warning(keyword + msg, message_prefix=message_prefix) if setting.FEISHU_WARNING_URL: keyword = "feapder报警系统\n" feishu_warning(keyword + msg, message_prefix=message_prefix) ################### def make_item(cls, data: dict): """提供Item类与原数据,快速构建Item实例 :param cls: Item类 :param data: 字典格式的数据 """ item = cls() for key, val in data.items(): setattr(item, key, val) return item ################### def aio_wrap(loop=None, executor=None): """ wrap a normal sync version of a function to an async version """ outer_loop = loop outer_executor = executor def wrap(fn): @wraps(fn) async def run(*args, loop=None, executor=None, **kwargs): if loop is None: if outer_loop is None: loop = asyncio.get_event_loop() else: loop = outer_loop if executor is None: executor = outer_executor pfunc = partial(fn, *args, **kwargs) return await loop.run_in_executor(executor, pfunc) return run return wrap ######### number ########## def ensure_int(n): """ >>> ensure_int(None) 0 >>> ensure_int(False) 0 >>> ensure_int(12) 12 >>> ensure_int("72") 72 >>> ensure_int('') 0 >>> ensure_int('1') 1 """ if not n: return 0 return int(n) def ensure_float(n): """ >>> ensure_float(None) 0.0 >>> ensure_float(False) 0.0 >>> ensure_float(12) 12.0 >>> ensure_float("72") 72.0 """ if not n: return 0.0 return float(n) def import_cls(cls_info): module, class_name = cls_info.rsplit(".", 1) cls = importlib.import_module(module).__getattribute__(class_name) return cls
73,707
Python
.py
2,222
23.959046
143
0.563226
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,864
webdriver_pool.py
demigody_nas-tools/third_party/feapder/feapder/utils/webdriver/webdriver_pool.py
# -*- coding: utf-8 -*- """ Created on 2021/3/18 4:59 下午 --------- @summary: --------- @author: Boris @email: [email protected] """ import queue import threading from feapder.utils.log import log from feapder.utils.tools import Singleton from feapder.utils.webdriver.selenium_driver import SeleniumDriver @Singleton class WebDriverPool: def __init__( self, pool_size=5, driver_cls=SeleniumDriver, thread_safe=False, **kwargs ): """ Args: pool_size: driver池的大小 driver: 驱动类型 thread_safe: 是否线程安全 是则每个线程拥有一个driver,pool_size无效,driver数量为线程数 否则每个线程从池中获取driver **kwargs: """ self.pool_size = pool_size self.driver_cls = driver_cls self.thread_safe = thread_safe self.kwargs = kwargs self.queue = queue.Queue(maxsize=pool_size) self.lock = threading.RLock() self.driver_count = 0 self.ctx = threading.local() @property def driver(self): if not hasattr(self.ctx, "driver"): self.ctx.driver = None return self.ctx.driver @driver.setter def driver(self, driver): self.ctx.driver = driver @property def is_full(self): return self.driver_count >= self.pool_size def create_driver(self, user_agent: str = None, proxy: str = None): kwargs = self.kwargs.copy() if user_agent: kwargs["user_agent"] = user_agent if proxy: kwargs["proxy"] = proxy return self.driver_cls(**kwargs) def get(self, user_agent: str = None, proxy: str = None): """ 获取webdriver 当webdriver为新实例时会使用 user_agen, proxy, cookie参数来创建 Args: user_agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 proxy: xxx.xxx.xxx.xxx Returns: """ if not self.is_full and not self.thread_safe: with self.lock: if not self.is_full: driver = self.create_driver(user_agent, proxy) self.queue.put(driver) self.driver_count += 1 elif self.thread_safe: if not self.driver: driver = self.create_driver(user_agent, proxy) self.driver = driver self.driver_count += 1 if self.thread_safe: driver = self.driver else: driver = self.queue.get() return driver def put(self, driver): if not self.thread_safe: self.queue.put(driver) def remove(self, driver): if self.thread_safe: if self.driver: self.driver.quit() self.driver = None else: driver.quit() self.driver_count -= 1 def close(self): if self.thread_safe: log.info("暂不支持关闭需线程安全的driver") while not self.queue.empty(): driver = self.queue.get() driver.quit() self.driver_count -= 1
3,282
Python
.py
97
23.103093
145
0.572423
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,865
__init__.py
demigody_nas-tools/third_party/feapder/feapder/utils/webdriver/__init__.py
# -*- coding: utf-8 -*- """ Created on 2022/9/7 4:39 PM --------- @summary: --------- @author: Boris @email: [email protected] """ from .selenium_driver import SeleniumDriver from .webdirver import InterceptRequest, InterceptResponse from .webdriver_pool import WebDriverPool # 为了兼容老代码 WebDriver = SeleniumDriver
332
Python
.py
14
21.642857
58
0.749175
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,866
selenium_driver.py
demigody_nas-tools/third_party/feapder/feapder/utils/webdriver/selenium_driver.py
# -*- coding: utf-8 -*- """ Created on 2021/3/18 4:59 下午 --------- @summary: --------- @author: Boris @email: [email protected] """ import json import logging import os from typing import Optional, Union, List import requests from selenium import webdriver from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver from selenium.webdriver.chrome.service import Service as ChromeService from selenium.webdriver.firefox.service import Service as GeckoService from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.firefox import GeckoDriverManager from feapder.utils import tools from feapder.utils.log import log, OTHERS_LOG_LEVAL from feapder.utils.webdriver.webdirver import * # 屏蔽webdriver_manager日志 logging.getLogger("WDM").setLevel(OTHERS_LOG_LEVAL) class SeleniumDriver(WebDriver, RemoteWebDriver): CHROME = "CHROME" FIREFOX = "FIREFOX" __CHROME_ATTRS__ = { "port", "options", "service_args", "desired_capabilities", "service_log_path", "chrome_options", "keep_alive", } __FIREFOX_ATTRS__ = { "firefox_profile", "firefox_binary", "timeout", "capabilities", "proxy", "options", "service_log_path", "firefox_options", "service_args", "desired_capabilities", "log_path", "keep_alive", } def __init__(self, xhr_url_regexes: list = None, **kwargs): """ Args: xhr_url_regexes: 拦截xhr接口,支持正则,数组类型 **kwargs: """ super(SeleniumDriver, self).__init__(**kwargs) self._xhr_url_regexes = xhr_url_regexes if self._xhr_url_regexes and self._driver_type != SeleniumDriver.CHROME: raise Exception( "xhr_url_regexes only support by chrome now! eg: driver_type=SeleniumDriver.CHROME" ) if self._driver_type == SeleniumDriver.CHROME: self.driver = self.chrome_driver() elif self._driver_type == SeleniumDriver.FIREFOX: self.driver = self.firefox_driver() else: raise TypeError( "dirver_type must be one of CHROME or FIREFOX, but received {}".format( type(self._driver_type) ) ) # driver.get(url)一直不返回,但也不报错的问题,这时程序会卡住,设置超时选项能解决这个问题。 self.driver.set_page_load_timeout(self._timeout) # 设置10秒脚本超时时间 self.driver.set_script_timeout(self._timeout) self.url = None def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_val: log.error(exc_val) self.quit() return True def filter_kwargs(self, kwargs: dict, driver_attrs: set): if not kwargs: return {} data = {} for key, value in kwargs.items(): if key in driver_attrs: data[key] = value return data def get_driver(self): return self.driver def firefox_driver(self): firefox_profile = webdriver.FirefoxProfile() firefox_options = webdriver.FirefoxOptions() firefox_capabilities = webdriver.DesiredCapabilities.FIREFOX if self._proxy: proxy = self._proxy() if callable(self._proxy) else self._proxy firefox_capabilities["marionette"] = True firefox_capabilities["proxy"] = { "proxyType": "MANUAL", "httpProxy": proxy, "ftpProxy": proxy, "sslProxy": proxy, } if self._user_agent: firefox_profile.set_preference( "general.useragent.override", self._user_agent() if callable(self._user_agent) else self._user_agent, ) if not self._load_images: firefox_profile.set_preference("permissions.default.image", 2) if self._headless: firefox_options.add_argument("--headless") firefox_options.add_argument("--disable-gpu") # 添加自定义的配置参数 if self._custom_argument: for arg in self._custom_argument: firefox_options.add_argument(arg) kwargs = self.filter_kwargs(self._kwargs, self.__FIREFOX_ATTRS__) service = GeckoService(GeckoDriverManager().install()) driver = webdriver.Firefox( service=service, capabilities=firefox_capabilities, options=firefox_options, firefox_profile=firefox_profile, **kwargs, ) if self._window_size: driver.set_window_size(*self._window_size) return driver def chrome_driver(self): chrome_options = webdriver.ChromeOptions() # 此步骤很重要,设置为开发者模式,防止被各大网站识别出来使用了Selenium chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"]) chrome_options.add_experimental_option("useAutomationExtension", False) # docker 里运行需要 chrome_options.add_argument("--no-sandbox") if self._proxy: chrome_options.add_argument( "--proxy-server={}".format( self._proxy() if callable(self._proxy) else self._proxy ) ) if self._user_agent: chrome_options.add_argument( "user-agent={}".format( self._user_agent() if callable(self._user_agent) else self._user_agent ) ) if not self._load_images: chrome_options.add_experimental_option( "prefs", {"profile.managed_default_content_settings.images": 2} ) if self._headless: chrome_options.add_argument("--headless") chrome_options.add_argument("--disable-gpu") if self._window_size: chrome_options.add_argument( "--window-size={},{}".format(self._window_size[0], self._window_size[1]) ) if self._download_path: os.makedirs(self._download_path, exist_ok=True) prefs = { "download.prompt_for_download": False, "download.default_directory": self._download_path, } chrome_options.add_experimental_option("prefs", prefs) # 添加自定义的配置参数 if self._custom_argument: for arg in self._custom_argument: chrome_options.add_argument(arg) kwargs = self.filter_kwargs(self._kwargs, self.__CHROME_ATTRS__) service = ChromeService(ChromeDriverManager().install) driver = webdriver.Chrome(service=service, options=chrome_options, **kwargs) # 隐藏浏览器特征 if self._use_stealth_js: with open( os.path.join(os.path.dirname(__file__), "../js/stealth.min.js") ) as f: js = f.read() driver.execute_cdp_cmd( "Page.addScriptToEvaluateOnNewDocument", {"source": js} ) if self._xhr_url_regexes: assert isinstance(self._xhr_url_regexes, list) with open( os.path.join(os.path.dirname(__file__), "../js/intercept.js") ) as f: js = f.read() driver.execute_cdp_cmd( "Page.addScriptToEvaluateOnNewDocument", {"source": js} ) js = f"window.__urlRegexes = {self._xhr_url_regexes}" driver.execute_cdp_cmd( "Page.addScriptToEvaluateOnNewDocument", {"source": js} ) if self._download_path: driver.command_executor._commands["send_command"] = ( "POST", "/session/$sessionId/chromium/send_command", ) params = { "cmd": "Page.setDownloadBehavior", "params": {"behavior": "allow", "downloadPath": self._download_path}, } driver.execute("send_command", params) return driver @property def domain(self): return tools.get_domain(self.url or self.driver.current_url) @property def cookies(self): cookies_json = {} for cookie in self.driver.get_cookies(): cookies_json[cookie["name"]] = cookie["value"] return cookies_json @cookies.setter def cookies(self, val: Union[dict, List[dict]]): """ 设置cookie Args: val: {"key":"value", "key2":"value2"} Returns: """ if isinstance(val, list): for cookie in val: # "path", "domain", "secure", "expiry" _cookie = { "name": cookie.get("name"), "value": cookie.get("value"), "domain": cookie.get("domain"), "path": cookie.get("path"), "expires": cookie.get("expires"), "secure": cookie.get("secure"), } self.driver.add_cookie(_cookie) else: for key, value in val.items(): self.driver.add_cookie({"name": key, "value": value}) @property def user_agent(self): return self.driver.execute_script("return navigator.userAgent;") def xhr_response(self, xhr_url_regex) -> Optional[InterceptResponse]: data = self.driver.execute_script( f'return window.__ajaxData["{xhr_url_regex}"];' ) if not data: return None request = InterceptRequest(**data["request"]) response = InterceptResponse(request, **data["response"]) return response def xhr_data(self, xhr_url_regex) -> Union[str, dict, None]: response = self.xhr_response(xhr_url_regex) if not response: return None return response.content def xhr_text(self, xhr_url_regex) -> Optional[str]: response = self.xhr_response(xhr_url_regex) if not response: return None if isinstance(response.content, dict): return json.dumps(response.content, ensure_ascii=False) return response.content def xhr_json(self, xhr_url_regex) -> Optional[dict]: text = self.xhr_text(xhr_url_regex) return json.loads(text) def __getattr__(self, name): if self.driver: return getattr(self.driver, name) else: raise AttributeError # def __del__(self): # self.quit()
10,939
Python
.py
280
27.560714
99
0.575573
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,867
webdirver.py
demigody_nas-tools/third_party/feapder/feapder/utils/webdriver/webdirver.py
# -*- coding: utf-8 -*- """ Created on 2022/9/7 4:27 PM --------- @summary: --------- @author: Boris @email: [email protected] """ import abc from feapder import setting class InterceptRequest: def __init__(self, url, data, headers): self.url = url self.data = data self.headers = headers class InterceptResponse: def __init__(self, request: InterceptRequest, url, headers, content, status_code): self.request = request self.url = url self.headers = headers self.content = content self.status_code = status_code class WebDriver: def __init__( self, load_images=True, user_agent=None, proxy=None, headless=False, driver_type=None, timeout=16, window_size=(1024, 800), custom_argument=None, download_path=None, auto_install_driver=True, use_stealth_js=True, **kwargs, ): """ webdirver 封装,支持chrome、和 firefox Args: load_images: 是否加载图片 user_agent: 字符串 或 无参函数,返回值为user_agent proxy: xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 headless: 是否启用无头模式 driver_type: CHROME 或 FIREFOX timeout: 请求超时时间 window_size: # 窗口大小 custom_argument: 自定义参数 用于webdriver.Chrome(options=chrome_options, **kwargs) download_path: 文件下载保存路径;如果指定,不再出现“保留”“放弃”提示,仅对Chrome有效 auto_install_driver: 自动下载浏览器驱动 支持chrome 和 firefox use_stealth_js: 使用stealth.min.js隐藏浏览器特征 **kwargs: """ self._load_images = load_images self._user_agent = user_agent or setting.DEFAULT_USERAGENT self._proxy = proxy self._headless = headless self._timeout = timeout self._window_size = window_size self._custom_argument = custom_argument self._download_path = download_path self._auto_install_driver = auto_install_driver self._use_stealth_js = use_stealth_js self._driver_type = driver_type self._kwargs = kwargs @abc.abstractmethod def quit(self): pass
2,407
Python
.py
70
23.1
87
0.604117
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,868
item.py
demigody_nas-tools/third_party/feapder/feapder/network/item.py
# -*- coding: utf-8 -*- """ Created on 2018-07-26 22:28:10 --------- @summary: 定义实体 --------- @author: Boris @email: [email protected] """ import re import feapder.utils.tools as tools class ItemMetaclass(type): def __new__(cls, name, bases, attrs): attrs.setdefault("__name__", None) attrs.setdefault("__table_name__", None) attrs.setdefault("__name_underline__", None) attrs.setdefault("__update_key__", None) attrs.setdefault("__unique_key__", None) return type.__new__(cls, name, bases, attrs) class Item(metaclass=ItemMetaclass): __unique_key__ = [] def __init__(self, **kwargs): self.__dict__ = kwargs def __repr__(self): return "<{}: {}>".format(self.item_name, tools.dumps_json(self.to_dict)) def __getitem__(self, key): return self.__dict__[key] def __setitem__(self, key, value): self.__dict__[key] = value def update(self, *args, **kwargs): """ 更新字段,与字典使用方法一致 """ self.__dict__.update(*args, **kwargs) def update_strict(self, *args, **kwargs): """ 更新严格更新,只更新item中有的字段 """ for key, value in dict(*args, **kwargs).items(): if key in self.__dict__: self.__dict__[key] = value def pre_to_db(self): """ 入库前的处理 """ pass @property def to_dict(self): propertys = {} for key, value in self.__dict__.items(): if key not in ( "__name__", "__table_name__", "__name_underline__", "__update_key__", "__unique_key__", ): if key.startswith(f"_{self.__class__.__name__}"): key = key.replace(f"_{self.__class__.__name__}", "") propertys[key] = value return propertys def to_sql(self, auto_update=False, update_columns=()): return tools.make_insert_sql( self.table_name, self.to_dict, auto_update, update_columns ) @property def item_name(self): return self.__name__ or self.__class__.__name__ @item_name.setter def item_name(self, name): self.__name__ = name self.__table_name__ = re.sub("_item$", "", self.name_underline) @property def table_name(self): if not self.__table_name__: self.__table_name__ = re.sub("_item$", "", self.name_underline) return self.__table_name__ @table_name.setter def table_name(self, name): self.__table_name__ = name self.__name__ = tools.key2hump(name) + "Item" @property def name_underline(self): if not self.__name_underline__: self.__name_underline__ = tools.key2underline(self.item_name) return self.__name_underline__ @name_underline.setter def name_underline(self, name): self.__name_underline__ = name @property def unique_key(self): return self.__unique_key__ or self.__class__.__unique_key__ @unique_key.setter def unique_key(self, keys): if isinstance(keys, (tuple, list)): self.__unique_key__ = keys else: self.__unique_key__ = (keys,) @property def fingerprint(self): args = [] for key, value in self.to_dict.items(): if value: if (self.unique_key and key in self.unique_key) or not self.unique_key: args.append(str(value)) if args: args = sorted(args) return tools.get_md5(*args) else: return None def to_UpdateItem(self): update_item = UpdateItem(**self.__dict__) update_item.item_name = self.item_name return update_item class UpdateItem(Item): __update_key__ = [] def __init__(self, **kwargs): super(UpdateItem, self).__init__(**kwargs) @property def update_key(self): return self.__update_key__ or self.__class__.__update_key__ @update_key.setter def update_key(self, keys): if isinstance(keys, (tuple, list)): self.__update_key__ = keys else: self.__update_key__ = (keys,)
4,341
Python
.py
127
25.149606
87
0.540224
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,869
proxy_pool_old.py
demigody_nas-tools/third_party/feapder/feapder/network/proxy_pool_old.py
# coding:utf8 """ 代理池 """ import datetime import json import os import random import socket import time from urllib import parse import redis import requests from feapder import setting from feapder.utils import tools from feapder.utils.log import log # 建立本地缓存代理文件夹 proxy_path = os.path.join(os.path.dirname(__file__), "proxy_file") if not os.path.exists(proxy_path): os.makedirs(proxy_path, exist_ok=True) def get_proxies_by_host(host, port): proxy_id = "{}:{}".format(host, port) return get_proxies_by_id(proxy_id) def get_proxies_by_id(proxy_id): proxies = { "http": "http://{}".format(proxy_id), "https": "http://{}".format(proxy_id), } return proxies def get_proxy_from_url(**kwargs): """ 获取指定url的代理 :param kwargs: :return: """ proxy_source_url = kwargs.get("proxy_source_url", []) if not isinstance(proxy_source_url, list): proxy_source_url = [proxy_source_url] proxy_source_url = [x for x in proxy_source_url if x] if not proxy_source_url: raise ValueError("no specify proxy_source_url: {}".format(proxy_source_url)) kwargs = kwargs.copy() kwargs.pop("proxy_source_url") proxies_list = [] for url in proxy_source_url: if url.startswith("http"): proxies_list.extend(get_proxy_from_http(url, **kwargs)) elif url.startswith("redis"): proxies_list.extend(get_proxy_from_redis(url, **kwargs)) if proxies_list: # 顺序打乱 random.shuffle(proxies_list) return proxies_list def get_proxy_from_http(proxy_source_url, **kwargs): """ 从指定 http 地址获取代理 :param proxy_source_url: :param kwargs: :return: """ filename = tools.get_md5(proxy_source_url) + ".txt" abs_filename = os.path.join(proxy_path, filename) update_interval = kwargs.get("local_proxy_file_cache_timeout", 60) update_flag = 0 if not update_interval: # 强制更新 update_flag = 1 elif not os.path.exists(abs_filename): # 文件不存在则更新 update_flag = 1 elif time.time() - os.stat(abs_filename).st_mtime > update_interval: # 超过更新间隔 update_flag = 1 if update_flag: response = requests.get(proxy_source_url, timeout=20) with open(os.path.join(proxy_path, filename), "w") as f: f.write(response.text) return get_proxy_from_file(filename) def get_proxy_from_file(filename, **kwargs): """ 从指定本地文件获取代理 文件格式 ip:port:https ip:port:http ip:port :param filename: :param kwargs: :return: """ proxies_list = [] with open(os.path.join(proxy_path, filename), "r") as f: lines = f.readlines() for line in lines: line = line.strip() if not line: continue # 解析 auth = "" if "@" in line: auth, line = line.split("@") # items = line.split(":") if len(items) < 2: continue ip, port, *protocol = items if not all([port, ip]): continue if auth: ip = "{}@{}".format(auth, ip) if not protocol: proxies = { "https": "http://%s:%s" % (ip, port), "http": "http://%s:%s" % (ip, port), } else: proxies = {protocol[0]: "%s://%s:%s" % (protocol[0], ip, port)} proxies_list.append(proxies) return proxies_list def get_proxy_from_redis(proxy_source_url, **kwargs): """ 从指定 redis 地址获取代理 @param proxy_source_url: redis://:passwd@host:ip/db redis 存储结构 zset ip:port ts @param kwargs: {"redis_proxies_key": "xxx"} @return: [{'http':'http://xxx.xxx.xxx:xxx', 'https':'http://xxx.xxx.xxx.xxx:xxx'}] """ redis_conn = redis.StrictRedis.from_url(proxy_source_url) key = kwargs.get("redis_proxies_key") assert key, "从redis中获取代理 需要指定 redis_proxies_key" proxies = redis_conn.zrange(key, 0, -1) proxies_list = [] for proxy in proxies: proxy = proxy.decode() proxies_list.append( {"https": "http://%s" % proxy, "http": "http://%s" % proxy} ) return proxies_list def check_proxy( ip="", port="", proxies=None, type=0, timeout=5, logger=None, show_error_log=True, **kwargs, ): """ 代理有效性检查 :param ip: :param port: :param type: 0:socket 1:requests :param timeout: :param logger: :return: """ if not logger: logger = log ok = 0 if type == 0 and ip and port: # socket检测成功 不代表代理一定可用 Connection closed by foreign host. 这种情况就不行 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sk: sk.settimeout(timeout) try: # 必须检测 否则代理永远不刷新 sk.connect((ip, int(port))) ok = 1 except Exception as e: if show_error_log: logger.debug("check proxy failed: {} {}:{}".format(e, ip, port)) sk.close() else: if not proxies: proxies = { "http": "http://{}:{}".format(ip, port), "https": "http://{}:{}".format(ip, port), } try: r = requests.get( "http://www.baidu.com", proxies=proxies, timeout=timeout, stream=True ) ok = 1 r.close() except Exception as e: if show_error_log: logger.debug( "check proxy failed: {} {}:{} {}".format(e, ip, port, proxies) ) return ok class ProxyItem(object): """单个代理对象""" # 代理标记 proxy_tag_list = (-1, 0, 1) def __init__( self, proxies=None, valid_timeout=20, check_interval=180, max_proxy_use_num=10000, delay=30, use_interval=None, **kwargs, ): """ :param proxies: :param valid_timeout: 代理检测超时时间 默认-1 20181008 默认不再监测有效性 :param check_interval: :param max_proxy_use_num: :param delay: :param use_interval: 使用间隔 单位秒 默认不限制 :param logger: 日志处理器 默认 log.get_logger() :param kwargs: """ # {"http": ..., "https": ...} self.proxies = proxies # 检测超时时间 秒 self.valid_timeout = valid_timeout # 检测间隔 秒 self.check_interval = check_interval # 标记 0:正常 -1:丢弃 1: 待会再用 ... self.flag = 0 # 上次状态变化时间 self.flag_ts = 0 # 上次更新时间 有效时间 self.update_ts = 0 # 最大被使用次数 self.max_proxy_use_num = max_proxy_use_num # 被使用次数记录 self.use_num = 0 # 延迟使用时间 self.delay = delay # 使用间隔 单位秒 self.use_interval = use_interval # 使用时间 self.use_ts = 0 self.proxy_args = self.parse_proxies(self.proxies) self.proxy_ip = self.proxy_args["ip"] self.proxy_port = self.proxy_args["port"] self.proxy_ip_port = "{}:{}".format(self.proxy_ip, self.proxy_port) if self.proxy_args["user"]: self.proxy_id = "{user}:{password}@{ip}:{port}".format(**self.proxy_args) else: self.proxy_id = self.proxy_ip_port # 日志处理器 self.logger = log def get_proxies(self): self.use_num += 1 return self.proxies def is_delay(self): return self.flag == 1 def is_valid(self, force=0, type=0): """ 检测代理是否有效 1 有效 2 延时使用 0 无效 直接在代理池删除 :param force: :param type: :return: """ if self.use_num > self.max_proxy_use_num > 0: self.logger.debug("代理达到最大使用次数: {} {}".format(self.use_num, self.proxies)) return 0 if self.flag == -1: self.logger.debug("代理被标记 -1 丢弃 %s" % self.proxies) return 0 if self.delay > 0 and self.flag == 1: if time.time() - self.flag_ts < self.delay: self.logger.debug("代理被标记 1 延迟 %s" % self.proxies) return 2 else: self.flag = 0 self.logger.debug("延迟代理释放: {}".format(self.proxies)) if self.use_interval: if time.time() - self.use_ts < self.use_interval: return 2 if not force: if time.time() - self.update_ts < self.check_interval: return 1 if self.valid_timeout > 0: ok = check_proxy( proxies=self.proxies, type=type, timeout=self.valid_timeout, logger=self.logger, ) else: ok = 1 self.update_ts = time.time() return ok @classmethod def parse_proxies(self, proxies): """ 分解代理组成部分 :param proxies: :return: """ if not proxies: return {} if isinstance(proxies, (str, bytes)): proxies = json.loads(proxies) protocol = list(proxies.keys()) if not protocol: return {} _url = proxies.get(protocol[0]) if not _url.startswith("http"): _url = "http://" + _url _url_parse = parse.urlparse(_url) netloc = _url_parse.netloc if "@" in netloc: netloc_auth, netloc_host = netloc.split("@") else: netloc_auth, netloc_host = "", netloc ip, *port = netloc_host.split(":") port = port[0] if port else "80" user, *password = netloc_auth.split(":") password = password[0] if password else "" return { "protocol": protocol, "ip": ip, "port": port, "user": user, "password": password, "ip_port": "{}:{}".format(ip, port), } class ProxyPoolBase(object): def __init__(self, *args, **kwargs): pass def get(self, *args, **kwargs): raise NotImplementedError class ProxyPool(ProxyPoolBase): """代理池""" def __init__(self, **kwargs): """ :param size: 代理池大小 -1 为不限制 :param proxy_source_url: 代理文件地址 支持列表 :param proxy_instance: 提供代理的实例 :param reset_interval: 代理池重置间隔 最小间隔 :param reset_interval_max: 代理池重置间隔 最大间隔 默认2分钟 :param check_valid: 是否在获取代理时进行检测有效性 :param local_proxy_file_cache_timeout: 本地缓存的代理文件超时时间 :param logger: 日志处理器 默认 log.get_logger() :param kwargs: 其他的参数 """ kwargs.setdefault("size", -1) kwargs.setdefault("proxy_source_url", setting.PROXY_EXTRACT_API) super(ProxyPool, self).__init__(**kwargs) # 队列最大长度 self.max_queue_size = kwargs.get("size", -1) # 实际代理数量 self.real_max_proxy_count = 1000 # 代理可用最大次数 # 代理获取地址 http://localhost/proxy.txt self.proxy_source_url = kwargs.get("proxy_source_url", []) if not isinstance(self.proxy_source_url, list): self.proxy_source_url = [self.proxy_source_url] self.proxy_source_url = [x for x in self.proxy_source_url if x] self.proxy_source_url = list(set(self.proxy_source_url)) kwargs.update({"proxy_source_url": self.proxy_source_url}) # 处理日志 self.logger = kwargs.get("logger") or log kwargs["logger"] = self.logger if not self.proxy_source_url: self.logger.warn("need set proxy_source_url or proxy_instance") # 代理池重置间隔 self.reset_interval = kwargs.get("reset_interval", 5) # 强制重置一下代理 添加新的代理进来 防止一直使用旧的被封的代理 self.reset_interval_max = kwargs.get("reset_interval_max", 180) # 是否监测代理有效性 self.check_valid = kwargs.get("check_valid", True) # 代理队列 self.proxy_queue = None # {代理id: ProxyItem, ...} self.proxy_dict = {} # 失效代理队列 self.invalid_proxy_dict = {} self.kwargs = kwargs # 重置代理池锁 self.reset_lock = None # 重置时间 self.last_reset_time = 0 # 重置的太快了 计数 self.reset_fast_count = 0 # 计数 获取代理重试3次仍然失败 次数 self.no_valid_proxy_times = 0 # 上次获取代理时间 self.last_get_ts = time.time() # 记录ProxyItem的update_ts 防止由于重置太快导致重复检测有效性 self.proxy_item_update_ts_dict = {} # 警告 self.warn_flag = False def warn(self): if not self.warn_flag: for url in self.proxy_source_url: if "zhima" in url: continue self.warn_flag = True return @property def queue_size(self): """ 当前代理池中代理数量 :return: """ return self.proxy_queue.qsize() if self.proxy_queue is not None else 0 def clear(self): """ 清空自己 :return: """ self.proxy_queue = None # {代理ip: ProxyItem, ...} self.proxy_dict = {} # 清理失效代理集合 _limit = datetime.datetime.now() - datetime.timedelta(minutes=10) self.invalid_proxy_dict = { k: v for k, v in self.invalid_proxy_dict.items() if v > _limit } # 清理超时的update_ts记录 _limit = time.time() - 600 self.proxy_item_update_ts_dict = { k: v for k, v in self.proxy_item_update_ts_dict.items() if v > _limit } return def get(self, retry: int = 0) -> dict: """ 从代理池中获取代理 :param retry: :return: """ retry += 1 if retry > 3: self.no_valid_proxy_times += 1 return None if time.time() - self.last_get_ts > 3 * 60: # 3分钟没有获取过 重置一下 try: self.reset_proxy_pool() except Exception as e: self.logger.exception(e) # 记录获取时间 self.last_get_ts = time.time() # self.warn() proxy_item = self.get_random_proxy() if proxy_item: # 不检测 if not self.check_valid: # 塞回去 proxies = proxy_item.get_proxies() self.put_proxy_item(proxy_item) return proxies else: is_valid = proxy_item.is_valid() if is_valid: # 记录update_ts self.proxy_item_update_ts_dict[ proxy_item.proxy_id ] = proxy_item.update_ts # 塞回去 proxies = proxy_item.get_proxies() self.put_proxy_item(proxy_item) if is_valid == 1: if proxy_item.use_interval: proxy_item.use_ts = time.time() return proxies else: # 处理失效代理 self.proxy_dict.pop(proxy_item.proxy_id, "") self.invalid_proxy_dict[ proxy_item.proxy_id ] = datetime.datetime.now() else: try: self.reset_proxy_pool() except Exception as e: self.logger.exception(e) if self.no_valid_proxy_times >= 5: # 解决bug: 当爬虫仅剩一个任务时 由于只有一个线程检测代理 而不可用代理又刚好很多(时间越长越多) 可能出现一直获取不到代理的情况 # 导致爬虫烂尾 try: self.reset_proxy_pool() except Exception as e: self.logger.exception(e) return self.get(retry) get_proxy = get def get_random_proxy(self) -> ProxyItem: """ 随机获取代理 :return: """ if self.proxy_queue is not None: if random.random() < 0.5: # 一半概率检查 这是个高频操作 优化一下 if time.time() - self.last_reset_time > self.reset_interval_max: self.reset_proxy_pool(force=True) else: min_q_size = ( min(self.max_queue_size / 2, self.real_max_proxy_count / 2) if self.max_queue_size > 0 else self.real_max_proxy_count / 2 ) if self.proxy_queue.qsize() < min_q_size: self.reset_proxy_pool() try: return self.proxy_queue.get_nowait() except Exception: pass return None def append_proxies(self, proxies_list: list) -> int: """ 添加代理到代理池 :param proxies_list: :return: """ count = 0 if not isinstance(proxies_list, list): proxies_list = [proxies_list] for proxies in proxies_list: if proxies: proxy_item = ProxyItem(proxies=proxies, **self.kwargs) # 增加失效判断 2018/12/18 if proxy_item.proxy_id in self.invalid_proxy_dict: continue if proxy_item.proxy_id not in self.proxy_dict: # 补充update_ts if not proxy_item.update_ts: proxy_item.update_ts = self.proxy_item_update_ts_dict.get( proxy_item.proxy_id, 0 ) self.put_proxy_item(proxy_item) self.proxy_dict[proxy_item.proxy_id] = proxy_item count += 1 return count def put_proxy_item(self, proxy_item: ProxyItem): """ 添加 ProxyItem 到代理池 :param proxy_item: :return: """ return self.proxy_queue.put_nowait(proxy_item) def reset_proxy_pool(self, force: bool = False): """ 重置代理池 :param force: 是否强制重置代理池 :return: """ if not self.reset_lock: # 必须用时调用 否则 可能存在 gevent patch前 threading就已经被导入 导致的Rlock patch失效 import threading self.reset_lock = threading.RLock() with self.reset_lock: if ( force or self.proxy_queue is None or ( self.max_queue_size > 0 and self.proxy_queue.qsize() < self.max_queue_size / 2 ) or ( self.max_queue_size < 0 and self.proxy_queue.qsize() < self.real_max_proxy_count / 2 ) or self.no_valid_proxy_times >= 5 ): if time.time() - self.last_reset_time < self.reset_interval: self.reset_fast_count += 1 if self.reset_fast_count % 10 == 0: self.logger.debug( "代理池重置的太快了:) {}".format(self.reset_fast_count) ) time.sleep(1) else: self.clear() if self.proxy_queue is None: import queue self.proxy_queue = queue.Queue() # TODO 这里获取到的可能重复 proxies_list = get_proxy_from_url(**self.kwargs) self.real_max_proxy_count = len(proxies_list) if 0 < self.max_queue_size < self.real_max_proxy_count: proxies_list = random.sample(proxies_list, self.max_queue_size) _valid_count = self.append_proxies(proxies_list) self.last_reset_time = time.time() self.no_valid_proxy_times = 0 self.logger.debug( "重置代理池成功: 获取{}, 成功添加{}, 失效{}, 当前代理数{},".format( len(proxies_list), _valid_count, len(self.invalid_proxy_dict), len(self.proxy_dict), ) ) return def tag_proxy(self, proxies_list: list, flag: int, *, delay=30) -> bool: """ 对代理进行标记 :param proxies_list: :param flag: -1 废弃 1 延迟使用 :param delay: 延迟时间 :return: """ if int(flag) not in ProxyItem.proxy_tag_list or not proxies_list: return False if not isinstance(proxies_list, list): proxies_list = [proxies_list] for proxies in proxies_list: if not proxies: continue proxy_id = ProxyItem(proxies).proxy_id if proxy_id not in self.proxy_dict: continue self.proxy_dict[proxy_id].flag = flag self.proxy_dict[proxy_id].flag_ts = time.time() self.proxy_dict[proxy_id].delay = delay return True def get_proxy_item(self, proxy_id="", proxies=None): """ 获取代理对象 :param proxy_id: :param proxies: :return: """ if proxy_id: return self.proxy_dict.get(proxy_id) if proxies: proxy_id = ProxyItem(proxies).proxy_id return self.proxy_dict.get(proxy_id) return def copy(self): return ProxyPool(**self.kwargs) def all(self) -> list: """ 获取当前代理池中的全部代理 :return: """ return get_proxy_from_url(**self.kwargs)
23,025
Python
.py
641
22.436817
87
0.516261
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,870
response.py
demigody_nas-tools/third_party/feapder/feapder/network/response.py
# -*- coding: utf-8 -*- """ Created on 2018-07-26 11:40:28 --------- @summary: --------- @author: Boris @email: [email protected] """ import datetime import os import re import tempfile import webbrowser from urllib.parse import urlparse, urlunparse, urljoin from bs4 import UnicodeDammit, BeautifulSoup from requests.cookies import RequestsCookieJar from requests.models import Response as res from w3lib.encoding import http_content_type_encoding, html_body_declared_encoding from feapder import setting from feapder.network.selector import Selector from feapder.utils.log import log FAIL_ENCODING = "ISO-8859-1" # html 源码中的特殊字符,需要删掉,否则会影响etree的构建 SPECIAL_CHARACTERS = [ # 移除控制字符 全部字符列表 https://zh.wikipedia.org/wiki/%E6%8E%A7%E5%88%B6%E5%AD%97%E7%AC%A6 "[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x9F]" ] SPECIAL_CHARACTER_PATTERNS = [ re.compile(special_character) for special_character in SPECIAL_CHARACTERS ] class Response(res): def __init__(self, response, make_absolute_links=None): """ Args: response: requests请求返回的response make_absolute_links: 是否自动补全url """ super(Response, self).__init__() self.__dict__.update(response.__dict__) self.make_absolute_links = ( make_absolute_links if make_absolute_links is not None else setting.MAKE_ABSOLUTE_LINKS ) self._cached_selector = None self._cached_text = None self._cached_json = None self._encoding = None self.encoding_errors = "strict" # strict / replace / ignore self.browser = self.driver = None @classmethod def from_text( cls, text: str, url: str = "", cookies: dict = None, headers: dict = None, encoding="utf-8", ): response_dict = { "_content": text.encode(encoding=encoding), "cookies": cookies or {}, "encoding": encoding, "headers": headers or {}, "status_code": 200, "elapsed": 0, "url": url, } return cls.from_dict(response_dict) @classmethod def from_dict(cls, response_dict): """ 利用字典获取Response对象 @param response_dict: 原生的response.__dict__ @return: """ cookie_jar = RequestsCookieJar() cookie_jar.update(other=response_dict["cookies"]) response_dict["cookies"] = cookie_jar response_dict["elapsed"] = datetime.timedelta( 0, 0, response_dict["elapsed"] ) # 耗时 response_dict["connection"] = None response_dict["_content_consumed"] = True response = res() response.__dict__.update(response_dict) return cls(response) @property def to_dict(self): response_dict = { "_content": self.content, "cookies": self.cookies.get_dict(), "encoding": self.encoding, "headers": self.headers, "status_code": self.status_code, "elapsed": self.elapsed.microseconds, # 耗时 "url": self.url, } return response_dict def __clear_cache(self): self.__dict__["_cached_selector"] = None self.__dict__["_cached_text"] = None self.__dict__["_cached_json"] = None @property def encoding(self): """ 编码优先级:自定义编码 > header中编码 > 页面编码 > 根据content猜测的编码 """ self._encoding = ( self._encoding or self._headers_encoding() or self._body_declared_encoding() or self.apparent_encoding ) return self._encoding @encoding.setter def encoding(self, val): self.__clear_cache() self._encoding = val code = encoding def _headers_encoding(self): """ 从headers获取头部charset编码 """ content_type = self.headers.get("Content-Type") or self.headers.get( "content-type" ) if content_type: return ( http_content_type_encoding(content_type) or "utf-8" if "application/json" in content_type else None ) def _body_declared_encoding(self): """ 从html xml等获取<meta charset="编码"> """ return html_body_declared_encoding(self.content) def _get_unicode_html(self, html): if not html or not isinstance(html, bytes): return html converted = UnicodeDammit(html, is_html=True) if not converted.unicode_markup: raise Exception( "Failed to detect encoding of article HTML, tried: %s" % ", ".join(converted.tried_encodings) ) html = converted.unicode_markup return html def _make_absolute(self, link): """Makes a given link absolute.""" try: link = link.strip() # Parse the link with stdlib. parsed = urlparse(link)._asdict() # If link is relative, then join it with base_url. if not parsed["netloc"]: return urljoin(self.url, link) # Link is absolute; if it lacks a scheme, add one from base_url. if not parsed["scheme"]: parsed["scheme"] = urlparse(self.url).scheme # Reconstruct the URL to incorporate the new scheme. parsed = (v for v in parsed.values()) return urlunparse(parsed) except Exception as e: log.error( "Invalid URL <{}> can't make absolute_link. exception: {}".format( link, e ) ) # Link is absolute and complete with scheme; nothing to be done here. return link def _absolute_links(self, text): regexs = [ r'(<a.*?href\s*?=\s*?["\'])(.+?)(["\'])', # a r'(<img.*?src\s*?=\s*?["\'])(.+?)(["\'])', # img r'(<link.*?href\s*?=\s*?["\'])(.+?)(["\'])', # css r'(<script.*?src\s*?=\s*?["\'])(.+?)(["\'])', # js ] for regex in regexs: def replace_href(text): # html = text.group(0) link = text.group(2) absolute_link = self._make_absolute(link) # return re.sub(regex, r'\1{}\3'.format(absolute_link), html) # 使用正则替换,个别字符不支持。如该网址源代码http://permit.mep.gov.cn/permitExt/syssb/xxgk/xxgk!showImage.action?dataid=0b092f8115ff45c5a50947cdea537726 return text.group(1) + absolute_link + text.group(3) text = re.sub(regex, replace_href, text, flags=re.S | re.I) return text def _del_special_character(self, text): """ 删除特殊字符 """ for special_character_pattern in SPECIAL_CHARACTER_PATTERNS: text = special_character_pattern.sub("", text) return text @property def __text(self): """Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. """ if not self.content: return "" # Decode unicode from given encoding. try: content = str(self.content, self.encoding, errors=self.encoding_errors) except (LookupError, TypeError): # A LookupError is raised if the encoding was not found which could # indicate a misspelling or similar mistake. # # A TypeError can be raised if encoding is None # # So we try blindly encoding. content = str(self.content, errors=self.encoding_errors) return content @property def text(self): if self._cached_text is None: if self.encoding and self.encoding.upper() != FAIL_ENCODING: try: self._cached_text = self.__text except UnicodeDecodeError: self._cached_text = self._get_unicode_html(self.content) else: self._cached_text = self._get_unicode_html(self.content) if self._cached_text: if self.make_absolute_links: self._cached_text = self._absolute_links(self._cached_text) self._cached_text = self._del_special_character(self._cached_text) return self._cached_text @text.setter def text(self, html): self._cached_text = html if self.make_absolute_links: self._cached_text = self._absolute_links(self._cached_text) self._cached_text = self._del_special_character(self._cached_text) self._cached_selector = Selector(self.text) @property def json(self, **kwargs): if self._cached_json is None: self.encoding = self.encoding or "utf-8" self._cached_json = super(Response, self).json(**kwargs) return self._cached_json @property def content(self): content = super(Response, self).content return content @property def is_html(self): content_type = self.headers.get("Content-Type", "") if "text/html" in content_type: return True else: return False @property def selector(self): if self._cached_selector is None: self._cached_selector = Selector(self.text) return self._cached_selector def bs4(self, features="html.parser"): soup = BeautifulSoup(self.text, features) return soup def extract(self): return self.selector.get() def xpath(self, query, **kwargs): return self.selector.xpath(query, **kwargs) def css(self, query): return self.selector.css(query) def re(self, regex, replace_entities=False): """ @summary: 正则匹配 注意:网页源码<a class='page-numbers'... 会被处理成<a class="page-numbers" ; 写正则时要写<a class="(.*?)"。 但不会改非html的文本引号格式 为了使用方便,正则单双引号自动处理为不敏感 --------- @param regex: 正则或者re.compile @param replace_entities: 为True时 去掉&nbsp;等字符, 转义&quot;为 " 等, 会使网页结构发生变化。如在网页源码中提取json, 建议设置成False --------- @result: 列表 """ # 将单双引号设置为不敏感 if isinstance(regex, str): regex = re.sub("['\"]", "['\"]", regex) return self.selector.re(regex, replace_entities) def re_first(self, regex, default=None, replace_entities=False): """ @summary: 正则匹配 注意:网页源码<a class='page-numbers'... 会被处理成<a class="page-numbers" ; 写正则时要写<a class="(.*?)"。 但不会改非html的文本引号格式 为了使用方便,正则单双引号自动处理为不敏感 --------- @param regex: 正则或者re.compile @param default: 未匹配到, 默认值 @param replace_entities: 为True时 去掉&nbsp;等字符, 转义&quot;为 " 等, 会使网页结构发生变化。如在网页源码中提取json, 建议设置成False --------- @result: 第一个值或默认值 """ # 将单双引号设置为不敏感 if isinstance(regex, str): regex = re.sub("['\"]", "['\"]", regex) return self.selector.re_first(regex, default, replace_entities) def close_browser(self, request): if self.browser: request.render_downloader.close(self.browser) def __del__(self): self.close() def open(self): body = self.content if b"<base" not in body: # <head> 标签后插入一个<base href="url">标签 repl = fr'\1<base href="{self.url}">' body = re.sub(rb"(<head(?:>|\s.*?>))", repl.encode("utf-8"), body) fd, fname = tempfile.mkstemp(".html") os.write(fd, body) os.close(fd) return webbrowser.open(f"file://{fname}")
12,743
Python
.py
319
28.087774
209
0.577328
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,871
request.py
demigody_nas-tools/third_party/feapder/feapder/network/request.py
# -*- coding: utf-8 -*- """ Created on 2018-07-25 11:49:08 --------- @summary: 请求结构体 --------- @author: Boris @email: [email protected] """ import copy import os import re import requests from requests.cookies import RequestsCookieJar from requests.packages.urllib3.exceptions import InsecureRequestWarning import feapder.setting as setting import feapder.utils.tools as tools from feapder.db.redisdb import RedisDB from feapder.network import user_agent from feapder.network.downloader.base import Downloader, RenderDownloader from feapder.network.proxy_pool import BaseProxyPool from feapder.network.response import Response from feapder.utils.log import log # 屏蔽warning信息 requests.packages.urllib3.disable_warnings(InsecureRequestWarning) class Request: user_agent_pool = user_agent proxies_pool: BaseProxyPool = None cache_db = None # redis / pika cached_redis_key = None # 缓存response的文件文件夹 response_cached:cached_redis_key:md5 cached_expire_time = 1200 # 缓存过期时间 # 下载器 downloader: Downloader = None session_downloader: Downloader = None render_downloader: RenderDownloader = None __REQUEST_ATTRS__ = { # "method", # "url", "params", "data", "headers", "cookies", "files", "auth", "timeout", "allow_redirects", "proxies", "hooks", "stream", "verify", "cert", "json", } _DEFAULT_KEY_VALUE_ = dict( url="", method=None, retry_times=0, priority=300, parser_name=None, callback=None, filter_repeat=True, auto_request=True, request_sync=False, use_session=None, random_user_agent=True, download_midware=None, is_abandoned=False, render=False, render_time=0, make_absolute_links=None, ) _CUSTOM_PROPERTIES_ = { "requests_kwargs", "custom_ua", "custom_proxies", } def __init__( self, url="", retry_times=0, priority=300, parser_name=None, callback=None, filter_repeat=True, auto_request=True, request_sync=False, use_session=None, random_user_agent=True, download_midware=None, is_abandoned=False, render=False, render_time=0, make_absolute_links=None, **kwargs, ): """ @summary: Request参数 --------- 框架参数 @param url: 待抓取url @param retry_times: 当前重试次数 @param priority: 优先级 越小越优先 默认300 @param parser_name: 回调函数所在的类名 默认为当前类 @param callback: 回调函数 可以是函数 也可是函数名(如想跨类回调时,parser_name指定那个类名,callback指定那个类想回调的方法名即可) @param filter_repeat: 是否需要去重 (True/False) 当setting中的REQUEST_FILTER_ENABLE设置为True时该参数生效 默认True @param auto_request: 是否需要自动请求下载网页 默认是。设置为False时返回的response为空,需要自己去请求网页 @param request_sync: 是否同步请求下载网页,默认异步。如果该请求url过期时间快,可设置为True,相当于yield的reqeust会立即响应,而不是去排队 @param use_session: 是否使用session方式 @param random_user_agent: 是否随机User-Agent (True/False) 当setting中的RANDOM_HEADERS设置为True时该参数生效 默认True @param download_midware: 下载中间件。默认为parser中的download_midware @param is_abandoned: 当发生异常时是否放弃重试 True/False. 默认False @param render: 是否用浏览器渲染 @param render_time: 渲染时长,即打开网页等待指定时间后再获取源码 @param make_absolute_links: 是否转成绝对连接,默认是 -- 以下参数与requests参数使用方式一致 @param method: 请求方式,如POST或GET,默认根据data值是否为空来判断 @param params: 请求参数 @param data: 请求body @param json: 请求json字符串,同 json.dumps(data) @param headers: @param cookies: 字典 或 CookieJar 对象 @param files: @param auth: @param timeout: (浮点或元组)等待服务器数据的超时限制,是一个浮点数,或是一个(connect timeout, read timeout) 元组 @param allow_redirects : Boolean. True 表示允许跟踪 POST/PUT/DELETE 方法的重定向 @param proxies: 代理 {"http":"http://xxx", "https":"https://xxx"} @param verify: 为 True 时将会验证 SSL 证书 @param stream: 如果为 False,将会立即下载响应内容 @param cert: -- @param **kwargs: 其他值: 如 Request(item=item) 则item可直接用 request.item 取出 --------- @result: """ self.url = url self.method = None self.retry_times = retry_times self.priority = priority self.parser_name = parser_name self.callback = callback self.filter_repeat = filter_repeat self.auto_request = auto_request self.request_sync = request_sync self.use_session = use_session self.random_user_agent = random_user_agent self.download_midware = download_midware self.is_abandoned = is_abandoned self.render = render self.render_time = render_time self.make_absolute_links = ( make_absolute_links if make_absolute_links is not None else setting.MAKE_ABSOLUTE_LINKS ) # 自定义属性,不参与序列化 self.requests_kwargs = {} for key, value in kwargs.items(): if key in self.__class__.__REQUEST_ATTRS__: # 取requests参数 self.requests_kwargs[key] = value self.__dict__[key] = value self.custom_ua = False self.custom_proxies = False def __repr__(self): try: return "<Request {}>".format(self.url) except: return "<Request {}>".format(str(self.to_dict)[:40]) def __setattr__(self, key, value): """ 针对 request.xxx = xxx 的形式,更新reqeust及内部参数值 @param key: @param value: @return: """ self.__dict__[key] = value if key in self.__class__.__REQUEST_ATTRS__: self.requests_kwargs[key] = value def __lt__(self, other): return self.priority < other.priority @property def _proxies_pool(self): if not self.__class__.proxies_pool: self.__class__.proxies_pool = tools.import_cls(setting.PROXY_POOL)() return self.__class__.proxies_pool @property def _downloader(self): if not self.__class__.downloader: self.__class__.downloader = tools.import_cls(setting.DOWNLOADER)() return self.__class__.downloader @property def _session_downloader(self): if not self.__class__.session_downloader: self.__class__.session_downloader = tools.import_cls( setting.SESSION_DOWNLOADER )() return self.__class__.session_downloader @property def _render_downloader(self): if not self.__class__.render_downloader: try: self.__class__.render_downloader = tools.import_cls( setting.RENDER_DOWNLOADER )() except AttributeError: log.error('当前是渲染模式,请安装 pip install "feapder[render]"') os._exit(0) return self.__class__.render_downloader @property def to_dict(self): request_dict = {} self.callback = ( getattr(self.callback, "__name__") if callable(self.callback) else self.callback ) if isinstance(self.download_midware, (tuple, list)): self.download_midware = [ getattr(download_midware, "__name__") if callable(download_midware) else download_midware for download_midware in self.download_midware ] else: self.download_midware = ( getattr(self.download_midware, "__name__") if callable(self.download_midware) else self.download_midware ) for key, value in self.__dict__.items(): if ( key in self.__class__._DEFAULT_KEY_VALUE_ and self.__class__._DEFAULT_KEY_VALUE_.get(key) == value or key in self.__class__._CUSTOM_PROPERTIES_ ): continue if value is not None: if key in self.__class__.__REQUEST_ATTRS__: if not isinstance( value, (bytes, bool, float, int, str, tuple, list, dict) ): value = tools.dumps_obj(value) else: if not isinstance(value, (bytes, bool, float, int, str)): value = tools.dumps_obj(value) request_dict[key] = value return request_dict @property def callback_name(self): return ( getattr(self.callback, "__name__") if callable(self.callback) else self.callback ) def make_requests_kwargs(self): """ 处理参数 """ # 设置超时默认时间 self.requests_kwargs.setdefault( "timeout", setting.REQUEST_TIMEOUT ) # connect=22 read=22 # 设置stream # 默认情况下,当你进行网络请求后,响应体会立即被下载。 # stream=True是,调用Response.content 才会下载响应体,默认只返回header。 # 缺点: stream 设为 True,Requests 无法将连接释放回连接池,除非消耗了所有的数据,或者调用了 Response.close。 这样会带来连接效率低下的问题。 self.requests_kwargs.setdefault("stream", True) # 关闭证书验证 self.requests_kwargs.setdefault("verify", False) # 设置请求方法 method = self.__dict__.get("method") if not method: if "data" in self.requests_kwargs or "json" in self.requests_kwargs: method = "POST" else: method = "GET" self.method = method # 设置user—agent headers = self.requests_kwargs.get("headers", {}) if "user-agent" not in headers and "User-Agent" not in headers: if self.random_user_agent and setting.RANDOM_HEADERS: # 随机user—agent ua = self.__class__.user_agent_pool.get(setting.USER_AGENT_TYPE) headers.update({"User-Agent": ua}) self.requests_kwargs.update(headers=headers) else: # 使用默认的user—agent self.requests_kwargs.setdefault( "headers", {"User-Agent": setting.DEFAULT_USERAGENT} ) else: self.custom_ua = True # 代理 proxies = self.requests_kwargs.get("proxies", -1) if proxies == -1 and setting.PROXY_ENABLE and setting.PROXY_EXTRACT_API: while True: proxies = self._proxies_pool.get_proxy() if proxies: self.requests_kwargs.update(proxies=proxies) break else: log.debug("暂无可用代理 ...") else: self.custom_proxies = True def get_response(self, save_cached=False): """ 获取带有selector功能的response @param save_cached: 保存缓存 方便调试时不用每次都重新下载 @return: """ self.make_requests_kwargs() log.debug( """ -------------- %srequest for ---------------- url = %s method = %s args = %s """ % ( "" if not self.parser_name else "%s.%s " % ( self.parser_name, ( self.callback and callable(self.callback) and getattr(self.callback, "__name__") or self.callback ) or "parse", ), self.url, self.method, self.requests_kwargs, ) ) # def hooks(response, *args, **kwargs): # print(response.url) # # self.requests_kwargs.update(hooks={'response': hooks}) # self.use_session 优先级高 use_session = ( setting.USE_SESSION if self.use_session is None else self.use_session ) if self.render: response = self._render_downloader.download(self) elif use_session: response = self._session_downloader.download(self) else: response = self._downloader.download(self) response.make_absolute_links = self.make_absolute_links if save_cached: self.save_cached(response, expire_time=self.__class__.cached_expire_time) return response def get_params(self): return self.requests_kwargs.get("params") def get_proxies(self) -> dict: """ Returns: {"https": "https://ip:port", "http": "http://ip:port"} """ return self.requests_kwargs.get("proxies") def get_proxy(self) -> str: """ Returns: ip:port """ proxies = self.get_proxies() if proxies: return re.sub( "http.*?//", "", proxies.get("http", "") or proxies.get("https", "") ) def del_proxy(self): proxy = self.get_proxy() if proxy: self._proxies_pool.del_proxy(proxy) del self.requests_kwargs["proxies"] def get_headers(self) -> dict: return self.requests_kwargs.get("headers", {}) def get_user_agent(self) -> str: return self.get_headers().get("user_agent") or self.get_headers().get( "User-Agent" ) def get_cookies(self) -> dict: cookies = self.requests_kwargs.get("cookies") if cookies and isinstance(cookies, RequestsCookieJar): cookies = cookies.get_dict() if not cookies: cookie_str = self.get_headers().get("Cookie") or self.get_headers().get( "cookie" ) if cookie_str: cookies = tools.get_cookies_from_str(cookie_str) return cookies @property def fingerprint(self): """ request唯一表识 @return: """ url = self.__dict__.get("url", "") # url 归一化 url = tools.canonicalize_url(url) args = [url] for arg in ["params", "data", "files", "auth", "cert", "json"]: if self.requests_kwargs.get(arg): args.append(self.requests_kwargs.get(arg)) return tools.get_md5(*args) @property def _cache_db(self): if not self.__class__.cache_db: self.__class__.cache_db = RedisDB() # .from_url(setting.pika_spider_1_uri) return self.__class__.cache_db @property def _cached_redis_key(self): if self.__class__.cached_redis_key: return ( f"response_cached:{self.__class__.cached_redis_key}:{self.fingerprint}" ) else: return f"response_cached:test:{self.fingerprint}" def save_cached(self, response, expire_time=1200): """ 使用redis保存response 用于调试 不用每回都下载 @param response: @param expire_time: 过期时间 @return: """ self._cache_db.strset(self._cached_redis_key, response.to_dict, ex=expire_time) def get_response_from_cached(self, save_cached=True): """ 从缓存中获取response 注意: 属性值为空: -raw : urllib3.response.HTTPResponse -connection:requests.adapters.HTTPAdapter -history 属性含义改变: - request 由requests 改为Request @param: save_cached 当无缓存 直接下载 下载完是否保存缓存 @return: """ response_dict = self._cache_db.strget(self._cached_redis_key) if not response_dict: log.info("无response缓存 重新下载") response_obj = self.get_response(save_cached=save_cached) else: response_dict = eval(response_dict) response_obj = Response.from_dict(response_dict) return response_obj def del_response_cached(self): self._cache_db.clear(self._cached_redis_key) @classmethod def from_dict(cls, request_dict): for key, value in request_dict.items(): if isinstance(value, bytes): # 反序列化 如item request_dict[key] = tools.loads_obj(value) return cls(**request_dict) def copy(self): return self.__class__.from_dict(copy.deepcopy(self.to_dict))
17,760
Python
.py
459
25.213508
106
0.564002
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,872
selector.py
demigody_nas-tools/third_party/feapder/feapder/network/selector.py
# -*- coding: utf-8 -*- """ Created on 2018-10-08 15:33:37 --------- @summary: 重新定义 selector --------- @author: Boris @email: [email protected] """ import re import parsel import six from lxml import etree from parsel import Selector as ParselSelector from parsel import SelectorList as ParselSelectorList from parsel import selector from w3lib.html import replace_entities as w3lib_replace_entities def extract_regex(regex, text, replace_entities=True, flags=0): """Extract a list of unicode strings from the given text/encoding using the following policies: * if the regex contains a named group called "extract" that will be returned * if the regex contains multiple numbered groups, all those will be returned (flattened) * if the regex doesn't contain any group the entire regex matching is returned """ if isinstance(regex, six.string_types): regex = re.compile(regex, flags=flags) if "extract" in regex.groupindex: # named group try: extracted = regex.search(text).group("extract") except AttributeError: strings = [] else: strings = [extracted] if extracted is not None else [] else: # full regex or numbered groups strings = regex.findall(text) # strings = flatten(strings) # 这东西会把多维列表铺平 if not replace_entities: return strings values = [] for value in strings: if isinstance(value, (list, tuple)): # w3lib_replace_entities 不能接收list tuple values.append( [w3lib_replace_entities(v, keep=["lt", "amp"]) for v in value] ) else: values.append(w3lib_replace_entities(value, keep=["lt", "amp"])) return values def create_root_node(text, parser_cls, base_url=None): """Create root node for text using given parser class.""" body = text.strip().replace("\x00", "").encode("utf8") or b"<html/>" parser = parser_cls(recover=True, encoding="utf8", huge_tree=True) root = etree.fromstring(body, parser=parser, base_url=base_url) if root is None: root = etree.fromstring(b"<html/>", parser=parser, base_url=base_url) return root if parsel.__version__ < "1.7.0": selector.create_root_node = create_root_node class SelectorList(ParselSelectorList): """ The :class:`SelectorList` class is a subclass of the builtin ``list`` class, which provides a few additional methods. """ def re_first(self, regex, default=None, replace_entities=True, flags=re.S): """ Call the ``.re()`` method for the first element in this list and return the result in an unicode string. If the list is empty or the regex doesn't match anything, return the default value (``None`` if the argument is not provided). By default, character entity references are replaced by their corresponding character (except for ``&amp;`` and ``&lt;``. Passing ``replace_entities`` as ``False`` switches off these replacements. """ datas = self.re(regex, replace_entities=replace_entities, flags=flags) return datas[0] if datas else default def re(self, regex, replace_entities=True, flags=re.S): """ Call the ``.re()`` method for each element in this list and return their results flattened, as a list of unicode strings. By default, character entity references are replaced by their corresponding character (except for ``&amp;`` and ``&lt;``. Passing ``replace_entities`` as ``False`` switches off these replacements. """ datas = [ x.re(regex, replace_entities=replace_entities, flags=flags) for x in self ] return datas[0] if len(datas) == 1 else datas class Selector(ParselSelector): selectorlist_cls = SelectorList def __str__(self): data = repr(self.get()) return "<%s xpath=%r data=%s>" % (type(self).__name__, self._expr, data) __repr__ = __str__ def __init__(self, text=None, *args, **kwargs): # 先将&nbsp; 转为空格,否则selector 会转为 \xa0 if text: text = re.sub("&nbsp;", "\x20", text) super(Selector, self).__init__(text, *args, **kwargs) def re_first(self, regex, default=None, replace_entities=True, flags=re.S): """ Apply the given regex and return the first unicode string which matches. If there is no match, return the default value (``None`` if the argument is not provided). By default, character entity references are replaced by their corresponding character (except for ``&amp;`` and ``&lt;``. Passing ``replace_entities`` as ``False`` switches off these replacements. """ datas = self.re(regex, replace_entities=replace_entities, flags=flags) return datas[0] if datas else default def re(self, regex, replace_entities=True, flags=re.S): """ Apply the given regex and return a list of unicode strings with the matches. ``regex`` can be either a compiled regular expression or a string which will be compiled to a regular expression using ``re.compile(regex)``. By default, character entity references are replaced by their corresponding character (except for ``&amp;`` and ``&lt;``. Passing ``replace_entities`` as ``False`` switches off these replacements. """ return extract_regex( regex, self.get(), replace_entities=replace_entities, flags=flags )
5,661
Python
.py
126
37.253968
99
0.65215
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,873
user_agent.py
demigody_nas-tools/third_party/feapder/feapder/network/user_agent.py
# -*- coding: utf-8 -*- """ Created on 2016-12-28 17:55 --------- @summary: --------- @author: Boris @email: [email protected] """ import random USER_AGENTS = { "chrome": [ "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36", "Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36", "Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36", "Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36", "Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36", "Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.15 (KHTML, like Gecko) Chrome/24.0.1295.0 Safari/537.15", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.14 (KHTML, like Gecko) Chrome/24.0.1292.0 Safari/537.14", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3215.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3790.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.92 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.89 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.63 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.116 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.24 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.136 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.0.3016 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36 Kinza/6.1.5", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.48 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.2.0.1713 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.47 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.2 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.819 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.41 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.785 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.9 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3235.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3409.85 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4371.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.9 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.43 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 CravingExplorer/2.4.1", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4121.813 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.107 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.9 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.158 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.58 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.140 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36", "Mozilla/5.0 (Microsoft Windows NT 10.0.16299.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36 (FTM)", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4500.0 Iron Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4427.5 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3835.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/82.0.4085.4 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.116 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.116 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.91 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.4000.0 Iron Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.41 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.116 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.41 Safari/537.36", "Mozilla/5.0 (Windows NT 5.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2566 AOLBUILD/11.0.2566 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2510 AOLBUILD/11.0.2510 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36 AOLShield/83.0.4103.0", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 AOL/11.0 AOLBUILD/11.0.1839 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 ADG/11.0.2414 AOLBUILD/11.0.2414 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2566 AOLBUILD/11.0.2566 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36 AOLShield/83.0.4103.2", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.87 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.105 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.183 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/90.0.4430.72 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2510 AOLBUILD/11.0.2510 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2566 AOLBUILD/11.0.2566 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.97 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.105 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2510 AOLBUILD/11.0.2510 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.101 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 AOL/11.0 AOLBUILD/11.0.1839 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2470 AOLBUILD/11.0.2470 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 ADG/11.0.2566 AOLBUILD/11.0.2566 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36 AOLShield/79.0.3945.5", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/79.0.3945.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.162 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.99 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.128 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.123 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4558.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.102 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4564.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.93 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.87 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.81 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.81 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.164 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.74 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.60 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3409.13 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.26 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.81 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4591.54 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.101.4951.54 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.7113.93 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.5005.49 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.54 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.1150.52 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4950.0 Iron Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4450.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Windows NT 11.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4868.173 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.1483.27 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.66 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.3478.83 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.67 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.60 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.5005.115 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36", "Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.5118.205 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36 Agency/97.8.8247.48", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36", "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.164 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4137.1 SputnikBrowser/5.6.6280.0 (GOST) Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.43 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/82.0.4078.2 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.3538.77 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.5 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.6 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.1 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3409.631 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.3 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.101 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.2 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.8 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.5 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3409.1 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.44 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.779 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.19 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.6 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 FS", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36\tChrome 79.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36\tChrome Generic", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_16_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_16_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.69 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.186 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.170 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4450.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_3_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/524.34", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.105 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.51 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.3538.77 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/77.0.3865.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/81.0.4044.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/83.0.4103.118 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/84.0.4147.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/84.0.4147.140 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/85.0.4183.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/87.0.4280.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/88.0.4324.175 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/89.0.4389.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/89.0.4389.127 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/79.0.3945.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/81.0.4044.113 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.135 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.70 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.162 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.67 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.162 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/83.0.4103.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/85.0.4183.83 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.198 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/90.0.4430.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/79.0.3945.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/79.0.3945.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/77.0.3865.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/81.0.4044.113 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/85.0.4183.102 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.183 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.70 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.97 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/79.0.3945.130 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.108 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.149 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.149 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/81.0.4044.122 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.89 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.101 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/83.0.4103.97 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.105 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/78.0.3904.87 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/83.0.4103.106 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/84.0.4147.125 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/85.0.4183.121 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.183 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/83.0.4103.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/85.0.4183.102 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.111 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.60 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.141 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_16_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/80.0.3987.116 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/86.0.4240.183 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.67 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.192 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.67 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.101 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.152 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/87.0.4280.101 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.182 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_2) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_2) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/88.0.4324.96 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.72 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_3_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/537.36 (KHTML, like Gecko, Mediapartners-Google) Chrome/89.0.4389.130 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.128 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_3_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.128 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.69 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4582.189 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/82.0.4083.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4612.206 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4702.147 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4691.94 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4889.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.79 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.79 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.9999.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.5005.40 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.60 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.55 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4880.146 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.55 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.147 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.109 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.109 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4886.93 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Brave Chrome/89.0.4389.105 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4886.148 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.5112.102 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.64 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.5163.147 Safari/537.36" ], "opera": [ "Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16", "Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14", "Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14", "Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02", "Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00", "Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00", "Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00", "Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00", "Mozilla/5.0 (Windows NT 5.1) Gecko/20100101 Firefox/14.0 Opera/12.0", "Opera/9.80 (Windows NT 6.1; WOW64; U; pt) Presto/2.10.229 Version/11.62", "Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.10.229 Version/11.62", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; de) Presto/2.9.168 Version/11.52", "Opera/9.80 (Windows NT 5.1; U; en) Presto/2.9.168 Version/11.51", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; de) Opera 11.51", "Opera/9.80 (X11; Linux x86_64; U; fr) Presto/2.9.168 Version/11.50", "Opera/9.80 (X11; Linux i686; U; hu) Presto/2.9.168 Version/11.50", "Opera/9.80 (X11; Linux i686; U; ru) Presto/2.8.131 Version/11.11", "Opera/9.80 (X11; Linux i686; U; es-ES) Presto/2.8.131 Version/11.11", "Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/5.0 Opera 11.11", "Opera/9.80 (X11; Linux x86_64; U; bg) Presto/2.8.131 Version/11.10", "Opera/9.80 (Windows NT 6.0; U; en) Presto/2.8.99 Version/11.10", "Opera/9.80 (Windows NT 5.1; U; zh-tw) Presto/2.8.131 Version/11.10", "Opera/9.80 (Windows NT 6.1; Opera Tablet/15165; U; en) Presto/2.8.149 Version/11.1", "Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01", "Opera/9.80 (X11; Linux i686; U; ja) Presto/2.7.62 Version/11.01", "Opera/9.80 (X11; Linux i686; U; fr) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; sv) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; en-US) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.1; U; cs) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01", "Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.7.62 Version/11.01", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101213 Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01", "Mozilla/5.0 (Windows NT 6.1; U; nl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01", "Mozilla/5.0 (Windows NT 6.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; de) Opera 11.01", "Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00", "Opera/9.80 (X11; Linux i686; U; it) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.37 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; pl) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; ko) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; fi) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1; U; en-GB) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00", "Opera/9.80 (Windows NT 6.0; U; en) Presto/2.7.39 Version/11.00", ], "firefox": [ "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1", "Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0", "Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0", "Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/29.0", "Mozilla/5.0 (X11; OpenBSD amd64; rv:28.0) Gecko/20100101 Firefox/28.0", "Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0", "Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:27.0) Gecko/20121011 Firefox/27.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) Gecko/20100101 Firefox/25.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0", "Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:23.0) Gecko/20131011 Firefox/23.0", "Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:22.0) Gecko/20130328 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.1; rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Microsoft Windows NT 6.2.9200.0); rv:22.0) Gecko/20130405 Firefox/22.0", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/21.0.1", "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/21.0.1", "Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:21.0.0) Gecko/20121011 Firefox/21.0.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (X11; Linux i686; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:21.0) Gecko/20130514 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; rv:21.0) Gecko/20130326 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20130330 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20130328 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.1; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20130401 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20130331 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.1; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 5.0; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:21.0) Gecko/20100101 Firefox/21.0", "Mozilla/5.0 (Windows NT 6.2; Win64; x64;) Gecko/20100101 Firefox/20.0", "Mozilla/5.0 (Windows x86; rv:19.0) Gecko/20100101 Firefox/19.0", "Mozilla/5.0 (Windows NT 6.1; rv:6.0) Gecko/20100101 Firefox/19.0", "Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/18.0.1", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0.6", ], "internetexplorer": [ "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko", "Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", "Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)", "Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)", "Mozilla/4.0 (Compatible; MSIE 8.0; Windows NT 5.2; Trident/6.0)", "Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)", "Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)", "Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))", "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 7.1; Trident/5.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; InfoPath.3; MS-RTC LM 8; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; chromeframe/12.0.742.112)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; Tablet PC 2.0; InfoPath.3; .NET4.0C; .NET4.0E)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; yie8)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET CLR 1.1.4322; .NET4.0C; Tablet PC 2.0)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; FunWebProducts)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/13.0.782.215)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/11.0.696.57)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0) chromeframe/10.0.648.205", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.1; SV1; .NET CLR 2.8.52393; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; chromeframe/11.0.696.57)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/4.0; GTB7.4; InfoPath.3; SV1; .NET CLR 3.1.76908; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.8.36217; WOW64; en-US)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; .NET CLR 2.7.58687; SLCC2; Media Center PC 5.0; Zune 3.4; Tablet PC 3.6; InfoPath.3)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/4.0; Media Center PC 4.0; SLCC1; .NET CLR 3.0.04320)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; SLCC1; .NET CLR 1.1.4322)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 3.0.04506.30)", "Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.0; Trident/4.0; FBSMTWB; .NET CLR 2.0.34861; .NET CLR 3.0.3746.3218; .NET CLR 3.5.33652; msn OptimizedIE8;ENUS)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; Media Center PC 6.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.3; .NET4.0C; .NET4.0E; .NET CLR 3.5.30729; .NET CLR 3.0.30729; MS-RTC LM 8)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)", "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 3.0)", ], "safari": [ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; de-at) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; da-dk) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1", "Mozilla/5.0 (Windows; U; Windows NT 6.1; tr-TR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; ko-KR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; fr-FR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; cs-CZ) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; sv-se) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ko-kr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; it-it) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-fr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; es-es) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-us) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-gb) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; de-de) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27", "Mozilla/5.0 (Windows; U; Windows NT 6.1; sv-SE) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; hu-HU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; it-IT) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; en-us) AppleWebKit/534.16+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-ch) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; de-de) AppleWebKit/534.15+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; ar) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Android 2.2; Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4", "Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-HK) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; tr-TR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; nb-NO) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 6.0; fr-FR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; zh-cn) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5", ], "mobile": [ "Mozilla/5.0 (PlayBook; U; RIM Tablet OS 2.1.0; en-US) AppleWebKit/536.2+ (KHTML like Gecko) Version/14.2 Safari/536.2+", "Mozilla/5.0 (PlayBook; U; RIM Tablet OS 2.1.0; en-US) AppleWebKit/536.2+ (KHTML like Gecko) Version/14.2 Safari/536.2+", "Mozilla/5.0 (BB10; Touch) AppleWebKit/537.10+ (KHTML, like Gecko) Version/14.2 Mobile Safari/537.10+", "Mozilla/5.0 (BB10; Touch) AppleWebKit/537.10+ (KHTML, like Gecko) Version/14.2 Mobile Safari/537.10+", "Mozilla/5.0 (Linux; U; Android 4.3; en-us; SM-N900T Build/JSS15J) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; U; Android 4.3; en-us; SM-N900T Build/JSS15J) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; U; Android 4.1; en-us; GT-N7100 Build/JRO03C) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; U; Android 4.1; en-us; GT-N7100 Build/JRO03C) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; U; Android 4.0; en-us; GT-I9300 Build/IMM76D) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; U; Android 4.0; en-us; GT-I9300 Build/IMM76D) AppleWebKit/534.30 (KHTML, like Gecko) Version/14.2 Mobile Safari/534.30", "Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.0; SM-G950U Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.0; SM-G950U Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; SM-G965U Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; SM-G965U Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.1.0; SM-T837A) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Linux; Android 8.1.0; SM-T837A) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPad; CPU OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/14.2 Mobile/14E304 Safari/602.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/14.2 Mobile/14E304 Safari/602.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/14.2 Mobile/15A372 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (iPhone; CPU iPhone OS 14_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Mobile/15E148 Safari/604.1", "Mozilla/5.0 (Mobile; LYF/F300B/LYF-F300B-001-01-15-130718-i;Android; rv:89.0 Gecko/48.0 Firefox/90.0 KAIOS/2.5", "Mozilla/5.0 (Mobile; LYF/F300B/LYF-F300B-001-01-15-130718-i;Android; rv:89.0 Gecko/48.0 Firefox/90.0 KAIOS/2.5", "Mozilla/5.0 (Linux; U; en-us; KFAPWI Build/JDQ39) AppleWebKit/535.19 (KHTML, like Gecko) Silk/3.13 Safari/535.19 Silk-Accelerated=true", "Mozilla/5.0 (Linux; U; en-us; KFAPWI Build/JDQ39) AppleWebKit/535.19 (KHTML, like Gecko) Silk/3.13 Safari/535.19 Silk-Accelerated=true", "Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LGMS323 Build/KOT49I.MS32310c) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LGMS323 Build/KOT49I.MS32310c) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Windows Phone 10.0; Android 4.2.1; Microsoft; Lumia 550) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36 Edge/14.14263", "Mozilla/5.0 (Windows Phone 10.0; Android 4.2.1; Microsoft; Lumia 550) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36 Edge/14.14263", "Mozilla/5.0 (Windows Phone 10.0; Android 4.2.1; Microsoft; Lumia 950) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36 Edge/14.14263", "Mozilla/5.0 (Windows Phone 10.0; Android 4.2.1; Microsoft; Lumia 950) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36 Edge/14.14263", "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 10 Build/MOB31T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 10 Build/MOB31T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Nexus 5X Build/OPR4.170623.006) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Nexus 5X Build/OPR4.170623.006) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.1.1; Nexus 6 Build/N6F26U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.1.1; Nexus 6 Build/N6F26U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Nexus 6P Build/OPP3.170518.006) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Nexus 6P Build/OPP3.170518.006) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 7 Build/MOB30X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 7 Build/MOB30X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 520)", "Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 520)", "Mozilla/5.0 (MeeGo; NokiaN9) AppleWebKit/534.13 (KHTML, like Gecko) NokiaBrowser/8.5.0 Mobile Safari/534.13", "Mozilla/5.0 (MeeGo; NokiaN9) AppleWebKit/534.13 (KHTML, like Gecko) NokiaBrowser/8.5.0 Mobile Safari/534.13", "Mozilla/5.0 (Linux; Android 8.0; Pixel 2 Build/OPD3.170816.012) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0; Pixel 2 Build/OPD3.170816.012) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Pixel 2 XL Build/OPD1.170816.004) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 8.0.0; Pixel 2 XL Build/OPD1.170816.004) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 9; Pixel 3 Build/PQ1A.181105.017.A1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 9; Pixel 3 Build/PQ1A.181105.017.A1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 10; Pixel 4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 10; Pixel 4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 11; Pixel 4a (5G)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 11; Pixel 4a (5G)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 11; Pixel 5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 11; Pixel 5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.0; Moto G (4)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Linux; Android 7.0; Moto G (4)) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Mobile Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36 Edg/93.0.4576.0", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:90.0 Gecko/20100101 Firefox/90.0", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.2 Safari/605.1.15", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4576.0 Safari/537.36 Edg/93.0.4576.0", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:90.0 Gecko/20100101 Firefox/90.0", ], } def get(ua_type: str = None): if not ua_type: ua_type = random.choice(list(USER_AGENTS.keys())) elif ua_type not in USER_AGENTS: raise ValueError( "ua_type error, expect one of {}".format(list(USER_AGENTS.keys())) ) return random.choice(USER_AGENTS[ua_type])
130,399
Python
.py
1,061
114.048068
231
0.678821
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,874
proxy_pool.py
demigody_nas-tools/third_party/feapder/feapder/network/proxy_pool/proxy_pool.py
# -*- coding: utf-8 -*- """ Created on 2022/10/19 10:40 AM --------- @summary: --------- @author: Boris @email: [email protected] """ from queue import Queue import requests import feapder.setting as setting from feapder.network.proxy_pool.base import BaseProxyPool from feapder.utils import metrics from feapder.utils import tools class ProxyPool(BaseProxyPool): """ 通过API提取代理,存储在内存中,无代理时会自动提取 API返回的代理以 \r\n 分隔 """ def __init__(self, proxy_api=None, **kwargs): self.proxy_api = proxy_api or setting.PROXY_EXTRACT_API self.proxy_queue = Queue() def format_proxy(self, proxy): return {"http": "http://" + proxy, "https": "http://" + proxy} @tools.retry(3, interval=5) def pull_proxies(self): resp = requests.get(self.proxy_api) proxies = resp.text.strip() resp.close() if "{" in proxies or not proxies: raise Exception("获取代理失败", proxies) # 使用 /r/n 分隔 return proxies.split("\r\n") def get_proxy(self): try: if self.proxy_queue.empty(): proxies = self.pull_proxies() for proxy in proxies: self.proxy_queue.put_nowait(proxy) metrics.emit_counter("total", 1, classify="proxy") proxy = self.proxy_queue.get_nowait() self.proxy_queue.put_nowait(proxy) metrics.emit_counter("used_times", 1, classify="proxy") return self.format_proxy(proxy) except Exception as e: tools.send_msg("获取代理失败", level="error") raise Exception("获取代理失败", e) def del_proxy(self, proxy): """ @summary: 删除代理 --------- @param proxy: ip:port """ if proxy in self.proxy_queue.queue: self.proxy_queue.queue.remove(proxy) metrics.emit_counter("invalid", 1, classify="proxy")
2,033
Python
.py
57
26
70
0.595676
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,875
__init__.py
demigody_nas-tools/third_party/feapder/feapder/network/proxy_pool/__init__.py
# -*- coding: utf-8 -*- """ Created on 2023/7/25 10:16 --------- @summary: --------- @author: Boris @email: [email protected] """ from .base import BaseProxyPool from .proxy_pool import ProxyPool
200
Python
.py
11
17.181818
33
0.666667
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,876
base.py
demigody_nas-tools/third_party/feapder/feapder/network/proxy_pool/base.py
# -*- coding: utf-8 -*- """ Created on 2023/7/25 10:03 --------- @summary: --------- @author: Boris @email: [email protected] """ import abc from feapder.utils.log import log class BaseProxyPool: @abc.abstractmethod def get_proxy(self): """ 获取代理 Returns: {"http": "xxx", "https": "xxx"} """ raise NotImplementedError @abc.abstractmethod def del_proxy(self, proxy): """ @summary: 删除代理 --------- @param proxy: ip:port """ raise NotImplementedError def tag_proxy(self, **kwargs): """ @summary: 标记代理 --------- @param kwargs: @return: """ log.warning("暂不支持标记代理") pass
797
Python
.py
37
14.324324
43
0.514006
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,877
guest_user_pool.py
demigody_nas-tools/third_party/feapder/feapder/network/user_pool/guest_user_pool.py
# -*- coding: utf-8 -*- """ Created on 2018/12/27 11:32 AM --------- @summary: 访客用户池 不需要登陆 --------- @author: Boris @email: [email protected] """ import random from typing import Optional import feapder.utils.tools as tools from feapder import setting from feapder.db.redisdb import RedisDB from feapder.network.user_pool.base_user_pool import UserPoolInterface, GuestUser from feapder.utils.log import log from feapder.utils.webdriver import WebDriver class GuestUserPool(UserPoolInterface): """ 访客用户池 不需要登陆 """ def __init__( self, redis_key, page_url=None, min_users=1, must_contained_keys=(), keep_alive=False, **kwargs, ): """ @param redis_key: user存放在redis中的key前缀 @param page_url: 生产user的url @param min_users: 最小user数 @param must_contained_keys: cookie中必须包含的key,用于校验cookie是否正确 @param keep_alive: 是否保持常驻,以便user不足时立即补充 --- @param kwargs: WebDriver的一些参数 load_images: 是否加载图片 user_agent: 字符串 或 无参函数,返回值为user_agent proxy: xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 headless: 是否启用无头模式 driver_type: CHROME 或 FIREFOX timeout: 请求超时时间 window_size: # 窗口大小 """ self._redisdb = RedisDB() self._tab_user_pool = setting.TAB_USER_POOL.format( redis_key=redis_key, user_type="guest" ) self._page_url = page_url self._min_users = min_users self._must_contained_keys = must_contained_keys self._keep_alive = keep_alive self._kwargs = kwargs self._kwargs.setdefault("load_images", False) self._kwargs.setdefault("headless", True) self._users_id = [] def _load_users_id(self): self._users_id = self._redisdb.hkeys(self._tab_user_pool) if self._users_id: random.shuffle(self._users_id) def _get_user_id(self): if not self._users_id: self._load_users_id() if self._users_id: return self._users_id.pop() def login(self) -> Optional[GuestUser]: """ 默认使用webdirver去登录,生产cookie,可以重写 """ with WebDriver(**self._kwargs) as driver: driver.get(self._page_url) cookies = driver.cookies for key in self._must_contained_keys: if key not in cookies: break else: user = GuestUser(user_agent=driver.user_agent, cookies=cookies) return user log.error("获取cookie失败 cookies = {}".format(cookies)) return None def add_user(self, user: GuestUser): log.debug("add {}".format(user)) self._redisdb.hset(self._tab_user_pool, user.user_id, user.to_dict()) def get_user(self, block=True) -> Optional[GuestUser]: """ Args: block: 无用户时是否等待 Returns: """ while True: try: user_id = self._get_user_id() user_str = None if user_id: user_str = self._redisdb.hget(self._tab_user_pool, user_id) # 如果没取到user,可能是其他爬虫将此用户删除了,需要重刷新本地缓存的用户id if not user_str: self._load_users_id() continue if not user_id and block: self._keep_alive = False self._min_users = 1 self.run() continue return user_str and GuestUser(**eval(user_str)) except Exception as e: log.exception(e) tools.delay_time(1) def del_user(self, user_id: str): self._redisdb.hdel(self._tab_user_pool, user_id) self._load_users_id() def run(self): while True: try: now_user_count = self._redisdb.hget_count(self._tab_user_pool) need_user_count = self._min_users - now_user_count if need_user_count > 0: log.info( "当前在线user数为 {} 小于 {}, 生产user".format( now_user_count, self._min_users ) ) try: user = self.login() if user: self.add_user(user) except Exception as e: log.exception(e) else: log.debug("当前user数为 {} 数量足够 暂不生产".format(now_user_count)) if self._keep_alive: tools.delay_time(10) else: break except Exception as e: log.exception(e) tools.delay_time(1)
5,293
Python
.py
139
23.165468
81
0.522881
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,878
gold_user_pool.py
demigody_nas-tools/third_party/feapder/feapder/network/user_pool/gold_user_pool.py
# -*- coding: utf-8 -*- """ Created on 2018/12/27 11:32 AM --------- @summary: 账号昂贵、限制查询次数及使用时间的用户UserPool --------- @author: Boris @email: [email protected] """ import os import random import time from enum import Enum, unique from typing import Optional, List from feapder import setting from feapder.db.redisdb import RedisDB from feapder.network.user_pool.base_user_pool import GoldUser, UserPoolInterface from feapder.utils import metrics from feapder.utils.log import log from feapder.utils.redis_lock import RedisLock from feapder.utils.tools import send_msg @unique class GoldUserStatus(Enum): # 使用状态 USED = "used" SUCCESS = "success" OVERDUE = "overdue" # cookie 过期 SLEEP = "sleep" EXCEPTION = "exception" # 登陆状态 LOGIN_SUCCESS = "login_success" LOGIN_FALIED = "login_failed" class GoldUserPool(UserPoolInterface): """ 账号昂贵、限制查询次数的用户的UserPool """ def __init__( self, redis_key, *, users: List[GoldUser], keep_alive=False, ): """ @param redis_key: user存放在redis中的key前缀 @param users: 账号信息 @param keep_alive: 是否保持常驻,以便user不足时立即补充 """ self._tab_user_pool = setting.TAB_USER_POOL.format( redis_key=redis_key, user_type="gold" ) self.users = users self._keep_alive = keep_alive self._redisdb = RedisDB() self._users_id = [] if not users: raise ValueError("not users") # 给user的类属性复制 self.users[0].__class__.redisdb = self._redisdb self.users[0].__class__.redis_key = self._tab_user_pool self.__init_metrics() self.__sync_users_base_info() self.__sycn_users_info() def __init_metrics(self): metrics.init(**setting.METRICS_OTHER_ARGS) def __sync_users_base_info(self): # 本地同步基本信息到redis, 注 只能在初始化函数内同步 for user in self.users: cache_user = self.get_user_by_id(user.user_id) if cache_user: for key, value in user.to_dict().items(): if not key.startswith("_"): setattr(cache_user, key, value) cache_user.sycn_to_redis() def __sycn_users_info(self): # redis同步登录信息到本地 for index, user in enumerate(self.users): cache_user = self.get_user_by_id(user.user_id) if cache_user: self.users[index] = cache_user def _load_users_id(self): self._users_id = self._redisdb.hkeys(self._tab_user_pool) if self._users_id: random.shuffle(self._users_id) def _get_user_id(self): if not self._users_id: self._load_users_id() if self._users_id: return self._users_id.pop() def login(self, user: GoldUser) -> GoldUser: """ 登录 生产cookie """ raise NotImplementedError def get_user_by_id(self, user_id: str) -> GoldUser: user_str = self._redisdb.hget(self._tab_user_pool, user_id) if user_str: user = GoldUser(**eval(user_str)) return user def get_user( self, block=True, username=None, used_for_spider_name=None, not_limit_use_interval=False, ) -> Optional[GoldUser]: """ @params username: 获取指定的用户 @params used_for_spider_name: 独享式使用,独享爬虫的名字。其他爬虫不可抢占 @params block: 无用户时是否等待 @params not_limit_frequence: 不限制使用频率 @return: GoldUser """ while True: try: user_id = username or self._get_user_id() user_str = None if user_id: user_str = self._redisdb.hget(self._tab_user_pool, user_id) if (not user_id or not user_str) and block: self._keep_alive = False self.run(username) continue # 取到用户 user = GoldUser(**eval(user_str)) # 独占式使用,若为其他爬虫,检查等待使用时间是否超过独占时间,若超过则可以使用 if ( user.get_used_for_spider_name() and user.get_used_for_spider_name() != used_for_spider_name ): wait_time = time.time() - user.get_last_use_time() if wait_time < user.exclusive_time: log.info( "用户{} 被 {} 爬虫独占,需等待 {} 秒后才可使用".format( user.username, user.get_used_for_spider_name(), user.exclusive_time - wait_time, ) ) time.sleep(1) continue if not user.is_overwork() and user.is_at_work_time(): if not user.cookies: log.debug(f"用户 {user.username} 未登录,尝试登录") self._keep_alive = False self.run(username) continue if not_limit_use_interval or user.is_time_to_use(): user.set_used_for_spider_name(used_for_spider_name) log.debug("使用用户 {}".format(user.username)) self.record_user_status(user.user_id, GoldUserStatus.USED) return user else: log.debug("{} 用户使用间隔过短 查看下一个用户".format(user.username)) time.sleep(1) continue else: if not user.is_at_work_time(): log.info("用户 {} 不在工作时间 sleep 60s".format(user.username)) if block: time.sleep(60) continue else: return None except Exception as e: log.exception(e) time.sleep(1) def del_user(self, user_id: str): user = self.get_user_by_id(user_id) if user: user.set_cookies(None) self.record_user_status(user.user_id, GoldUserStatus.OVERDUE) def add_user(self, user: GoldUser): user.sycn_to_redis() def delay_use(self, user_id: str, delay_seconds: int): user = self.get_user_by_id(user_id) if user: user.set_delay_use(delay_seconds) self.record_user_status(user_id, GoldUserStatus.SLEEP) def record_success_user(self, user_id: str): self.record_user_status(user_id, GoldUserStatus.SUCCESS) def record_exception_user(self, user_id: str): self.record_user_status(user_id, GoldUserStatus.EXCEPTION) def run(self, username=None): while True: try: with RedisLock( key=self._tab_user_pool, lock_timeout=3600, wait_timeout=0 ) as _lock: if _lock.locked: self.__sycn_users_info() online_user = 0 for user in self.users: if username and username != user.username: continue try: if user.cookies: online_user += 1 continue # 预检查 if not user.is_time_to_login(): log.info( "账号{}与上次登录时间间隔过短,暂不登录: 将在{}登录使用".format( user.username, user.next_login_time() ) ) continue user = self.login(user) if user.cookies: # 保存cookie user.set_login_time() self.add_user(user) self.record_user_status( user.user_id, GoldUserStatus.LOGIN_SUCCESS ) log.debug("登录成功 {}".format(user.username)) online_user += 1 else: log.info("登录失败 {}".format(user.username)) self.record_user_status( user.user_id, GoldUserStatus.LOGIN_FALIED ) except NotImplementedError: log.error( f"{self.__class__.__name__} must be implementation login method!" ) os._exit(0) except Exception as e: log.exception(e) msg = f"{user.username} 账号登陆失败 exception: {str(e)}" log.info(msg) self.record_user_status( user.user_id, GoldUserStatus.LOGIN_FALIED ) send_msg( msg=msg, level="error", message_prefix=f"{user.username} 账号登陆失败", ) log.info("当前在线user数为 {}".format(online_user)) if self._keep_alive: time.sleep(10) else: break except Exception as e: log.exception(e) time.sleep(1) def record_user_status(self, user_id: str, status: GoldUserStatus): metrics.emit_counter(user_id, 1, classify=f"users_{status.value}")
10,762
Python
.py
249
23.97992
101
0.463521
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,879
normal_user_pool.py
demigody_nas-tools/third_party/feapder/feapder/network/user_pool/normal_user_pool.py
# -*- coding: utf-8 -*- """ Created on 2018/12/27 11:32 AM --------- @summary: 普通用户池,适用于账号成本低且大量的场景 --------- @author: Boris @email: [email protected] """ import os import random from typing import Iterable, Optional import feapder.utils.tools as tools from feapder import setting from feapder.db.mysqldb import MysqlDB from feapder.db.redisdb import RedisDB from feapder.network.user_pool.base_user_pool import UserPoolInterface, NormalUser from feapder.utils.log import log from feapder.utils.redis_lock import RedisLock class NormalUserPool(UserPoolInterface): """ 普通用户池,适用于账号成本低且大量的场景 """ def __init__( self, redis_key, *, table_userbase, login_state_key="login_state", lock_state_key="lock_state", username_key="username", password_key="password", login_retry_times=1, keep_alive=False, ): """ @param redis_key: 项目名 @param table_userbase: 用户表名 @param login_state_key: 登录状态列名 @param lock_state_key: 封锁状态列名 @param username_key: 登陆名列名 @param password_key: 密码列名 @param login_retry_times: 登陆失败重试次数 @param keep_alive: 是否保持常驻,以便user不足时立即补充 """ self._tab_user_pool = setting.TAB_USER_POOL.format( redis_key=redis_key, user_type="normal" ) self._login_retry_times = login_retry_times self._table_userbase = table_userbase self._login_state_key = login_state_key self._lock_state_key = lock_state_key self._username_key = username_key self._password_key = password_key self._keep_alive = keep_alive self._users_id = [] self._redisdb = RedisDB() self._mysqldb = MysqlDB() self._create_userbase() def _load_users_id(self): self._users_id = self._redisdb.hkeys(self._tab_user_pool) if self._users_id: random.shuffle(self._users_id) def _get_user_id(self): if not self._users_id: self._load_users_id() if self._users_id: return self._users_id.pop() def _create_userbase(self): sql = f""" CREATE TABLE IF NOT EXISTS `{self._table_userbase}` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `{self._username_key}` varchar(50) DEFAULT NULL COMMENT '用户名', `{self._password_key}` varchar(255) DEFAULT NULL COMMENT '密码', `{self._login_state_key}` int(11) DEFAULT '0' COMMENT '登录状态(0未登录 1已登录)', `{self._lock_state_key}` int(11) DEFAULT '0' COMMENT '账号是否被封(0 未封 1 被封)', PRIMARY KEY (`id`), UNIQUE KEY `username` (`username`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; """ self._mysqldb.execute(sql) def _load_user(self) -> Iterable[NormalUser]: """ 返回用户信息 @return: yield username, password """ sql = "select id, {username_key}, {password_key} from {table_userbase} where {lock_state_key} != 1 and {login_state_key} != 1".format( username_key=self._username_key, password_key=self._password_key, table_userbase=self._table_userbase, lock_state_key=self._lock_state_key, login_state_key=self._login_state_key, ) for id, username, password in self._mysqldb.find(sql): yield NormalUser(user_id=id, username=username, password=password) def handle_login_failed_user(self, user: NormalUser): """ 处理登录失败的user @return: """ pass def handel_exception(self, e: Exception): """ 处理异常 @param e: @return: """ log.exception(e) def login(self, user: NormalUser) -> NormalUser: """ 登录 生产cookie """ raise NotImplementedError def add_user(self, user: NormalUser): log.debug("add {}".format(user)) self._redisdb.hset(self._tab_user_pool, user.user_id, user.to_dict()) sql = "update {table_userbase} set {login_state_key} = 1 where id = {user_id}".format( table_userbase=self._table_userbase, login_state_key=self._login_state_key, username_key=self._username_key, user_id=user.user_id, ) self._mysqldb.update(sql) def get_user(self, block=True) -> Optional[NormalUser]: while True: try: user_id = self._get_user_id() user_str = None if user_id: user_str = self._redisdb.hget(self._tab_user_pool, user_id) # 如果没取到user,可能是其他爬虫将此用户删除了,需要重刷新本地缓存的用户id if not user_str: self._load_users_id() continue if not user_id and block: self._keep_alive = False self.run() continue return user_str and NormalUser(**eval(user_str)) except Exception as e: log.exception(e) tools.delay_time(1) def del_user(self, user_id: int): """ 删除失效的user @return: """ self._redisdb.hdel(self._tab_user_pool, user_id) self._load_users_id() sql = "update {table_userbase} set {login_state_key} = 0 where id = {user_id}".format( table_userbase=self._table_userbase, login_state_key=self._login_state_key, username_key=self._username_key, user_id=user_id, ) self._mysqldb.update(sql) def tag_user_locked(self, user_id: int): """ 标记用户被封堵 """ sql = "update {table_userbase} set {lock_state_key} = 1 where id = {user_id}".format( table_userbase=self._table_userbase, lock_state_key=self._lock_state_key, username_key=self._username_key, user_id=user_id, ) self._mysqldb.update(sql) def run(self): while True: try: try: with RedisLock( key=self._tab_user_pool, lock_timeout=3600, wait_timeout=0 ) as _lock: if _lock.locked: for user in self._load_user(): retry_times = 0 while retry_times <= self._login_retry_times: try: login_user = self.login(user) if login_user: self.add_user(login_user) else: self.handle_login_failed_user(user) break except NotImplementedError: log.error( f"{self.__class__.__name__} must be implementation login method!" ) os._exit(0) except Exception as e: self.handel_exception(e) log.debug( f"login failed, user: {user} retry_times: {retry_times}" ) retry_times += 1 else: self.handle_login_failed_user(user) now_user_count = self._redisdb.hget_count( self._tab_user_pool ) log.info("当前在线user数为 {}".format(now_user_count)) except Exception as e: log.exception(e) if self._keep_alive: tools.delay_time(10) else: break except Exception as e: log.exception(e) tools.delay_time(1)
8,648
Python
.py
211
25.265403
142
0.504181
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,880
__init__.py
demigody_nas-tools/third_party/feapder/feapder/network/user_pool/__init__.py
__all__ = [ "GuestUserPool", "GuestUser", "NormalUserPool", "NormalUser", "GoldUserPool", "GoldUser", "GoldUserStatus", ] from .gold_user_pool import GoldUserPool, GoldUser, GoldUserStatus from .guest_user_pool import GuestUserPool, GuestUser from .normal_user_pool import NormalUserPool, NormalUser
329
Python
.py
12
24
66
0.734177
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,881
base_user_pool.py
demigody_nas-tools/third_party/feapder/feapder/network/user_pool/base_user_pool.py
import abc import json import random import time from datetime import datetime from feapder.db.redisdb import RedisDB from feapder.utils.log import log from feapder.utils.tools import get_md5, timestamp_to_date class GuestUser: def __init__(self, user_agent=None, proxies=None, cookies=None, **kwargs): self.__dict__.update(kwargs) self.user_agent = user_agent self.proxies = proxies self.cookies = cookies self.user_id = kwargs.get("user_id") or get_md5(user_agent, proxies, cookies) def __str__(self): return f"<{self.__class__.__name__}>: " + json.dumps( self.to_dict(), indent=4, ensure_ascii=False ) def __repr__(self): return self.__str__() def to_dict(self): data = {} for key, value in self.__dict__.items(): if value is not None: data[key] = value return data def from_dict(cls, data): return cls.__init__(**data) class NormalUser(GuestUser): def __init__(self, username, password, **kwargs): super().__init__(**kwargs) self.username = username self.password = password self.user_id = kwargs.get("user_id") or self.username # 用户名作为user_id class GoldUser(NormalUser): """ 昂贵的账号 """ redisdb: RedisDB = None redis_key: str = None def __init__( self, max_use_times, use_interval=0, work_time=(7, 23), login_interval=30 * 60, exclusive_time=None, **kwargs, ): """ @param max_use_times: @param use_interval: 使用时间间隔。 支持元组 指定间隔的时间范围 如(5,10)即5到10秒;或直接传整数 @param work_time: 工作时间,默认 7点到23点 @param login_interval: 登录时间间隔 防止频繁登录 导致账号被封 @param exclusive_time: 独占时长 """ super().__init__(**kwargs) self.max_use_times = max_use_times self.use_interval = use_interval self.work_time = work_time self.login_interval = login_interval self.exclusive_time = exclusive_time or ( use_interval[-1] * 5 if isinstance(use_interval, (tuple, list)) else use_interval * 5 ) self._delay_use = kwargs.get("_delay_use", 0) # 延时使用,用于等待解封的用户 self._login_time = kwargs.get("_login_time", 0) self._use_times = kwargs.get("_use_times", 0) self._last_use_time = kwargs.get("_last_use_time", 0) self._used_for_spider_name = kwargs.get("_used_for_spider_name") self._reset_use_times_date = kwargs.get("_reset_use_times_date") def __eq__(self, other): return self.username == other.username def update(self, ohter): self.__dict__.update(ohter.to_dict()) def sycn_to_redis(self): self.redisdb.hset(self.redis_key, self.user_id, self.to_dict()) def set_delay_use(self, seconds): self._delay_use = seconds self.sycn_to_redis() def set_cookies(self, cookies): self.cookies = cookies self.sycn_to_redis() def set_login_time(self, _login_time=None): self._login_time = _login_time or time.time() self.sycn_to_redis() def get_login_time(self): return self._login_time def get_last_use_time(self): return self._last_use_time def get_used_for_spider_name(self): return self._used_for_spider_name def set_used_for_spider_name(self, name): self._used_for_spider_name = name self._use_times += 1 self._last_use_time = time.time() self.sycn_to_redis() def is_time_to_login(self): return time.time() - self.get_login_time() > self.login_interval def next_login_time(self): return timestamp_to_date(int(self.login_interval + self.get_login_time())) def is_time_to_use(self): if self._delay_use: is_time = time.time() - self._last_use_time > self._delay_use if is_time: self._delay_use = 0 # 不用同步了,使用用户时会同步 else: is_time = time.time() - self._last_use_time > ( random.randint(*self.use_interval) if isinstance(self.use_interval, (tuple, list)) else self.use_interval ) return is_time def reset_use_times(self): self._use_times = 0 self._reset_use_times_date = datetime.now().strftime("%Y-%m-%d") self.sycn_to_redis() @property def use_times(self): current_date = datetime.now().strftime("%Y-%m-%d") if current_date != self._reset_use_times_date: self.reset_use_times() return self._use_times def is_overwork(self): if self.use_times > self.max_use_times: log.info("账号 {} 请求次数超限制".format(self.username)) return True return False def is_at_work_time(self): if datetime.now().hour in list(range(*self.work_time)): return True log.info("账号 {} 不再工作时间内".format(self.username)) return False class UserPoolInterface(metaclass=abc.ABCMeta): @abc.abstractmethod def login(self, *args, **kwargs): """ 登录 生产cookie Args: *args: **kwargs: Returns: """ raise NotImplementedError @abc.abstractmethod def add_user(self, *args, **kwargs): """ 将带有cookie的用户添加到用户池 Args: *args: **kwargs: Returns: """ raise NotImplementedError @abc.abstractmethod def get_user(self, block=True): """ 获取用户使用 Args: block: 无用户时是否等待 Returns: """ raise NotImplementedError @abc.abstractmethod def del_user(self, *args, **kwargs): """ 删除用户 Args: *args: **kwargs: Returns: """ raise NotImplementedError @abc.abstractmethod def run(self): """ 维护一定数量的用户 Returns: """ raise NotImplementedError
6,397
Python
.py
182
24.824176
85
0.579362
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,882
_requests.py
demigody_nas-tools/third_party/feapder/feapder/network/downloader/_requests.py
# -*- coding: utf-8 -*- """ Created on 2022/4/10 5:57 下午 --------- @summary: --------- @author: Boris @email: [email protected] """ import requests from requests.adapters import HTTPAdapter from feapder.network.downloader.base import Downloader from feapder.network.response import Response class RequestsDownloader(Downloader): def download(self, request) -> Response: response = requests.request( request.method, request.url, **request.requests_kwargs ) response = Response(response) return response class RequestsSessionDownloader(Downloader): session = None @property def _session(self): if not self.__class__.session: self.__class__.session = requests.Session() # pool_connections – 缓存的 urllib3 连接池个数 pool_maxsize – 连接池中保存的最大连接数 http_adapter = HTTPAdapter(pool_connections=1000, pool_maxsize=1000) # 任何使用该session会话的 HTTP 请求,只要其 URL 是以给定的前缀开头,该传输适配器就会被使用到。 self.__class__.session.mount("http", http_adapter) return self.__class__.session def download(self, request) -> Response: response = self._session.request( request.method, request.url, **request.requests_kwargs ) response = Response(response) return response
1,440
Python
.py
37
29.405405
80
0.673742
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,883
_selenium.py
demigody_nas-tools/third_party/feapder/feapder/network/downloader/_selenium.py
# -*- coding: utf-8 -*- """ Created on 2022/7/26 4:28 下午 --------- @summary: --------- @author: Boris @email: [email protected] """ import feapder.setting as setting import feapder.utils.tools as tools from feapder.network.downloader.base import RenderDownloader from feapder.network.response import Response from feapder.utils.webdriver import WebDriverPool, SeleniumDriver class SeleniumDownloader(RenderDownloader): webdriver_pool: WebDriverPool = None @property def _webdriver_pool(self): if not self.__class__.webdriver_pool: self.__class__.webdriver_pool = WebDriverPool( **setting.WEBDRIVER, driver=SeleniumDriver ) return self.__class__.webdriver_pool def download(self, request) -> Response: # 代理优先级 自定义 > 配置文件 > 随机 if request.custom_proxies: proxy = request.get_proxy() elif setting.WEBDRIVER.get("proxy"): proxy = setting.WEBDRIVER.get("proxy") else: proxy = request.get_proxy() # user_agent优先级 自定义 > 配置文件 > 随机 if request.custom_ua: user_agent = request.get_user_agent() elif setting.WEBDRIVER.get("user_agent"): user_agent = setting.WEBDRIVER.get("user_agent") else: user_agent = request.get_user_agent() cookies = request.get_cookies() url = request.url render_time = request.render_time or setting.WEBDRIVER.get("render_time") if request.get_params(): url = tools.joint_url(url, request.get_params()) browser: SeleniumDriver = self._webdriver_pool.get( user_agent=user_agent, proxy=proxy ) try: browser.get(url) if cookies: browser.delete_all_cookies() browser.cookies = cookies browser.get(url) if render_time: tools.delay_time(render_time) html = browser.page_source response = Response.from_dict( { "url": browser.current_url, "cookies": browser.cookies, "_content": html.encode(), "status_code": 200, "elapsed": 666, "headers": { "User-Agent": browser.user_agent, "Cookie": tools.cookies2str(browser.cookies), }, } ) response.driver = browser response.browser = browser return response except Exception as e: self._webdriver_pool.remove(browser) raise e def close(self, driver): if driver: self._webdriver_pool.remove(driver) def put_back(self, driver): """ 释放浏览器对象 """ self._webdriver_pool.put(driver) def close_all(self): """ 关闭所有浏览器 """ self._webdriver_pool.close()
3,098
Python
.py
87
24.183908
81
0.56044
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,884
__init__.py
demigody_nas-tools/third_party/feapder/feapder/network/downloader/__init__.py
from ._requests import RequestsDownloader from ._requests import RequestsSessionDownloader # 下面是非必要依赖 try: from ._selenium import SeleniumDownloader except ModuleNotFoundError: pass
207
Python
.py
7
25
48
0.846995
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,885
base.py
demigody_nas-tools/third_party/feapder/feapder/network/downloader/base.py
import abc from abc import ABC from feapder.network.response import Response class Downloader: @abc.abstractmethod def download(self, request) -> Response: """ Args: request: feapder.Request Returns: feapder.Response """ raise NotImplementedError def close(self, response: Response): pass class RenderDownloader(Downloader, ABC): def put_back(self, driver): """ 释放浏览器对象 """ pass def close(self, driver): """ 关闭浏览器 """ pass def close_all(self): """ 关闭所有浏览器 """ pass
694
Python
.py
30
14.5
45
0.565854
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,886
setting.py
demigody_nas-tools/third_party/feapder/feapder/templates/project_template/setting.py
# -*- coding: utf-8 -*- """爬虫配置文件""" # import os # import sys # # # MYSQL # MYSQL_IP = "localhost" # MYSQL_PORT = 3306 # MYSQL_DB = "" # MYSQL_USER_NAME = "" # MYSQL_USER_PASS = "" # # # MONGODB # MONGO_IP = "localhost" # MONGO_PORT = 27017 # MONGO_DB = "" # MONGO_USER_NAME = "" # MONGO_USER_PASS = "" # # # REDIS # # ip:port 多个可写为列表或者逗号隔开 如 ip1:port1,ip2:port2 或 ["ip1:port1", "ip2:port2"] # REDISDB_IP_PORTS = "localhost:6379" # REDISDB_USER_PASS = "" # REDISDB_DB = 0 # # 连接redis时携带的其他参数,如ssl=True # REDISDB_KWARGS = dict() # # 适用于redis哨兵模式 # REDISDB_SERVICE_NAME = "" # # # 数据入库的pipeline,可自定义,默认MysqlPipeline # ITEM_PIPELINES = [ # "feapder.pipelines.mysql_pipeline.MysqlPipeline", # # "feapder.pipelines.mongo_pipeline.MongoPipeline", # # "feapder.pipelines.console_pipeline.ConsolePipeline", # ] # EXPORT_DATA_MAX_FAILED_TIMES = 10 # 导出数据时最大的失败次数,包括保存和更新,超过这个次数报警 # EXPORT_DATA_MAX_RETRY_TIMES = 10 # 导出数据时最大的重试次数,包括保存和更新,超过这个次数则放弃重试 # # # 爬虫相关 # # COLLECTOR # COLLECTOR_TASK_COUNT = 32 # 每次获取任务数量,追求速度推荐32 # # # SPIDER # SPIDER_THREAD_COUNT = 1 # 爬虫并发数,追求速度推荐32 # # 下载时间间隔 单位秒。 支持随机 如 SPIDER_SLEEP_TIME = [2, 5] 则间隔为 2~5秒之间的随机数,包含2和5 # SPIDER_SLEEP_TIME = 0 # SPIDER_MAX_RETRY_TIMES = 10 # 每个请求最大重试次数 # KEEP_ALIVE = False # 爬虫是否常驻 # 下载 # DOWNLOADER = "feapder.network.downloader.RequestsDownloader" # 请求下载器 # SESSION_DOWNLOADER = "feapder.network.downloader.RequestsSessionDownloader" # RENDER_DOWNLOADER = "feapder.network.downloader.SeleniumDownloader" # 渲染下载器 # # RENDER_DOWNLOADER="feapder.network.downloader.PlaywrightDownloader" # MAKE_ABSOLUTE_LINKS = True # 自动转成绝对连接 # # 浏览器渲染 # WEBDRIVER = dict( # pool_size=1, # 浏览器的数量 # load_images=True, # 是否加载图片 # user_agent=None, # 字符串 或 无参函数,返回值为user_agent # proxy=None, # xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 # headless=False, # 是否为无头浏览器 # driver_type="CHROME", # CHROME、FIREFOX # timeout=30, # 请求超时时间 # window_size=(1024, 800), # 窗口大小 # render_time=0, # 渲染时长,即打开网页等待指定时间后再获取源码 # custom_argument=[ # "--ignore-certificate-errors", # "--disable-blink-features=AutomationControlled", # ], # 自定义浏览器渲染参数 # xhr_url_regexes=None, # 拦截xhr接口,支持正则,数组类型 # auto_install_driver=True, # 自动下载浏览器驱动 支持chrome 和 firefox # download_path=None, # 下载文件的路径 # use_stealth_js=False, # 使用stealth.min.js隐藏浏览器特征 # ) # # PLAYWRIGHT = dict( # user_agent=None, # 字符串 或 无参函数,返回值为user_agent # proxy=None, # xxx.xxx.xxx.xxx:xxxx 或 无参函数,返回值为代理地址 # headless=False, # 是否为无头浏览器 # driver_type="chromium", # chromium、firefox、webkit # timeout=30, # 请求超时时间 # window_size=(1024, 800), # 窗口大小 # download_path=None, # 下载文件的路径 # render_time=0, # 渲染时长,即打开网页等待指定时间后再获取源码 # wait_until="networkidle", # 等待页面加载完成的事件,可选值:"commit", "domcontentloaded", "load", "networkidle" # use_stealth_js=False, # 使用stealth.min.js隐藏浏览器特征 # page_on_event_callback=None, # page.on() 事件的回调 如 page_on_event_callback={"dialog": lambda dialog: dialog.accept()} # storage_state_path=None, # 保存浏览器状态的路径 # url_regexes=None, # 拦截接口,支持正则,数组类型 # save_all=False, # 是否保存所有拦截的接口, 配合url_regexes使用,为False时只保存最后一次拦截的接口 # ) # # # 爬虫启动时,重新抓取失败的requests # RETRY_FAILED_REQUESTS = False # # 爬虫启动时,重新入库失败的item # RETRY_FAILED_ITEMS = False # # 保存失败的request # SAVE_FAILED_REQUEST = True # # request防丢机制。(指定的REQUEST_LOST_TIMEOUT时间内request还没做完,会重新下发 重做) # REQUEST_LOST_TIMEOUT = 600 # 10分钟 # # request网络请求超时时间 # REQUEST_TIMEOUT = 22 # 等待服务器响应的超时时间,浮点数,或(connect timeout, read timeout)元组 # # item在内存队列中最大缓存数量 # ITEM_MAX_CACHED_COUNT = 5000 # # item每批入库的最大数量 # ITEM_UPLOAD_BATCH_MAX_SIZE = 1000 # # item入库时间间隔 # ITEM_UPLOAD_INTERVAL = 1 # # 内存任务队列最大缓存的任务数,默认不限制;仅对AirSpider有效。 # TASK_MAX_CACHED_SIZE = 0 # # # 下载缓存 利用redis缓存,但由于内存大小限制,所以建议仅供开发调试代码时使用,防止每次debug都需要网络请求 # RESPONSE_CACHED_ENABLE = False # 是否启用下载缓存 成本高的数据或容易变需求的数据,建议设置为True # RESPONSE_CACHED_EXPIRE_TIME = 3600 # 缓存时间 秒 # RESPONSE_CACHED_USED = False # 是否使用缓存 补采数据时可设置为True # # # 设置代理 # PROXY_EXTRACT_API = None # 代理提取API ,返回的代理分割符为\r\n # PROXY_ENABLE = True # PROXY_MAX_FAILED_TIMES = 5 # 代理最大失败次数,超过则不使用,自动删除 # PROXY_POOL = "feapder.network.proxy_pool.ProxyPool" # 代理池 # # # 随机headers # RANDOM_HEADERS = True # # UserAgent类型 支持 'chrome', 'opera', 'firefox', 'internetexplorer', 'safari','mobile' 若不指定则随机类型 # USER_AGENT_TYPE = "chrome" # # 默认使用的浏览器头 # DEFAULT_USERAGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36" # # requests 使用session # USE_SESSION = False # # # 去重 # ITEM_FILTER_ENABLE = False # item 去重 # REQUEST_FILTER_ENABLE = False # request 去重 # ITEM_FILTER_SETTING = dict( # filter_type=1 # 永久去重(BloomFilter) = 1 、内存去重(MemoryFilter) = 2、 临时去重(ExpireFilter)= 3、轻量去重(LiteFilter)= 4 # ) # REQUEST_FILTER_SETTING = dict( # filter_type=3, # 永久去重(BloomFilter) = 1 、内存去重(MemoryFilter) = 2、 临时去重(ExpireFilter)= 3、 轻量去重(LiteFilter)= 4 # expire_time=2592000, # 过期时间1个月 # ) # # # 报警 支持钉钉、飞书、企业微信、邮件 # # 钉钉报警 # DINGDING_WARNING_URL = "" # 钉钉机器人api # DINGDING_WARNING_PHONE = "" # 报警人 支持列表,可指定多个 # DINGDING_WARNING_ALL = False # 是否提示所有人, 默认为False # # 飞书报警 # # https://open.feishu.cn/document/ukTMukTMukTM/ucTM5YjL3ETO24yNxkjN#e1cdee9f # FEISHU_WARNING_URL = "" # 飞书机器人api # FEISHU_WARNING_USER = None # 报警人 {"open_id":"ou_xxxxx", "name":"xxxx"} 或 [{"open_id":"ou_xxxxx", "name":"xxxx"}] # FEISHU_WARNING_ALL = False # 是否提示所有人, 默认为False # # 邮件报警 # EMAIL_SENDER = "" # 发件人 # EMAIL_PASSWORD = "" # 授权码 # EMAIL_RECEIVER = "" # 收件人 支持列表,可指定多个 # EMAIL_SMTPSERVER = "smtp.163.com" # 邮件服务器 默认为163邮箱 # # 企业微信报警 # WECHAT_WARNING_URL = "" # 企业微信机器人api # WECHAT_WARNING_PHONE = "" # 报警人 将会在群内@此人, 支持列表,可指定多人 # WECHAT_WARNING_ALL = False # 是否提示所有人, 默认为False # # 时间间隔 # WARNING_INTERVAL = 3600 # 相同报警的报警时间间隔,防止刷屏; 0表示不去重 # WARNING_LEVEL = "DEBUG" # 报警级别, DEBUG / INFO / ERROR # WARNING_FAILED_COUNT = 1000 # 任务失败数 超过WARNING_FAILED_COUNT则报警 # # LOG_NAME = os.path.basename(os.getcwd()) # LOG_PATH = "log/%s.log" % LOG_NAME # log存储路径 # LOG_LEVEL = "DEBUG" # LOG_COLOR = True # 是否带有颜色 # LOG_IS_WRITE_TO_CONSOLE = True # 是否打印到控制台 # LOG_IS_WRITE_TO_FILE = False # 是否写文件 # LOG_MODE = "w" # 写文件的模式 # LOG_MAX_BYTES = 10 * 1024 * 1024 # 每个日志文件的最大字节数 # LOG_BACKUP_COUNT = 20 # 日志文件保留数量 # LOG_ENCODING = "utf8" # 日志文件编码 # OTHERS_LOG_LEVAL = "ERROR" # 第三方库的log等级 # # # 切换工作路径为当前项目路径 # project_path = os.path.abspath(os.path.dirname(__file__)) # os.chdir(project_path) # 切换工作路经 # sys.path.insert(0, project_path) # print("当前工作路径为 " + os.getcwd())
8,869
Python
.py
183
34.830601
145
0.696894
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,887
main.py
demigody_nas-tools/third_party/feapder/feapder/templates/project_template/main.py
# -*- coding: utf-8 -*- """ Created on {DATE} --------- @summary: 爬虫入口 --------- @author: {USER} """ from feapder import ArgumentParser from spiders import * def crawl_xxx(): """ AirSpider爬虫 """ spider = xxx.XXXSpider() spider.start() def crawl_xxx(): """ Spider爬虫 """ spider = xxx.XXXSpider(redis_key="xxx:xxx") spider.start() def crawl_xxx(args): """ BatchSpider爬虫 """ spider = xxx_spider.XXXSpider( task_table="", # mysql中的任务表 batch_record_table="", # mysql中的批次记录表 batch_name="xxx(周全)", # 批次名字 batch_interval=7, # 批次时间 天为单位 若为小时 可写 1 / 24 task_keys=["id", "xxx"], # 需要获取任务表里的字段名,可添加多个 redis_key="xxx:xxxx", # redis中存放request等信息的根key task_state="state", # mysql中任务状态字段 ) if args == 1: spider.start_monitor_task() elif args == 2: spider.start() elif args == 3: spider.init_task() if __name__ == "__main__": parser = ArgumentParser(description="xxx爬虫") parser.add_argument( "--crawl_xxx", action="store_true", help="xxx爬虫", function=crawl_xxx ) parser.add_argument( "--crawl_xxx", action="store_true", help="xxx爬虫", function=crawl_xxx ) parser.add_argument( "--crawl_xxx", type=int, nargs=1, help="xxx爬虫", choices=[1, 2, 3], function=crawl_xxx, ) parser.start() # main.py作为爬虫启动的统一入口,提供命令行的方式启动多个爬虫,若只有一个爬虫,可不编写main.py # 将上面的xxx修改为自己实际的爬虫名 # 查看运行命令 python main.py --help # AirSpider与Spider爬虫运行方式 python main.py --crawl_xxx # BatchSpider运行方式 # 1. 下发任务:python main.py --crawl_xxx 1 # 2. 采集:python main.py --crawl_xxx 2 # 3. 重置任务:python main.py --crawl_xxx 3
2,089
Python
.py
66
21.151515
76
0.596659
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,888
mysql_pipeline.py
demigody_nas-tools/third_party/feapder/feapder/pipelines/mysql_pipeline.py
# -*- coding: utf-8 -*- """ Created on 2018-07-29 22:48:30 --------- @summary: 导出数据 --------- @author: Boris @email: [email protected] """ from typing import Dict, List, Tuple import feapder.utils.tools as tools from feapder.db.mysqldb import MysqlDB from feapder.pipelines import BasePipeline from feapder.utils.log import log class MysqlPipeline(BasePipeline): def __init__(self): self._to_db = None @property def to_db(self): if not self._to_db: self._to_db = MysqlDB() return self._to_db def save_items(self, table, items: List[Dict]) -> bool: """ 保存数据 Args: table: 表名 items: 数据,[{},{},...] Returns: 是否保存成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ sql, datas = tools.make_batch_sql(table, items) add_count = self.to_db.add_batch(sql, datas) datas_size = len(datas) if add_count: log.info( "共导出 %s 条数据 到 %s, 重复 %s 条" % (datas_size, table, datas_size - add_count) ) return add_count != None def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool: """ 更新数据 Args: table: 表名 items: 数据,[{},{},...] update_keys: 更新的字段, 如 ("title", "publish_time") Returns: 是否更新成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ sql, datas = tools.make_batch_sql( table, items, update_columns=update_keys or list(items[0].keys()) ) update_count = self.to_db.add_batch(sql, datas) if update_count: msg = "共更新 %s 条数据 到 %s" % (update_count // 2, table) if update_keys: msg += " 更新字段为 {}".format(update_keys) log.info(msg) return update_count != None
2,088
Python
.py
59
23.59322
88
0.54928
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,889
console_pipeline.py
demigody_nas-tools/third_party/feapder/feapder/pipelines/console_pipeline.py
# -*- coding: utf-8 -*- """ Created on 2021/3/18 12:39 上午 --------- @summary: --------- @author: Boris @email: [email protected] """ from feapder.pipelines import BasePipeline from typing import Dict, List, Tuple from feapder.utils.log import log class ConsolePipeline(BasePipeline): """ pipeline 是单线程的,批量保存数据的操作,不建议在这里写网络请求代码,如下载图片等 """ def save_items(self, table, items: List[Dict]) -> bool: """ 保存数据 Args: table: 表名 items: 数据,[{},{},...] Returns: 是否保存成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ log.info("【调试输出】共导出 %s 条数据 到 %s" % (len(items), table)) return True def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool: """ 更新数据 Args: table: 表名 items: 数据,[{},{},...] update_keys: 更新的字段, 如 ("title", "publish_time") Returns: 是否更新成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ log.info("【调试输出】共导出 %s 条数据 到 %s" % (len(items), table)) return True
1,392
Python
.py
39
21.384615
80
0.554717
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,890
mongo_pipeline.py
demigody_nas-tools/third_party/feapder/feapder/pipelines/mongo_pipeline.py
# -*- coding: utf-8 -*- """ Created on 2021-04-18 14:12:21 --------- @summary: 导出数据 --------- @author: Mkdir700 @email: [email protected] """ from typing import Dict, List, Tuple from feapder.db.mongodb import MongoDB from feapder.pipelines import BasePipeline from feapder.utils.log import log class MongoPipeline(BasePipeline): def __init__(self): self._to_db = None @property def to_db(self): if not self._to_db: self._to_db = MongoDB() return self._to_db def save_items(self, table, items: List[Dict]) -> bool: """ 保存数据 Args: table: 表名 items: 数据,[{},{},...] Returns: 是否保存成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ try: add_count = self.to_db.add_batch(coll_name=table, datas=items) datas_size = len(items) log.info( "共导出 %s 条数据到 %s, 新增 %s条, 重复 %s 条" % (datas_size, table, add_count, datas_size - add_count) ) return True except Exception as e: log.exception(e) return False def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool: """ 更新数据 Args: table: 表名 items: 数据,[{},{},...] update_keys: 更新的字段, 如 ("title", "publish_time") Returns: 是否更新成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ try: add_count = self.to_db.add_batch( coll_name=table, datas=items, update_columns=update_keys or list(items[0].keys()), ) datas_size = len(items) update_count = datas_size - add_count msg = "共导出 %s 条数据到 %s, 新增 %s 条, 更新 %s 条" % ( datas_size, table, add_count, update_count, ) if update_keys: msg += " 更新字段为 {}".format(update_keys) log.info(msg) return True except Exception as e: log.exception(e) return False
2,405
Python
.py
72
20.236111
80
0.500716
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,891
__init__.py
demigody_nas-tools/third_party/feapder/feapder/pipelines/__init__.py
# -*- coding: utf-8 -*- """ Created on 2021/3/17 10:57 下午 --------- @summary: --------- @author: Boris @email: [email protected] """ import abc from typing import Dict, List, Tuple class BasePipeline(metaclass=abc.ABCMeta): """ pipeline 是单线程的,批量保存数据的操作,不建议在这里写网络请求代码,如下载图片等 """ @abc.abstractmethod def save_items(self, table, items: List[Dict]) -> bool: """ 保存数据 Args: table: 表名 items: 数据,[{},{},...] Returns: 是否保存成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ return True def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool: """ 更新数据, 与UpdateItem配合使用,若爬虫中没使用UpdateItem,则可不实现此接口 Args: table: 表名 items: 数据,[{},{},...] update_keys: 更新的字段, 如 ("title", "publish_time") Returns: 是否更新成功 True / False 若False,不会将本批数据入到去重库,以便再次入库 """ return True def close(self): """ 关闭,爬虫结束时调用 Returns: """ pass
1,382
Python
.py
43
17.953488
80
0.541748
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,892
handle_failed_requests.py
demigody_nas-tools/third_party/feapder/feapder/core/handle_failed_requests.py
# -*- coding: utf-8 -*- """ Created on 2018-08-13 11:43:01 --------- @summary: --------- @author: Boris @email: [email protected] """ import feapder.setting as setting from feapder.buffer.request_buffer import RequestBuffer from feapder.db.redisdb import RedisDB from feapder.network.request import Request from feapder.utils.log import log class HandleFailedRequests: def __init__(self, redis_key): if redis_key.endswith(":z_failed_requests"): redis_key = redis_key.replace(":z_failed_requests", "") self._redisdb = RedisDB() self._request_buffer = RequestBuffer(redis_key) self._table_failed_request = setting.TAB_FAILED_REQUESTS.format( redis_key=redis_key ) def get_failed_requests(self, count=10000): failed_requests = self._redisdb.zget(self._table_failed_request, count=count) failed_requests = [eval(failed_request) for failed_request in failed_requests] return failed_requests def reput_failed_requests_to_requests(self): log.debug("正在重置失败的requests...") total_count = 0 while True: try: failed_requests = self.get_failed_requests() if not failed_requests: break for request in failed_requests: request["retry_times"] = 0 request_obj = Request.from_dict(request) self._request_buffer.put_request(request_obj) total_count += 1 except Exception as e: log.exception(e) self._request_buffer.flush() log.debug("重置%s条失败requests为待抓取requests" % total_count)
1,733
Python
.py
44
29.977273
86
0.625379
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,893
handle_failed_items.py
demigody_nas-tools/third_party/feapder/feapder/core/handle_failed_items.py
# -*- coding: utf-8 -*- """ Created on 2022/11/18 11:33 AM --------- @summary: --------- @author: Boris @email: [email protected] """ import feapder.setting as setting from feapder.buffer.item_buffer import ItemBuffer from feapder.db.redisdb import RedisDB from feapder.network.item import Item, UpdateItem from feapder.utils.log import log class HandleFailedItems: def __init__(self, redis_key, task_table=None, item_buffer=None): if redis_key.endswith(":s_failed_items"): redis_key = redis_key.replace(":s_failed_items", "") self._redisdb = RedisDB() self._item_buffer = item_buffer or ItemBuffer(redis_key, task_table=task_table) self._table_failed_items = setting.TAB_FAILED_ITEMS.format(redis_key=redis_key) def get_failed_items(self, count=1): failed_items = self._redisdb.sget( self._table_failed_items, count=count, is_pop=False ) return failed_items def reput_failed_items_to_db(self): log.debug("正在重新写入失败的items...") total_count = 0 while True: try: failed_items = self.get_failed_items() if not failed_items: break for data_str in failed_items: data = eval(data_str) for add in data.get("add"): table = add.get("table") datas = add.get("datas") for _data in datas: item = Item(**_data) item.table_name = table self._item_buffer.put_item(item) total_count += 1 for update in data.get("update"): table = update.get("table") datas = update.get("datas") update_keys = update.get("update_keys") for _data in datas: item = UpdateItem(**_data) item.table_name = table item.update_keys = update_keys self._item_buffer.put_item(item) total_count += 1 # 入库成功后删除 def delete_item(): self._redisdb.srem(self._table_failed_items, data_str) self._item_buffer.put_item(delete_item) self._item_buffer.flush() except Exception as e: log.exception(e) if total_count: log.debug("导入%s条失败item到数库" % total_count) else: log.debug("没有失败的item") def close(self): self._item_buffer.close()
2,808
Python
.py
67
27
87
0.511053
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,894
parser_control.py
demigody_nas-tools/third_party/feapder/feapder/core/parser_control.py
# -*- coding: utf-8 -*- """ Created on 2017-01-03 16:06 --------- @summary: parser 控制类 --------- @author: Boris @email: [email protected] """ import inspect import random import threading import time from collections.abc import Iterable import feapder.setting as setting import feapder.utils.tools as tools from feapder.buffer.item_buffer import ItemBuffer from feapder.buffer.request_buffer import AirSpiderRequestBuffer from feapder.core.base_parser import BaseParser from feapder.db.memorydb import MemoryDB from feapder.network.item import Item from feapder.network.request import Request from feapder.utils import metrics from feapder.utils.log import log class ParserControl(threading.Thread): DOWNLOAD_EXCEPTION = "download_exception" DOWNLOAD_SUCCESS = "download_success" DOWNLOAD_TOTAL = "download_total" PAESERS_EXCEPTION = "parser_exception" is_show_tip = False # 实时统计已做任务数及失败任务数,若失败任务数/已做任务数>0.5 则报警 _success_task_count = 0 _failed_task_count = 0 _total_task_count = 0 _hook_parsers = set() def __init__(self, collector, redis_key, request_buffer, item_buffer): super(ParserControl, self).__init__() self._parsers = [] self._collector = collector self._redis_key = redis_key self._request_buffer = request_buffer self._item_buffer = item_buffer self._thread_stop = False def run(self): self._thread_stop = False while not self._thread_stop: try: request = self._collector.get_request() if not request: if not self.is_show_tip: log.debug("等待任务...") self.is_show_tip = True continue self.is_show_tip = False self.deal_request(request) except Exception as e: log.exception(e) def is_not_task(self): return self.is_show_tip @classmethod def get_task_status_count(cls): return cls._failed_task_count, cls._success_task_count, cls._total_task_count def deal_request(self, request): response = None request_redis = request["request_redis"] request = request["request_obj"] del_request_redis_after_item_to_db = False del_request_redis_after_request_to_db = False for parser in self._parsers: if parser.name == request.parser_name: used_download_midware_enable = False try: self.__class__._total_task_count += 1 # 记录需下载的文档 self.record_download_status( ParserControl.DOWNLOAD_TOTAL, parser.name ) # 解析request if request.auto_request: request_temp = None response = None # 下载中间件 if request.download_midware: if isinstance(request.download_midware, (list, tuple)): request_temp = request for download_midware in request.download_midware: download_midware = ( download_midware if callable(download_midware) else tools.get_method(parser, download_midware) ) request_temp = download_midware(request_temp) else: download_midware = ( request.download_midware if callable(request.download_midware) else tools.get_method( parser, request.download_midware ) ) request_temp = download_midware(request) elif request.download_midware != False: request_temp = parser.download_midware(request) # 请求 if request_temp: if ( isinstance(request_temp, (tuple, list)) and len(request_temp) == 2 ): request_temp, response = request_temp if not isinstance(request_temp, Request): raise Exception( "download_midware need return a request, but received type: {}".format( type(request_temp) ) ) used_download_midware_enable = True if not response: response = ( request_temp.get_response() if not setting.RESPONSE_CACHED_USED else request_temp.get_response_from_cached( save_cached=False ) ) else: response = ( request.get_response() if not setting.RESPONSE_CACHED_USED else request.get_response_from_cached(save_cached=False) ) if response == None: raise Exception( "连接超时 url: %s" % (request.url or request_temp.url) ) # 校验 if parser.validate(request, response) == False: break else: response = None if request.callback: # 如果有parser的回调函数,则用回调处理 callback_parser = ( request.callback if callable(request.callback) else tools.get_method(parser, request.callback) ) results = callback_parser(request, response) else: # 否则默认用parser处理 results = parser.parse(request, response) if results and not isinstance(results, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, request.callback or "parse") ) # 标识上一个result是什么 result_type = 0 # 0\1\2 (初始值\request\item) # 此处判断是request 还是 item for result in results or []: if isinstance(result, Request): result_type = 1 # 给request的 parser_name 赋值 result.parser_name = result.parser_name or parser.name # 判断是同步的callback还是异步的 if result.request_sync: # 同步 request_dict = { "request_obj": result, "request_redis": None, } self.deal_request(request_dict) else: # 异步 # 将next_request 入库 self._request_buffer.put_request(result) del_request_redis_after_request_to_db = True elif isinstance(result, Item): result_type = 2 # 将item入库 self._item_buffer.put_item(result) # 需删除正在做的request del_request_redis_after_item_to_db = True elif callable(result): # result为可执行的无参函数 if result_type == 2: # item 的 callback,buffer里的item均入库后再执行 self._item_buffer.put_item(result) del_request_redis_after_item_to_db = True else: # result_type == 1: # request 的 callback,buffer里的request均入库后再执行。可能有的parser直接返回callback self._request_buffer.put_request(result) del_request_redis_after_request_to_db = True elif result is not None: function_name = "{}.{}".format( parser.name, ( request.callback and callable(request.callback) and getattr(request.callback, "__name__") or request.callback ) or "parse", ) raise TypeError( f"{function_name} result expect Request、Item or callback, bug get type: {type(result)}" ) except Exception as e: exception_type = ( str(type(e)).replace("<class '", "").replace("'>", "") ) if exception_type.startswith("requests"): # 记录下载失败的文档 self.record_download_status( ParserControl.DOWNLOAD_EXCEPTION, parser.name ) if request.retry_times % setting.PROXY_MAX_FAILED_TIMES == 0: request.del_proxy() else: # 记录解析程序异常 self.record_download_status( ParserControl.PAESERS_EXCEPTION, parser.name ) if setting.LOG_LEVEL == "DEBUG": # 只有debug模式下打印, 超时的异常篇幅太多 log.exception(e) log.error( """ -------------- %s.%s error ------------- error %s response %s deal request %s """ % ( parser.name, ( request.callback and callable(request.callback) and getattr(request.callback, "__name__") or request.callback ) or "parse", str(e), response, tools.dumps_json(request.to_dict, indent=28) if setting.LOG_LEVEL == "DEBUG" else request, ) ) request.error_msg = "%s: %s" % (exception_type, e) request.response = str(response) if "Invalid URL" in str(e): request.is_abandoned = True requests = parser.exception_request(request, response, e) or [ request ] if not isinstance(requests, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "exception_request") ) for request in requests: if callable(request): self._request_buffer.put_request(request) continue if not isinstance(request, Request): raise Exception("exception_request 需 yield request") if ( request.retry_times + 1 > setting.SPIDER_MAX_RETRY_TIMES or request.is_abandoned ): self.__class__._failed_task_count += 1 # 记录失败任务数 # 处理failed_request的返回值 request 或 func results = parser.failed_request(request, response, e) or [ request ] if not isinstance(results, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "failed_request") ) for result in results: if isinstance(result, Request): if setting.SAVE_FAILED_REQUEST: if used_download_midware_enable: # 去掉download_midware 添加的属性 original_request = ( Request.from_dict(eval(request_redis)) if request_redis else result ) original_request.error_msg = ( request.error_msg ) original_request.response = request.response self._request_buffer.put_failed_request( original_request ) else: self._request_buffer.put_failed_request( result ) elif callable(result): self._request_buffer.put_request(result) elif isinstance(result, Item): self._item_buffer.put_item(result) del_request_redis_after_request_to_db = True else: # 将 requests 重新入库 爬取 request.retry_times += 1 request.filter_repeat = False log.info( """ 入库 等待重试 url %s 重试次数 %s 最大允许重试次数 %s""" % ( request.url, request.retry_times, setting.SPIDER_MAX_RETRY_TIMES, ) ) if used_download_midware_enable: # 去掉download_midware 添加的属性 使用原来的requests original_request = ( Request.from_dict(eval(request_redis)) if request_redis else request ) if hasattr(request, "error_msg"): original_request.error_msg = request.error_msg if hasattr(request, "response"): original_request.response = request.response original_request.retry_times = request.retry_times original_request.filter_repeat = request.filter_repeat self._request_buffer.put_request(original_request) else: self._request_buffer.put_request(request) del_request_redis_after_request_to_db = True else: # 记录下载成功的文档 self.record_download_status( ParserControl.DOWNLOAD_SUCCESS, parser.name ) # 记录成功任务数 self.__class__._success_task_count += 1 # 缓存下载成功的文档 if setting.RESPONSE_CACHED_ENABLE: request.save_cached( response=response, expire_time=setting.RESPONSE_CACHED_EXPIRE_TIME, ) finally: # 释放浏览器 if response and getattr(response, "browser", None): request.render_downloader.put_back(response.browser) break # 删除正在做的request 跟随item优先 if request_redis: if del_request_redis_after_item_to_db: self._item_buffer.put_item(request_redis) elif del_request_redis_after_request_to_db: self._request_buffer.put_del_request(request_redis) else: self._request_buffer.put_del_request(request_redis) if setting.SPIDER_SLEEP_TIME: if ( isinstance(setting.SPIDER_SLEEP_TIME, (tuple, list)) and len(setting.SPIDER_SLEEP_TIME) == 2 ): sleep_time = random.randint( int(setting.SPIDER_SLEEP_TIME[0]), int(setting.SPIDER_SLEEP_TIME[1]) ) time.sleep(sleep_time) else: time.sleep(setting.SPIDER_SLEEP_TIME) def record_download_status(self, status, spider): """ 记录html等文档下载状态 @return: """ metrics.emit_counter(f"{spider}:{status}", 1, classify="document") def stop(self): self._thread_stop = True self._started.clear() def add_parser(self, parser: BaseParser): # 动态增加parser.exception_request和parser.failed_request的参数, 兼容旧版本 if parser not in self.__class__._hook_parsers: self.__class__._hook_parsers.add(parser) if len(inspect.getfullargspec(parser.exception_request).args) == 3: _exception_request = parser.exception_request parser.exception_request = ( lambda request, response, e: _exception_request(request, response) ) if len(inspect.getfullargspec(parser.failed_request).args) == 3: _failed_request = parser.failed_request parser.failed_request = lambda request, response, e: _failed_request( request, response ) self._parsers.append(parser) class AirSpiderParserControl(ParserControl): is_show_tip = False # 实时统计已做任务数及失败任务数,若失败任务数/已做任务数>0.5 则报警 _success_task_count = 0 _failed_task_count = 0 def __init__( self, *, memory_db: MemoryDB, request_buffer: AirSpiderRequestBuffer, item_buffer: ItemBuffer, ): super(ParserControl, self).__init__() self._parsers = [] self._memory_db = memory_db self._thread_stop = False self._request_buffer = request_buffer self._item_buffer = item_buffer def run(self): while not self._thread_stop: try: request = self._memory_db.get() if not request: if not self.is_show_tip: log.debug("等待任务...") self.is_show_tip = True continue self.is_show_tip = False self.deal_request(request) except Exception as e: log.exception(e) def deal_request(self, request): response = None for parser in self._parsers: if parser.name == request.parser_name: try: self.__class__._total_task_count += 1 # 记录需下载的文档 self.record_download_status( ParserControl.DOWNLOAD_TOTAL, parser.name ) # 解析request if request.auto_request: request_temp = None response = None # 下载中间件 if request.download_midware: if isinstance(request.download_midware, (list, tuple)): request_temp = request for download_midware in request.download_midware: download_midware = ( download_midware if callable(download_midware) else tools.get_method(parser, download_midware) ) request_temp = download_midware(request_temp) else: download_midware = ( request.download_midware if callable(request.download_midware) else tools.get_method( parser, request.download_midware ) ) request_temp = download_midware(request) elif request.download_midware != False: request_temp = parser.download_midware(request) # 请求 if request_temp: if ( isinstance(request_temp, (tuple, list)) and len(request_temp) == 2 ): request_temp, response = request_temp if not isinstance(request_temp, Request): raise Exception( "download_midware need return a request, but received type: {}".format( type(request_temp) ) ) request = request_temp if not response: response = ( request.get_response() if not setting.RESPONSE_CACHED_USED else request.get_response_from_cached(save_cached=False) ) # 校验 if parser.validate(request, response) == False: break else: response = None if request.callback: # 如果有parser的回调函数,则用回调处理 callback_parser = ( request.callback if callable(request.callback) else tools.get_method(parser, request.callback) ) results = callback_parser(request, response) else: # 否则默认用parser处理 results = parser.parse(request, response) if results and not isinstance(results, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, request.callback or "parse") ) # 此处判断是request 还是 item for result in results or []: if isinstance(result, Request): # 给request的 parser_name 赋值 result.parser_name = result.parser_name or parser.name # 判断是同步的callback还是异步的 if result.request_sync: # 同步 self.deal_request(result) else: # 异步 # 将next_request 入库 self._request_buffer.put_request(result) elif isinstance(result, Item): self._item_buffer.put_item(result) elif result is not None: function_name = "{}.{}".format( parser.name, ( request.callback and callable(request.callback) and getattr(request.callback, "__name__") or request.callback ) or "parse", ) raise TypeError( f"{function_name} result expect Request or Item, bug get type: {type(result)}" ) except Exception as e: exception_type = ( str(type(e)).replace("<class '", "").replace("'>", "") ) if exception_type.startswith("requests"): # 记录下载失败的文档 self.record_download_status( ParserControl.DOWNLOAD_EXCEPTION, parser.name ) if request.retry_times % setting.PROXY_MAX_FAILED_TIMES == 0: request.del_proxy() else: # 记录解析程序异常 self.record_download_status( ParserControl.PAESERS_EXCEPTION, parser.name ) if setting.LOG_LEVEL == "DEBUG": # 只有debug模式下打印, 超时的异常篇幅太多 log.exception(e) log.error( """ -------------- %s.%s error ------------- error %s response %s deal request %s """ % ( parser.name, ( request.callback and callable(request.callback) and getattr(request.callback, "__name__") or request.callback ) or "parse", str(e), response, tools.dumps_json(request.to_dict, indent=28) if setting.LOG_LEVEL == "DEBUG" else request, ) ) request.error_msg = "%s: %s" % (exception_type, e) request.response = str(response) if "Invalid URL" in str(e): request.is_abandoned = True requests = parser.exception_request(request, response, e) or [ request ] if not isinstance(requests, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "exception_request") ) for request in requests: if not isinstance(request, Request): raise Exception("exception_request 需 yield request") if ( request.retry_times + 1 > setting.SPIDER_MAX_RETRY_TIMES or request.is_abandoned ): self.__class__._failed_task_count += 1 # 记录失败任务数 # 处理failed_request的返回值 request 或 func results = parser.failed_request(request, response, e) or [ request ] if not isinstance(results, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "failed_request") ) log.info( """ 任务超过最大重试次数,丢弃 url %s 重试次数 %s 最大允许重试次数 %s""" % ( request.url, request.retry_times, setting.SPIDER_MAX_RETRY_TIMES, ) ) else: # 将 requests 重新入库 爬取 request.retry_times += 1 request.filter_repeat = False log.info( """ 入库 等待重试 url %s 重试次数 %s 最大允许重试次数 %s""" % ( request.url, request.retry_times, setting.SPIDER_MAX_RETRY_TIMES, ) ) self._request_buffer.put_request(request) else: # 记录下载成功的文档 self.record_download_status( ParserControl.DOWNLOAD_SUCCESS, parser.name ) # 记录成功任务数 self.__class__._success_task_count += 1 # 缓存下载成功的文档 if setting.RESPONSE_CACHED_ENABLE: request.save_cached( response=response, expire_time=setting.RESPONSE_CACHED_EXPIRE_TIME, ) finally: # 释放浏览器 if response and getattr(response, "browser", None): request.render_downloader.put_back(response.browser) break if setting.SPIDER_SLEEP_TIME: if ( isinstance(setting.SPIDER_SLEEP_TIME, (tuple, list)) and len(setting.SPIDER_SLEEP_TIME) == 2 ): sleep_time = random.randint( int(setting.SPIDER_SLEEP_TIME[0]), int(setting.SPIDER_SLEEP_TIME[1]) ) time.sleep(sleep_time) else: time.sleep(setting.SPIDER_SLEEP_TIME)
32,962
Python
.py
648
24.248457
121
0.39649
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,895
collector.py
demigody_nas-tools/third_party/feapder/feapder/core/collector.py
# -*- coding: utf-8 -*- """ Created on 2016-12-23 11:24 --------- @summary: request 管理 --------- @author: Boris @email: [email protected] """ import threading import time from queue import Queue, Empty import feapder.setting as setting import feapder.utils.tools as tools from feapder.db.redisdb import RedisDB from feapder.network.request import Request from feapder.utils.log import log class Collector(threading.Thread): def __init__(self, redis_key): """ @summary: --------- @param redis_key: --------- @result: """ super(Collector, self).__init__() self._db = RedisDB() self._thread_stop = False self._todo_requests = Queue(maxsize=setting.COLLECTOR_TASK_COUNT) self._tab_requests = setting.TAB_REQUESTS.format(redis_key=redis_key) self._is_collector_task = False def run(self): self._thread_stop = False while not self._thread_stop: try: self.__input_data() except Exception as e: log.exception(e) time.sleep(0.1) self._is_collector_task = False def stop(self): self._thread_stop = True self._started.clear() def __input_data(self): if setting.COLLECTOR_TASK_COUNT / setting.SPIDER_THREAD_COUNT > 1 and ( self._todo_requests.qsize() > setting.SPIDER_THREAD_COUNT or self._todo_requests.qsize() >= self._todo_requests.maxsize ): time.sleep(0.1) return current_timestamp = tools.get_current_timestamp() # 取任务,只取当前时间搓以内的任务,同时将任务分数修改为 current_timestamp + setting.REQUEST_LOST_TIMEOUT requests_list = self._db.zrangebyscore_set_score( self._tab_requests, priority_min="-inf", priority_max=current_timestamp, score=current_timestamp + setting.REQUEST_LOST_TIMEOUT, count=setting.COLLECTOR_TASK_COUNT, ) if requests_list: self._is_collector_task = True # 存request self.__put_requests(requests_list) else: time.sleep(0.1) def __put_requests(self, requests_list): for request in requests_list: try: request_dict = { "request_obj": Request.from_dict(eval(request)), "request_redis": request, } except Exception as e: log.exception( """ error %s request %s """ % (e, request) ) request_dict = None if request_dict: self._todo_requests.put(request_dict) def get_request(self): try: request = self._todo_requests.get(timeout=1) return request except Empty as e: return None def get_requests_count(self): return ( self._todo_requests.qsize() or self._db.zget_count(self._tab_requests) or 0 ) def is_collector_task(self): return self._is_collector_task
3,256
Python
.py
96
23.458333
87
0.56039
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,896
__init__.py
demigody_nas-tools/third_party/feapder/feapder/core/__init__.py
# -*- coding: utf-8 -*- ''' Created on 2020/4/23 12:09 AM --------- @summary: --------- @author: Boris @email: [email protected] '''
136
Python
.py
9
14.222222
29
0.570313
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,897
scheduler.py
demigody_nas-tools/third_party/feapder/feapder/core/scheduler.py
# -*- coding: utf-8 -*- """ Created on 2017-01-09 10:38 --------- @summary: 组装parser、 parser_control 和 collector --------- @author: Boris @email: [email protected] """ import threading import time from collections.abc import Iterable import feapder.setting as setting import feapder.utils.tools as tools from feapder.buffer.item_buffer import ItemBuffer from feapder.buffer.request_buffer import RequestBuffer from feapder.core.base_parser import BaseParser from feapder.core.collector import Collector from feapder.core.handle_failed_requests import HandleFailedRequests from feapder.core.handle_failed_items import HandleFailedItems from feapder.core.parser_control import ParserControl from feapder.db.redisdb import RedisDB from feapder.network.item import Item from feapder.network.request import Request from feapder.utils import metrics from feapder.utils.log import log from feapder.utils.redis_lock import RedisLock SPIDER_START_TIME_KEY = "spider_start_time" SPIDER_END_TIME_KEY = "spider_end_time" SPIDER_LAST_TASK_COUNT_RECORD_TIME_KEY = "last_task_count_record_time" HEARTBEAT_TIME_KEY = "heartbeat_time" class Scheduler(threading.Thread): __custom_setting__ = {} def __init__( self, redis_key=None, thread_count=None, begin_callback=None, end_callback=None, delete_keys=(), keep_alive=None, auto_start_requests=None, batch_interval=0, wait_lock=True, task_table=None, **kwargs, ): """ @summary: 调度器 --------- @param redis_key: 爬虫request及item存放redis中的文件夹 @param thread_count: 线程数,默认为配置文件中的线程数 @param begin_callback: 爬虫开始回调函数 @param end_callback: 爬虫结束回调函数 @param delete_keys: 爬虫启动时删除的key,类型: 元组/bool/string。 支持正则 @param keep_alive: 爬虫是否常驻,默认否 @param auto_start_requests: 爬虫是否自动添加任务 @param batch_interval: 抓取时间间隔 默认为0 天为单位 多次启动时,只有当前时间与第一次抓取结束的时间间隔大于指定的时间间隔时,爬虫才启动 @param wait_lock: 下发任务时否等待锁,若不等待锁,可能会存在多进程同时在下发一样的任务,因此分布式环境下请将该值设置True @param task_table: 任务表, 批次爬虫传递 --------- @result: """ super(Scheduler, self).__init__() for key, value in self.__class__.__custom_setting__.items(): if key == "AUTO_STOP_WHEN_SPIDER_DONE": # 兼容老版本的配置 setattr(setting, "KEEP_ALIVE", not value) else: setattr(setting, key, value) self._redis_key = redis_key or setting.REDIS_KEY if not self._redis_key: raise Exception( """ redis_key 为redis中存放request与item的目录。不能为空, 可在setting中配置,如 REDIS_KEY = 'test' 或spider初始化时传参, 如 TestSpider(redis_key='test') """ ) self._request_buffer = RequestBuffer(redis_key) self._item_buffer = ItemBuffer(redis_key, task_table) self._collector = Collector(redis_key) self._parsers = [] self._parser_controls = [] self._parser_control_obj = ParserControl # 兼容老版本的参数 if "auto_stop_when_spider_done" in kwargs: self._keep_alive = not kwargs.get("auto_stop_when_spider_done") else: self._keep_alive = ( keep_alive if keep_alive is not None else setting.KEEP_ALIVE ) self._auto_start_requests = ( auto_start_requests if auto_start_requests is not None else setting.SPIDER_AUTO_START_REQUESTS ) self._batch_interval = batch_interval self._begin_callback = ( begin_callback if begin_callback else lambda: log.info("\n********** feapder begin **********") ) self._end_callback = ( end_callback if end_callback else lambda: log.info("\n********** feapder end **********") ) if thread_count: setattr(setting, "SPIDER_THREAD_COUNT", thread_count) self._thread_count = setting.SPIDER_THREAD_COUNT self._spider_name = self.name self._task_table = task_table self._tab_spider_status = setting.TAB_SPIDER_STATUS.format(redis_key=redis_key) self._tab_requests = setting.TAB_REQUESTS.format(redis_key=redis_key) self._tab_failed_requests = setting.TAB_FAILED_REQUESTS.format( redis_key=redis_key ) self._is_notify_end = False # 是否已经通知结束 self._last_task_count = 0 # 最近一次任务数量 self._last_check_task_count_time = 0 self._stop_heartbeat = False # 是否停止心跳 self._redisdb = RedisDB() # Request 缓存设置 Request.cached_redis_key = redis_key Request.cached_expire_time = setting.RESPONSE_CACHED_EXPIRE_TIME delete_keys = delete_keys or setting.DELETE_KEYS if delete_keys: self.delete_tables(delete_keys) self._last_check_task_status_time = 0 self.wait_lock = wait_lock self.init_metrics() # 重置丢失的任务 self.reset_task() self._stop_spider = False def init_metrics(self): """ 初始化打点系统 """ metrics.init(**setting.METRICS_OTHER_ARGS) def add_parser(self, parser, **kwargs): parser = parser(**kwargs) # parser 实例化 if isinstance(parser, BaseParser): self._parsers.append(parser) else: raise ValueError("类型错误,爬虫需继承feapder.BaseParser或feapder.BatchParser") def run(self): if not self.is_reach_next_spider_time(): return self._start() while True: try: if self._stop or self.all_thread_is_done(): if not self._is_notify_end: self.spider_end() # 跑完一轮 self._is_notify_end = True if not self._keep_alive: self._stop_all_thread() break else: self._is_notify_end = False self.check_task_status() except Exception as e: log.exception(e) tools.delay_time(1) # 1秒钟检查一次爬虫状态 def __add_task(self): # 启动parser 的 start_requests self.spider_begin() # 不自动结束的爬虫此处只能执行一遍 # 判断任务池中属否还有任务,若有接着抓取 todo_task_count = self._collector.get_requests_count() if todo_task_count: log.info("检查到有待做任务 %s 条,不重下发新任务,将接着上回异常终止处继续抓取" % todo_task_count) else: for parser in self._parsers: results = parser.start_requests() # 添加request到请求队列,由请求队列统一入库 if results and not isinstance(results, Iterable): raise Exception("%s.%s返回值必须可迭代" % (parser.name, "start_requests")) result_type = 1 for result in results or []: if isinstance(result, Request): result.parser_name = result.parser_name or parser.name self._request_buffer.put_request(result) result_type = 1 elif isinstance(result, Item): self._item_buffer.put_item(result) result_type = 2 elif callable(result): # callbale的request可能是更新数据库操作的函数 if result_type == 1: self._request_buffer.put_request(result) else: self._item_buffer.put_item(result) else: raise TypeError( "start_requests yield result type error, expect Request、Item、callback func, bug get type: {}".format( type(result) ) ) self._request_buffer.flush() self._item_buffer.flush() def _start(self): # 将失败的item入库 if setting.RETRY_FAILED_ITEMS: handle_failed_items = HandleFailedItems( redis_key=self._redis_key, task_table=self._task_table, item_buffer=self._item_buffer, ) handle_failed_items.reput_failed_items_to_db() # 心跳开始 self.heartbeat_start() # 启动request_buffer self._request_buffer.start() # 启动item_buffer self._item_buffer.start() # 启动collector self._collector.start() # 启动parser control for i in range(self._thread_count): parser_control = self._parser_control_obj( self._collector, self._redis_key, self._request_buffer, self._item_buffer, ) for parser in self._parsers: parser_control.add_parser(parser) parser_control.start() self._parser_controls.append(parser_control) # 下发任务 因为时间可能比较长,放到最后面 if setting.RETRY_FAILED_REQUESTS: # 重设失败的任务, 不用加锁,原子性操作 handle_failed_requests = HandleFailedRequests(self._redis_key) handle_failed_requests.reput_failed_requests_to_requests() # 下发新任务 if self._auto_start_requests: # 自动下发 if self.wait_lock: # 将添加任务处加锁,防止多进程之间添加重复的任务 with RedisLock(key=self._spider_name) as lock: if lock.locked: self.__add_task() else: self.__add_task() def all_thread_is_done(self): # 降低偶然性, 因为各个环节不是并发的,很有可能当时状态为假,但检测下一条时该状态为真。一次检测很有可能遇到这种偶然性 for i in range(3): # 检测 collector 状态 if ( self._collector.is_collector_task() or self._collector.get_requests_count() > 0 ): return False # 检测 parser_control 状态 for parser_control in self._parser_controls: if not parser_control.is_not_task(): return False # 检测 item_buffer 状态 if ( self._item_buffer.get_items_count() > 0 or self._item_buffer.is_adding_to_db() ): return False # 检测 request_buffer 状态 if ( self._request_buffer.get_requests_count() > 0 or self._request_buffer.is_adding_to_db() ): return False tools.delay_time(1) return True @tools.run_safe_model("check_task_status") def check_task_status(self): """ 检查任务状态 预警 """ # 每分钟检查一次 now_time = time.time() if now_time - self._last_check_task_status_time > 60: self._last_check_task_status_time = now_time else: return # 检查失败任务数量 超过1000 报警, failed_count = self._redisdb.zget_count(self._tab_failed_requests) if failed_count > setting.WARNING_FAILED_COUNT: # 发送报警 msg = "《%s》爬虫当前失败任务数:%s, 请检查爬虫是否正常" % (self._spider_name, failed_count) log.error(msg) self.send_msg( msg, level="error", message_prefix="《%s》爬虫当前失败任务数报警" % (self._spider_name), ) # parser_control实时统计已做任务数及失败任务数,若成功率<0.5 则报警 ( failed_task_count, success_task_count, total_task_count, ) = ParserControl.get_task_status_count() total_count = success_task_count + failed_task_count if total_count > 0: task_success_rate = success_task_count / total_count if task_success_rate < 0.5: # 发送报警 msg = "《%s》爬虫当前任务成功数%s, 失败数%s, 成功率 %.2f, 请检查爬虫是否正常" % ( self._spider_name, success_task_count, failed_task_count, task_success_rate, ) log.error(msg) self.send_msg( msg, level="error", message_prefix="《%s》爬虫当前任务成功率报警" % (self._spider_name), ) # 判断任务数是否变化 current_time = tools.get_current_timestamp() if ( current_time - self._last_check_task_count_time > setting.WARNING_CHECK_TASK_COUNT_INTERVAL ): if ( self._last_task_count and self._last_task_count == total_task_count and self._redisdb.zget_count(self._tab_requests) > 0 ): # 发送报警 msg = "《{}》爬虫停滞 {},请检查爬虫是否正常".format( self._spider_name, tools.format_seconds( current_time - self._last_check_task_count_time ), ) log.error(msg) self.send_msg( msg, level="error", message_prefix="《{}》爬虫停滞".format(self._spider_name), ) else: self._last_task_count = total_task_count self._last_check_task_count_time = current_time # 检查入库失败次数 if self._item_buffer.export_falied_times > setting.EXPORT_DATA_MAX_FAILED_TIMES: msg = "《{}》爬虫导出数据失败,失败次数:{}, 请检查爬虫是否正常".format( self._spider_name, self._item_buffer.export_falied_times ) log.error(msg) self.send_msg( msg, level="error", message_prefix="《%s》爬虫导出数据失败" % (self._spider_name) ) def delete_tables(self, delete_keys): if delete_keys == True: delete_keys = [self._redis_key + "*"] elif not isinstance(delete_keys, (list, tuple)): delete_keys = [delete_keys] for delete_key in delete_keys: if not delete_key.startswith(self._redis_key): delete_key = self._redis_key + delete_key keys = self._redisdb.getkeys(delete_key) for key in keys: log.debug("正在删除key %s" % key) self._redisdb.clear(key) def _stop_all_thread(self): self._request_buffer.stop() self._item_buffer.stop() # 停止 collector self._collector.stop() # 停止 parser_controls for parser_control in self._parser_controls: parser_control.stop() self.heartbeat_stop() self._started.clear() def send_msg(self, msg, level="debug", message_prefix=""): # log.debug("发送报警 level:{} msg{}".format(level, msg)) tools.send_msg(msg=msg, level=level, message_prefix=message_prefix) def spider_begin(self): """ @summary: start_monitor_task 方式启动,此函数与spider_end不在同一进程内,变量不可共享 --------- --------- @result: """ if self._begin_callback: self._begin_callback() for parser in self._parsers: parser.start_callback() # 记录开始时间 if not self._redisdb.hexists(self._tab_spider_status, SPIDER_START_TIME_KEY): current_timestamp = tools.get_current_timestamp() self._redisdb.hset( self._tab_spider_status, SPIDER_START_TIME_KEY, current_timestamp ) # 发送消息 self.send_msg("《%s》爬虫开始" % self._spider_name) def spider_end(self): self.record_end_time() if self._end_callback: self._end_callback() for parser in self._parsers: if not self._keep_alive: parser.close() parser.end_callback() if not self._keep_alive: # 关闭webdirver Request.render_downloader and Request.render_downloader.close_all() # 关闭打点 metrics.close() else: metrics.flush() # 计算抓取时长 data = self._redisdb.hget( self._tab_spider_status, SPIDER_START_TIME_KEY, is_pop=True ) if data: begin_timestamp = int(data) spand_time = tools.get_current_timestamp() - begin_timestamp msg = "《%s》爬虫%s,采集耗时 %s" % ( self._spider_name, "被终止" if self._stop_spider else "结束", tools.format_seconds(spand_time), ) log.info(msg) self.send_msg(msg) if self._keep_alive: log.info("爬虫不自动结束, 等待下一轮任务...") else: self.delete_tables(self._tab_spider_status) def record_end_time(self): # 记录结束时间 if self._batch_interval: current_timestamp = tools.get_current_timestamp() self._redisdb.hset( self._tab_spider_status, SPIDER_END_TIME_KEY, current_timestamp ) def is_reach_next_spider_time(self): if not self._batch_interval: return True last_spider_end_time = self._redisdb.hget( self._tab_spider_status, SPIDER_END_TIME_KEY ) if last_spider_end_time: last_spider_end_time = int(last_spider_end_time) current_timestamp = tools.get_current_timestamp() time_interval = current_timestamp - last_spider_end_time if time_interval < self._batch_interval * 86400: log.info( "上次运行结束时间为 {} 与当前时间间隔 为 {}, 小于规定的抓取时间间隔 {}。爬虫不执行,退出~".format( tools.timestamp_to_date(last_spider_end_time), tools.format_seconds(time_interval), tools.format_seconds(self._batch_interval * 86400), ) ) return False return True def join(self, timeout=None): """ 重写线程的join """ if not self._started.is_set(): return super().join() def heartbeat(self): while not self._stop_heartbeat: try: self._redisdb.hset( self._tab_spider_status, HEARTBEAT_TIME_KEY, tools.get_current_timestamp(), ) except Exception as e: log.error("心跳异常: {}".format(e)) time.sleep(5) def heartbeat_start(self): threading.Thread(target=self.heartbeat).start() def heartbeat_stop(self): self._stop_heartbeat = True def have_alive_spider(self, heartbeat_interval=10): heartbeat_time = self._redisdb.hget(self._tab_spider_status, HEARTBEAT_TIME_KEY) if heartbeat_time: heartbeat_time = int(heartbeat_time) current_timestamp = tools.get_current_timestamp() if current_timestamp - heartbeat_time < heartbeat_interval: return True return False def reset_task(self, heartbeat_interval=10): """ 重置丢失的任务 Returns: """ if self.have_alive_spider(heartbeat_interval=heartbeat_interval): current_timestamp = tools.get_current_timestamp() datas = self._redisdb.zrangebyscore_set_score( self._tab_requests, priority_min=current_timestamp, priority_max=current_timestamp + setting.REQUEST_LOST_TIMEOUT, score=300, count=None, ) lose_count = len(datas) if lose_count: log.info("重置丢失任务完毕,共{}条".format(len(datas))) def stop_spider(self): self._stop_spider = True
21,525
Python
.py
503
26.749503
129
0.547337
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,898
base_parser.py
demigody_nas-tools/third_party/feapder/feapder/core/base_parser.py
# -*- coding: utf-8 -*- """ Created on 2018-07-25 11:41:57 --------- @summary: parser 的基类 --------- @author: Boris @email: [email protected] """ import os import feapder.utils.tools as tools from feapder.db.mysqldb import MysqlDB from feapder.network.item import UpdateItem from feapder.utils.log import log class BaseParser(object): def start_requests(self): """ @summary: 添加初始url --------- --------- @result: yield Request() """ pass def download_midware(self, request): """ @summary: 下载中间件 可修改请求的一些参数, 或可自定义下载,然后返回 request, response --------- @param request: --------- @result: return request / request, response """ pass def validate(self, request, response): """ @summary: 校验函数, 可用于校验response是否正确 若函数内抛出异常,则重试请求 若返回True 或 None,则进入解析函数 若返回False,则抛弃当前请求 可通过request.callback_name 区分不同的回调函数,编写不同的校验逻辑 --------- @param request: @param response: --------- @result: True / None / False """ pass def parse(self, request, response): """ @summary: 默认的解析函数 --------- @param request: @param response: --------- @result: """ pass def exception_request(self, request, response, e): """ @summary: 请求或者parser里解析出异常的request --------- @param request: @param response: @param e: 异常 --------- @result: request / callback / None (返回值必须可迭代) """ pass def failed_request(self, request, response, e): """ @summary: 超过最大重试次数的request 可返回修改后的request 若不返回request,则将传进来的request直接人redis的failed表。否则将修改后的request入failed表 --------- @param request: @param response: @param e: 异常 --------- @result: request / item / callback / None (返回值必须可迭代) """ pass def start_callback(self): """ @summary: 程序开始的回调 --------- --------- @result: None """ pass def end_callback(self): """ @summary: 程序结束的回调 --------- --------- @result: None """ pass @property def name(self): return self.__class__.__name__ def close(self): pass class TaskParser(BaseParser): def __init__(self, task_table, task_state, mysqldb=None): self._mysqldb = mysqldb or MysqlDB() # mysqldb self._task_state = task_state # mysql中任务表的state字段名 self._task_table = task_table # mysql中的任务表 def add_task(self): """ @summary: 添加任务, 每次启动start_monitor 都会调用,且在init_task之前调用 --------- --------- @result: """ def start_requests(self, task): """ @summary: --------- @param task: 任务信息 list --------- @result: """ def update_task_state(self, task_id, state=1, **kwargs): """ @summary: 更新任务表中任务状态,做完每个任务时代码逻辑中要主动调用。可能会重写 调用方法为 yield lambda : self.update_task_state(task_id, state) --------- @param task_id: @param state: --------- @result: """ kwargs["id"] = task_id kwargs[self._task_state] = state sql = tools.make_update_sql( self._task_table, kwargs, condition="id = {task_id}".format(task_id=task_id) ) if self._mysqldb.update(sql): log.debug("置任务%s状态成功" % task_id) else: log.error("置任务%s状态失败 sql=%s" % (task_id, sql)) update_task = update_task_state def update_task_batch(self, task_id, state=1, **kwargs): """ 批量更新任务 多处调用,更新的字段必须一致 注意:需要 写成 yield update_task_batch(...) 否则不会更新 @param task_id: @param state: @param kwargs: @return: """ kwargs["id"] = task_id kwargs[self._task_state] = state update_item = UpdateItem(**kwargs) update_item.table_name = self._task_table update_item.name_underline = self._task_table + "_item" return update_item class BatchParser(TaskParser): """ @summary: 批次爬虫模版 --------- """ def __init__( self, task_table, batch_record_table, task_state, date_format, mysqldb=None ): super(BatchParser, self).__init__( task_table=task_table, task_state=task_state, mysqldb=mysqldb ) self._batch_record_table = batch_record_table # mysql 中的批次记录表 self._date_format = date_format # 批次日期格式 @property def batch_date(self): """ @summary: 获取批次时间 --------- --------- @result: """ batch_date = os.environ.get("batch_date") if not batch_date: sql = 'select date_format(batch_date, "{date_format}") from {batch_record_table} order by id desc limit 1'.format( date_format=self._date_format.replace(":%M", ":%i"), batch_record_table=self._batch_record_table, ) batch_info = MysqlDB().find(sql) # (('2018-08-19'),) if batch_info: os.environ["batch_date"] = batch_date = batch_info[0][0] else: log.error("需先运行 start_monitor_task()") os._exit(137) # 使退出码为35072 方便爬虫管理器重启 return batch_date
6,240
Python
.py
189
20.793651
126
0.526126
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)
2,288,899
task_spider.py
demigody_nas-tools/third_party/feapder/feapder/core/spiders/task_spider.py
# -*- coding: utf-8 -*- """ Created on 2020/4/22 12:06 AM --------- @summary: --------- @author: Boris @email: [email protected] """ import os import time import warnings from collections.abc import Iterable from typing import List, Tuple, Dict, Union import feapder.setting as setting import feapder.utils.tools as tools from feapder.core.base_parser import TaskParser from feapder.core.scheduler import Scheduler from feapder.db.mysqldb import MysqlDB from feapder.db.redisdb import RedisDB from feapder.network.item import Item from feapder.network.item import UpdateItem from feapder.network.request import Request from feapder.utils.log import log from feapder.utils.perfect_dict import PerfectDict CONSOLE_PIPELINE_PATH = "feapder.pipelines.console_pipeline.ConsolePipeline" class TaskSpider(TaskParser, Scheduler): def __init__( self, redis_key, task_table, task_table_type="mysql", task_keys=None, task_state="state", min_task_count=10000, check_task_interval=5, task_limit=10000, related_redis_key=None, related_batch_record=None, task_condition="", task_order_by="", thread_count=None, begin_callback=None, end_callback=None, delete_keys=(), keep_alive=None, batch_interval=0, use_mysql=True, **kwargs, ): """ @summary: 任务爬虫 必要条件 需要指定任务表,可以是redis表或者mysql表作为任务种子 redis任务种子表:zset类型。值为 {"xxx":xxx, "xxx2":"xxx2"};若为集成模式,需指定parser_name字段,如{"xxx":xxx, "xxx2":"xxx2", "parser_name":"TestTaskSpider"} mysql任务表: 任务表中必须有id及任务状态字段 如 state, 其他字段可根据爬虫需要的参数自行扩充。若为集成模式,需指定parser_name字段。 参考建表语句如下: CREATE TABLE `table_name` ( `id` int(11) NOT NULL AUTO_INCREMENT, `param` varchar(1000) DEFAULT NULL COMMENT '爬虫需要的抓取数据需要的参数', `state` int(11) DEFAULT NULL COMMENT '任务状态', `parser_name` varchar(255) DEFAULT NULL COMMENT '任务解析器的脚本类名', PRIMARY KEY (`id`), UNIQUE KEY `nui` (`param`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; --------- @param task_table: mysql中的任务表 或 redis中存放任务种子的key,zset类型 @param task_table_type: 任务表类型 支持 redis 、mysql @param task_keys: 需要获取的任务字段 列表 [] 如需指定解析的parser,则需将parser_name字段取出来。 @param task_state: mysql中任务表的任务状态字段 @param min_task_count: redis 中最少任务数, 少于这个数量会从种子表中取任务 @param check_task_interval: 检查是否还有任务的时间间隔; @param task_limit: 每次从数据库中取任务的数量 @param redis_key: 任务等数据存放在redis中的key前缀 @param thread_count: 线程数,默认为配置文件中的线程数 @param begin_callback: 爬虫开始回调函数 @param end_callback: 爬虫结束回调函数 @param delete_keys: 爬虫启动时删除的key,类型: 元组/bool/string。 支持正则; 常用于清空任务队列,否则重启时会断点续爬 @param keep_alive: 爬虫是否常驻,默认否 @param related_redis_key: 有关联的其他爬虫任务表(redis)注意:要避免环路 如 A -> B & B -> A 。 @param related_batch_record: 有关联的其他爬虫批次表(mysql)注意:要避免环路 如 A -> B & B -> A 。 related_redis_key 与 related_batch_record 选其一配置即可;用于相关联的爬虫没结束时,本爬虫也不结束 若相关连的爬虫为批次爬虫,推荐以related_batch_record配置, 若相关连的爬虫为普通爬虫,无批次表,可以以related_redis_key配置 @param task_condition: 任务条件 用于从一个大任务表中挑选出数据自己爬虫的任务,即where后的条件语句 @param task_order_by: 取任务时的排序条件 如 id desc @param batch_interval: 抓取时间间隔 默认为0 天为单位 多次启动时,只有当前时间与第一次抓取结束的时间间隔大于指定的时间间隔时,爬虫才启动 @param use_mysql: 是否使用mysql数据库 --------- @result: """ Scheduler.__init__( self, redis_key=redis_key, thread_count=thread_count, begin_callback=begin_callback, end_callback=end_callback, delete_keys=delete_keys, keep_alive=keep_alive, auto_start_requests=False, batch_interval=batch_interval, task_table=task_table, **kwargs, ) self._redisdb = RedisDB() self._mysqldb = MysqlDB() if use_mysql else None self._task_table = task_table # mysql中的任务表 self._task_keys = task_keys # 需要获取的任务字段 self._task_table_type = task_table_type if self._task_table_type == "mysql" and not self._task_keys: raise Exception("需指定任务字段 使用task_keys") self._task_state = task_state # mysql中任务表的state字段名 self._min_task_count = min_task_count # redis 中最少任务数 self._check_task_interval = check_task_interval self._task_limit = task_limit # mysql中一次取的任务数量 self._related_task_tables = [ setting.TAB_REQUESTS.format(redis_key=redis_key) ] # 自己的task表也需要检查是否有任务 if related_redis_key: self._related_task_tables.append( setting.TAB_REQUESTS.format(redis_key=related_redis_key) ) self._related_batch_record = related_batch_record self._task_condition = task_condition self._task_condition_prefix_and = task_condition and " and {}".format( task_condition ) self._task_condition_prefix_where = task_condition and " where {}".format( task_condition ) self._task_order_by = task_order_by and " order by {}".format(task_order_by) self._is_more_parsers = True # 多模版类爬虫 self.reset_task() def add_parser(self, parser, **kwargs): parser = parser( self._task_table, self._task_state, self._mysqldb, **kwargs, ) # parser 实例化 self._parsers.append(parser) def start_monitor_task(self): """ @summary: 监控任务状态 --------- --------- @result: """ if not self._parsers: # 不是多模版模式, 将自己注入到parsers,自己为模版 self._is_more_parsers = False self._parsers.append(self) elif len(self._parsers) <= 1: self._is_more_parsers = False # 添加任务 for parser in self._parsers: parser.add_task() while True: try: # 检查redis中是否有任务 任务小于_min_task_count 则从mysql中取 tab_requests = setting.TAB_REQUESTS.format(redis_key=self._redis_key) todo_task_count = self._redisdb.zget_count(tab_requests) tasks = [] if todo_task_count < self._min_task_count: tasks = self.get_task(todo_task_count) if not tasks: if not todo_task_count: if self._keep_alive: log.info("任务均已做完,爬虫常驻, 等待新任务") time.sleep(self._check_task_interval) continue elif self.have_alive_spider(): log.info("任务均已做完,但还有爬虫在运行,等待爬虫结束") time.sleep(self._check_task_interval) continue elif not self.related_spider_is_done(): continue else: log.info("任务均已做完,爬虫结束") break else: log.info("redis 中尚有%s条积压任务,暂时不派发新任务" % todo_task_count) if not tasks: if todo_task_count >= self._min_task_count: # log.info('任务正在进行 redis中剩余任务 %s' % todo_task_count) pass else: log.info("无待做种子 redis中剩余任务 %s" % todo_task_count) else: # make start requests self.distribute_task(tasks) log.info(f"添加任务到redis成功 共{len(tasks)}条") except Exception as e: log.exception(e) time.sleep(self._check_task_interval) def get_task(self, todo_task_count) -> List[Union[Tuple, Dict]]: """ 获取任务 Args: todo_task_count: redis里剩余的任务数 Returns: """ tasks = [] if self._task_table_type == "mysql": # 从mysql中取任务 log.info("redis 中剩余任务%s 数量过小 从mysql中取任务追加" % todo_task_count) tasks = self.get_todo_task_from_mysql() if not tasks: # 状态为0的任务已经做完,需要检查状态为2的任务是否丢失 # redis 中无待做任务,此时mysql中状态为2的任务为丢失任务。需重新做 if todo_task_count == 0: log.info("无待做任务,尝试取丢失的任务") tasks = self.get_doing_task_from_mysql() elif self._task_table_type == "redis": log.info("redis 中剩余任务%s 数量过小 从redis种子任务表中取任务追加" % todo_task_count) tasks = self.get_task_from_redis() else: raise Exception( f"task_table_type expect mysql or redis,bug got {self._task_table_type}" ) return tasks def distribute_task(self, tasks): """ @summary: 分发任务 --------- @param tasks: --------- @result: """ if self._is_more_parsers: # 为多模版类爬虫,需要下发指定的parser for task in tasks: for parser in self._parsers: # 寻找task对应的parser if parser.name in task: if isinstance(task, dict): task = PerfectDict(_dict=task) else: task = PerfectDict( _dict=dict(zip(self._task_keys, task)), _values=list(task), ) requests = parser.start_requests(task) if requests and not isinstance(requests, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "start_requests") ) result_type = 1 for request in requests or []: if isinstance(request, Request): request.parser_name = request.parser_name or parser.name self._request_buffer.put_request(request) result_type = 1 elif isinstance(request, Item): self._item_buffer.put_item(request) result_type = 2 if ( self._item_buffer.get_items_count() >= setting.ITEM_MAX_CACHED_COUNT ): self._item_buffer.flush() elif callable(request): # callbale的request可能是更新数据库操作的函数 if result_type == 1: self._request_buffer.put_request(request) else: self._item_buffer.put_item(request) if ( self._item_buffer.get_items_count() >= setting.ITEM_MAX_CACHED_COUNT ): self._item_buffer.flush() else: raise TypeError( "start_requests yield result type error, expect Request、Item、callback func, bug get type: {}".format( type(requests) ) ) break else: # task没对应的parser 则将task下发到所有的parser for task in tasks: for parser in self._parsers: if isinstance(task, dict): task = PerfectDict(_dict=task) else: task = PerfectDict( _dict=dict(zip(self._task_keys, task)), _values=list(task) ) requests = parser.start_requests(task) if requests and not isinstance(requests, Iterable): raise Exception( "%s.%s返回值必须可迭代" % (parser.name, "start_requests") ) result_type = 1 for request in requests or []: if isinstance(request, Request): request.parser_name = request.parser_name or parser.name self._request_buffer.put_request(request) result_type = 1 elif isinstance(request, Item): self._item_buffer.put_item(request) result_type = 2 if ( self._item_buffer.get_items_count() >= setting.ITEM_MAX_CACHED_COUNT ): self._item_buffer.flush() elif callable(request): # callbale的request可能是更新数据库操作的函数 if result_type == 1: self._request_buffer.put_request(request) else: self._item_buffer.put_item(request) if ( self._item_buffer.get_items_count() >= setting.ITEM_MAX_CACHED_COUNT ): self._item_buffer.flush() self._request_buffer.flush() self._item_buffer.flush() def get_task_from_redis(self): tasks = self._redisdb.zget(self._task_table, count=self._task_limit) tasks = [eval(task) for task in tasks] return tasks def get_todo_task_from_mysql(self): """ @summary: 取待做的任务 --------- --------- @result: """ # TODO 分批取数据 每批最大取 1000000个,防止内存占用过大 # 查询任务 task_keys = ", ".join([f"`{key}`" for key in self._task_keys]) sql = "select %s from %s where %s = 0%s%s limit %s" % ( task_keys, self._task_table, self._task_state, self._task_condition_prefix_and, self._task_order_by, self._task_limit, ) tasks = self._mysqldb.find(sql) if tasks: # 更新任务状态 for i in range(0, len(tasks), 10000): # 10000 一批量更新 task_ids = str( tuple([task[0] for task in tasks[i : i + 10000]]) ).replace(",)", ")") sql = "update %s set %s = 2 where id in %s" % ( self._task_table, self._task_state, task_ids, ) self._mysqldb.update(sql) return tasks def get_doing_task_from_mysql(self): """ @summary: 取正在做的任务 --------- --------- @result: """ # 查询任务 task_keys = ", ".join([f"`{key}`" for key in self._task_keys]) sql = "select %s from %s where %s = 2%s%s limit %s" % ( task_keys, self._task_table, self._task_state, self._task_condition_prefix_and, self._task_order_by, self._task_limit, ) tasks = self._mysqldb.find(sql) return tasks def get_lose_task_count(self): sql = "select count(1) from %s where %s = 2%s" % ( self._task_table, self._task_state, self._task_condition_prefix_and, ) doing_count = self._mysqldb.find(sql)[0][0] return doing_count def reset_lose_task_from_mysql(self): """ @summary: 重置丢失任务为待做 --------- --------- @result: """ sql = "update {table} set {state} = 0 where {state} = 2{task_condition}".format( table=self._task_table, state=self._task_state, task_condition=self._task_condition_prefix_and, ) return self._mysqldb.update(sql) def related_spider_is_done(self): """ 相关连的爬虫是否跑完 @return: True / False / None 表示无相关的爬虫 可由自身的total_count 和 done_count 来判断 """ for related_redis_task_table in self._related_task_tables: if self._redisdb.exists_key(related_redis_task_table): log.info(f"依赖的爬虫还未结束,任务表为:{related_redis_task_table}") return False if self._related_batch_record: sql = "select is_done from {} order by id desc limit 1".format( self._related_batch_record ) is_done = self._mysqldb.find(sql) is_done = is_done[0][0] if is_done else None if is_done is None: log.warning("相关联的批次表不存在或无批次信息") return True if not is_done: log.info(f"依赖的爬虫还未结束,批次表为:{self._related_batch_record}") return False return True # -------- 批次结束逻辑 ------------ def task_is_done(self): """ @summary: 检查种子表是否做完 --------- --------- @result: True / False (做完 / 未做完) """ is_done = False if self._task_table_type == "mysql": sql = "select 1 from %s where (%s = 0 or %s=2)%s limit 1" % ( self._task_table, self._task_state, self._task_state, self._task_condition_prefix_and, ) count = self._mysqldb.find(sql) # [(1,)] / [] elif self._task_table_type == "redis": count = self._redisdb.zget_count(self._task_table) else: raise Exception( f"task_table_type expect mysql or redis,bug got {self._task_table_type}" ) if not count: log.info("种子表中任务均已完成") is_done = True return is_done def run(self): """ @summary: 重写run方法 检查mysql中的任务是否做完, 做完停止 --------- --------- @result: """ try: if not self.is_reach_next_spider_time(): return if not self._parsers: # 不是add_parser 模式 self._parsers.append(self) self._start() while True: try: if self._stop_spider or ( self.all_thread_is_done() and self.task_is_done() and self.related_spider_is_done() ): # redis全部的任务已经做完 并且mysql中的任务已经做完(检查各个线程all_thread_is_done,防止任务没做完,就更新任务状态,导致程序结束的情况) if not self._is_notify_end: self.spider_end() self._is_notify_end = True if not self._keep_alive: self._stop_all_thread() break else: log.info("常驻爬虫,等待新任务") else: self._is_notify_end = False self.check_task_status() except Exception as e: log.exception(e) tools.delay_time(10) # 10秒钟检查一次爬虫状态 except Exception as e: msg = "《%s》主线程异常 爬虫结束 exception: %s" % (self.name, e) log.error(msg) self.send_msg( msg, level="error", message_prefix="《%s》爬虫异常结束".format(self.name) ) os._exit(137) # 使退出码为35072 方便爬虫管理器重启 @classmethod def to_DebugTaskSpider(cls, *args, **kwargs): # DebugBatchSpider 继承 cls DebugTaskSpider.__bases__ = (cls,) DebugTaskSpider.__name__ = cls.__name__ return DebugTaskSpider(*args, **kwargs) class DebugTaskSpider(TaskSpider): """ Debug批次爬虫 """ __debug_custom_setting__ = dict( COLLECTOR_TASK_COUNT=1, # SPIDER SPIDER_THREAD_COUNT=1, SPIDER_SLEEP_TIME=0, SPIDER_MAX_RETRY_TIMES=10, REQUEST_LOST_TIMEOUT=600, # 10分钟 PROXY_ENABLE=False, RETRY_FAILED_REQUESTS=False, # 保存失败的request SAVE_FAILED_REQUEST=False, # 过滤 ITEM_FILTER_ENABLE=False, REQUEST_FILTER_ENABLE=False, OSS_UPLOAD_TABLES=(), DELETE_KEYS=True, ) def __init__( self, task_id=None, task=None, save_to_db=False, update_task=False, *args, **kwargs, ): """ @param task_id: 任务id @param task: 任务 task 与 task_id 二者选一即可。如 task = {"url":""} @param save_to_db: 数据是否入库 默认否 @param update_task: 是否更新任务 默认否 @param args: @param kwargs: """ warnings.warn( "您正处于debug模式下,该模式下不会更新任务状态及数据入库,仅用于调试。正式发布前请更改为正常模式", category=Warning ) if not task and not task_id: raise Exception("task_id 与 task 不能同时为空") kwargs["redis_key"] = kwargs["redis_key"] + "_debug" if not save_to_db: self.__class__.__debug_custom_setting__["ITEM_PIPELINES"] = [ CONSOLE_PIPELINE_PATH ] self.__class__.__custom_setting__.update( self.__class__.__debug_custom_setting__ ) super(DebugTaskSpider, self).__init__(*args, **kwargs) self._task_id = task_id self._task = task self._update_task = update_task def start_monitor_task(self): """ @summary: 监控任务状态 --------- --------- @result: """ if not self._parsers: # 不是多模版模式, 将自己注入到parsers,自己为模版 self._is_more_parsers = False self._parsers.append(self) elif len(self._parsers) <= 1: self._is_more_parsers = False if self._task: self.distribute_task([self._task]) else: tasks = self.get_todo_task_from_mysql() if not tasks: raise Exception("未获取到任务 请检查 task_id: {} 是否存在".format(self._task_id)) self.distribute_task(tasks) log.debug("下发任务完毕") def get_todo_task_from_mysql(self): """ @summary: 取待做的任务 --------- --------- @result: """ # 查询任务 task_keys = ", ".join([f"`{key}`" for key in self._task_keys]) sql = "select %s from %s where id=%s" % ( task_keys, self._task_table, self._task_id, ) tasks = self._mysqldb.find(sql) return tasks def save_cached(self, request, response, table): pass def update_task_state(self, task_id, state=1, *args, **kwargs): """ @summary: 更新任务表中任务状态,做完每个任务时代码逻辑中要主动调用。可能会重写 调用方法为 yield lambda : self.update_task_state(task_id, state) --------- @param task_id: @param state: --------- @result: """ if self._update_task: kwargs["id"] = task_id kwargs[self._task_state] = state sql = tools.make_update_sql( self._task_table, kwargs, condition="id = {task_id}".format(task_id=task_id), ) if self._mysqldb.update(sql): log.debug("置任务%s状态成功" % task_id) else: log.error("置任务%s状态失败 sql=%s" % (task_id, sql)) def update_task_batch(self, task_id, state=1, *args, **kwargs): """ 批量更新任务 多处调用,更新的字段必须一致 注意:需要 写成 yield update_task_batch(...) 否则不会更新 @param task_id: @param state: @param kwargs: @return: """ if self._update_task: kwargs["id"] = task_id kwargs[self._task_state] = state update_item = UpdateItem(**kwargs) update_item.table_name = self._task_table update_item.name_underline = self._task_table + "_item" return update_item def run(self): self.start_monitor_task() if not self._parsers: # 不是add_parser 模式 self._parsers.append(self) self._start() while True: try: if self.all_thread_is_done(): self._stop_all_thread() break except Exception as e: log.exception(e) tools.delay_time(1) # 1秒钟检查一次爬虫状态 self.delete_tables([self._redis_key + "*"])
27,875
Python
.py
631
25.302694
139
0.496886
demigody/nas-tools
8
1
0
AGPL-3.0
9/5/2024, 10:48:34 PM (Europe/Amsterdam)