navpan2 commited on
Commit
94ecfcc
·
1 Parent(s): 6912a0e

Upload 42 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ assets/instagram.png filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ # Set up a new user named "user" with user ID 1000
4
+ RUN useradd -m -u 1000 user
5
+
6
+ # Switch to the "user" user
7
+ USER user
8
+ WORKDIR /code
9
+
10
+ COPY --chown=user ./requirements.txt /code/requirements.txt
11
+
12
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
13
+
14
+ COPY --chown=user . .
15
+
16
+ # Expose the port on which Streamlit will run
17
+ EXPOSE 7860
18
+
19
+ # Set the entry point for the Streamlit app
20
+ CMD ["streamlit", "run", "app.py", "--server.port", "7860"]
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
Makefile ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ define NOLOGGING
2
+
3
+ logging:
4
+ driver: none
5
+ endef
6
+ export NOLOGGING
7
+
8
+ default:
9
+ docker pull bennythink/ytdlbot
10
+
11
+ bot:
12
+ make
13
+ docker-compose up -d
14
+ docker system prune -a --volumes -f
15
+
16
+ worker:
17
+ make
18
+ docker-compose -f worker.yml up -d
19
+ docker system prune -a --volumes -f
20
+ sleep 5
21
+
22
+ weak-worker:
23
+ make
24
+ docker-compose --compatibility -f worker.yml up -d
25
+ docker system prune -a --volumes -f
26
+ sleep 5
27
+
28
+ upgrade-all-worker:
29
+ bash upgrade_worker.sh
30
+
31
+ tag:
32
+ git tag -a v$(shell date "+%Y-%m-%d")_$(shell git rev-parse --short HEAD) -m v$(shell date "+%Y-%m-%d")
33
+ git push --tags
34
+
35
+ nolog:
36
+ echo "$$NOLOGGING">> worker.yml
37
+
38
+ flower:
39
+ echo 'import dbm;dbm.open("data/flower","n");exit()'| python3
40
+
41
+ up:
42
+ docker build -t bennythink/ytdlbot:latest .
43
+ docker-compose -f docker-compose.yml -f worker.yml up -d
44
+
45
+ ps:
46
+ docker-compose -f docker-compose.yml -f worker.yml ps
47
+
48
+ down:
49
+ docker-compose -f docker-compose.yml -f worker.yml down
50
+
51
+ logs:
52
+ docker-compose -f docker-compose.yml -f worker.yml logs -f worker ytdl
Procfile ADDED
@@ -0,0 +1 @@
 
 
1
+ worker: python ytdlbot/ytdl_bot.py
app.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "YouTube-Downloader",
3
+ "description": "A Telegrambot to download youtube video",
4
+ "repository": "https://github.com/tgbot-collection/ytdlbot",
5
+ "logo": "https://avatars.githubusercontent.com/u/73354211?s=200&v=4",
6
+ "keywords": [
7
+ "telegram",
8
+ "youtube-dl"
9
+ ],
10
+ "env": {
11
+ "TOKEN": {
12
+ "description": "Bot token",
13
+ "value": "token"
14
+ },
15
+ "APP_ID": {
16
+ "description": "APP ID",
17
+ "value": "12345"
18
+ },
19
+ "APP_HASH": {
20
+ "description": "APP HASH",
21
+ "value": "12345abc"
22
+ },
23
+ "OWNER": {
24
+ "description": "Your telegram username",
25
+ "value": "username",
26
+ "required": false
27
+ }
28
+ },
29
+ "formation": {
30
+ "worker": {
31
+ "quantity": 1,
32
+ "size": "eco"
33
+ }
34
+ },
35
+ "buildpacks": [
36
+ {
37
+ "url": "https://github.com/heroku/heroku-buildpack-python.git"
38
+ },
39
+ {
40
+ "url": "https://github.com/jonathanong/heroku-buildpack-ffmpeg-latest.git"
41
+ }
42
+ ]
43
+ }
app.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+
4
+ def run_python_file(file_path):
5
+ try:
6
+ st.text(f"Running {file_path}...")
7
+ os.system(f"python {file_path}")
8
+ st.success("Script executed successfully!")
9
+ except Exception as e:
10
+ st.error(f"Error: {e}")
11
+
12
+ def main():
13
+ st.title("YTDLBot Runner")
14
+
15
+ # Specify the directory and file name
16
+ directory = "ytdlbot"
17
+ file_name = "ytdl_bot.py"
18
+ file_path = os.path.join(directory, file_name)
19
+
20
+ st.text(f"Selected file: {file_path}")
21
+
22
+ # Run the Python file automatically when the app starts
23
+ run_python_file(file_path)
24
+
25
+ if __name__ == "__main__":
26
+ main()
assets/1.jpeg ADDED
assets/2.jpeg ADDED
assets/CNY.png ADDED
assets/USD.png ADDED
assets/instagram.png ADDED

Git LFS Details

  • SHA256: 403808b9b818ec3ad934a4b7b4b1689c179d318eb34a3cabbe5e00b1b90fb14a
  • Pointer size: 132 Bytes
  • Size of remote file: 1.58 MB
assets/tron.png ADDED
conf/YouTube Download Celery.json ADDED
@@ -0,0 +1,794 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__inputs": [
3
+ {
4
+ "name": "DS_CELERY",
5
+ "label": "celery",
6
+ "description": "",
7
+ "type": "datasource",
8
+ "pluginId": "influxdb",
9
+ "pluginName": "InfluxDB"
10
+ }
11
+ ],
12
+ "__elements": [],
13
+ "__requires": [
14
+ {
15
+ "type": "grafana",
16
+ "id": "grafana",
17
+ "name": "Grafana",
18
+ "version": "8.3.1"
19
+ },
20
+ {
21
+ "type": "datasource",
22
+ "id": "influxdb",
23
+ "name": "InfluxDB",
24
+ "version": "1.0.0"
25
+ },
26
+ {
27
+ "type": "panel",
28
+ "id": "timeseries",
29
+ "name": "Time series",
30
+ "version": ""
31
+ }
32
+ ],
33
+ "annotations": {
34
+ "list": [
35
+ {
36
+ "builtIn": 1,
37
+ "datasource": "-- Grafana --",
38
+ "enable": true,
39
+ "hide": true,
40
+ "iconColor": "rgba(0, 211, 255, 1)",
41
+ "name": "Annotations & Alerts",
42
+ "target": {
43
+ "limit": 100,
44
+ "matchAny": false,
45
+ "tags": [],
46
+ "type": "dashboard"
47
+ },
48
+ "type": "dashboard"
49
+ }
50
+ ]
51
+ },
52
+ "editable": true,
53
+ "fiscalYearStartMonth": 0,
54
+ "graphTooltip": 0,
55
+ "id": null,
56
+ "iteration": 1644554238421,
57
+ "links": [],
58
+ "liveNow": false,
59
+ "panels": [
60
+ {
61
+ "datasource": {
62
+ "type": "influxdb",
63
+ "uid": "${DS_CELERY}"
64
+ },
65
+ "fieldConfig": {
66
+ "defaults": {
67
+ "color": {
68
+ "mode": "palette-classic"
69
+ },
70
+ "custom": {
71
+ "axisLabel": "",
72
+ "axisPlacement": "auto",
73
+ "barAlignment": 0,
74
+ "drawStyle": "line",
75
+ "fillOpacity": 5,
76
+ "gradientMode": "none",
77
+ "hideFrom": {
78
+ "legend": false,
79
+ "tooltip": false,
80
+ "viz": false
81
+ },
82
+ "lineInterpolation": "linear",
83
+ "lineWidth": 1,
84
+ "pointSize": 5,
85
+ "scaleDistribution": {
86
+ "type": "linear"
87
+ },
88
+ "showPoints": "auto",
89
+ "spanNulls": true,
90
+ "stacking": {
91
+ "group": "A",
92
+ "mode": "none"
93
+ },
94
+ "thresholdsStyle": {
95
+ "mode": "off"
96
+ }
97
+ },
98
+ "mappings": [],
99
+ "thresholds": {
100
+ "mode": "absolute",
101
+ "steps": [
102
+ {
103
+ "color": "green",
104
+ "value": null
105
+ },
106
+ {
107
+ "color": "red",
108
+ "value": 80
109
+ }
110
+ ]
111
+ }
112
+ },
113
+ "overrides": []
114
+ },
115
+ "gridPos": {
116
+ "h": 8,
117
+ "w": 12,
118
+ "x": 0,
119
+ "y": 0
120
+ },
121
+ "id": 2,
122
+ "options": {
123
+ "legend": {
124
+ "calcs": [],
125
+ "displayMode": "list",
126
+ "placement": "bottom"
127
+ },
128
+ "tooltip": {
129
+ "mode": "single"
130
+ }
131
+ },
132
+ "targets": [
133
+ {
134
+ "alias": "Active",
135
+ "groupBy": [
136
+ {
137
+ "params": [
138
+ "$__interval"
139
+ ],
140
+ "type": "time"
141
+ },
142
+ {
143
+ "params": [
144
+ "null"
145
+ ],
146
+ "type": "fill"
147
+ }
148
+ ],
149
+ "measurement": "active",
150
+ "orderByTime": "ASC",
151
+ "policy": "default",
152
+ "query": "SELECT mean(\"active\") FROM \"active\" WHERE $timeFilter GROUP BY time($__interval) ",
153
+ "rawQuery": true,
154
+ "refId": "A",
155
+ "resultFormat": "time_series",
156
+ "select": [
157
+ [
158
+ {
159
+ "params": [
160
+ "active"
161
+ ],
162
+ "type": "field"
163
+ },
164
+ {
165
+ "params": [],
166
+ "type": "mean"
167
+ }
168
+ ]
169
+ ],
170
+ "tags": []
171
+ },
172
+ {
173
+ "alias": "$tag_hostname",
174
+ "hide": false,
175
+ "query": "\nSELECT \nmean(\"active\") AS active\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
176
+ "rawQuery": true,
177
+ "refId": "B",
178
+ "resultFormat": "time_series"
179
+ }
180
+ ],
181
+ "title": "Active Jobs",
182
+ "type": "timeseries"
183
+ },
184
+ {
185
+ "datasource": {
186
+ "type": "influxdb",
187
+ "uid": "${DS_CELERY}"
188
+ },
189
+ "fieldConfig": {
190
+ "defaults": {
191
+ "color": {
192
+ "mode": "palette-classic"
193
+ },
194
+ "custom": {
195
+ "axisLabel": "",
196
+ "axisPlacement": "auto",
197
+ "barAlignment": 0,
198
+ "drawStyle": "line",
199
+ "fillOpacity": 5,
200
+ "gradientMode": "none",
201
+ "hideFrom": {
202
+ "legend": false,
203
+ "tooltip": false,
204
+ "viz": false
205
+ },
206
+ "lineInterpolation": "smooth",
207
+ "lineWidth": 1,
208
+ "pointSize": 5,
209
+ "scaleDistribution": {
210
+ "type": "linear"
211
+ },
212
+ "showPoints": "auto",
213
+ "spanNulls": true,
214
+ "stacking": {
215
+ "group": "A",
216
+ "mode": "none"
217
+ },
218
+ "thresholdsStyle": {
219
+ "mode": "off"
220
+ }
221
+ },
222
+ "mappings": [],
223
+ "thresholds": {
224
+ "mode": "absolute",
225
+ "steps": [
226
+ {
227
+ "color": "green",
228
+ "value": null
229
+ },
230
+ {
231
+ "color": "red",
232
+ "value": 80
233
+ }
234
+ ]
235
+ },
236
+ "unit": "percent"
237
+ },
238
+ "overrides": []
239
+ },
240
+ "gridPos": {
241
+ "h": 8,
242
+ "w": 12,
243
+ "x": 12,
244
+ "y": 0
245
+ },
246
+ "id": 10,
247
+ "options": {
248
+ "legend": {
249
+ "calcs": [],
250
+ "displayMode": "list",
251
+ "placement": "bottom"
252
+ },
253
+ "tooltip": {
254
+ "mode": "single"
255
+ }
256
+ },
257
+ "targets": [
258
+ {
259
+ "alias": "$col",
260
+ "datasource": {
261
+ "type": "influxdb",
262
+ "uid": "${DS_CELERY}"
263
+ },
264
+ "groupBy": [
265
+ {
266
+ "params": [
267
+ "$__interval"
268
+ ],
269
+ "type": "time"
270
+ },
271
+ {
272
+ "params": [
273
+ "null"
274
+ ],
275
+ "type": "fill"
276
+ }
277
+ ],
278
+ "measurement": "metrics",
279
+ "orderByTime": "ASC",
280
+ "policy": "default",
281
+ "query": "\nSELECT \nmean(\"today_audio_success\")/mean(\"today_audio_request\")*100 as audio_success,\nmean(\"today_video_success\")/mean(\"today_video_request\")*100 as video_success\n\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
282
+ "rawQuery": true,
283
+ "refId": "A",
284
+ "resultFormat": "time_series",
285
+ "select": [
286
+ [
287
+ {
288
+ "params": [
289
+ "today_audio_success"
290
+ ],
291
+ "type": "field"
292
+ },
293
+ {
294
+ "params": [],
295
+ "type": "mean"
296
+ }
297
+ ]
298
+ ],
299
+ "tags": []
300
+ }
301
+ ],
302
+ "title": "Video & Audio Success Rate",
303
+ "type": "timeseries"
304
+ },
305
+ {
306
+ "datasource": {
307
+ "type": "influxdb",
308
+ "uid": "${DS_CELERY}"
309
+ },
310
+ "fieldConfig": {
311
+ "defaults": {
312
+ "color": {
313
+ "mode": "palette-classic"
314
+ },
315
+ "custom": {
316
+ "axisLabel": "",
317
+ "axisPlacement": "auto",
318
+ "barAlignment": 0,
319
+ "drawStyle": "line",
320
+ "fillOpacity": 5,
321
+ "gradientMode": "none",
322
+ "hideFrom": {
323
+ "legend": false,
324
+ "tooltip": false,
325
+ "viz": false
326
+ },
327
+ "lineInterpolation": "smooth",
328
+ "lineWidth": 1,
329
+ "pointSize": 5,
330
+ "scaleDistribution": {
331
+ "type": "linear"
332
+ },
333
+ "showPoints": "auto",
334
+ "spanNulls": true,
335
+ "stacking": {
336
+ "group": "A",
337
+ "mode": "none"
338
+ },
339
+ "thresholdsStyle": {
340
+ "mode": "off"
341
+ }
342
+ },
343
+ "mappings": [],
344
+ "thresholds": {
345
+ "mode": "absolute",
346
+ "steps": [
347
+ {
348
+ "color": "green",
349
+ "value": null
350
+ },
351
+ {
352
+ "color": "red",
353
+ "value": 80
354
+ }
355
+ ]
356
+ }
357
+ },
358
+ "overrides": []
359
+ },
360
+ "gridPos": {
361
+ "h": 8,
362
+ "w": 12,
363
+ "x": 0,
364
+ "y": 8
365
+ },
366
+ "id": 6,
367
+ "options": {
368
+ "legend": {
369
+ "calcs": [],
370
+ "displayMode": "list",
371
+ "placement": "bottom"
372
+ },
373
+ "tooltip": {
374
+ "mode": "single"
375
+ }
376
+ },
377
+ "targets": [
378
+ {
379
+ "alias": "$tag_hostname:$col",
380
+ "query": "SELECT mean(\"load1\") AS load1,mean(\"load5\") AS load5,mean(\"load15\") AS load15\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc \n\n",
381
+ "rawQuery": true,
382
+ "refId": "A",
383
+ "resultFormat": "time_series"
384
+ }
385
+ ],
386
+ "title": "Load Average",
387
+ "type": "timeseries"
388
+ },
389
+ {
390
+ "datasource": {
391
+ "type": "influxdb",
392
+ "uid": "${DS_CELERY}"
393
+ },
394
+ "fieldConfig": {
395
+ "defaults": {
396
+ "color": {
397
+ "mode": "palette-classic"
398
+ },
399
+ "custom": {
400
+ "axisLabel": "",
401
+ "axisPlacement": "auto",
402
+ "barAlignment": 0,
403
+ "drawStyle": "line",
404
+ "fillOpacity": 5,
405
+ "gradientMode": "none",
406
+ "hideFrom": {
407
+ "legend": false,
408
+ "tooltip": false,
409
+ "viz": false
410
+ },
411
+ "lineInterpolation": "smooth",
412
+ "lineWidth": 1,
413
+ "pointSize": 5,
414
+ "scaleDistribution": {
415
+ "type": "linear"
416
+ },
417
+ "showPoints": "auto",
418
+ "spanNulls": true,
419
+ "stacking": {
420
+ "group": "A",
421
+ "mode": "none"
422
+ },
423
+ "thresholdsStyle": {
424
+ "mode": "off"
425
+ }
426
+ },
427
+ "mappings": [],
428
+ "thresholds": {
429
+ "mode": "absolute",
430
+ "steps": [
431
+ {
432
+ "color": "green",
433
+ "value": null
434
+ },
435
+ {
436
+ "color": "red",
437
+ "value": 80
438
+ }
439
+ ]
440
+ },
441
+ "unit": "percent"
442
+ },
443
+ "overrides": []
444
+ },
445
+ "gridPos": {
446
+ "h": 8,
447
+ "w": 12,
448
+ "x": 12,
449
+ "y": 8
450
+ },
451
+ "id": 9,
452
+ "options": {
453
+ "legend": {
454
+ "calcs": [],
455
+ "displayMode": "list",
456
+ "placement": "bottom"
457
+ },
458
+ "tooltip": {
459
+ "mode": "single"
460
+ }
461
+ },
462
+ "targets": [
463
+ {
464
+ "alias": "$tag_hostname:$col",
465
+ "datasource": {
466
+ "type": "influxdb",
467
+ "uid": "${DS_CELERY}"
468
+ },
469
+ "groupBy": [
470
+ {
471
+ "params": [
472
+ "$__interval"
473
+ ],
474
+ "type": "time"
475
+ },
476
+ {
477
+ "params": [
478
+ "null"
479
+ ],
480
+ "type": "fill"
481
+ }
482
+ ],
483
+ "measurement": "tasks",
484
+ "orderByTime": "ASC",
485
+ "policy": "default",
486
+ "query": "\nSELECT mean(\"task-succeeded\")/mean(\"task-received\")*100 AS success_rate, mean(\"task-failed\")/mean(\"task-received\")*100 AS fail_rate\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
487
+ "rawQuery": true,
488
+ "refId": "A",
489
+ "resultFormat": "time_series",
490
+ "select": [
491
+ [
492
+ {
493
+ "params": [
494
+ "task-received"
495
+ ],
496
+ "type": "field"
497
+ }
498
+ ]
499
+ ],
500
+ "tags": [
501
+ {
502
+ "key": "hostname",
503
+ "operator": "=~",
504
+ "value": "/^$hostname$/"
505
+ }
506
+ ]
507
+ }
508
+ ],
509
+ "title": "Task Rate",
510
+ "type": "timeseries"
511
+ },
512
+ {
513
+ "datasource": {
514
+ "type": "influxdb",
515
+ "uid": "${DS_CELERY}"
516
+ },
517
+ "fieldConfig": {
518
+ "defaults": {
519
+ "color": {
520
+ "mode": "palette-classic"
521
+ },
522
+ "custom": {
523
+ "axisLabel": "",
524
+ "axisPlacement": "auto",
525
+ "barAlignment": 0,
526
+ "drawStyle": "line",
527
+ "fillOpacity": 5,
528
+ "gradientMode": "none",
529
+ "hideFrom": {
530
+ "legend": false,
531
+ "tooltip": false,
532
+ "viz": false
533
+ },
534
+ "lineInterpolation": "smooth",
535
+ "lineWidth": 1,
536
+ "pointSize": 5,
537
+ "scaleDistribution": {
538
+ "type": "linear"
539
+ },
540
+ "showPoints": "auto",
541
+ "spanNulls": true,
542
+ "stacking": {
543
+ "group": "A",
544
+ "mode": "none"
545
+ },
546
+ "thresholdsStyle": {
547
+ "mode": "off"
548
+ }
549
+ },
550
+ "mappings": [],
551
+ "thresholds": {
552
+ "mode": "absolute",
553
+ "steps": [
554
+ {
555
+ "color": "green",
556
+ "value": null
557
+ },
558
+ {
559
+ "color": "red",
560
+ "value": 80
561
+ }
562
+ ]
563
+ },
564
+ "unit": "none"
565
+ },
566
+ "overrides": []
567
+ },
568
+ "gridPos": {
569
+ "h": 8,
570
+ "w": 12,
571
+ "x": 0,
572
+ "y": 16
573
+ },
574
+ "id": 13,
575
+ "options": {
576
+ "legend": {
577
+ "calcs": [],
578
+ "displayMode": "list",
579
+ "placement": "bottom"
580
+ },
581
+ "tooltip": {
582
+ "mode": "single"
583
+ }
584
+ },
585
+ "targets": [
586
+ {
587
+ "alias": "$tag_hostname:$col",
588
+ "datasource": {
589
+ "type": "influxdb",
590
+ "uid": "${DS_CELERY}"
591
+ },
592
+ "groupBy": [
593
+ {
594
+ "params": [
595
+ "$__interval"
596
+ ],
597
+ "type": "time"
598
+ },
599
+ {
600
+ "params": [
601
+ "null"
602
+ ],
603
+ "type": "fill"
604
+ }
605
+ ],
606
+ "measurement": "tasks",
607
+ "orderByTime": "ASC",
608
+ "policy": "default",
609
+ "query": "\nSELECT mean(\"task-received\") AS received, mean(\"task-started\") AS started,mean(\"task-succeeded\") AS succeeded,mean(\"task-failed\") AS failed\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
610
+ "rawQuery": true,
611
+ "refId": "A",
612
+ "resultFormat": "time_series",
613
+ "select": [
614
+ [
615
+ {
616
+ "params": [
617
+ "task-received"
618
+ ],
619
+ "type": "field"
620
+ }
621
+ ]
622
+ ],
623
+ "tags": [
624
+ {
625
+ "key": "hostname",
626
+ "operator": "=~",
627
+ "value": "/^$hostname$/"
628
+ }
629
+ ]
630
+ }
631
+ ],
632
+ "title": "Task Status",
633
+ "type": "timeseries"
634
+ },
635
+ {
636
+ "datasource": {
637
+ "type": "influxdb",
638
+ "uid": "${DS_CELERY}"
639
+ },
640
+ "fieldConfig": {
641
+ "defaults": {
642
+ "color": {
643
+ "mode": "palette-classic"
644
+ },
645
+ "custom": {
646
+ "axisLabel": "",
647
+ "axisPlacement": "auto",
648
+ "barAlignment": 0,
649
+ "drawStyle": "line",
650
+ "fillOpacity": 5,
651
+ "gradientMode": "none",
652
+ "hideFrom": {
653
+ "legend": false,
654
+ "tooltip": false,
655
+ "viz": false
656
+ },
657
+ "lineInterpolation": "smooth",
658
+ "lineWidth": 1,
659
+ "pointSize": 5,
660
+ "scaleDistribution": {
661
+ "type": "linear"
662
+ },
663
+ "showPoints": "auto",
664
+ "spanNulls": true,
665
+ "stacking": {
666
+ "group": "A",
667
+ "mode": "none"
668
+ },
669
+ "thresholdsStyle": {
670
+ "mode": "off"
671
+ }
672
+ },
673
+ "mappings": [],
674
+ "thresholds": {
675
+ "mode": "absolute",
676
+ "steps": [
677
+ {
678
+ "color": "green",
679
+ "value": null
680
+ },
681
+ {
682
+ "color": "red",
683
+ "value": 80
684
+ }
685
+ ]
686
+ }
687
+ },
688
+ "overrides": []
689
+ },
690
+ "gridPos": {
691
+ "h": 8,
692
+ "w": 12,
693
+ "x": 12,
694
+ "y": 16
695
+ },
696
+ "id": 8,
697
+ "options": {
698
+ "legend": {
699
+ "calcs": [],
700
+ "displayMode": "list",
701
+ "placement": "bottom"
702
+ },
703
+ "tooltip": {
704
+ "mode": "single"
705
+ }
706
+ },
707
+ "targets": [
708
+ {
709
+ "alias": "$col",
710
+ "datasource": {
711
+ "type": "influxdb",
712
+ "uid": "${DS_CELERY}"
713
+ },
714
+ "groupBy": [
715
+ {
716
+ "params": [
717
+ "$__interval"
718
+ ],
719
+ "type": "time"
720
+ },
721
+ {
722
+ "params": [
723
+ "null"
724
+ ],
725
+ "type": "fill"
726
+ }
727
+ ],
728
+ "measurement": "metrics",
729
+ "orderByTime": "ASC",
730
+ "policy": "default",
731
+ "query": "SELECT \nmean(\"today_audio_request\") as audio_request,\nmean(\"today_audio_success\") as audio_success,\n\nmean(\"today_bad_request\") as bad_request,\n\nmean(\"today_video_request\") as video_request,\nmean(\"today_video_success\") as video_success\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
732
+ "rawQuery": true,
733
+ "refId": "A",
734
+ "resultFormat": "time_series",
735
+ "select": [
736
+ [
737
+ {
738
+ "params": [
739
+ "today_audio_success"
740
+ ],
741
+ "type": "field"
742
+ },
743
+ {
744
+ "params": [],
745
+ "type": "mean"
746
+ }
747
+ ]
748
+ ],
749
+ "tags": []
750
+ }
751
+ ],
752
+ "title": "Video & Audio",
753
+ "type": "timeseries"
754
+ }
755
+ ],
756
+ "refresh": "",
757
+ "schemaVersion": 33,
758
+ "style": "dark",
759
+ "tags": [],
760
+ "templating": {
761
+ "list": [
762
+ {
763
+ "current": {},
764
+ "datasource": {
765
+ "type": "influxdb",
766
+ "uid": "${DS_CELERY}"
767
+ },
768
+ "definition": "show tag values with KEY=\"hostname\"",
769
+ "hide": 0,
770
+ "includeAll": true,
771
+ "label": "hostname",
772
+ "multi": true,
773
+ "name": "hostname",
774
+ "options": [],
775
+ "query": "show tag values with KEY=\"hostname\"",
776
+ "refresh": 1,
777
+ "regex": "",
778
+ "skipUrlSync": false,
779
+ "sort": 1,
780
+ "type": "query"
781
+ }
782
+ ]
783
+ },
784
+ "time": {
785
+ "from": "now-15m",
786
+ "to": "now"
787
+ },
788
+ "timepicker": {},
789
+ "timezone": "",
790
+ "title": "YouTube Download Celery",
791
+ "uid": "9yXGmc1nk",
792
+ "version": 14,
793
+ "weekStart": ""
794
+ }
conf/supervisor_main.conf ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [supervisord]
2
+ nodaemon=true
3
+ logfile=/dev/null
4
+ logfile_maxbytes=0
5
+ user=root
6
+
7
+
8
+ [program:vnstat]
9
+ command=vnstatd -n
10
+ autorestart=true
11
+
12
+
13
+ [program:ytdl]
14
+ directory=/ytdlbot/ytdlbot/
15
+ command=python ytdl_bot.py
16
+ autorestart=true
17
+ priority=900
18
+ stopasgroup=true
19
+ startsecs = 30
20
+ startretries = 2
21
+
22
+ redirect_stderr=true
23
+ stdout_logfile_maxbytes = 50MB
24
+ stdout_logfile_backups = 2
25
+ stdout_logfile = /var/log/ytdl.log
26
+
27
+ [program:log]
28
+ command=tail -f /var/log/ytdl.log
29
+ autorestart=true
30
+ priority=999
31
+
32
+ redirect_stderr=true
33
+ stdout_logfile=/dev/fd/1
34
+ stdout_logfile_maxbytes=0
conf/supervisor_worker.conf ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [supervisord]
2
+ nodaemon=true
3
+ logfile=/dev/null
4
+ logfile_maxbytes=0
5
+ user=root
6
+
7
+
8
+ [program:vnstat]
9
+ command=vnstatd -n
10
+ autorestart=true
11
+
12
+ [program:worker]
13
+ directory=/ytdlbot/ytdlbot/
14
+ command=python tasks.py
15
+ autorestart=true
16
+ priority=900
17
+ stopasgroup=true
18
+ startsecs = 5
19
+ startretries = 5
20
+
21
+ redirect_stderr=true
22
+ stdout_logfile_maxbytes = 50MB
23
+ stdout_logfile_backups = 2
24
+ stdout_logfile = /var/log/ytdl.log
25
+
26
+ [program:log]
27
+ command=tail -f /var/log/ytdl.log
28
+ autorestart=true
29
+ priority=999
30
+
31
+ redirect_stderr=true
32
+ stdout_logfile=/dev/fd/1
33
+ stdout_logfile_maxbytes=0
k8s.md ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Kubernetes
2
+
3
+ Kubernetes, also known as K8s, is an open-source system for automating deployment, scaling, and management of
4
+ containerized applications
5
+
6
+ # Complete deployment guide for k8s deloyment
7
+
8
+ * contains every functionality
9
+ * compatible with amd64, arm64 and armv7l
10
+
11
+ ## First. Get all file in k8s folder
12
+
13
+ Download `k8s` file to a directory on your k8s server and go to this folder
14
+
15
+ ## 1. Create Redis deloyment
16
+
17
+ ```shell
18
+ kubectl apply -f 01.redis.yml
19
+ ```
20
+
21
+ This command will create ytdl namespace, redis pod and redis service
22
+
23
+ ## 2. Creat MariaDB deloyment
24
+
25
+ ```shell
26
+ kubectl apply -f 02.mariadb.yml
27
+ ```
28
+
29
+ This deloyment will claim 10GB storage from storageClassName: longhorn. Please replace longhorn with your
30
+ storageClassName before apply.
31
+
32
+ ## 3. Set environment variables
33
+
34
+ Create configMap for env
35
+
36
+ ### 3.1 Edit configmap.yml
37
+
38
+ ```shell
39
+ vim 03.configmap.yml
40
+ ```
41
+
42
+ you can configure all the following environment variables:
43
+
44
+ * PYRO_WORKERS: number of workers for pyrogram, default is 100
45
+ * WORKERS: workers count for celery
46
+ * APP_ID: **REQUIRED**, get it from https://core.telegram.org/
47
+ * APP_HASH: **REQUIRED**
48
+ * TOKEN: **REQUIRED**
49
+ * REDIS: **REQUIRED if you need VIP mode and cache** ⚠️ Don't publish your redis server on the internet. ⚠️
50
+
51
+ * OWNER: owner username
52
+ * QUOTA: quota in bytes
53
+ * EX: quota expire time
54
+ * MULTIPLY: vip quota comparing to normal quota
55
+ * USD2CNY: exchange rate
56
+ * VIP: VIP mode, default: disable
57
+ * AFD_LINK
58
+ * COFFEE_LINK
59
+ * COFFEE_TOKEN
60
+ * AFD_TOKEN
61
+ * AFD_USER_ID
62
+
63
+ * AUTHORIZED_USER: users that could use this bot, user_id, separated with `,`
64
+ * REQUIRED_MEMBERSHIP: group or channel username, user must join this group to use the bot. Could be use with
65
+ above `AUTHORIZED_USER`
66
+
67
+ * ENABLE_CELERY: Distribution mode, default: disable. You'll can setup workers in different locations.
68
+ * ENABLE_FFMPEG: enable ffmpeg so Telegram can stream
69
+ * MYSQL_HOST: you'll have to setup MySQL if you enable VIP mode
70
+ * MYSQL_USER
71
+ * MYSQL_PASS
72
+ * GOOGLE_API_KEY: YouTube API key, required for YouTube video subscription.
73
+ * AUDIO_FORMAT: audio format, default is m4a. You can set to any known and supported format for ffmpeg. For
74
+ example,`mp3`, `flac`, etc. ⚠️ m4a is the fastest. Other formats may affect performance.
75
+ * ARCHIVE_ID: group or channel id/username. All downloads will send to this group first and then forward to end user.
76
+ **Inline button will be lost during the forwarding.**
77
+
78
+ ### 3.2 Apply configMap for environment variables
79
+
80
+ ```shell
81
+ kubectl apply -f 03.configmap.yml
82
+ ```
83
+
84
+ ## 4. Run Master Celery
85
+
86
+ ```shell
87
+ kubectl apply -f 04.ytdl-master.yml
88
+ ```
89
+
90
+ This deloyment will create ytdl-pvc PersistentVolumeClaim on storageClassName: longhorn. This clain will contain vnstat,
91
+ cookies folder and flower database. Please replace longhorn with your storageClassName before apply
92
+
93
+ ### 4.1 Setup instagram cookies
94
+
95
+ Required if you want to support instagram.
96
+
97
+ You can use this extension
98
+ [Get cookies.txt](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid)
99
+ to get instagram cookies
100
+
101
+ Get pod running ytdl master:
102
+
103
+ ```shell
104
+ kubectl get pods --namespace ytdl
105
+ ```
106
+
107
+ Name should be ytdl-xxxxxxxx
108
+
109
+ Access to pod
110
+
111
+ ```shell
112
+ kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
113
+ ```
114
+
115
+ (replace ytdl-xxx by your pod name)
116
+
117
+ Go to ytdl-pvc mounted folder
118
+
119
+ ```shell
120
+ cd /ytdlbot/ytdlbot/data/
121
+ vim instagram.com_cookies.txt
122
+ # paste your cookies
123
+ ```
124
+
125
+ ## 5. Run Worker Celery
126
+
127
+ ```shell
128
+ kubectl apply -f 05.ytdl-worker.yml
129
+ ```
130
+
131
+ ## 6. Run Flower image (OPTIONAL)
132
+
133
+ ### 6.1 Setup flower db
134
+
135
+ Get pod running ytdl master:
136
+
137
+ ```shell
138
+ kubectl get pods --namespace ytdl
139
+ ```
140
+
141
+ Name should be ytdl-xxxxxxxx
142
+
143
+ Access to pod
144
+
145
+ ```shell
146
+ kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
147
+ ```
148
+
149
+ (replace ytdl-xxx by your pod name)
150
+
151
+ Go to ytdl-pvc mounted folder
152
+
153
+ ```shel
154
+ cd /var/lib/vnstat/
155
+ ```
156
+
157
+ Create flower database file
158
+
159
+ ```shell
160
+ {} ~ python3
161
+ Python 3.9.9 (main, Nov 21 2021, 03:22:47)
162
+ [Clang 12.0.0 (clang-1200.0.32.29)] on darwin
163
+ Type "help", "copyright", "credits" or "license" for more information.
164
+ >>> import dbm;dbm.open("flower","n");exit()
165
+ ```
166
+
167
+ ### 6.2 Config Flower Ingress
168
+
169
+ This step need config ingress from line 51 of file 06.flower.yml with your ingress service. Need for access from
170
+ internet.
171
+ YML file should be adjusted depending on your load balancing, ingress and network system
172
+
173
+ For active SSL
174
+
175
+ ```yml
176
+ cert-manager.io/cluster-issuer: letsencrypt-prod
177
+ ```
178
+
179
+ Replace nginx by your ingress service
180
+
181
+ ```yml
182
+ ingressClassName: nginx
183
+ ```
184
+
185
+ Add your domain, example
186
+
187
+ ```yml
188
+ tls:
189
+ - hosts:
190
+ - flower.benny.com
191
+ secretName: flower-tls
192
+ rules:
193
+ - host: flower.benny.com
194
+ ```
195
+
196
+ ### 6.3 Apply Flower deloyment
197
+
198
+ ```shell
199
+ kubectl apply -f 06.flower.yml
200
+ ```
k8s/01.redis.yml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: Namespace
3
+ metadata:
4
+ name: ytdl
5
+
6
+ ---
7
+ apiVersion: apps/v1
8
+ kind: Deployment
9
+ metadata:
10
+ creationTimestamp: null
11
+ labels:
12
+ ytdl: redis
13
+ name: redis
14
+ namespace: ytdl
15
+ spec:
16
+ replicas: 1
17
+ selector:
18
+ matchLabels:
19
+ ytdl: redis
20
+ strategy: {}
21
+ template:
22
+ metadata:
23
+ creationTimestamp: null
24
+ labels:
25
+ ytdl: redis
26
+ spec:
27
+ containers:
28
+ - image: redis:7-alpine
29
+ name: redis
30
+ ports:
31
+ - containerPort: 6379
32
+ resources: {}
33
+ restartPolicy: Always
34
+ status: {}
35
+
36
+ ---
37
+ apiVersion: v1
38
+ kind: Service
39
+ metadata:
40
+ creationTimestamp: null
41
+ labels:
42
+ ytdl: redis
43
+ name: redis
44
+ namespace: ytdl
45
+ spec:
46
+ ports:
47
+ - name: "6379"
48
+ port: 6379
49
+ targetPort: 6379
50
+ selector:
51
+ ytdl: redis
52
+ status:
53
+ loadBalancer: {}
k8s/02.mariadb.yml ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: PersistentVolumeClaim
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: mariadb-pvc
7
+ name: mariadb-pvc
8
+ namespace: ytdl
9
+ spec:
10
+ accessModes:
11
+ - ReadWriteOnce
12
+ storageClassName: longhorn
13
+ resources:
14
+ requests:
15
+ storage: 10Gi
16
+ status: {}
17
+
18
+ ---
19
+ apiVersion: apps/v1
20
+ kind: Deployment
21
+ metadata:
22
+ annotations:
23
+ creationTimestamp: null
24
+ labels:
25
+ ytdl: mariadb
26
+ name: mariadb
27
+ namespace: ytdl
28
+ spec:
29
+ replicas: 1
30
+ selector:
31
+ matchLabels:
32
+ ytdl: mariadb
33
+ strategy:
34
+ type: Recreate
35
+ template:
36
+ metadata:
37
+ creationTimestamp: null
38
+ labels:
39
+ ytdl: mariadb
40
+ spec:
41
+ containers:
42
+ - env:
43
+ - name: MYSQL_ROOT_PASSWORD
44
+ value: ro0tP4sSworD
45
+ - name: MYSQL_DATABASE
46
+ value: ytdl
47
+ image: mariadb:latest
48
+ name: mariadb
49
+ ports:
50
+ - containerPort: 3306
51
+ resources: {}
52
+ volumeMounts:
53
+ - mountPath: /var/lib/mysql
54
+ name: "mariadb-persistent-storage"
55
+ restartPolicy: Always
56
+ volumes:
57
+ - name: mariadb-persistent-storage
58
+ persistentVolumeClaim:
59
+ claimName: mariadb-pvc
60
+ status: {}
61
+
62
+ ---
63
+ apiVersion: v1
64
+ kind: Service
65
+ metadata:
66
+ creationTimestamp: null
67
+ labels:
68
+ ytdl: mariadb
69
+ name: mariadb-svc
70
+ namespace: ytdl
71
+ spec:
72
+ ports:
73
+ - name: "3306"
74
+ port: 3306
75
+ targetPort: 3306
76
+ selector:
77
+ ytdl: mariadb
78
+ status:
79
+ loadBalancer: {}
80
+
k8s/03.configmap.yml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: ConfigMap
3
+ metadata:
4
+ name: ytdlenv
5
+ namespace: ytdl
6
+ annotations:
7
+ data:
8
+ APP_HASH:
9
+ APP_ID:
10
+ TOKEN:
11
+ ARCHIVE_ID:
12
+ ENABLE_CELERY: 'True'
13
+ ENABLE_FFMPEG: 'True'
14
+ MYSQL_HOST: mariadb-svc
15
+ MYSQL_PASS: ro0tP4sSworD
16
+ MYSQL_USER: root
17
+ REDIS: redis
k8s/04.ytdl-master.yml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ apiVersion: v1
3
+ kind: PersistentVolumeClaim
4
+ metadata:
5
+ name: ytdl-pvc
6
+ namespace: ytdl
7
+ creationTimestamp: null
8
+ labels:
9
+ ytdl: ytdl-pvc
10
+ spec:
11
+ accessModes:
12
+ - ReadWriteMany
13
+ storageClassName: longhorn
14
+ resources:
15
+ requests:
16
+ storage: 10Gi
17
+ status: {}
18
+
19
+ ---
20
+ apiVersion: apps/v1
21
+ kind: Deployment
22
+ metadata:
23
+ name: ytdl
24
+ namespace: ytdl
25
+ creationTimestamp: null
26
+ labels:
27
+ ytdl: ytdl
28
+ spec:
29
+ replicas: 1
30
+ selector:
31
+ matchLabels:
32
+ ytdl: ytdl
33
+ template:
34
+ metadata:
35
+ creationTimestamp: null
36
+ labels:
37
+ ytdl: ytdl
38
+ spec:
39
+ volumes:
40
+ - name: ytdl-pvc
41
+ persistentVolumeClaim:
42
+ claimName: ytdl-pvc
43
+ containers:
44
+ - name: ytdl
45
+ image: bennythink/ytdlbot
46
+ envFrom:
47
+ - configMapRef:
48
+ name: ytdlenv
49
+ resources: {}
50
+ volumeMounts:
51
+ - name: ytdl-pvc
52
+ mountPath: /var/lib/vnstat/
53
+ subPath: vnstat/
54
+ - name: ytdl-pvc
55
+ mountPath: /ytdlbot/ytdlbot/data/
56
+ subPath: data/
57
+ terminationMessagePath: /dev/termination-log
58
+ terminationMessagePolicy: File
59
+ imagePullPolicy: Always
60
+ restartPolicy: Always
61
+ terminationGracePeriodSeconds: 30
62
+ dnsPolicy: ClusterFirst
63
+ securityContext: {}
64
+ schedulerName: default-scheduler
65
+ status: {}
k8s/05.ytdl-worker.yml ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: apps/v1
2
+ kind: Deployment
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: ytdl-worker
7
+ name: ytdl-worker
8
+ namespace: ytdl
9
+ spec:
10
+ replicas: 4
11
+ selector:
12
+ matchLabels:
13
+ ytdl: ytdl-worker
14
+ template:
15
+ metadata:
16
+ creationTimestamp: null
17
+ labels:
18
+ ytdl: ytdl-worker
19
+ spec:
20
+ volumes:
21
+ - name: ytdl-pvc
22
+ persistentVolumeClaim:
23
+ claimName: ytdl-pvc
24
+ containers:
25
+ - name: ytdl-worker
26
+ image: bennythink/ytdlbot
27
+ args:
28
+ - /usr/local/bin/supervisord
29
+ - '-c'
30
+ - /ytdlbot/conf/supervisor_worker.conf
31
+ envFrom:
32
+ - configMapRef:
33
+ name: ytdlenv
34
+ resources: {}
35
+ volumeMounts:
36
+ - name: ytdl-pvc
37
+ mountPath: /ytdlbot/ytdlbot/data/
38
+ subPath: data/
39
+ terminationMessagePath: /dev/termination-log
40
+ terminationMessagePolicy: File
41
+ imagePullPolicy: Always
42
+ restartPolicy: Always
43
+ terminationGracePeriodSeconds: 30
44
+ dnsPolicy: ClusterFirst
45
+ securityContext: {}
46
+ schedulerName: default-scheduler
47
+ status: {}
k8s/06.flower.yml ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: apps/v1
2
+ kind: Deployment
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: flower
7
+ name: flower
8
+ namespace: ytdl
9
+ spec:
10
+ replicas: 1
11
+ selector:
12
+ matchLabels:
13
+ ytdl: flower
14
+ strategy:
15
+ type: Recreate
16
+ template:
17
+ metadata:
18
+ creationTimestamp: null
19
+ labels:
20
+ ytdl: flower
21
+ spec:
22
+ containers:
23
+ - envFrom:
24
+ - configMapRef:
25
+ name: ytdlenv
26
+ args:
27
+ - /usr/local/bin/celery
28
+ - -A
29
+ - flower_tasks
30
+ - flower
31
+ - --basic_auth=bennythink:123456
32
+ - --address=0.0.0.0
33
+ - --persistent
34
+ - --purge_offline_workers=3600
35
+ image: bennythink/ytdlbot
36
+ name: flower
37
+ ports:
38
+ - containerPort: 5555
39
+ resources: {}
40
+ volumeMounts:
41
+ - name: ytdl-pvc
42
+ mountPath: /ytdlbot/ytdlbot/flower
43
+ subPath: vnstat/flower
44
+ restartPolicy: Always
45
+ volumes:
46
+ - name: ytdl-pvc
47
+ persistentVolumeClaim:
48
+ claimName: ytdl-pvc
49
+ status: {}
50
+
51
+ # THIS IS OPTION IF YOU WANT PUBLIC FLOWER PAGE TO INTERNET.
52
+ # should be adjusted depending on your load balancing system machine
53
+ ---
54
+ apiVersion: v1
55
+ kind: Service
56
+ metadata:
57
+ creationTimestamp: null
58
+ labels:
59
+ ytdl: flower
60
+ name: flower-svc
61
+ namespace: ytdl
62
+ spec:
63
+ type: NodePort
64
+ ports:
65
+ - name: "5555"
66
+ protocol: TCP
67
+ port: 5555
68
+ targetPort: 5555
69
+ selector:
70
+ ytdl: flower
71
+ status:
72
+ loadBalancer: {}
73
+
74
+ ---
75
+ apiVersion: networking.k8s.io/v1
76
+ kind: Ingress
77
+ metadata:
78
+ name: nginx-flower-ingress
79
+ namespace: ytdl
80
+ annotations:
81
+ # cert-manager.io/cluster-issuer: letsencrypt-prod
82
+ nginx.ingress.kubernetes.io/rewrite-target: /
83
+ # nginx.ingress.kubernetes.io/whitelist-source-range: 14.161.27.151 limit by ipaddresss
84
+
85
+ spec:
86
+ ingressClassName: nginx
87
+ tls:
88
+ - hosts:
89
+ - your-domain
90
+ secretName: flower-tls
91
+ rules:
92
+ - host: your-domain
93
+ http:
94
+ paths:
95
+ - path: /
96
+ pathType: Prefix
97
+ backend:
98
+ service:
99
+ name: flower-svc
100
+ port:
101
+ number: 5555
main.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ import subprocess
3
+
4
+ def run_python_verbose():
5
+ try:
6
+ # Run the 'python -v' command in a subprocess
7
+ print("okk")
8
+ subprocess.run(['python', 'ytdlbot/ytdl_bot.py'], check=True)
9
+ except subprocess.CalledProcessError as e:
10
+ print(f"Error running 'python -v': {e}")
11
+
12
+ # Call the function
13
+
14
+ app = FastAPI()
15
+
16
+ @app.get("/")
17
+ async def root():
18
+ return {"message": "Hello World"}
19
+ @app.get("/okk")
20
+ async def okk():
21
+ run_python_verbose()
requirements.txt ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pyrogram
2
+ tgcrypto
3
+ yt-dlp
4
+ APScheduler
5
+ beautifultable
6
+ ffmpeg-python
7
+ PyMySQL
8
+ celery
9
+ filetype
10
+ flower
11
+ psutil
12
+ influxdb
13
+ beautifulsoup4
14
+ fakeredis
15
+ supervisor
16
+ tgbot-ping
17
+ redis
18
+ requests
19
+ tqdm
20
+ requests-toolbelt
21
+ ffpb
22
+ youtube-search-python
23
+ token-bucket
24
+ coloredlogs
25
+ tronpy
26
+ mnemonic
27
+ qrcode
28
+ blinker
29
+ flask
30
+ streamlit
scripts/low_id.sh ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/usr/local/go/bin:/opt/bin
3
+
4
+ # Check the logs for the given string
5
+ if docker-compose logs --tail=100 ytdl | grep -q "The msg_id is too low"; then
6
+ # If the string is found, stop the ytdl service
7
+ echo "ytdl service stopped due to 'The msg_id is too low' found in logs."
8
+ docker-compose stop ytdl && docker-compose rm ytdl && docker-compose up -d
9
+
10
+ else
11
+ echo "String not found in logs."
12
+ fi
scripts/migrate_to_mysql.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - migrate_to_mysql.py
5
+ # 12/29/21 15:28
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import sqlite3
11
+
12
+ import pymysql
13
+
14
+ mysql_con = pymysql.connect(host='localhost', user='root', passwd='root', db='vip', charset='utf8mb4')
15
+ sqlite_con = sqlite3.connect('vip.sqlite')
16
+
17
+ vips = sqlite_con.execute('SELECT * FROM VIP').fetchall()
18
+
19
+ for vip in vips:
20
+ mysql_con.cursor().execute('INSERT INTO vip VALUES (%s, %s, %s, %s, %s, %s)', vip)
21
+
22
+ settings = sqlite_con.execute('SELECT * FROM settings').fetchall()
23
+
24
+ for setting in settings:
25
+ mysql_con.cursor().execute("INSERT INTO settings VALUES (%s,%s,%s)", setting)
26
+
27
+ mysql_con.commit()
scripts/start.sh ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ docker run -d --restart unless-stopped --name ytdl \
2
+ --net host \
3
+ -e TOKEN=12345 \
4
+ -e APP_ID=123123 \
5
+ -e APP_HASH=4990 \
6
+ -e ENABLE_CELERY=True \
7
+ -e REDIS=192.168.6.1 \
8
+ -e MYSQL_HOST=192.168.6.1 \
9
+ -e WORKERS=4 \
10
+ -e VIP=True \
11
+ -e CUSTOM_TEXT=#StandWithUkraine \
12
+ bennythink/ytdlbot \
13
+ /usr/local/bin/supervisord -c "/ytdlbot/conf/supervisor_worker.conf"
scripts/transfer.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - transfer.py
5
+ # 2023-12-07 18:21
6
+ from tronpy import Tron
7
+ from tronpy.hdwallet import seed_from_mnemonic, key_from_seed
8
+ from tronpy.keys import PrivateKey
9
+
10
+ mnemonic = "web horse smile ramp olive slush blue property world physical donkey pumpkin"
11
+
12
+ client = Tron(network="nile")
13
+
14
+ from_ = client.generate_address_from_mnemonic(mnemonic, account_path="m/44'/195'/0'/0/0")["base58check_address"]
15
+ balance = client.get_account_balance(from_)
16
+ print("my addr: ", from_, "balance: ", balance)
17
+ to = input("to: ")
18
+ amount = int(input("amount in TRX: "))
19
+
20
+
21
+ def mnemonic_to_private_key():
22
+ seed = seed_from_mnemonic(mnemonic, passphrase="")
23
+ private_key = key_from_seed(seed, account_path="m/44'/195'/0'/0/0")
24
+ return PrivateKey(private_key)
25
+
26
+
27
+ t = client.trx.transfer(from_, to, amount * 1_000_000).build().sign(mnemonic_to_private_key()).broadcast()
28
+
29
+ print(t.wait())
worker.yml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.1'
2
+
3
+ services:
4
+ worker:
5
+ image: bennythink/ytdlbot
6
+ env_file:
7
+ - env/ytdl.env
8
+ restart: always
9
+ command: [ "/usr/local/bin/supervisord", "-c" ,"/ytdlbot/conf/supervisor_worker.conf" ]
10
+ # network_mode: "host"
11
+ # deploy:
12
+ # resources:
13
+ # limits:
14
+ # cpus: '0.3'
15
+ # memory: 1500M
ytdlbot/channel.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # coding: utf-8
3
+ import http
4
+ import logging
5
+ import os
6
+ import re
7
+
8
+ import requests
9
+ from bs4 import BeautifulSoup
10
+
11
+ from config import ENABLE_VIP
12
+ from limit import Payment
13
+
14
+
15
+ class Channel(Payment):
16
+ def subscribe_channel(self, user_id: int, share_link: str) -> str:
17
+ if not re.findall(r"youtube\.com|youtu\.be", share_link):
18
+ raise ValueError("Is this a valid YouTube Channel link?")
19
+ if ENABLE_VIP:
20
+ self.cur.execute("select count(user_id) from subscribe where user_id=%s", (user_id,))
21
+ usage = int(self.cur.fetchone()[0])
22
+ if usage >= 10:
23
+ logging.warning("User %s has subscribed %s channels", user_id, usage)
24
+ return "You have subscribed too many channels. Maximum 5 channels."
25
+
26
+ data = self.get_channel_info(share_link)
27
+ channel_id = data["channel_id"]
28
+
29
+ self.cur.execute("select user_id from subscribe where user_id=%s and channel_id=%s", (user_id, channel_id))
30
+ if self.cur.fetchall():
31
+ raise ValueError("You have already subscribed this channel.")
32
+
33
+ self.cur.execute(
34
+ "INSERT IGNORE INTO channel values"
35
+ "(%(link)s,%(title)s,%(description)s,%(channel_id)s,%(playlist)s,%(last_video)s)",
36
+ data,
37
+ )
38
+ self.cur.execute("INSERT INTO subscribe values(%s,%s, NULL)", (user_id, channel_id))
39
+ self.con.commit()
40
+ logging.info("User %s subscribed channel %s", user_id, data["title"])
41
+ return "Subscribed to {}".format(data["title"])
42
+
43
+ def unsubscribe_channel(self, user_id: int, channel_id: str) -> int:
44
+ affected_rows = self.cur.execute(
45
+ "DELETE FROM subscribe WHERE user_id=%s AND channel_id=%s", (user_id, channel_id)
46
+ )
47
+ self.con.commit()
48
+ logging.info("User %s tried to unsubscribe channel %s", user_id, channel_id)
49
+ return affected_rows
50
+
51
+ @staticmethod
52
+ def extract_canonical_link(url: str) -> str:
53
+ # canonic link works for many websites. It will strip out unnecessary stuff
54
+ props = ["canonical", "alternate", "shortlinkUrl"]
55
+ headers = {
56
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36"
57
+ }
58
+ cookie = {"CONSENT": "PENDING+197"}
59
+ # send head request first
60
+ r = requests.head(url, headers=headers, allow_redirects=True, cookies=cookie)
61
+ if r.status_code != http.HTTPStatus.METHOD_NOT_ALLOWED and "text/html" not in r.headers.get("content-type", ""):
62
+ # get content-type, if it's not text/html, there's no need to issue a GET request
63
+ logging.warning("%s Content-type is not text/html, no need to GET for extract_canonical_link", url)
64
+ return url
65
+
66
+ html_doc = requests.get(url, headers=headers, cookies=cookie, timeout=5).text
67
+ soup = BeautifulSoup(html_doc, "html.parser")
68
+ for prop in props:
69
+ element = soup.find("link", rel=prop)
70
+ try:
71
+ href = element["href"]
72
+ if href not in ["null", "", None, "https://consent.youtube.com/m"]:
73
+ return href
74
+ except Exception as e:
75
+ logging.debug("Canonical exception %s %s e", url, e)
76
+
77
+ return url
78
+
79
+ def get_channel_info(self, url: str) -> dict:
80
+ api_key = os.getenv("GOOGLE_API_KEY")
81
+ canonical_link = self.extract_canonical_link(url)
82
+ try:
83
+ channel_id = canonical_link.split("youtube.com/channel/")[1]
84
+ except IndexError:
85
+ channel_id = canonical_link.split("/")[-1]
86
+ channel_api = (
87
+ f"https://www.googleapis.com/youtube/v3/channels?part=snippet,contentDetails&id={channel_id}&key={api_key}"
88
+ )
89
+
90
+ data = requests.get(channel_api).json()
91
+ snippet = data["items"][0]["snippet"]
92
+ title = snippet["title"]
93
+ description = snippet["description"]
94
+ playlist = data["items"][0]["contentDetails"]["relatedPlaylists"]["uploads"]
95
+
96
+ return {
97
+ "link": url,
98
+ "title": title,
99
+ "description": description,
100
+ "channel_id": channel_id,
101
+ "playlist": playlist,
102
+ "last_video": self.get_latest_video(playlist),
103
+ }
104
+
105
+ @staticmethod
106
+ def get_latest_video(playlist_id: str) -> str:
107
+ api_key = os.getenv("GOOGLE_API_KEY")
108
+ video_api = (
109
+ f"https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=1&"
110
+ f"playlistId={playlist_id}&key={api_key}"
111
+ )
112
+ data = requests.get(video_api).json()
113
+ video_id = data["items"][0]["snippet"]["resourceId"]["videoId"]
114
+ logging.info(f"Latest video %s from %s", video_id, data["items"][0]["snippet"]["channelTitle"])
115
+ return f"https://www.youtube.com/watch?v={video_id}"
116
+
117
+ def has_newer_update(self, channel_id: str) -> str:
118
+ self.cur.execute("SELECT playlist,latest_video FROM channel WHERE channel_id=%s", (channel_id,))
119
+ data = self.cur.fetchone()
120
+ playlist_id = data[0]
121
+ old_video = data[1]
122
+ newest_video = self.get_latest_video(playlist_id)
123
+ if old_video != newest_video:
124
+ logging.info("Newer update found for %s %s", channel_id, newest_video)
125
+ self.cur.execute("UPDATE channel SET latest_video=%s WHERE channel_id=%s", (newest_video, channel_id))
126
+ self.con.commit()
127
+ return newest_video
128
+
129
+ def get_user_subscription(self, user_id: int) -> str:
130
+ self.cur.execute(
131
+ """
132
+ select title, link, channel.channel_id from channel, subscribe
133
+ where subscribe.user_id = %s and channel.channel_id = subscribe.channel_id
134
+ """,
135
+ (user_id,),
136
+ )
137
+ data = self.cur.fetchall()
138
+ text = ""
139
+ for item in data:
140
+ text += "[{}]({}) `{}\n`".format(*item)
141
+ return text
142
+
143
+ def group_subscriber(self) -> dict:
144
+ # {"channel_id": [user_id, user_id, ...]}
145
+ self.cur.execute("select * from subscribe where is_valid=1")
146
+ data = self.cur.fetchall()
147
+ group = {}
148
+ for item in data:
149
+ group.setdefault(item[1], []).append(item[0])
150
+ logging.info("Checking periodic subscriber...")
151
+ return group
152
+
153
+ def deactivate_user_subscription(self, user_id: int):
154
+ self.cur.execute("UPDATE subscribe set is_valid=0 WHERE user_id=%s", (user_id,))
155
+ self.con.commit()
156
+
157
+ def sub_count(self) -> str:
158
+ sql = """
159
+ select user_id, channel.title, channel.link
160
+ from subscribe, channel where subscribe.channel_id = channel.channel_id
161
+ """
162
+ self.cur.execute(sql)
163
+ data = self.cur.fetchall()
164
+ text = f"Total {len(data)} subscriptions found.\n\n"
165
+ for item in data:
166
+ text += "{} ==> [{}]({})\n".format(*item)
167
+ return text
168
+
169
+ def del_cache(self, user_link: str) -> int:
170
+ unique = self.extract_canonical_link(user_link)
171
+ caches = self.r.hgetall("cache")
172
+ count = 0
173
+ for key in caches:
174
+ if key.startswith(unique):
175
+ count += self.del_send_cache(key)
176
+ return count
177
+
178
+
179
+ if __name__ == "__main__":
180
+ s = Channel.extract_canonical_link("https://www.youtube.com/shorts/KkbYbknjPBM")
181
+ print(s)
ytdlbot/client_init.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - client_init.py
5
+ # 12/29/21 16:20
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ from pyrogram import Client
11
+
12
+ from config import APP_HASH, APP_ID, PYRO_WORKERS, TOKEN, IPv6
13
+
14
+
15
+ def create_app(name: str, workers: int = PYRO_WORKERS) -> Client:
16
+ return Client(
17
+ name,
18
+ APP_ID,
19
+ APP_HASH,
20
+ bot_token=TOKEN,
21
+ workers=workers,
22
+ ipv6=IPv6,
23
+ )
ytdlbot/config.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - config.py
5
+ # 8/28/21 15:01
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import os
11
+
12
+ from blinker import signal
13
+
14
+ # general settings
15
+ WORKERS: int = int(os.getenv("WORKERS", 10))
16
+ PYRO_WORKERS: int = int(os.getenv("PYRO_WORKERS", 100))
17
+ APP_ID: int = int(20295350)
18
+ APP_HASH = str("805a0a86f3b382d904617d0a2fd4fb6f")
19
+ TOKEN = str("6718500924:AAHljBZcqaegxtDMOODLomnbeR625E-s5uw")
20
+
21
+ REDIS = os.getenv("REDIS", "redis")
22
+
23
+ TG_MAX_SIZE = 2000 * 1024 * 1024
24
+ # TG_MAX_SIZE = 10 * 1024 * 1024
25
+
26
+ EXPIRE = 24 * 3600
27
+
28
+ ENABLE_VIP = os.getenv("VIP", False)
29
+ OWNER = str("navpan18")
30
+
31
+ # limitation settings
32
+ AUTHORIZED_USER: str = os.getenv("AUTHORIZED_USER", "")
33
+ # membership requires: the format could be username(without @ sign)/chat_id of channel or group.
34
+ # You need to add the bot to this group/channel as admin
35
+ REQUIRED_MEMBERSHIP: str = os.getenv("REQUIRED_MEMBERSHIP", "")
36
+
37
+ # celery related
38
+ IS_BACKUP_BOT = os.getenv("IS_BACKUP_BOT")
39
+ ENABLE_CELERY = os.getenv("ENABLE_CELERY", False)
40
+ if IS_BACKUP_BOT:
41
+ BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/1")
42
+ else:
43
+ BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/0")
44
+
45
+ MYSQL_HOST = os.getenv("MYSQL_HOST", "mysql")
46
+ MYSQL_USER = os.getenv("MYSQL_USER", "root")
47
+ MYSQL_PASS = os.getenv("MYSQL_PASS", "root")
48
+
49
+ AUDIO_FORMAT = os.getenv("AUDIO_FORMAT")
50
+ ARCHIVE_ID = -1002047782676
51
+
52
+ IPv6 = os.getenv("IPv6", False)
53
+ ENABLE_FFMPEG = os.getenv("ENABLE_FFMPEG", False)
54
+
55
+ PLAYLIST_SUPPORT = True
56
+ M3U8_SUPPORT = True
57
+ ENABLE_ARIA2 = os.getenv("ENABLE_ARIA2", False)
58
+
59
+ RATE_LIMIT = os.getenv("RATE_LIMIT", 120)
60
+ RCLONE_PATH = os.getenv("RCLONE")
61
+ # This will set the value for the tmpfile path(download path) if it is set.
62
+ # If TMPFILE is not set, it will return None and use system’s default temporary file path.
63
+ # Please ensure that the directory exists and you have necessary permissions to write to it.
64
+ # If you don't know what this is just leave it as it is.
65
+ TMPFILE_PATH = os.getenv("TMPFILE")
66
+
67
+ # payment settings
68
+ AFD_LINK = os.getenv("AFD_LINK", "https://afdian.net/@BennyThink")
69
+ COFFEE_LINK = os.getenv("COFFEE_LINK", "https://www.buymeacoffee.com/bennythink")
70
+ COFFEE_TOKEN = os.getenv("COFFEE_TOKEN")
71
+ AFD_TOKEN = os.getenv("AFD_TOKEN")
72
+ AFD_USER_ID = os.getenv("AFD_USER_ID")
73
+ PROVIDER_TOKEN = os.getenv("PROVIDER_TOKEN") or "1234"
74
+ FREE_DOWNLOAD = 20000
75
+ TOKEN_PRICE = os.getenv("BUY_UNIT", 20) # one USD=20 credits
76
+ TRONGRID_KEY = os.getenv("TRONGRID_KEY", "").split(",")
77
+ # the default mnemonic is for nile testnet
78
+ TRON_MNEMONIC = os.getenv("TRON_MNEMONIC", "cram floor today legend service drill pitch leaf car govern harvest soda")
79
+ TRX_SIGNAL = signal("trx_received")
ytdlbot/constant.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - constant.py
5
+ # 8/16/21 16:59
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import os
11
+
12
+ from config import (
13
+ AFD_LINK,
14
+ COFFEE_LINK,
15
+ ENABLE_CELERY,
16
+ FREE_DOWNLOAD,
17
+ REQUIRED_MEMBERSHIP,
18
+ TOKEN_PRICE,
19
+ )
20
+ from database import InfluxDB
21
+ from utils import get_func_queue
22
+
23
+
24
+ class BotText:
25
+ start = """
26
+ Welcome to YouTube Download bot. Type /help for more information.
27
+ Backup bot: @benny_2ytdlbot
28
+ Join https://t.me/+OGRC8tp9-U9mZDZl for updates."""
29
+
30
+ help = f"""
31
+ 1. If the bot doesn't work, try again or join https://t.me/+OGRC8tp9-U9mZDZl for updates.
32
+
33
+ 2. Source code: https://github.com/tgbot-collection/ytdlbot
34
+ """
35
+
36
+ about = "YouTube Downloader by @BennyThink.\n\nOpen source on GitHub: https://github.com/tgbot-collection/ytdlbot"
37
+
38
+ buy = f"""
39
+ **Terms:**
40
+ 1. You can use this bot to download video for {FREE_DOWNLOAD} times within a 24-hour period.
41
+
42
+ 2. You can buy additional download tokens, valid permanently.
43
+
44
+ 3. Refunds are possible, contact me if you need that @BennyThink
45
+
46
+ 4. Download for paid user will be automatically changed to Local mode to avoid queuing.
47
+
48
+ **Price:**
49
+ valid permanently
50
+ 1. 1 USD == {TOKEN_PRICE} tokens
51
+ 2. 7 CNY == {TOKEN_PRICE} tokens
52
+ 3. 10 TRX == {TOKEN_PRICE} tokens
53
+
54
+ **Payment options:**
55
+ Pay any amount you want. For example you can send 20 TRX for {TOKEN_PRICE * 2} tokens.
56
+ 1. AFDIAN(AliPay, WeChat Pay and PayPal): {AFD_LINK}
57
+ 2. Buy me a coffee: {COFFEE_LINK}
58
+ 3. Telegram Bot Payment(Stripe), please click Bot Payment button.
59
+ 4. TRON(TRX), please click TRON(TRX) button.
60
+
61
+ **After payment:**
62
+ 1. Afdian: attach order number with /redeem command (e.g., `/redeem 123456`).
63
+ 2. Buy Me a Coffee: attach email with /redeem command (e.g., `/redeem [email protected]`). **Use different email each time.**
64
+ 3. Telegram Payment & Tron(TRX): automatically activated within 60s. Check /start to see your balance.
65
+
66
+ Want to buy more token with Telegram payment? Let's say 100? Here you go! `/buy 123`
67
+ """
68
+
69
+ private = "This bot is for private use"
70
+
71
+ membership_require = f"You need to join this group or channel to use this bot\n\nhttps://t.me/{REQUIRED_MEMBERSHIP}"
72
+
73
+ settings = """
74
+ Please choose the preferred format and video quality for your video. These settings only **apply to YouTube videos**.
75
+
76
+ High quality is recommended. Medium quality aims to 720P, while low quality is 480P.
77
+
78
+ If you choose to send the video as a document, it will not be possible to stream it.
79
+
80
+ Your current settings:
81
+ Video quality: **{0}**
82
+ Sending format: **{1}**
83
+ """
84
+ custom_text = os.getenv("CUSTOM_TEXT", "")
85
+
86
+ @staticmethod
87
+ def get_receive_link_text() -> str:
88
+ reserved = get_func_queue("reserved")
89
+ if ENABLE_CELERY and reserved:
90
+ text = f"Your tasks was added to the reserved queue {reserved}. Processing...\n\n"
91
+ else:
92
+ text = "Your task was added to active queue.\nProcessing...\n\n"
93
+
94
+ return text
95
+
96
+ @staticmethod
97
+ def ping_worker() -> str:
98
+ from tasks import app as celery_app
99
+
100
+ workers = InfluxDB().extract_dashboard_data()
101
+ # [{'celery@BennyのMBP': 'abc'}, {'celery@BennyのMBP': 'abc'}]
102
+ response = celery_app.control.broadcast("ping_revision", reply=True)
103
+ revision = {}
104
+ for item in response:
105
+ revision.update(item)
106
+
107
+ text = ""
108
+ for worker in workers:
109
+ fields = worker["fields"]
110
+ hostname = worker["tags"]["hostname"]
111
+ status = {True: "✅"}.get(fields["status"], "❌")
112
+ active = fields["active"]
113
+ load = "{},{},{}".format(fields["load1"], fields["load5"], fields["load15"])
114
+ rev = revision.get(hostname, "")
115
+ text += f"{status}{hostname} **{active}** {load} {rev}\n"
116
+
117
+ return text
ytdlbot/database.py ADDED
@@ -0,0 +1,375 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - database.py
5
+ # 12/7/21 16:57
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import base64
11
+ import contextlib
12
+ import datetime
13
+ import logging
14
+ import os
15
+ import re
16
+ import sqlite3
17
+ import subprocess
18
+ import time
19
+ from io import BytesIO
20
+
21
+ import fakeredis
22
+ import pymysql
23
+ import redis
24
+ import requests
25
+ from beautifultable import BeautifulTable
26
+ from influxdb import InfluxDBClient
27
+
28
+ from config import MYSQL_HOST, MYSQL_PASS, MYSQL_USER, REDIS, IS_BACKUP_BOT
29
+
30
+ init_con = sqlite3.connect(":memory:", check_same_thread=False)
31
+
32
+
33
+ class FakeMySQL:
34
+ @staticmethod
35
+ def cursor() -> "Cursor":
36
+ return Cursor()
37
+
38
+ def commit(self):
39
+ pass
40
+
41
+ def close(self):
42
+ pass
43
+
44
+ def ping(self, reconnect):
45
+ pass
46
+
47
+
48
+ class Cursor:
49
+ def __init__(self):
50
+ self.con = init_con
51
+ self.cur = self.con.cursor()
52
+
53
+ def execute(self, *args, **kwargs):
54
+ sql = self.sub(args[0])
55
+ new_args = (sql,) + args[1:]
56
+ with contextlib.suppress(sqlite3.OperationalError):
57
+ return self.cur.execute(*new_args, **kwargs)
58
+
59
+ def fetchall(self):
60
+ return self.cur.fetchall()
61
+
62
+ def fetchone(self):
63
+ return self.cur.fetchone()
64
+
65
+ @staticmethod
66
+ def sub(sql):
67
+ sql = re.sub(r"CHARSET.*|charset.*", "", sql, re.IGNORECASE)
68
+ sql = sql.replace("%s", "?")
69
+ return sql
70
+
71
+
72
+ class Redis:
73
+ def __init__(self):
74
+ db = 1 if IS_BACKUP_BOT else 0
75
+ try:
76
+ self.r = redis.StrictRedis(host=REDIS, db=db, decode_responses=True)
77
+ self.r.ping()
78
+ except Exception:
79
+ self.r = fakeredis.FakeStrictRedis(host=REDIS, db=db, decode_responses=True)
80
+
81
+ db_banner = "=" * 20 + "DB data" + "=" * 20
82
+ quota_banner = "=" * 20 + "Celery" + "=" * 20
83
+ metrics_banner = "=" * 20 + "Metrics" + "=" * 20
84
+ usage_banner = "=" * 20 + "Usage" + "=" * 20
85
+ vnstat_banner = "=" * 20 + "vnstat" + "=" * 20
86
+ self.final_text = f"""
87
+ {db_banner}
88
+ %s
89
+
90
+
91
+ {vnstat_banner}
92
+ %s
93
+
94
+
95
+ {quota_banner}
96
+ %s
97
+
98
+
99
+ {metrics_banner}
100
+ %s
101
+
102
+
103
+ {usage_banner}
104
+ %s
105
+ """
106
+ super().__init__()
107
+
108
+ def __del__(self):
109
+ self.r.close()
110
+
111
+ def update_metrics(self, metrics: str):
112
+ logging.info(f"Setting metrics: {metrics}")
113
+ all_ = f"all_{metrics}"
114
+ today = f"today_{metrics}"
115
+ self.r.hincrby("metrics", all_)
116
+ self.r.hincrby("metrics", today)
117
+
118
+ @staticmethod
119
+ def generate_table(header, all_data: list):
120
+ table = BeautifulTable()
121
+ for data in all_data:
122
+ table.rows.append(data)
123
+ table.columns.header = header
124
+ table.rows.header = [str(i) for i in range(1, len(all_data) + 1)]
125
+ return table
126
+
127
+ def show_usage(self):
128
+ db = MySQL()
129
+ db.cur.execute("select user_id,payment_amount,old_user,token from payment")
130
+ data = db.cur.fetchall()
131
+ fd = []
132
+ for item in data:
133
+ fd.append([item[0], item[1], item[2], item[3]])
134
+ db_text = self.generate_table(["ID", "pay amount", "old user", "token"], fd)
135
+
136
+ fd = []
137
+ hash_keys = self.r.hgetall("metrics")
138
+ for key, value in hash_keys.items():
139
+ if re.findall(r"^today|all", key):
140
+ fd.append([key, value])
141
+ fd.sort(key=lambda x: x[0])
142
+ metrics_text = self.generate_table(["name", "count"], fd)
143
+
144
+ fd = []
145
+ for key, value in hash_keys.items():
146
+ if re.findall(r"\d+", key):
147
+ fd.append([key, value])
148
+ fd.sort(key=lambda x: int(x[-1]), reverse=True)
149
+ usage_text = self.generate_table(["UserID", "count"], fd)
150
+
151
+ worker_data = InfluxDB.get_worker_data()
152
+ fd = []
153
+ for item in worker_data["data"]:
154
+ fd.append(
155
+ [
156
+ item.get("hostname", 0),
157
+ item.get("status", 0),
158
+ item.get("active", 0),
159
+ item.get("processed", 0),
160
+ item.get("task-failed", 0),
161
+ item.get("task-succeeded", 0),
162
+ ",".join(str(i) for i in item.get("loadavg", [])),
163
+ ]
164
+ )
165
+
166
+ worker_text = self.generate_table(
167
+ ["worker name", "status", "active", "processed", "failed", "succeeded", "Load Average"], fd
168
+ )
169
+
170
+ # vnstat
171
+ if os.uname().sysname == "Darwin":
172
+ cmd = "/opt/homebrew/bin/vnstat -i en0".split()
173
+ else:
174
+ cmd = "/usr/bin/vnstat -i eth0".split()
175
+ vnstat_text = subprocess.check_output(cmd).decode("u8")
176
+ return self.final_text % (db_text, vnstat_text, worker_text, metrics_text, usage_text)
177
+
178
+ def reset_today(self):
179
+ pairs = self.r.hgetall("metrics")
180
+ for k in pairs:
181
+ if k.startswith("today"):
182
+ self.r.hdel("metrics", k)
183
+
184
+ def user_count(self, user_id):
185
+ self.r.hincrby("metrics", user_id)
186
+
187
+ def generate_file(self):
188
+ text = self.show_usage()
189
+ file = BytesIO()
190
+ file.write(text.encode("u8"))
191
+ date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
192
+ file.name = f"{date}.txt"
193
+ return file
194
+
195
+ def add_send_cache(self, unique: str, file_id: str):
196
+ self.r.hset("cache", unique, file_id)
197
+
198
+ def get_send_cache(self, unique) -> str:
199
+ return self.r.hget("cache", unique)
200
+
201
+ def del_send_cache(self, unique):
202
+ return self.r.hdel("cache", unique)
203
+
204
+
205
+ class MySQL:
206
+ vip_sql = """
207
+ CREATE TABLE if not exists `payment`
208
+ (
209
+ `user_id` bigint NOT NULL,
210
+ `payment_amount` float DEFAULT NULL,
211
+ `payment_id` varchar(256) DEFAULT NULL,
212
+ `old_user` tinyint(1) DEFAULT NULL,
213
+ `token` int DEFAULT NULL,
214
+ UNIQUE KEY `payment_id` (`payment_id`)
215
+ ) CHARSET = utf8mb4
216
+ """
217
+
218
+ settings_sql = """
219
+ create table if not exists settings
220
+ (
221
+ user_id bigint not null,
222
+ resolution varchar(128) null,
223
+ method varchar(64) null,
224
+ mode varchar(32) default 'Celery' null,
225
+ constraint settings_pk
226
+ primary key (user_id)
227
+ );
228
+ """
229
+
230
+ channel_sql = """
231
+ create table if not exists channel
232
+ (
233
+ link varchar(256) null,
234
+ title varchar(256) null,
235
+ description text null,
236
+ channel_id varchar(256),
237
+ playlist varchar(256) null,
238
+ latest_video varchar(256) null,
239
+ constraint channel_pk
240
+ primary key (channel_id)
241
+ ) CHARSET=utf8mb4;
242
+ """
243
+
244
+ subscribe_sql = """
245
+ create table if not exists subscribe
246
+ (
247
+ user_id bigint null,
248
+ channel_id varchar(256) null,
249
+ is_valid boolean default 1 null
250
+ ) CHARSET=utf8mb4;
251
+ """
252
+
253
+ def __init__(self):
254
+ try:
255
+ self.con = pymysql.connect(
256
+ host=MYSQL_HOST, user=MYSQL_USER, passwd=MYSQL_PASS, db="ytdl", charset="utf8mb4"
257
+ )
258
+ except Exception:
259
+
260
+ self.con = FakeMySQL()
261
+
262
+ self.con.ping(reconnect=True)
263
+ self.cur = self.con.cursor()
264
+ self.init_db()
265
+ super().__init__()
266
+
267
+ def init_db(self):
268
+ self.cur.execute(self.vip_sql)
269
+ self.cur.execute(self.settings_sql)
270
+ self.cur.execute(self.channel_sql)
271
+ self.cur.execute(self.subscribe_sql)
272
+ self.con.commit()
273
+
274
+ def __del__(self):
275
+ self.con.close()
276
+
277
+ def get_user_settings(self, user_id: int) -> tuple:
278
+ self.cur.execute("SELECT * FROM settings WHERE user_id = %s", (user_id,))
279
+ data = self.cur.fetchone()
280
+ if data is None:
281
+ return 100, "high", "video", "Celery"
282
+ return data
283
+
284
+ def set_user_settings(self, user_id: int, field: str, value: str):
285
+ cur = self.con.cursor()
286
+ cur.execute("SELECT * FROM settings WHERE user_id = %s", (user_id,))
287
+ data = cur.fetchone()
288
+ if data is None:
289
+ resolution = method = ""
290
+ if field == "resolution":
291
+ method = "video"
292
+ resolution = value
293
+ if field == "method":
294
+ method = value
295
+ resolution = "high"
296
+ cur.execute("INSERT INTO settings VALUES (%s,%s,%s,%s)", (user_id, resolution, method, "Celery"))
297
+ else:
298
+ cur.execute(f"UPDATE settings SET {field} =%s WHERE user_id = %s", (value, user_id))
299
+ self.con.commit()
300
+
301
+
302
+ class InfluxDB:
303
+ def __init__(self):
304
+ self.client = InfluxDBClient(host=os.getenv("INFLUX_HOST", "192.168.7.233"), database="celery")
305
+ self.data = None
306
+
307
+ def __del__(self):
308
+ self.client.close()
309
+
310
+ @staticmethod
311
+ def get_worker_data() -> dict:
312
+ username = os.getenv("FLOWER_USERNAME", "benny")
313
+ password = os.getenv("FLOWER_PASSWORD", "123456abc")
314
+ token = base64.b64encode(f"{username}:{password}".encode()).decode()
315
+ headers = {"Authorization": f"Basic {token}"}
316
+ r = requests.get("https://celery.dmesg.app/dashboard?json=1", headers=headers)
317
+ if r.status_code != 200:
318
+ return dict(data=[])
319
+ return r.json()
320
+
321
+ def extract_dashboard_data(self):
322
+ self.data = self.get_worker_data()
323
+ json_body = []
324
+ for worker in self.data["data"]:
325
+ load1, load5, load15 = worker["loadavg"]
326
+ t = {
327
+ "measurement": "tasks",
328
+ "tags": {
329
+ "hostname": worker["hostname"],
330
+ },
331
+ "time": datetime.datetime.utcnow(),
332
+ "fields": {
333
+ "task-received": worker.get("task-received", 0),
334
+ "task-started": worker.get("task-started", 0),
335
+ "task-succeeded": worker.get("task-succeeded", 0),
336
+ "task-failed": worker.get("task-failed", 0),
337
+ "active": worker.get("active", 0),
338
+ "status": worker.get("status", False),
339
+ "load1": load1,
340
+ "load5": load5,
341
+ "load15": load15,
342
+ },
343
+ }
344
+ json_body.append(t)
345
+ return json_body
346
+
347
+ def __fill_worker_data(self):
348
+ json_body = self.extract_dashboard_data()
349
+ self.client.write_points(json_body)
350
+
351
+ def __fill_overall_data(self):
352
+ active = sum([i["active"] for i in self.data["data"]])
353
+ json_body = [{"measurement": "active", "time": datetime.datetime.utcnow(), "fields": {"active": active}}]
354
+ self.client.write_points(json_body)
355
+
356
+ def __fill_redis_metrics(self):
357
+ json_body = [{"measurement": "metrics", "time": datetime.datetime.utcnow(), "fields": {}}]
358
+ r = Redis().r
359
+ hash_keys = r.hgetall("metrics")
360
+ for key, value in hash_keys.items():
361
+ if re.findall(r"^today", key):
362
+ json_body[0]["fields"][key] = int(value)
363
+
364
+ self.client.write_points(json_body)
365
+
366
+ def collect_data(self):
367
+ if os.getenv("INFLUX_HOST") is None:
368
+ return
369
+
370
+ with contextlib.suppress(Exception):
371
+ self.data = self.get_worker_data()
372
+ self.__fill_worker_data()
373
+ self.__fill_overall_data()
374
+ self.__fill_redis_metrics()
375
+ logging.debug("InfluxDB data was collected.")
ytdlbot/downloader.py ADDED
@@ -0,0 +1,281 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - downloader.py
5
+ # 8/14/21 16:53
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import logging
11
+ import os
12
+ import pathlib
13
+ import random
14
+ import re
15
+ import subprocess
16
+ import time
17
+ import traceback
18
+ from io import StringIO
19
+ from unittest.mock import MagicMock
20
+
21
+ import fakeredis
22
+ import ffmpeg
23
+ import ffpb
24
+ import filetype
25
+ import requests
26
+ import yt_dlp as ytdl
27
+ from pyrogram import types
28
+ from tqdm import tqdm
29
+
30
+ from config import AUDIO_FORMAT, ENABLE_ARIA2, ENABLE_FFMPEG, TG_MAX_SIZE, IPv6
31
+ from limit import Payment
32
+ from utils import adjust_formats, apply_log_formatter, current_time, sizeof_fmt
33
+
34
+ r = fakeredis.FakeStrictRedis()
35
+ apply_log_formatter()
36
+
37
+
38
+ def edit_text(bot_msg: types.Message, text: str):
39
+ key = f"{bot_msg.chat.id}-{bot_msg.id}"
40
+ # if the key exists, we shouldn't send edit message
41
+ if not r.exists(key):
42
+ time.sleep(random.random())
43
+ r.set(key, "ok", ex=3)
44
+ bot_msg.edit_text(text)
45
+
46
+
47
+ def tqdm_progress(desc, total, finished, speed="", eta=""):
48
+ def more(title, initial):
49
+ if initial:
50
+ return f"{title} {initial}"
51
+ else:
52
+ return ""
53
+
54
+ f = StringIO()
55
+ tqdm(
56
+ total=total,
57
+ initial=finished,
58
+ file=f,
59
+ ascii=False,
60
+ unit_scale=True,
61
+ ncols=30,
62
+ bar_format="{l_bar}{bar} |{n_fmt}/{total_fmt} ",
63
+ )
64
+ raw_output = f.getvalue()
65
+ tqdm_output = raw_output.split("|")
66
+ progress = f"`[{tqdm_output[1]}]`"
67
+ detail = tqdm_output[2].replace("[A", "")
68
+ text = f"""
69
+ {desc}
70
+
71
+ {progress}
72
+ {detail}
73
+ {more("Speed:", speed)}
74
+ {more("ETA:", eta)}
75
+ """
76
+ f.close()
77
+ return text
78
+
79
+
80
+ def remove_bash_color(text):
81
+ return re.sub(r"\u001b|\[0;94m|\u001b\[0m|\[0;32m|\[0m|\[0;33m", "", text)
82
+
83
+
84
+ def download_hook(d: dict, bot_msg):
85
+ # since we're using celery, server location may be located in different region.
86
+ # Therefore, we can't trigger the hook very often.
87
+ # the key is user_id + download_link
88
+ original_url = d["info_dict"]["original_url"]
89
+ key = f"{bot_msg.chat.id}-{original_url}"
90
+
91
+ if d["status"] == "downloading":
92
+ downloaded = d.get("downloaded_bytes", 0)
93
+ total = d.get("total_bytes") or d.get("total_bytes_estimate", 0)
94
+ if total > TG_MAX_SIZE:
95
+ raise Exception(f"Your download file size {sizeof_fmt(total)} is too large for Telegram.")
96
+
97
+ # percent = remove_bash_color(d.get("_percent_str", "N/A"))
98
+ speed = remove_bash_color(d.get("_speed_str", "N/A"))
99
+ eta = remove_bash_color(d.get("_eta_str", d.get("eta")))
100
+ text = tqdm_progress("Downloading...", total, downloaded, speed, eta)
101
+ edit_text(bot_msg, text)
102
+ r.set(key, "ok", ex=5)
103
+
104
+
105
+ def upload_hook(current, total, bot_msg):
106
+ text = tqdm_progress("Uploading...", total, current)
107
+ edit_text(bot_msg, text)
108
+
109
+
110
+ def convert_to_mp4(video_paths: list, bot_msg):
111
+ default_type = ["video/x-flv", "video/webm"]
112
+ # all_converted = []
113
+ for path in video_paths:
114
+ # if we can't guess file type, we assume it's video/mp4
115
+ mime = getattr(filetype.guess(path), "mime", "video/mp4")
116
+ if mime in default_type:
117
+ if not can_convert_mp4(path, bot_msg.chat.id):
118
+ logging.warning("Conversion abort for %s", bot_msg.chat.id)
119
+ bot_msg._client.send_message(bot_msg.chat.id, "Can't convert your video. ffmpeg has been disabled.")
120
+ break
121
+ edit_text(bot_msg, f"{current_time()}: Converting {path.name} to mp4. Please wait.")
122
+ new_file_path = path.with_suffix(".mp4")
123
+ logging.info("Detected %s, converting to mp4...", mime)
124
+ run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, new_file_path], bot_msg)
125
+ index = video_paths.index(path)
126
+ video_paths[index] = new_file_path
127
+
128
+
129
+ class ProgressBar(tqdm):
130
+ b = None
131
+
132
+ def __init__(self, *args, **kwargs):
133
+ super().__init__(*args, **kwargs)
134
+ self.bot_msg = self.b
135
+
136
+ def update(self, n=1):
137
+ super().update(n)
138
+ t = tqdm_progress("Converting...", self.total, self.n)
139
+ edit_text(self.bot_msg, t)
140
+
141
+
142
+ def run_ffmpeg_progressbar(cmd_list: list, bm):
143
+ cmd_list = cmd_list.copy()[1:]
144
+ ProgressBar.b = bm
145
+ ffpb.main(cmd_list, tqdm=ProgressBar)
146
+
147
+
148
+ def can_convert_mp4(video_path, uid):
149
+ if not ENABLE_FFMPEG:
150
+ return False
151
+ return True
152
+
153
+
154
+ def ytdl_download(url: str, tempdir: str, bm, **kwargs) -> list:
155
+ payment = Payment()
156
+ chat_id = bm.chat.id
157
+ hijack = kwargs.get("hijack")
158
+ output = pathlib.Path(tempdir, "%(title).70s.%(ext)s").as_posix()
159
+ ydl_opts = {
160
+ "progress_hooks": [lambda d: download_hook(d, bm)],
161
+ "outtmpl": output,
162
+ "restrictfilenames": False,
163
+ "quiet": True,
164
+ }
165
+ if ENABLE_ARIA2:
166
+ ydl_opts["external_downloader"] = "aria2c"
167
+ ydl_opts["external_downloader_args"] = [
168
+ "--min-split-size=1M",
169
+ "--max-connection-per-server=16",
170
+ "--max-concurrent-downloads=16",
171
+ "--split=16",
172
+ ]
173
+ if url.startswith("https://drive.google.com"):
174
+ # Always use the `source` format for Google Drive URLs.
175
+ formats = ["source"]
176
+ else:
177
+ # Use the default formats for other URLs.
178
+ formats = [
179
+ # webm , vp9 and av01 are not streamable on telegram, so we'll extract only mp4
180
+ "bestvideo[ext=mp4][vcodec!*=av01][vcodec!*=vp09]+bestaudio[ext=m4a]/bestvideo+bestaudio",
181
+ "bestvideo[vcodec^=avc]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best",
182
+ None,
183
+ ]
184
+ adjust_formats(chat_id, url, formats, hijack)
185
+ if download_instagram(url, tempdir):
186
+ return list(pathlib.Path(tempdir).glob("*"))
187
+
188
+ address = ["::", "0.0.0.0"] if IPv6 else [None]
189
+ error = None
190
+ video_paths = None
191
+ for format_ in formats:
192
+ ydl_opts["format"] = format_
193
+ for addr in address:
194
+ # IPv6 goes first in each format
195
+ ydl_opts["source_address"] = addr
196
+ try:
197
+ logging.info("Downloading for %s with format %s", url, format_)
198
+ with ytdl.YoutubeDL(ydl_opts) as ydl:
199
+ ydl.download([url])
200
+ video_paths = list(pathlib.Path(tempdir).glob("*"))
201
+ break
202
+ except Exception:
203
+ error = traceback.format_exc()
204
+ logging.error("Download failed for %s - %s, try another way", format_, url)
205
+ if error is None:
206
+ break
207
+
208
+ if not video_paths:
209
+ raise Exception(error)
210
+
211
+ # convert format if necessary
212
+ settings = payment.get_user_settings(chat_id)
213
+ if settings[2] == "video" or isinstance(settings[2], MagicMock):
214
+ # only convert if send type is video
215
+ convert_to_mp4(video_paths, bm)
216
+ if settings[2] == "audio" or hijack == "bestaudio[ext=m4a]":
217
+ convert_audio_format(video_paths, bm)
218
+ # split_large_video(video_paths)
219
+ return video_paths
220
+
221
+
222
+ def convert_audio_format(video_paths: list, bm):
223
+ # 1. file is audio, default format
224
+ # 2. file is video, default format
225
+ # 3. non default format
226
+
227
+ for path in video_paths:
228
+ streams = ffmpeg.probe(path)["streams"]
229
+ if AUDIO_FORMAT is None and len(streams) == 1 and streams[0]["codec_type"] == "audio":
230
+ logging.info("%s is audio, default format, no need to convert", path)
231
+ elif AUDIO_FORMAT is None and len(streams) >= 2:
232
+ logging.info("%s is video, default format, need to extract audio", path)
233
+ audio_stream = {"codec_name": "m4a"}
234
+ for stream in streams:
235
+ if stream["codec_type"] == "audio":
236
+ audio_stream = stream
237
+ break
238
+ ext = audio_stream["codec_name"]
239
+ new_path = path.with_suffix(f".{ext}")
240
+ run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, "-vn", "-acodec", "copy", new_path], bm)
241
+ path.unlink()
242
+ index = video_paths.index(path)
243
+ video_paths[index] = new_path
244
+ else:
245
+ logging.info("Not default format, converting %s to %s", path, AUDIO_FORMAT)
246
+ new_path = path.with_suffix(f".{AUDIO_FORMAT}")
247
+ run_ffmpeg_progressbar(["ffmpeg", "-y", "-i", path, new_path], bm)
248
+ path.unlink()
249
+ index = video_paths.index(path)
250
+ video_paths[index] = new_path
251
+
252
+
253
+ def split_large_video(video_paths: list):
254
+ original_video = None
255
+ split = False
256
+ for original_video in video_paths:
257
+ size = os.stat(original_video).st_size
258
+ if size > TG_MAX_SIZE:
259
+ split = True
260
+ logging.warning("file is too large %s, splitting...", size)
261
+ subprocess.check_output(f"sh split-video.sh {original_video} {TG_MAX_SIZE * 0.95} ".split())
262
+ os.remove(original_video)
263
+
264
+ if split and original_video:
265
+ return [i for i in pathlib.Path(original_video).parent.glob("*")]
266
+
267
+
268
+ def download_instagram(url: str, tempdir: str):
269
+ if not url.startswith("https://www.instagram.com"):
270
+ return False
271
+
272
+ resp = requests.get(f"http://192.168.6.1:15000/?url={url}").json()
273
+ if url_results := resp.get("data"):
274
+ for link in url_results:
275
+ content = requests.get(link, stream=True).content
276
+ ext = filetype.guess_extension(content)
277
+ save_path = pathlib.Path(tempdir, f"{id(link)}.{ext}")
278
+ with open(save_path, "wb") as f:
279
+ f.write(content)
280
+
281
+ return True
ytdlbot/flower_tasks.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - flower_tasks.py
5
+ # 1/2/22 10:17
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ from celery import Celery
11
+
12
+ from config import BROKER
13
+
14
+ app = Celery("tasks", broker=BROKER, timezone="Europe/London")
ytdlbot/limit.py ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - limit.py
5
+ # 8/15/21 18:23
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import hashlib
11
+ import logging
12
+ import time
13
+
14
+ import requests
15
+ from tronpy import Tron
16
+ from tronpy.exceptions import TransactionError, ValidationError
17
+ from tronpy.hdwallet import key_from_seed, seed_from_mnemonic
18
+ from tronpy.keys import PrivateKey
19
+ from tronpy.providers import HTTPProvider
20
+
21
+ from config import (
22
+ AFD_TOKEN,
23
+ AFD_USER_ID,
24
+ COFFEE_TOKEN,
25
+ EXPIRE,
26
+ FREE_DOWNLOAD,
27
+ OWNER,
28
+ TOKEN_PRICE,
29
+ TRON_MNEMONIC,
30
+ TRONGRID_KEY,
31
+ TRX_SIGNAL,
32
+ )
33
+ from database import MySQL, Redis
34
+ from utils import apply_log_formatter, current_time
35
+
36
+ apply_log_formatter()
37
+
38
+
39
+ class BuyMeACoffee:
40
+ def __init__(self):
41
+ self._token = COFFEE_TOKEN
42
+ self._url = "https://developers.buymeacoffee.com/api/v1/supporters"
43
+ self._data = []
44
+
45
+ def _get_data(self, url):
46
+ d = requests.get(url, headers={"Authorization": f"Bearer {self._token}"}).json()
47
+ self._data.extend(d["data"])
48
+ next_page = d["next_page_url"]
49
+ if next_page:
50
+ self._get_data(next_page)
51
+
52
+ def _get_bmac_status(self, email: str) -> dict:
53
+ self._get_data(self._url)
54
+ for user in self._data:
55
+ if user["payer_email"] == email or user["support_email"] == email:
56
+ return user
57
+ return {}
58
+
59
+ def get_user_payment(self, email: str) -> (int, "float", str):
60
+ order = self._get_bmac_status(email)
61
+ price = float(order.get("support_coffee_price", 0))
62
+ cups = float(order.get("support_coffees", 1))
63
+ amount = price * cups
64
+ return amount, email
65
+
66
+
67
+ class Afdian:
68
+ def __init__(self):
69
+ self._token = AFD_TOKEN
70
+ self._user_id = AFD_USER_ID
71
+ self._url = "https://afdian.net/api/open/query-order"
72
+
73
+ def _generate_signature(self):
74
+ data = {
75
+ "user_id": self._user_id,
76
+ "params": '{"x":0}',
77
+ "ts": int(time.time()),
78
+ }
79
+ sign_text = "{token}params{params}ts{ts}user_id{user_id}".format(
80
+ token=self._token, params=data["params"], ts=data["ts"], user_id=data["user_id"]
81
+ )
82
+
83
+ md5 = hashlib.md5(sign_text.encode("u8"))
84
+ md5 = md5.hexdigest()
85
+ data["sign"] = md5
86
+
87
+ return data
88
+
89
+ def _get_afdian_status(self, trade_no: str) -> dict:
90
+ req_data = self._generate_signature()
91
+ data = requests.post(self._url, json=req_data).json()
92
+ # latest 50
93
+ for order in data["data"]["list"]:
94
+ if order["out_trade_no"] == trade_no:
95
+ return order
96
+
97
+ return {}
98
+
99
+ def get_user_payment(self, trade_no: str) -> (int, float, str):
100
+ order = self._get_afdian_status(trade_no)
101
+ amount = float(order.get("show_amount", 0))
102
+ # convert to USD
103
+ return amount / 7, trade_no
104
+
105
+
106
+ class TronTrx:
107
+ def __init__(self):
108
+ if TRON_MNEMONIC == "cram floor today legend service drill pitch leaf car govern harvest soda":
109
+ logging.warning("Using nile testnet")
110
+ provider = HTTPProvider(endpoint_uri="https://nile.trongrid.io")
111
+ network = "nile"
112
+ else:
113
+ provider = HTTPProvider(api_key=TRONGRID_KEY)
114
+ network = "mainnet"
115
+ self.client = Tron(provider, network=network)
116
+
117
+ def central_transfer(self, from_, index, amount: int):
118
+ logging.info("Generated key with index %s", index)
119
+ seed = seed_from_mnemonic(TRON_MNEMONIC, passphrase="")
120
+ key = PrivateKey(key_from_seed(seed, account_path=f"m/44'/195'/1'/0/{index}"))
121
+ central = self.central_wallet()
122
+ logging.info("Transfer %s TRX from %s to %s", amount, from_, central)
123
+ try:
124
+ self.client.trx.transfer(from_, central, amount).build().sign(key).broadcast()
125
+ except (TransactionError, ValidationError):
126
+ logging.error("Failed to transfer %s TRX to %s. Lower and try again.", amount, from_)
127
+ if amount > 1_100_000:
128
+ # 1.1 trx transfer fee
129
+ self.client.trx.transfer(from_, central, amount - 1_100_000).build().sign(key).broadcast()
130
+
131
+ def central_wallet(self):
132
+ wallet = self.client.generate_address_from_mnemonic(TRON_MNEMONIC, account_path="m/44'/195'/0'/0/0")
133
+ return wallet["base58check_address"]
134
+
135
+ def get_payment_address(self, user_id):
136
+ # payment_id is like tron,0,TN8Mn9KKv3cSrKyrt6Xx5L18nmezbpiW31,index where 0 means unpaid
137
+ db = MySQL()
138
+ con = db.con
139
+ cur = db.cur
140
+ cur.execute("select user_id from payment where payment_id like 'tron,%'")
141
+ data = cur.fetchall()
142
+ index = len(data)
143
+ path = f"m/44'/195'/1'/0/{index}"
144
+ logging.info("Generating address for user %s with path %s", user_id, path)
145
+ addr = self.client.generate_address_from_mnemonic(TRON_MNEMONIC, account_path=path)["base58check_address"]
146
+ # add row in db, unpaid
147
+ cur.execute("insert into payment values (%s,%s,%s,%s,%s)", (user_id, 0, f"tron,0,{addr},{index}", 0, 0))
148
+ con.commit()
149
+ return addr
150
+
151
+ def check_payment(self):
152
+ db = MySQL()
153
+ con = db.con
154
+ cur = db.cur
155
+
156
+ cur.execute("select user_id, payment_id from payment where payment_id like 'tron,0,T%'")
157
+ data = cur.fetchall()
158
+ for row in data:
159
+ logging.info("Checking user payment %s", row)
160
+ user_id = row[0]
161
+ addr, index = row[1].split(",")[2:]
162
+ try:
163
+ balance = self.client.get_account_balance(addr)
164
+ except:
165
+ balance = 0
166
+ if balance:
167
+ logging.info("User %s has %s TRX", user_id, balance)
168
+ # paid, calc token count
169
+ token_count = int(balance / 10 * TOKEN_PRICE)
170
+ cur.execute(
171
+ "update payment set token=%s,payment_id=%s where user_id=%s and payment_id like %s",
172
+ (token_count, f"tron,1,{addr},{index}", user_id, f"tron,%{addr}%"),
173
+ )
174
+ cur.execute("UPDATE settings SET mode='Local' WHERE user_id=%s", (user_id,))
175
+ con.commit()
176
+ self.central_transfer(addr, index, int(balance * 1_000_000))
177
+ logging.debug("Dispatch signal now....")
178
+ TRX_SIGNAL.send("cron", user_id=user_id, text=f"{balance} TRX received, {token_count} tokens added.")
179
+
180
+
181
+ class Payment(Redis, MySQL):
182
+ def check_old_user(self, user_id: int) -> tuple:
183
+ self.cur.execute("SELECT * FROM payment WHERE user_id=%s AND old_user=1", (user_id,))
184
+ data = self.cur.fetchone()
185
+ return data
186
+
187
+ def get_pay_token(self, user_id: int) -> int:
188
+ self.cur.execute("SELECT token, old_user FROM payment WHERE user_id=%s", (user_id,))
189
+ data = self.cur.fetchall() or [(0, False)]
190
+ number = sum([i[0] for i in data if i[0]])
191
+ if number == 0 and data[0][1] != 1:
192
+ # not old user, no token
193
+ logging.warning("User %s has no token, set download mode to Celery", user_id)
194
+ # change download mode to Celery
195
+ self.set_user_settings(user_id, "mode", "Celery")
196
+ return number
197
+
198
+ def get_free_token(self, user_id: int) -> int:
199
+ if self.r.exists(user_id):
200
+ return int(self.r.get(user_id))
201
+ else:
202
+ # set and return
203
+ self.r.set(user_id, FREE_DOWNLOAD, ex=EXPIRE)
204
+ return FREE_DOWNLOAD
205
+
206
+ def get_token(self, user_id: int):
207
+ ttl = self.r.ttl(user_id)
208
+ return self.get_free_token(user_id), self.get_pay_token(user_id), current_time(time.time() + ttl)
209
+
210
+ def use_free_token(self, user_id: int):
211
+ if self.r.exists(user_id):
212
+ self.r.decr(user_id, 1)
213
+ else:
214
+ # first time download
215
+ self.r.set(user_id, 5 - 1, ex=EXPIRE)
216
+
217
+ def use_pay_token(self, user_id: int):
218
+ # a user may pay multiple times, so we'll need to filter the first payment with valid token
219
+ self.cur.execute("SELECT payment_id FROM payment WHERE user_id=%s AND token>0", (user_id,))
220
+ data = self.cur.fetchone()
221
+ payment_id = data[0]
222
+ logging.info("User %s use pay token with payment_id %s", user_id, payment_id)
223
+ self.cur.execute("UPDATE payment SET token=token-1 WHERE payment_id=%s", (payment_id,))
224
+ self.con.commit()
225
+
226
+ def use_token(self, user_id: int):
227
+ free = self.get_free_token(user_id)
228
+ if free > 0:
229
+ self.use_free_token(user_id)
230
+ else:
231
+ self.use_pay_token(user_id)
232
+
233
+ def add_pay_user(self, pay_data: list):
234
+ self.cur.execute("INSERT INTO payment VALUES (%s,%s,%s,%s,%s)", pay_data)
235
+ self.set_user_settings(pay_data[0], "mode", "Local")
236
+ self.con.commit()
237
+
238
+ def verify_payment(self, user_id: int, unique: str) -> str:
239
+ pay = BuyMeACoffee() if "@" in unique else Afdian()
240
+ self.cur.execute("SELECT * FROM payment WHERE payment_id=%s ", (unique,))
241
+ data = self.cur.fetchone()
242
+ if data:
243
+ # TODO what if a user pay twice with the same email address?
244
+ return (
245
+ f"Failed. Payment has been verified by other users. Please contact @{OWNER} if you have any questions."
246
+ )
247
+
248
+ amount, pay_id = pay.get_user_payment(unique)
249
+ logging.info("User %s paid %s, identifier is %s", user_id, amount, unique)
250
+ # amount is already in USD
251
+ if amount == 0:
252
+ return "Payment not found. Please check your payment ID or email address"
253
+ self.add_pay_user([user_id, amount, pay_id, 0, amount * TOKEN_PRICE])
254
+ return "Thanks! Your payment has been verified. /start to get your token details"
255
+
256
+
257
+ if __name__ == "__main__":
258
+ a = TronTrx()
259
+ # a.central_wallet()
260
+ a.check_payment()
ytdlbot/main.session ADDED
Binary file (28.7 kB). View file
 
ytdlbot/split-video.sh ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ # Short script to split videos by filesize using ffmpeg by LukeLR
3
+
4
+ if [ $# -ne 2 ]; then
5
+ echo 'Illegal number of parameters. Needs 2 parameters:'
6
+ echo 'Usage:'
7
+ echo './split-video.sh FILE SIZELIMIT "FFMPEG_ARGS'
8
+ echo
9
+ echo 'Parameters:'
10
+ echo ' - FILE: Name of the video file to split'
11
+ echo ' - SIZELIMIT: Maximum file size of each part (in bytes)'
12
+ echo ' - FFMPEG_ARGS: Additional arguments to pass to each ffmpeg-call'
13
+ echo ' (video format and quality options etc.)'
14
+ exit 1
15
+ fi
16
+
17
+ FILE="$1"
18
+ SIZELIMIT="$2"
19
+ FFMPEG_ARGS="$3"
20
+
21
+ # Duration of the source video
22
+ DURATION=$(ffprobe -i "$FILE" -show_entries format=duration -v quiet -of default=noprint_wrappers=1:nokey=1|cut -d. -f1)
23
+
24
+ # Duration that has been encoded so far
25
+ CUR_DURATION=0
26
+
27
+ # Filename of the source video (without extension)
28
+ BASENAME="${FILE%.*}"
29
+
30
+ # Extension for the video parts
31
+ #EXTENSION="${FILE##*.}"
32
+ EXTENSION="mp4"
33
+
34
+ # Number of the current video part
35
+ i=1
36
+
37
+ # Filename of the next video part
38
+ NEXTFILENAME="$BASENAME-$i.$EXTENSION"
39
+
40
+ echo "Duration of source video: $DURATION"
41
+
42
+ # Until the duration of all partial videos has reached the duration of the source video
43
+ while [[ $CUR_DURATION -lt $DURATION ]]; do
44
+ # Encode next part
45
+ echo ffmpeg -i "$FILE" -ss "$CUR_DURATION" -fs "$SIZELIMIT" $FFMPEG_ARGS "$NEXTFILENAME"
46
+ ffmpeg -ss "$CUR_DURATION" -i "$FILE" -fs "$SIZELIMIT" $FFMPEG_ARGS "$NEXTFILENAME"
47
+
48
+ # Duration of the new part
49
+ NEW_DURATION=$(ffprobe -i "$NEXTFILENAME" -show_entries format=duration -v quiet -of default=noprint_wrappers=1:nokey=1|cut -d. -f1)
50
+
51
+ # Total duration encoded so far
52
+ CUR_DURATION=$((CUR_DURATION + NEW_DURATION))
53
+
54
+ i=$((i + 1))
55
+
56
+ echo "Duration of $NEXTFILENAME: $NEW_DURATION"
57
+ echo "Part No. $i starts at $CUR_DURATION"
58
+
59
+ NEXTFILENAME="$BASENAME-$i.$EXTENSION"
60
+ done
ytdlbot/tasks.py ADDED
@@ -0,0 +1,491 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - tasks.py
5
+ # 12/29/21 14:57
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import asyncio
11
+ import logging
12
+ import os
13
+ import pathlib
14
+ import re
15
+ import shutil
16
+ import subprocess
17
+ import tempfile
18
+ import threading
19
+ import time
20
+ import traceback
21
+ import typing
22
+ from typing import Any
23
+ from urllib.parse import quote_plus
24
+
25
+ import filetype
26
+ import psutil
27
+ import pyrogram.errors
28
+ import requests
29
+ from apscheduler.schedulers.background import BackgroundScheduler
30
+ from celery import Celery
31
+ from celery.worker.control import Panel
32
+ from pyrogram import Client, enums, idle, types
33
+
34
+ from channel import Channel
35
+ from client_init import create_app
36
+ from config import (
37
+ ARCHIVE_ID,
38
+ BROKER,
39
+ ENABLE_CELERY,
40
+ ENABLE_VIP,
41
+ OWNER,
42
+ RATE_LIMIT,
43
+ RCLONE_PATH,
44
+ TMPFILE_PATH,
45
+ WORKERS,
46
+ )
47
+ from constant import BotText
48
+ from database import Redis
49
+ from downloader import edit_text, tqdm_progress, upload_hook, ytdl_download
50
+ from limit import Payment
51
+ from utils import (
52
+ apply_log_formatter,
53
+ auto_restart,
54
+ customize_logger,
55
+ get_metadata,
56
+ get_revision,
57
+ sizeof_fmt,
58
+ )
59
+
60
+ customize_logger(["pyrogram.client", "pyrogram.session.session", "pyrogram.connection.connection"])
61
+ apply_log_formatter()
62
+ bot_text = BotText()
63
+ logging.getLogger("apscheduler.executors.default").propagate = False
64
+
65
+ app = Celery("tasks", broker=BROKER)
66
+ bot = create_app("tasks")
67
+ channel = Channel()
68
+
69
+
70
+ def retrieve_message(chat_id: int, message_id: int) -> types.Message | Any:
71
+ # this should only be called by celery tasks
72
+ try:
73
+ return bot.get_messages(chat_id, message_id)
74
+ except ConnectionError as e:
75
+ logging.critical("BOT IS NOT STARTED YET: %s", e)
76
+ bot.start()
77
+ return bot.get_messages(chat_id, message_id)
78
+
79
+
80
+ @app.task(rate_limit=f"{RATE_LIMIT}/m")
81
+ def ytdl_download_task(chat_id: int, message_id: int, url: str):
82
+ logging.info("YouTube celery tasks started for %s", url)
83
+ bot_msg = retrieve_message(chat_id, message_id)
84
+ ytdl_normal_download(bot, bot_msg, url)
85
+ logging.info("YouTube celery tasks ended.")
86
+
87
+
88
+ @app.task()
89
+ def audio_task(chat_id: int, message_id: int):
90
+ logging.info("Audio celery tasks started for %s-%s", chat_id, message_id)
91
+ bot_msg = retrieve_message(chat_id, message_id)
92
+ normal_audio(bot, bot_msg)
93
+ logging.info("Audio celery tasks ended.")
94
+
95
+
96
+ @app.task()
97
+ def direct_download_task(chat_id: int, message_id: int, url: str):
98
+ logging.info("Direct download celery tasks started for %s", url)
99
+ bot_msg = retrieve_message(chat_id, message_id)
100
+ direct_normal_download(bot, bot_msg, url)
101
+ logging.info("Direct download celery tasks ended.")
102
+
103
+
104
+ def get_unique_clink(original_url: str, user_id: int):
105
+ payment = Payment()
106
+ settings = payment.get_user_settings(user_id)
107
+ clink = channel.extract_canonical_link(original_url)
108
+ try:
109
+ # different user may have different resolution settings
110
+ unique = "{}?p={}{}".format(clink, *settings[1:])
111
+ except IndexError:
112
+ unique = clink
113
+ return unique
114
+
115
+
116
+ def forward_video(client, bot_msg: types.Message | Any, url: str, cached_fid: str):
117
+ res_msg = upload_processor(client, bot_msg, url, cached_fid)
118
+ obj = res_msg.document or res_msg.video or res_msg.audio or res_msg.animation or res_msg.photo
119
+
120
+ caption, _ = gen_cap(bot_msg, url, obj)
121
+ res_msg.edit_text(caption, reply_markup=gen_video_markup())
122
+ bot_msg.edit_text(f"Download success!✅")
123
+ return True
124
+
125
+
126
+ def ytdl_download_entrance(client: Client, bot_msg: types.Message, url: str, mode=None):
127
+ # in Local node and forward mode, we pass client from main
128
+ # in celery mode, we need to use our own client called bot
129
+ payment = Payment()
130
+ redis = Redis()
131
+ chat_id = bot_msg.chat.id
132
+ unique = get_unique_clink(url, chat_id)
133
+ cached_fid = redis.get_send_cache(unique)
134
+
135
+ try:
136
+ if cached_fid:
137
+ forward_video(client, bot_msg, url, cached_fid)
138
+ redis.update_metrics("cache_hit")
139
+ return
140
+ redis.update_metrics("cache_miss")
141
+ mode = mode or payment.get_user_settings(chat_id)[-1]
142
+ if ENABLE_CELERY and mode in [None, "Celery"]:
143
+ ytdl_download_task.delay(chat_id, bot_msg.id, url)
144
+ else:
145
+ ytdl_normal_download(client, bot_msg, url)
146
+ except Exception as e:
147
+ logging.error("Failed to download %s, error: %s", url, e)
148
+ bot_msg.edit_text(f"Download failed!❌\n\n`{traceback.format_exc()[0:4000]}`", disable_web_page_preview=True)
149
+
150
+
151
+ def direct_download_entrance(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str):
152
+ if ENABLE_CELERY:
153
+ direct_normal_download(client, bot_msg, url)
154
+ # direct_download_task.delay(bot_msg.chat.id, bot_msg.id, url)
155
+ else:
156
+ direct_normal_download(client, bot_msg, url)
157
+
158
+
159
+ def audio_entrance(client: Client, bot_msg: types.Message):
160
+ if ENABLE_CELERY:
161
+ audio_task.delay(bot_msg.chat.id, bot_msg.id)
162
+ else:
163
+ normal_audio(client, bot_msg)
164
+
165
+
166
+ def direct_normal_download(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine], url: str):
167
+ chat_id = bot_msg.chat.id
168
+ headers = {
169
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.3987.149 Safari/537.36"
170
+ }
171
+ length = 0
172
+
173
+ req = None
174
+ try:
175
+ req = requests.get(url, headers=headers, stream=True)
176
+ length = int(req.headers.get("content-length"))
177
+ filename = re.findall("filename=(.+)", req.headers.get("content-disposition"))[0]
178
+ except TypeError:
179
+ filename = getattr(req, "url", "").rsplit("/")[-1]
180
+ except Exception as e:
181
+ bot_msg.edit_text(f"Download failed!❌\n\n```{e}```", disable_web_page_preview=True)
182
+ return
183
+
184
+ if not filename:
185
+ filename = quote_plus(url)
186
+
187
+ with tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH) as f:
188
+ filepath = f"{f}/{filename}"
189
+ # consume the req.content
190
+ downloaded = 0
191
+ for chunk in req.iter_content(1024 * 1024):
192
+ text = tqdm_progress("Downloading...", length, downloaded)
193
+ edit_text(bot_msg, text)
194
+ with open(filepath, "ab") as fp:
195
+ fp.write(chunk)
196
+ downloaded += len(chunk)
197
+ logging.info("Downloaded file %s", filename)
198
+ st_size = os.stat(filepath).st_size
199
+
200
+ client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
201
+ client.send_document(
202
+ bot_msg.chat.id,
203
+ filepath,
204
+ caption=f"filesize: {sizeof_fmt(st_size)}",
205
+ progress=upload_hook,
206
+ progress_args=(bot_msg,),
207
+ )
208
+ bot_msg.edit_text("Download success!✅")
209
+
210
+
211
+ def normal_audio(client: Client, bot_msg: typing.Union[types.Message, typing.Coroutine]):
212
+ chat_id = bot_msg.chat.id
213
+ # fn = getattr(bot_msg.video, "file_name", None) or getattr(bot_msg.document, "file_name", None)
214
+ status_msg: typing.Union[types.Message, typing.Coroutine] = bot_msg.reply_text(
215
+ "Converting to audio...please wait patiently", quote=True
216
+ )
217
+ orig_url: str = re.findall(r"https?://.*", bot_msg.caption)[0]
218
+ with tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH) as tmp:
219
+ client.send_chat_action(chat_id, enums.ChatAction.RECORD_AUDIO)
220
+ # just try to download the audio using yt-dlp
221
+ filepath = ytdl_download(orig_url, tmp, status_msg, hijack="bestaudio[ext=m4a]")
222
+ status_msg.edit_text("Sending audio now...")
223
+ client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_AUDIO)
224
+ for f in filepath:
225
+ client.send_audio(chat_id, f)
226
+ status_msg.edit_text("✅ Conversion complete.")
227
+ Redis().update_metrics("audio_success")
228
+
229
+
230
+ def ytdl_normal_download(client: Client, bot_msg: types.Message | typing.Any, url: str):
231
+ """
232
+ This function is called by celery task or directly by bot
233
+ :param client: bot client, either from main or bot(celery)
234
+ :param bot_msg: bot message
235
+ :param url: url to download
236
+ """
237
+ chat_id = bot_msg.chat.id
238
+ temp_dir = tempfile.TemporaryDirectory(prefix="ytdl-", dir=TMPFILE_PATH)
239
+
240
+ video_paths = ytdl_download(url, temp_dir.name, bot_msg)
241
+ logging.info("Download complete.")
242
+ client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_DOCUMENT)
243
+ bot_msg.edit_text("Download complete. Sending now...")
244
+ try:
245
+ upload_processor(client, bot_msg, url, video_paths)
246
+ except pyrogram.errors.Flood as e:
247
+ logging.critical("FloodWait from Telegram: %s", e)
248
+ client.send_message(
249
+ chat_id,
250
+ f"I'm being rate limited by Telegram. Your video will come after {e} seconds. Please wait patiently.",
251
+ )
252
+ client.send_message(OWNER, f"CRITICAL INFO: {e}")
253
+ time.sleep(e.value)
254
+ upload_processor(client, bot_msg, url, video_paths)
255
+
256
+ bot_msg.edit_text("Download success!✅")
257
+
258
+ # setup rclone environment var to back up the downloaded file
259
+ if RCLONE_PATH:
260
+ for item in os.listdir(temp_dir.name):
261
+ logging.info("Copying %s to %s", item, RCLONE_PATH)
262
+ shutil.copy(os.path.join(temp_dir.name, item), RCLONE_PATH)
263
+ temp_dir.cleanup()
264
+
265
+
266
+ def generate_input_media(file_paths: list, cap: str) -> list:
267
+ input_media = []
268
+ for path in file_paths:
269
+ mime = filetype.guess_mime(path)
270
+ if "video" in mime:
271
+ input_media.append(pyrogram.types.InputMediaVideo(media=path))
272
+ elif "image" in mime:
273
+ input_media.append(pyrogram.types.InputMediaPhoto(media=path))
274
+ elif "audio" in mime:
275
+ input_media.append(pyrogram.types.InputMediaAudio(media=path))
276
+ else:
277
+ input_media.append(pyrogram.types.InputMediaDocument(media=path))
278
+
279
+ input_media[0].caption = cap
280
+ return input_media
281
+
282
+
283
+ def upload_processor(client: Client, bot_msg: types.Message, url: str, vp_or_fid: str | list):
284
+ redis = Redis()
285
+ # raise pyrogram.errors.exceptions.FloodWait(13)
286
+ # if is str, it's a file id; else it's a list of paths
287
+ payment = Payment()
288
+ chat_id = bot_msg.chat.id
289
+ markup = gen_video_markup()
290
+ if isinstance(vp_or_fid, list) and len(vp_or_fid) > 1:
291
+ # just generate the first for simplicity, send as media group(2-20)
292
+ cap, meta = gen_cap(bot_msg, url, vp_or_fid[0])
293
+ res_msg: list["types.Message"] | Any = client.send_media_group(chat_id, generate_input_media(vp_or_fid, cap))
294
+ # TODO no cache for now
295
+ return res_msg[0]
296
+ elif isinstance(vp_or_fid, list) and len(vp_or_fid) == 1:
297
+ # normal download, just contains one file in video_paths
298
+ vp_or_fid = vp_or_fid[0]
299
+ cap, meta = gen_cap(bot_msg, url, vp_or_fid)
300
+ else:
301
+ # just a file id as string
302
+ cap, meta = gen_cap(bot_msg, url, vp_or_fid)
303
+
304
+ settings = payment.get_user_settings(chat_id)
305
+ if ARCHIVE_ID and isinstance(vp_or_fid, pathlib.Path):
306
+ chat_id = ARCHIVE_ID
307
+
308
+ if settings[2] == "document":
309
+ logging.info("Sending as document")
310
+ try:
311
+ # send as document could be sent as video even if it's a document
312
+ res_msg = client.send_document(
313
+ chat_id,
314
+ vp_or_fid,
315
+ caption=cap,
316
+ progress=upload_hook,
317
+ progress_args=(bot_msg,),
318
+ reply_markup=markup,
319
+ thumb=meta["thumb"],
320
+ force_document=True,
321
+ )
322
+ except ValueError:
323
+ logging.error("Retry to send as video")
324
+ res_msg = client.send_video(
325
+ chat_id,
326
+ vp_or_fid,
327
+ supports_streaming=True,
328
+ caption=cap,
329
+ progress=upload_hook,
330
+ progress_args=(bot_msg,),
331
+ reply_markup=markup,
332
+ **meta,
333
+ )
334
+ elif settings[2] == "audio":
335
+ logging.info("Sending as audio")
336
+ res_msg = client.send_audio(
337
+ chat_id,
338
+ vp_or_fid,
339
+ caption=cap,
340
+ progress=upload_hook,
341
+ progress_args=(bot_msg,),
342
+ )
343
+ else:
344
+ # settings==video
345
+ logging.info("Sending as video")
346
+ try:
347
+ res_msg = client.send_video(
348
+ chat_id,
349
+ vp_or_fid,
350
+ supports_streaming=True,
351
+ caption=cap,
352
+ progress=upload_hook,
353
+ progress_args=(bot_msg,),
354
+ reply_markup=markup,
355
+ **meta,
356
+ )
357
+ except Exception:
358
+ # try to send as annimation, photo
359
+ try:
360
+ logging.warning("Retry to send as animation")
361
+ res_msg = client.send_animation(
362
+ chat_id,
363
+ vp_or_fid,
364
+ caption=cap,
365
+ progress=upload_hook,
366
+ progress_args=(bot_msg,),
367
+ reply_markup=markup,
368
+ **meta,
369
+ )
370
+ except Exception:
371
+ # this is likely a photo
372
+ logging.warning("Retry to send as photo")
373
+ res_msg = client.send_photo(
374
+ chat_id,
375
+ vp_or_fid,
376
+ caption=cap,
377
+ progress=upload_hook,
378
+ progress_args=(bot_msg,),
379
+ )
380
+
381
+ unique = get_unique_clink(url, bot_msg.chat.id)
382
+ obj = res_msg.document or res_msg.video or res_msg.audio or res_msg.animation or res_msg.photo
383
+ redis.add_send_cache(unique, getattr(obj, "file_id", None))
384
+ redis.update_metrics("video_success")
385
+ if ARCHIVE_ID and isinstance(vp_or_fid, pathlib.Path):
386
+ client.forward_messages(bot_msg.chat.id, ARCHIVE_ID, res_msg.id)
387
+ return res_msg
388
+
389
+
390
+ def gen_cap(bm, url, video_path):
391
+ payment = Payment()
392
+ chat_id = bm.chat.id
393
+ user = bm.chat
394
+ try:
395
+ user_info = "@{}({})-{}".format(user.username or "N/A", user.first_name or "" + user.last_name or "", user.id)
396
+ except Exception:
397
+ user_info = ""
398
+
399
+ if isinstance(video_path, pathlib.Path):
400
+ meta = get_metadata(video_path)
401
+ file_name = video_path.name
402
+ file_size = sizeof_fmt(os.stat(video_path).st_size)
403
+ else:
404
+ file_name = getattr(video_path, "file_name", "")
405
+ file_size = sizeof_fmt(getattr(video_path, "file_size", (2 << 2) + ((2 << 2) + 1) + (2 << 5)))
406
+ meta = dict(
407
+ width=getattr(video_path, "width", 0),
408
+ height=getattr(video_path, "height", 0),
409
+ duration=getattr(video_path, "duration", 0),
410
+ thumb=getattr(video_path, "thumb", None),
411
+ )
412
+ free = payment.get_free_token(chat_id)
413
+ pay = payment.get_pay_token(chat_id)
414
+ if ENABLE_VIP:
415
+ remain = f"Download token count: free {free}, pay {pay}"
416
+ else:
417
+ remain = ""
418
+
419
+ if worker_name := os.getenv("WORKER_NAME"):
420
+ worker = f"Downloaded by {worker_name}"
421
+ else:
422
+ worker = ""
423
+ cap = (
424
+ f"{user_info}\n{file_name}\n\n{url}\n\nInfo: {meta['width']}x{meta['height']} {file_size}\t"
425
+ f"{meta['duration']}s\n{remain}\n{worker}\n{bot_text.custom_text}"
426
+ )
427
+ return cap, meta
428
+
429
+
430
+ def gen_video_markup():
431
+ markup = types.InlineKeyboardMarkup(
432
+ [
433
+ [ # First row
434
+ types.InlineKeyboardButton( # Generates a callback query when pressed
435
+ "convert to audio", callback_data="convert"
436
+ )
437
+ ]
438
+ ]
439
+ )
440
+ return markup
441
+
442
+
443
+ @Panel.register
444
+ def ping_revision(*args):
445
+ return get_revision()
446
+
447
+
448
+ @Panel.register
449
+ def hot_patch(*args):
450
+ app_path = pathlib.Path().cwd().parent
451
+ logging.info("Hot patching on path %s...", app_path)
452
+
453
+ pip_install = "pip install -r requirements.txt"
454
+ unset = "git config --unset http.https://github.com/.extraheader"
455
+ pull_unshallow = "git pull origin --unshallow"
456
+ pull = "git pull"
457
+
458
+ subprocess.call(unset, shell=True, cwd=app_path)
459
+ if subprocess.call(pull_unshallow, shell=True, cwd=app_path) != 0:
460
+ logging.info("Already unshallow, pulling now...")
461
+ subprocess.call(pull, shell=True, cwd=app_path)
462
+
463
+ logging.info("Code is updated, applying hot patch now...")
464
+ subprocess.call(pip_install, shell=True, cwd=app_path)
465
+ psutil.Process().kill()
466
+
467
+
468
+ def purge_tasks():
469
+ count = app.control.purge()
470
+ return f"purged {count} tasks."
471
+
472
+
473
+ def run_celery():
474
+ loop = asyncio.new_event_loop()
475
+ asyncio.set_event_loop(loop)
476
+ worker_name = os.getenv("WORKER_NAME", "")
477
+ argv = ["-A", "tasks", "worker", "--loglevel=info", "--pool=threads", f"--concurrency={WORKERS}", "-n", worker_name]
478
+ app.worker_main(argv)
479
+
480
+
481
+ if __name__ == "__main__":
482
+ print("Bootstrapping Celery worker now.....")
483
+ time.sleep(5)
484
+ threading.Thread(target=run_celery, daemon=True).start()
485
+
486
+ scheduler = BackgroundScheduler(timezone="Europe/London")
487
+ scheduler.add_job(auto_restart, "interval", seconds=900)
488
+ scheduler.start()
489
+
490
+ idle()
491
+ bot.stop()
ytdlbot/utils.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - utils.py
5
+ # 9/1/21 22:50
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import contextlib
11
+ import inspect as pyinspect
12
+ import logging
13
+ import os
14
+ import pathlib
15
+ import shutil
16
+ import subprocess
17
+ import tempfile
18
+ import time
19
+ import uuid
20
+
21
+ import coloredlogs
22
+ import ffmpeg
23
+ import psutil
24
+
25
+ from config import TMPFILE_PATH
26
+ from flower_tasks import app
27
+
28
+ inspect = app.control.inspect()
29
+
30
+
31
+ def apply_log_formatter():
32
+ coloredlogs.install(
33
+ level=logging.INFO,
34
+ fmt="[%(asctime)s %(filename)s:%(lineno)d %(levelname).1s] %(message)s",
35
+ datefmt="%Y-%m-%d %H:%M:%S",
36
+ )
37
+
38
+
39
+ def customize_logger(logger: list):
40
+ for log in logger:
41
+ logging.getLogger(log).setLevel(level=logging.INFO)
42
+
43
+
44
+ def sizeof_fmt(num: int, suffix="B"):
45
+ for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
46
+ if abs(num) < 1024.0:
47
+ return "%3.1f%s%s" % (num, unit, suffix)
48
+ num /= 1024.0
49
+ return "%.1f%s%s" % (num, "Yi", suffix)
50
+
51
+
52
+ def is_youtube(url: str):
53
+ if url.startswith("https://www.youtube.com/") or url.startswith("https://youtu.be/"):
54
+ return True
55
+
56
+
57
+ def adjust_formats(user_id: int, url: str, formats: list, hijack=None):
58
+ from database import MySQL
59
+
60
+ # high: best quality 1080P, 2K, 4K, 8K
61
+ # medium: 720P
62
+ # low: 480P
63
+ if hijack:
64
+ formats.insert(0, hijack)
65
+ return
66
+
67
+ mapping = {"high": [], "medium": [720], "low": [480]}
68
+ settings = MySQL().get_user_settings(user_id)
69
+ if settings and is_youtube(url):
70
+ for m in mapping.get(settings[1], []):
71
+ formats.insert(0, f"bestvideo[ext=mp4][height={m}]+bestaudio[ext=m4a]")
72
+ formats.insert(1, f"bestvideo[vcodec^=avc][height={m}]+bestaudio[acodec^=mp4a]/best[vcodec^=avc]/best")
73
+
74
+ if settings[2] == "audio":
75
+ formats.insert(0, "bestaudio[ext=m4a]")
76
+
77
+
78
+ def get_metadata(video_path):
79
+ width, height, duration = 1280, 720, 0
80
+ try:
81
+ video_streams = ffmpeg.probe(video_path, select_streams="v")
82
+ for item in video_streams.get("streams", []):
83
+ height = item["height"]
84
+ width = item["width"]
85
+ duration = int(float(video_streams["format"]["duration"]))
86
+ except Exception as e:
87
+ logging.error(e)
88
+ try:
89
+ thumb = pathlib.Path(video_path).parent.joinpath(f"{uuid.uuid4().hex}-thunmnail.png").as_posix()
90
+ ffmpeg.input(video_path, ss=duration / 2).filter("scale", width, -1).output(thumb, vframes=1).run()
91
+ except ffmpeg._run.Error:
92
+ thumb = None
93
+
94
+ return dict(height=height, width=width, duration=duration, thumb=thumb)
95
+
96
+
97
+ def current_time(ts=None):
98
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ts))
99
+
100
+
101
+ def get_revision():
102
+ with contextlib.suppress(subprocess.SubprocessError):
103
+ return subprocess.check_output("git -C ../ rev-parse --short HEAD".split()).decode("u8").replace("\n", "")
104
+ return "unknown"
105
+
106
+
107
+ def get_func_queue(func) -> int:
108
+ try:
109
+ count = 0
110
+ data = getattr(inspect, func)() or {}
111
+ for _, task in data.items():
112
+ count += len(task)
113
+ return count
114
+ except Exception:
115
+ return 0
116
+
117
+
118
+ def tail_log(f, lines=1, _buffer=4098):
119
+ """Tail a file and get X lines from the end"""
120
+ # placeholder for the lines found
121
+ lines_found = []
122
+
123
+ # block counter will be multiplied by buffer
124
+ # to get the block size from the end
125
+ block_counter = -1
126
+
127
+ # loop until we find X lines
128
+ while len(lines_found) < lines:
129
+ try:
130
+ f.seek(block_counter * _buffer, os.SEEK_END)
131
+ except IOError: # either file is too small, or too many lines requested
132
+ f.seek(0)
133
+ lines_found = f.readlines()
134
+ break
135
+
136
+ lines_found = f.readlines()
137
+
138
+ # we found enough lines, get out
139
+ # Removed this line because it was redundant the while will catch
140
+ # it, I left it for history
141
+ # if len(lines_found) > lines:
142
+ # break
143
+
144
+ # decrement the block counter to get the
145
+ # next X bytes
146
+ block_counter -= 1
147
+
148
+ return lines_found[-lines:]
149
+
150
+
151
+ class Detector:
152
+ def __init__(self, logs: str):
153
+ self.logs = logs
154
+
155
+ @staticmethod
156
+ def func_name():
157
+ with contextlib.suppress(Exception):
158
+ return pyinspect.stack()[1][3]
159
+ return "N/A"
160
+
161
+ def updates_too_long_detector(self):
162
+ # If you're seeing this, that means you have logged more than 10 device
163
+ # and the earliest account was kicked out. Restart the program could get you back in.
164
+ indicators = [
165
+ "types.UpdatesTooLong",
166
+ "Got shutdown from remote",
167
+ "Code is updated",
168
+ "OSError: Connection lost",
169
+ "[Errno -3] Try again",
170
+ "MISCONF",
171
+ ]
172
+ for indicator in indicators:
173
+ if indicator in self.logs:
174
+ logging.critical("kick out crash: %s", self.func_name())
175
+ return True
176
+ logging.debug("No crash detected.")
177
+
178
+ def next_salt_detector(self):
179
+ text = "Next salt in"
180
+ if self.logs.count(text) >= 5:
181
+ logging.critical("Next salt crash: %s", self.func_name())
182
+ return True
183
+
184
+ def connection_reset_detector(self):
185
+ text = "Send exception: ConnectionResetError Connection lost"
186
+ if text in self.logs:
187
+ logging.critical("connection lost: %s ", self.func_name())
188
+ return True
189
+
190
+
191
+ def auto_restart():
192
+ log_path = "/var/log/ytdl.log"
193
+ if not os.path.exists(log_path):
194
+ return
195
+ with open(log_path) as f:
196
+ logs = "".join(tail_log(f, lines=100))
197
+
198
+ det = Detector(logs)
199
+ method_list = [getattr(det, func) for func in dir(det) if func.endswith("_detector")]
200
+ for method in method_list:
201
+ if method():
202
+ logging.critical("%s bye bye world!☠️", method)
203
+ for item in pathlib.Path(TMPFILE_PATH or tempfile.gettempdir()).glob("ytdl-*"):
204
+ shutil.rmtree(item, ignore_errors=True)
205
+ time.sleep(5)
206
+ psutil.Process().kill()
207
+
208
+
209
+ def clean_tempfile():
210
+ for item in pathlib.Path(TMPFILE_PATH or tempfile.gettempdir()).glob("ytdl-*"):
211
+ if time.time() - item.stat().st_ctime > 3600:
212
+ shutil.rmtree(item, ignore_errors=True)
213
+
214
+
215
+ if __name__ == "__main__":
216
+ auto_restart()
ytdlbot/ytdl_bot.py ADDED
@@ -0,0 +1,562 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - new.py
5
+ # 8/14/21 14:37
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import contextlib
11
+ import logging
12
+ import os
13
+ import random
14
+ import re
15
+ import tempfile
16
+ import time
17
+ import traceback
18
+ from io import BytesIO
19
+ from typing import Any
20
+
21
+ import pyrogram.errors
22
+ import qrcode
23
+ import yt_dlp
24
+ from apscheduler.schedulers.background import BackgroundScheduler
25
+ from pyrogram import Client, enums, filters, types
26
+ from pyrogram.errors.exceptions.bad_request_400 import UserNotParticipant
27
+ from pyrogram.raw import functions
28
+ from pyrogram.raw import types as raw_types
29
+ from tgbot_ping import get_runtime
30
+ from youtubesearchpython import VideosSearch
31
+
32
+ from channel import Channel
33
+ from client_init import create_app
34
+ from config import (
35
+ AUTHORIZED_USER,
36
+ ENABLE_CELERY,
37
+ ENABLE_FFMPEG,
38
+ ENABLE_VIP,
39
+ IS_BACKUP_BOT,
40
+ M3U8_SUPPORT,
41
+ OWNER,
42
+ PLAYLIST_SUPPORT,
43
+ PROVIDER_TOKEN,
44
+ REQUIRED_MEMBERSHIP,
45
+ TOKEN_PRICE,
46
+ TRX_SIGNAL,
47
+ )
48
+ from constant import BotText
49
+ from database import InfluxDB, MySQL, Redis
50
+ from limit import Payment, TronTrx
51
+ from tasks import app as celery_app
52
+ from tasks import (
53
+ audio_entrance,
54
+ direct_download_entrance,
55
+ hot_patch,
56
+ purge_tasks,
57
+ ytdl_download_entrance,
58
+ )
59
+ from utils import auto_restart, clean_tempfile, customize_logger, get_revision
60
+
61
+ logging.info("Authorized users are %s", AUTHORIZED_USER)
62
+ customize_logger(["pyrogram.client", "pyrogram.session.session", "pyrogram.connection.connection"])
63
+ logging.getLogger("apscheduler.executors.default").propagate = False
64
+
65
+ app = create_app("main")
66
+ channel = Channel()
67
+
68
+
69
+ def private_use(func):
70
+ def wrapper(client: Client, message: types.Message):
71
+ chat_id = getattr(message.from_user, "id", None)
72
+
73
+ # message type check
74
+ if message.chat.type != enums.ChatType.PRIVATE and not message.text.lower().startswith("/ytdl"):
75
+ logging.debug("%s, it's annoying me...🙄️ ", message.text)
76
+ return
77
+
78
+ # authorized users check
79
+ if AUTHORIZED_USER:
80
+ users = [int(i) for i in AUTHORIZED_USER.split(",")]
81
+ else:
82
+ users = []
83
+
84
+ if users and chat_id and chat_id not in users:
85
+ message.reply_text(BotText.private, quote=True)
86
+ return
87
+
88
+ if REQUIRED_MEMBERSHIP:
89
+ try:
90
+ member: types.ChatMember | Any = app.get_chat_member(REQUIRED_MEMBERSHIP, chat_id)
91
+ if member.status not in [
92
+ enums.ChatMemberStatus.ADMINISTRATOR,
93
+ enums.ChatMemberStatus.MEMBER,
94
+ enums.ChatMemberStatus.OWNER,
95
+ ]:
96
+ raise UserNotParticipant()
97
+ else:
98
+ logging.info("user %s check passed for group/channel %s.", chat_id, REQUIRED_MEMBERSHIP)
99
+ except UserNotParticipant:
100
+ logging.warning("user %s is not a member of group/channel %s", chat_id, REQUIRED_MEMBERSHIP)
101
+ message.reply_text(BotText.membership_require, quote=True)
102
+ return
103
+
104
+ return func(client, message)
105
+
106
+ return wrapper
107
+
108
+
109
+ @app.on_message(filters.command(["start"]))
110
+ def start_handler(client: Client, message: types.Message):
111
+ payment = Payment()
112
+ from_id = message.from_user.id
113
+ logging.info("%s welcome to youtube-dl bot!", message.from_user.id)
114
+ client.send_chat_action(from_id, enums.ChatAction.TYPING)
115
+ is_old_user = payment.check_old_user(from_id)
116
+ if is_old_user:
117
+ info = ""
118
+ if ENABLE_VIP:
119
+ free_token, pay_token, reset = payment.get_token(from_id)
120
+ info = f"Free token: {free_token}, Pay token: {pay_token}, Reset: {reset}"
121
+ else:
122
+ info = ""
123
+ text = f"{BotText.start}\n\n{info}\n{BotText.custom_text}"
124
+ client.send_message(message.chat.id, text, disable_web_page_preview=True)
125
+
126
+
127
+ @app.on_message(filters.command(["help"]))
128
+ def help_handler(client: Client, message: types.Message):
129
+ chat_id = message.chat.id
130
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
131
+ client.send_message(chat_id, BotText.help, disable_web_page_preview=True)
132
+
133
+
134
+ @app.on_message(filters.command(["about"]))
135
+ def about_handler(client: Client, message: types.Message):
136
+ chat_id = message.chat.id
137
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
138
+ client.send_message(chat_id, BotText.about)
139
+
140
+
141
+ @app.on_message(filters.command(["sub"]))
142
+ def subscribe_handler(client: Client, message: types.Message):
143
+ chat_id = message.chat.id
144
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
145
+ if message.text == "/sub":
146
+ result = channel.get_user_subscription(chat_id)
147
+ else:
148
+ link = message.text.split()[1]
149
+ try:
150
+ result = channel.subscribe_channel(chat_id, link)
151
+ except (IndexError, ValueError):
152
+ result = f"Error: \n{traceback.format_exc()}"
153
+ client.send_message(chat_id, result or "You have no subscription.", disable_web_page_preview=True)
154
+
155
+
156
+ @app.on_message(filters.command(["unsub"]))
157
+ def unsubscribe_handler(client: Client, message: types.Message):
158
+ chat_id = message.chat.id
159
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
160
+ text = message.text.split(" ")
161
+ if len(text) == 1:
162
+ client.send_message(chat_id, "/unsub channel_id", disable_web_page_preview=True)
163
+ return
164
+
165
+ rows = channel.unsubscribe_channel(chat_id, text[1])
166
+ if rows:
167
+ text = f"Unsubscribed from {text[1]}"
168
+ else:
169
+ text = "Unable to find the channel."
170
+ client.send_message(chat_id, text, disable_web_page_preview=True)
171
+
172
+
173
+ @app.on_message(filters.command(["patch"]))
174
+ def patch_handler(client: Client, message: types.Message):
175
+ username = message.from_user.username
176
+ chat_id = message.chat.id
177
+ if username == OWNER:
178
+ celery_app.control.broadcast("hot_patch")
179
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
180
+ client.send_message(chat_id, "Oorah!")
181
+ hot_patch()
182
+
183
+
184
+ @app.on_message(filters.command(["uncache"]))
185
+ def uncache_handler(client: Client, message: types.Message):
186
+ username = message.from_user.username
187
+ link = message.text.split()[1]
188
+ if username == OWNER:
189
+ count = channel.del_cache(link)
190
+ message.reply_text(f"{count} cache(s) deleted.", quote=True)
191
+
192
+
193
+ @app.on_message(filters.command(["purge"]))
194
+ def purge_handler(client: Client, message: types.Message):
195
+ username = message.from_user.username
196
+ if username == OWNER:
197
+ message.reply_text(purge_tasks(), quote=True)
198
+
199
+
200
+ @app.on_message(filters.command(["ping"]))
201
+ def ping_handler(client: Client, message: types.Message):
202
+ redis = Redis()
203
+ chat_id = message.chat.id
204
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
205
+ if os.uname().sysname == "Darwin" or ".heroku" in os.getenv("PYTHONHOME", ""):
206
+ bot_info = "ping unavailable."
207
+ else:
208
+ bot_info = get_runtime("ytdlbot_ytdl_1", "YouTube-dl")
209
+ if message.chat.username == OWNER:
210
+ stats = BotText.ping_worker()[:1000]
211
+ client.send_document(chat_id, redis.generate_file(), caption=f"{bot_info}\n\n{stats}")
212
+ else:
213
+ client.send_message(chat_id, f"{bot_info.split('CPU')[0]}")
214
+
215
+
216
+ @app.on_message(filters.command(["sub_count"]))
217
+ def sub_count_handler(client: Client, message: types.Message):
218
+ username = message.from_user.username
219
+ chat_id = message.chat.id
220
+ if username == OWNER:
221
+ with BytesIO() as f:
222
+ f.write(channel.sub_count().encode("u8"))
223
+ f.name = "subscription count.txt"
224
+ client.send_document(chat_id, f)
225
+
226
+
227
+ @app.on_message(filters.command(["direct"]))
228
+ def direct_handler(client: Client, message: types.Message):
229
+ redis = Redis()
230
+ chat_id = message.from_user.id
231
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
232
+ url = re.sub(r"/direct\s*", "", message.text)
233
+ logging.info("direct start %s", url)
234
+ if not re.findall(r"^https?://", url.lower()):
235
+ redis.update_metrics("bad_request")
236
+ message.reply_text("Send me a DIRECT LINK.", quote=True)
237
+ return
238
+
239
+ bot_msg = message.reply_text("Request received.", quote=True)
240
+ redis.update_metrics("direct_request")
241
+ direct_download_entrance(client, bot_msg, url)
242
+
243
+
244
+ @app.on_message(filters.command(["settings"]))
245
+ def settings_handler(client: Client, message: types.Message):
246
+ chat_id = message.chat.id
247
+ payment = Payment()
248
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
249
+ data = MySQL().get_user_settings(chat_id)
250
+ set_mode = data[-1]
251
+ text = {"Local": "Celery", "Celery": "Local"}.get(set_mode, "Local")
252
+ mode_text = f"Download mode: **{set_mode}**"
253
+ if message.chat.username == OWNER or payment.get_pay_token(chat_id):
254
+ extra = [types.InlineKeyboardButton(f"Change download mode to {text}", callback_data=text)]
255
+ else:
256
+ extra = []
257
+
258
+ markup = types.InlineKeyboardMarkup(
259
+ [
260
+ [ # First row
261
+ types.InlineKeyboardButton("send as document", callback_data="document"),
262
+ types.InlineKeyboardButton("send as video", callback_data="video"),
263
+ types.InlineKeyboardButton("send as audio", callback_data="audio"),
264
+ ],
265
+ [ # second row
266
+ types.InlineKeyboardButton("High Quality", callback_data="high"),
267
+ types.InlineKeyboardButton("Medium Quality", callback_data="medium"),
268
+ types.InlineKeyboardButton("Low Quality", callback_data="low"),
269
+ ],
270
+ extra,
271
+ ]
272
+ )
273
+
274
+ try:
275
+ client.send_message(chat_id, BotText.settings.format(data[1], data[2]) + mode_text, reply_markup=markup)
276
+ except:
277
+ client.send_message(
278
+ chat_id, BotText.settings.format(data[1] + ".", data[2] + ".") + mode_text, reply_markup=markup
279
+ )
280
+
281
+
282
+ @app.on_message(filters.command(["buy"]))
283
+ def buy_handler(client: Client, message: types.Message):
284
+ # process as chat.id, not from_user.id
285
+ chat_id = message.chat.id
286
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
287
+ # currency USD
288
+ token_count = message.text.replace("/buy", "").strip()
289
+ if token_count.isdigit():
290
+ price = int(int(token_count) / TOKEN_PRICE * 100)
291
+ else:
292
+ price = 100
293
+
294
+ markup = types.InlineKeyboardMarkup(
295
+ [
296
+ [
297
+ types.InlineKeyboardButton("Bot Payments", callback_data=f"bot-payments-{price}"),
298
+ types.InlineKeyboardButton("TRON(TRX)", callback_data="tron-trx"),
299
+ ],
300
+ ]
301
+ )
302
+ client.send_message(chat_id, BotText.buy, disable_web_page_preview=True, reply_markup=markup)
303
+
304
+
305
+ @app.on_callback_query(filters.regex(r"tron-trx"))
306
+ def tronpayment_btn_calback(client: Client, callback_query: types.CallbackQuery):
307
+ callback_query.answer("Generating QR code...")
308
+ chat_id = callback_query.message.chat.id
309
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
310
+
311
+ addr = TronTrx().get_payment_address(chat_id)
312
+ with BytesIO() as bio:
313
+ qr = qrcode.make(addr)
314
+ qr.save(bio)
315
+ client.send_photo(chat_id, bio, caption=f"Send any amount of TRX to `{addr}`")
316
+
317
+
318
+ @app.on_callback_query(filters.regex(r"bot-payments-.*"))
319
+ def bot_payment_btn_calback(client: Client, callback_query: types.CallbackQuery):
320
+ callback_query.answer("Generating invoice...")
321
+ chat_id = callback_query.message.chat.id
322
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
323
+
324
+ data = callback_query.data
325
+ price = int(data.split("-")[-1])
326
+ payload = f"{chat_id}-buy"
327
+ invoice = generate_invoice(price, f"Buy {TOKEN_PRICE} download tokens", "Pay by card", payload)
328
+ app.invoke(
329
+ functions.messages.SendMedia(
330
+ peer=(raw_types.InputPeerUser(user_id=chat_id, access_hash=0)),
331
+ media=invoice,
332
+ random_id=app.rnd_id(),
333
+ message="Buy more download token",
334
+ )
335
+ )
336
+
337
+
338
+ @app.on_message(filters.command(["redeem"]))
339
+ def redeem_handler(client: Client, message: types.Message):
340
+ payment = Payment()
341
+ chat_id = message.chat.id
342
+ text = message.text.strip()
343
+ unique = text.replace("/redeem", "").strip()
344
+ msg = payment.verify_payment(chat_id, unique)
345
+ message.reply_text(msg, quote=True)
346
+
347
+
348
+ def generate_invoice(amount: int, title: str, description: str, payload: str):
349
+ invoice = raw_types.input_media_invoice.InputMediaInvoice(
350
+ invoice=raw_types.invoice.Invoice(
351
+ currency="USD", prices=[raw_types.LabeledPrice(label="price", amount=amount)]
352
+ ),
353
+ title=title,
354
+ description=description,
355
+ provider=PROVIDER_TOKEN,
356
+ provider_data=raw_types.DataJSON(data="{}"),
357
+ payload=payload.encode(),
358
+ start_param=payload,
359
+ )
360
+ return invoice
361
+
362
+
363
+ def link_checker(url: str) -> str:
364
+ if url.startswith("https://www.instagram.com"):
365
+ return ""
366
+ ytdl = yt_dlp.YoutubeDL()
367
+
368
+ if not PLAYLIST_SUPPORT and (
369
+ re.findall(r"^https://www\.youtube\.com/channel/", Channel.extract_canonical_link(url)) or "list" in url
370
+ ):
371
+ return "Playlist or channel links are disabled."
372
+
373
+ if not M3U8_SUPPORT and (re.findall(r"m3u8|\.m3u8|\.m3u$", url.lower())):
374
+ return "m3u8 links are disabled."
375
+
376
+ with contextlib.suppress(yt_dlp.utils.DownloadError):
377
+ if ytdl.extract_info(url, download=False).get("live_status") == "is_live":
378
+ return "Live stream links are disabled. Please download it after the stream ends."
379
+
380
+
381
+ def search_ytb(kw: str):
382
+ videos_search = VideosSearch(kw, limit=10)
383
+ text = ""
384
+ results = videos_search.result()["result"]
385
+ for item in results:
386
+ title = item.get("title")
387
+ link = item.get("link")
388
+ index = results.index(item) + 1
389
+ text += f"{index}. {title}\n{link}\n\n"
390
+ return text
391
+
392
+
393
+ @app.on_message(filters.incoming & (filters.text | filters.document))
394
+ @private_use
395
+ def download_handler(client: Client, message: types.Message):
396
+ redis = Redis()
397
+ payment = Payment()
398
+ chat_id = message.from_user.id
399
+ client.send_chat_action(chat_id, enums.ChatAction.TYPING)
400
+ redis.user_count(chat_id)
401
+ if message.document:
402
+ with tempfile.NamedTemporaryFile(mode="r+") as tf:
403
+ logging.info("Downloading file to %s", tf.name)
404
+ message.download(tf.name)
405
+ contents = open(tf.name, "r").read() # don't know why
406
+ urls = contents.split()
407
+ else:
408
+ urls = [re.sub(r"/ytdl\s*", "", message.text)]
409
+ logging.info("start %s", urls)
410
+
411
+ for url in urls:
412
+ # check url
413
+ if not re.findall(r"^https?://", url.lower()):
414
+ redis.update_metrics("bad_request")
415
+ text = search_ytb(url)
416
+ message.reply_text(text, quote=True, disable_web_page_preview=True)
417
+ return
418
+
419
+ if text := link_checker(url):
420
+ message.reply_text(text, quote=True)
421
+ redis.update_metrics("reject_link_checker")
422
+ return
423
+
424
+ # old user is not limited by token
425
+ if ENABLE_VIP and not payment.check_old_user(chat_id):
426
+ free, pay, reset = payment.get_token(chat_id)
427
+ if free + pay <= 0:
428
+ message.reply_text(f"You don't have enough token. Please wait until {reset} or /buy .", quote=True)
429
+ redis.update_metrics("reject_token")
430
+ return
431
+ else:
432
+ payment.use_token(chat_id)
433
+
434
+ redis.update_metrics("video_request")
435
+
436
+ text = BotText.get_receive_link_text()
437
+ try:
438
+ # raise pyrogram.errors.exceptions.FloodWait(10)
439
+ bot_msg: types.Message | Any = message.reply_text(text, quote=True)
440
+ except pyrogram.errors.Flood as e:
441
+ f = BytesIO()
442
+ f.write(str(e).encode())
443
+ f.write(b"Your job will be done soon. Just wait! Don't rush.")
444
+ f.name = "Please don't flood me.txt"
445
+ bot_msg = message.reply_document(
446
+ f, caption=f"Flood wait! Please wait {e} seconds...." f"Your job will start automatically", quote=True
447
+ )
448
+ f.close()
449
+ client.send_message(OWNER, f"Flood wait! 🙁 {e} seconds....")
450
+ time.sleep(e.value)
451
+
452
+ client.send_chat_action(chat_id, enums.ChatAction.UPLOAD_VIDEO)
453
+ bot_msg.chat = message.chat
454
+ ytdl_download_entrance(client, bot_msg, url)
455
+
456
+
457
+ @app.on_callback_query(filters.regex(r"document|video|audio"))
458
+ def send_method_callback(client: Client, callback_query: types.CallbackQuery):
459
+ chat_id = callback_query.message.chat.id
460
+ data = callback_query.data
461
+ logging.info("Setting %s file type to %s", chat_id, data)
462
+ MySQL().set_user_settings(chat_id, "method", data)
463
+ callback_query.answer(f"Your send type was set to {callback_query.data}")
464
+
465
+
466
+ @app.on_callback_query(filters.regex(r"high|medium|low"))
467
+ def download_resolution_callback(client: Client, callback_query: types.CallbackQuery):
468
+ chat_id = callback_query.message.chat.id
469
+ data = callback_query.data
470
+ logging.info("Setting %s file type to %s", chat_id, data)
471
+ MySQL().set_user_settings(chat_id, "resolution", data)
472
+ callback_query.answer(f"Your default download quality was set to {callback_query.data}")
473
+
474
+
475
+ @app.on_callback_query(filters.regex(r"convert"))
476
+ def audio_callback(client: Client, callback_query: types.CallbackQuery):
477
+ redis = Redis()
478
+ if not ENABLE_FFMPEG:
479
+ callback_query.answer("Request rejected.")
480
+ callback_query.message.reply_text("Audio conversion is disabled now.")
481
+ return
482
+
483
+ callback_query.answer(f"Converting to audio...please wait patiently")
484
+ redis.update_metrics("audio_request")
485
+ audio_entrance(client, callback_query.message)
486
+
487
+
488
+ @app.on_callback_query(filters.regex(r"Local|Celery"))
489
+ def owner_local_callback(client: Client, callback_query: types.CallbackQuery):
490
+ chat_id = callback_query.message.chat.id
491
+ MySQL().set_user_settings(chat_id, "mode", callback_query.data)
492
+ callback_query.answer(f"Download mode was changed to {callback_query.data}")
493
+
494
+
495
+ def periodic_sub_check():
496
+ exceptions = pyrogram.errors.exceptions
497
+ for cid, uids in channel.group_subscriber().items():
498
+ video_url = channel.has_newer_update(cid)
499
+ if video_url:
500
+ logging.info(f"periodic update:{video_url} - {uids}")
501
+ for uid in uids:
502
+ try:
503
+ app.send_message(uid, f"{video_url} is out. Watch it on YouTube")
504
+ except (exceptions.bad_request_400.PeerIdInvalid, exceptions.bad_request_400.UserIsBlocked) as e:
505
+ logging.warning("User is blocked or deleted. %s", e)
506
+ channel.deactivate_user_subscription(uid)
507
+ except Exception:
508
+ logging.error("Unknown error when sending message to user. %s", traceback.format_exc())
509
+ finally:
510
+ time.sleep(random.random() * 3)
511
+
512
+
513
+ @app.on_raw_update()
514
+ def raw_update(client: Client, update, users, chats):
515
+ payment = Payment()
516
+ action = getattr(getattr(update, "message", None), "action", None)
517
+ if update.QUALNAME == "types.UpdateBotPrecheckoutQuery":
518
+ client.invoke(
519
+ functions.messages.SetBotPrecheckoutResults(
520
+ query_id=update.query_id,
521
+ success=True,
522
+ )
523
+ )
524
+ elif action and action.QUALNAME == "types.MessageActionPaymentSentMe":
525
+ logging.info("Payment received. %s", action)
526
+ uid = update.message.peer_id.user_id
527
+ amount = action.total_amount / 100
528
+ payment.add_pay_user([uid, amount, action.charge.provider_charge_id, 0, amount * TOKEN_PRICE])
529
+ client.send_message(uid, f"Thank you {uid}. Payment received: {amount} {action.currency}")
530
+
531
+
532
+ def trx_notify(_, **kwargs):
533
+ user_id = kwargs.get("user_id")
534
+ text = kwargs.get("text")
535
+ logging.info("Sending trx notification to %s", user_id)
536
+ app.send_message(user_id, text)
537
+
538
+
539
+ if __name__ == "__main__":
540
+ MySQL()
541
+ TRX_SIGNAL.connect(trx_notify)
542
+ scheduler = BackgroundScheduler(timezone="Europe/London", job_defaults={"max_instances": 6})
543
+ scheduler.add_job(auto_restart, "interval", seconds=600)
544
+ scheduler.add_job(clean_tempfile, "interval", seconds=120)
545
+ if not IS_BACKUP_BOT:
546
+ scheduler.add_job(Redis().reset_today, "cron", hour=0, minute=0)
547
+ scheduler.add_job(InfluxDB().collect_data, "interval", seconds=120)
548
+ scheduler.add_job(TronTrx().check_payment, "interval", seconds=60, max_instances=1)
549
+ # default quota allocation of 10,000 units per day
550
+ scheduler.add_job(periodic_sub_check, "interval", seconds=3600)
551
+ scheduler.start()
552
+ banner = f"""
553
+ ▌ ▌ ▀▛▘ ▌ ▛▀▖ ▜ ▌
554
+ ▝▞ ▞▀▖ ▌ ▌ ▌ ▌ ▌ ▛▀▖ ▞▀▖ ▌ ▌ ▞▀▖ ▌ ▌ ▛▀▖ ▐ ▞▀▖ ▝▀▖ ▞▀▌
555
+ ▌ ▌ ▌ ▌ ▌ ▌ ▌ ▌ ▌ ▌ ▛▀ ▌ ▌ ▌ ▌ ▐▐▐ ▌ ▌ ▐ ▌ ▌ ▞▀▌ ▌ ▌
556
+ ▘ ▝▀ ▝▀▘ ▘ ▝▀▘ ▀▀ ▝▀▘ ▀▀ ▝▀ ▘▘ ▘ ▘ ▘ ▝▀ ▝▀▘ ▝▀▘
557
+
558
+ By @BennyThink, VIP mode: {ENABLE_VIP}, Celery Mode: {ENABLE_CELERY}
559
+ Version: {get_revision()}
560
+ """
561
+ print(banner)
562
+ app.run()