navpan2 commited on
Commit
8e43f8d
·
1 Parent(s): 8c3c0f1

Upload 65 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .dockerignore +5 -0
  2. .gitattributes +1 -0
  3. .gitignore +175 -0
  4. Dockerfile +11 -0
  5. LICENSE +201 -0
  6. Makefile +52 -0
  7. Procfile +1 -0
  8. app.json +43 -0
  9. app.py +26 -0
  10. assets/1.jpeg +0 -0
  11. assets/2.jpeg +0 -0
  12. assets/CNY.png +0 -0
  13. assets/USD.png +0 -0
  14. assets/instagram.png +3 -0
  15. assets/tron.png +0 -0
  16. conf/YouTube Download Celery.json +794 -0
  17. conf/supervisor_main.conf +34 -0
  18. conf/supervisor_worker.conf +33 -0
  19. docker-compose.yml +60 -0
  20. k8s.md +200 -0
  21. k8s/01.redis.yml +53 -0
  22. k8s/02.mariadb.yml +80 -0
  23. k8s/03.configmap.yml +17 -0
  24. k8s/04.ytdl-master.yml +65 -0
  25. k8s/05.ytdl-worker.yml +47 -0
  26. k8s/06.flower.yml +101 -0
  27. main.py +21 -0
  28. requirements.txt +31 -0
  29. scripts/low_id.sh +12 -0
  30. scripts/migrate_to_mysql.py +27 -0
  31. scripts/start.sh +13 -0
  32. scripts/transfer.py +29 -0
  33. worker.yml +15 -0
  34. ytdlbot/__pycache__/channel.cpython-310.pyc +0 -0
  35. ytdlbot/__pycache__/channel.cpython-38.pyc +0 -0
  36. ytdlbot/__pycache__/client_init.cpython-310.pyc +0 -0
  37. ytdlbot/__pycache__/client_init.cpython-38.pyc +0 -0
  38. ytdlbot/__pycache__/config.cpython-310.pyc +0 -0
  39. ytdlbot/__pycache__/config.cpython-38.pyc +0 -0
  40. ytdlbot/__pycache__/constant.cpython-310.pyc +0 -0
  41. ytdlbot/__pycache__/constant.cpython-38.pyc +0 -0
  42. ytdlbot/__pycache__/database.cpython-310.pyc +0 -0
  43. ytdlbot/__pycache__/database.cpython-38.pyc +0 -0
  44. ytdlbot/__pycache__/downloader.cpython-310.pyc +0 -0
  45. ytdlbot/__pycache__/downloader.cpython-38.pyc +0 -0
  46. ytdlbot/__pycache__/flower_tasks.cpython-310.pyc +0 -0
  47. ytdlbot/__pycache__/flower_tasks.cpython-38.pyc +0 -0
  48. ytdlbot/__pycache__/limit.cpython-310.pyc +0 -0
  49. ytdlbot/__pycache__/limit.cpython-38.pyc +0 -0
  50. ytdlbot/__pycache__/tasks.cpython-310.pyc +0 -0
.dockerignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ env
2
+ venv
3
+ db_data
4
+ .ash_history
5
+ .DS_Store
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ assets/instagram.png filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
130
+ /.idea/modules.xml
131
+ /.idea/inspectionProfiles/profiles_settings.xml
132
+ /.idea/inspectionProfiles/Project_Default.xml
133
+ /.idea/vcs.xml
134
+ /.idea/ytdl-bot.iml
135
+ /.idea/misc.xml
136
+ /.idea/workspace.xml
137
+ /.idea/jsonSchemas.xml
138
+ /*.session
139
+ /.idea/ytdlbot.iml
140
+ /*.sqlite
141
+ /.idea/dataSources.xml
142
+ /.idea/sqldialects.xml
143
+ /.idea/.gitignore
144
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df.xml
145
+ /.idea/dataSources.local.xml
146
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat
147
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat.len
148
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat.values
149
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat.values.at
150
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat.values.s
151
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat_i
152
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/entities/entities.dat_i.len
153
+ /.idea/dataSources/bf75f0a6-c774-4ecf-9448-2086f57b70df/storage_v2/_src_/schema/main.uQUzAA.meta
154
+ db_data/*
155
+ env/*
156
+ .ash_history
157
+ .DS_Store
158
+ ytdlbot/ytdl.session
159
+ data/*
160
+ upgrade_worker.sh
161
+ ytdl.session
162
+ reinforcement/*
163
+ /ytdlbot/session/celery.session
164
+ /.idea/prettier.xml
165
+ /.idea/watcherTasks.xml
166
+ /ytdlbot/session/ytdl.session-journal
167
+ /ytdlbot/unknown_errors.txt
168
+ /ytdlbot/ytdl.session-journal
169
+ /ytdlbot/ytdl-main.session-journal
170
+ /ytdlbot/ytdl-main.session
171
+ /ytdlbot/ytdl-celery.session-journal
172
+ /ytdlbot/ytdl-celery.session
173
+ /ytdlbot/main.session
174
+ /ytdlbot/tasks.session
175
+ /ytdlbot/tasks.session-journal
Dockerfile ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ WORKDIR /code
4
+
5
+ COPY ./requirements.txt /code/requirements.txt
6
+
7
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
8
+
9
+ COPY . .
10
+
11
+ CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860"]
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
Makefile ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ define NOLOGGING
2
+
3
+ logging:
4
+ driver: none
5
+ endef
6
+ export NOLOGGING
7
+
8
+ default:
9
+ docker pull bennythink/ytdlbot
10
+
11
+ bot:
12
+ make
13
+ docker-compose up -d
14
+ docker system prune -a --volumes -f
15
+
16
+ worker:
17
+ make
18
+ docker-compose -f worker.yml up -d
19
+ docker system prune -a --volumes -f
20
+ sleep 5
21
+
22
+ weak-worker:
23
+ make
24
+ docker-compose --compatibility -f worker.yml up -d
25
+ docker system prune -a --volumes -f
26
+ sleep 5
27
+
28
+ upgrade-all-worker:
29
+ bash upgrade_worker.sh
30
+
31
+ tag:
32
+ git tag -a v$(shell date "+%Y-%m-%d")_$(shell git rev-parse --short HEAD) -m v$(shell date "+%Y-%m-%d")
33
+ git push --tags
34
+
35
+ nolog:
36
+ echo "$$NOLOGGING">> worker.yml
37
+
38
+ flower:
39
+ echo 'import dbm;dbm.open("data/flower","n");exit()'| python3
40
+
41
+ up:
42
+ docker build -t bennythink/ytdlbot:latest .
43
+ docker-compose -f docker-compose.yml -f worker.yml up -d
44
+
45
+ ps:
46
+ docker-compose -f docker-compose.yml -f worker.yml ps
47
+
48
+ down:
49
+ docker-compose -f docker-compose.yml -f worker.yml down
50
+
51
+ logs:
52
+ docker-compose -f docker-compose.yml -f worker.yml logs -f worker ytdl
Procfile ADDED
@@ -0,0 +1 @@
 
 
1
+ worker: python ytdlbot/ytdl_bot.py
app.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "YouTube-Downloader",
3
+ "description": "A Telegrambot to download youtube video",
4
+ "repository": "https://github.com/tgbot-collection/ytdlbot",
5
+ "logo": "https://avatars.githubusercontent.com/u/73354211?s=200&v=4",
6
+ "keywords": [
7
+ "telegram",
8
+ "youtube-dl"
9
+ ],
10
+ "env": {
11
+ "TOKEN": {
12
+ "description": "Bot token",
13
+ "value": "token"
14
+ },
15
+ "APP_ID": {
16
+ "description": "APP ID",
17
+ "value": "12345"
18
+ },
19
+ "APP_HASH": {
20
+ "description": "APP HASH",
21
+ "value": "12345abc"
22
+ },
23
+ "OWNER": {
24
+ "description": "Your telegram username",
25
+ "value": "username",
26
+ "required": false
27
+ }
28
+ },
29
+ "formation": {
30
+ "worker": {
31
+ "quantity": 1,
32
+ "size": "eco"
33
+ }
34
+ },
35
+ "buildpacks": [
36
+ {
37
+ "url": "https://github.com/heroku/heroku-buildpack-python.git"
38
+ },
39
+ {
40
+ "url": "https://github.com/jonathanong/heroku-buildpack-ffmpeg-latest.git"
41
+ }
42
+ ]
43
+ }
app.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+
4
+ def run_python_file(file_path):
5
+ try:
6
+ st.text(f"Running {file_path}...")
7
+ os.system(f"python {file_path}")
8
+ st.success("Script executed successfully!")
9
+ except Exception as e:
10
+ st.error(f"Error: {e}")
11
+
12
+ def main():
13
+ st.title("YTDLBot Runner")
14
+
15
+ # Specify the directory and file name
16
+ directory = "ytdlbot"
17
+ file_name = "ytdl_bot.py"
18
+ file_path = os.path.join(directory, file_name)
19
+
20
+ st.text(f"Selected file: {file_path}")
21
+
22
+ # Run the Python file automatically when the app starts
23
+ run_python_file(file_path)
24
+
25
+ if __name__ == "__main__":
26
+ main()
assets/1.jpeg ADDED
assets/2.jpeg ADDED
assets/CNY.png ADDED
assets/USD.png ADDED
assets/instagram.png ADDED

Git LFS Details

  • SHA256: 403808b9b818ec3ad934a4b7b4b1689c179d318eb34a3cabbe5e00b1b90fb14a
  • Pointer size: 132 Bytes
  • Size of remote file: 1.58 MB
assets/tron.png ADDED
conf/YouTube Download Celery.json ADDED
@@ -0,0 +1,794 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__inputs": [
3
+ {
4
+ "name": "DS_CELERY",
5
+ "label": "celery",
6
+ "description": "",
7
+ "type": "datasource",
8
+ "pluginId": "influxdb",
9
+ "pluginName": "InfluxDB"
10
+ }
11
+ ],
12
+ "__elements": [],
13
+ "__requires": [
14
+ {
15
+ "type": "grafana",
16
+ "id": "grafana",
17
+ "name": "Grafana",
18
+ "version": "8.3.1"
19
+ },
20
+ {
21
+ "type": "datasource",
22
+ "id": "influxdb",
23
+ "name": "InfluxDB",
24
+ "version": "1.0.0"
25
+ },
26
+ {
27
+ "type": "panel",
28
+ "id": "timeseries",
29
+ "name": "Time series",
30
+ "version": ""
31
+ }
32
+ ],
33
+ "annotations": {
34
+ "list": [
35
+ {
36
+ "builtIn": 1,
37
+ "datasource": "-- Grafana --",
38
+ "enable": true,
39
+ "hide": true,
40
+ "iconColor": "rgba(0, 211, 255, 1)",
41
+ "name": "Annotations & Alerts",
42
+ "target": {
43
+ "limit": 100,
44
+ "matchAny": false,
45
+ "tags": [],
46
+ "type": "dashboard"
47
+ },
48
+ "type": "dashboard"
49
+ }
50
+ ]
51
+ },
52
+ "editable": true,
53
+ "fiscalYearStartMonth": 0,
54
+ "graphTooltip": 0,
55
+ "id": null,
56
+ "iteration": 1644554238421,
57
+ "links": [],
58
+ "liveNow": false,
59
+ "panels": [
60
+ {
61
+ "datasource": {
62
+ "type": "influxdb",
63
+ "uid": "${DS_CELERY}"
64
+ },
65
+ "fieldConfig": {
66
+ "defaults": {
67
+ "color": {
68
+ "mode": "palette-classic"
69
+ },
70
+ "custom": {
71
+ "axisLabel": "",
72
+ "axisPlacement": "auto",
73
+ "barAlignment": 0,
74
+ "drawStyle": "line",
75
+ "fillOpacity": 5,
76
+ "gradientMode": "none",
77
+ "hideFrom": {
78
+ "legend": false,
79
+ "tooltip": false,
80
+ "viz": false
81
+ },
82
+ "lineInterpolation": "linear",
83
+ "lineWidth": 1,
84
+ "pointSize": 5,
85
+ "scaleDistribution": {
86
+ "type": "linear"
87
+ },
88
+ "showPoints": "auto",
89
+ "spanNulls": true,
90
+ "stacking": {
91
+ "group": "A",
92
+ "mode": "none"
93
+ },
94
+ "thresholdsStyle": {
95
+ "mode": "off"
96
+ }
97
+ },
98
+ "mappings": [],
99
+ "thresholds": {
100
+ "mode": "absolute",
101
+ "steps": [
102
+ {
103
+ "color": "green",
104
+ "value": null
105
+ },
106
+ {
107
+ "color": "red",
108
+ "value": 80
109
+ }
110
+ ]
111
+ }
112
+ },
113
+ "overrides": []
114
+ },
115
+ "gridPos": {
116
+ "h": 8,
117
+ "w": 12,
118
+ "x": 0,
119
+ "y": 0
120
+ },
121
+ "id": 2,
122
+ "options": {
123
+ "legend": {
124
+ "calcs": [],
125
+ "displayMode": "list",
126
+ "placement": "bottom"
127
+ },
128
+ "tooltip": {
129
+ "mode": "single"
130
+ }
131
+ },
132
+ "targets": [
133
+ {
134
+ "alias": "Active",
135
+ "groupBy": [
136
+ {
137
+ "params": [
138
+ "$__interval"
139
+ ],
140
+ "type": "time"
141
+ },
142
+ {
143
+ "params": [
144
+ "null"
145
+ ],
146
+ "type": "fill"
147
+ }
148
+ ],
149
+ "measurement": "active",
150
+ "orderByTime": "ASC",
151
+ "policy": "default",
152
+ "query": "SELECT mean(\"active\") FROM \"active\" WHERE $timeFilter GROUP BY time($__interval) ",
153
+ "rawQuery": true,
154
+ "refId": "A",
155
+ "resultFormat": "time_series",
156
+ "select": [
157
+ [
158
+ {
159
+ "params": [
160
+ "active"
161
+ ],
162
+ "type": "field"
163
+ },
164
+ {
165
+ "params": [],
166
+ "type": "mean"
167
+ }
168
+ ]
169
+ ],
170
+ "tags": []
171
+ },
172
+ {
173
+ "alias": "$tag_hostname",
174
+ "hide": false,
175
+ "query": "\nSELECT \nmean(\"active\") AS active\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
176
+ "rawQuery": true,
177
+ "refId": "B",
178
+ "resultFormat": "time_series"
179
+ }
180
+ ],
181
+ "title": "Active Jobs",
182
+ "type": "timeseries"
183
+ },
184
+ {
185
+ "datasource": {
186
+ "type": "influxdb",
187
+ "uid": "${DS_CELERY}"
188
+ },
189
+ "fieldConfig": {
190
+ "defaults": {
191
+ "color": {
192
+ "mode": "palette-classic"
193
+ },
194
+ "custom": {
195
+ "axisLabel": "",
196
+ "axisPlacement": "auto",
197
+ "barAlignment": 0,
198
+ "drawStyle": "line",
199
+ "fillOpacity": 5,
200
+ "gradientMode": "none",
201
+ "hideFrom": {
202
+ "legend": false,
203
+ "tooltip": false,
204
+ "viz": false
205
+ },
206
+ "lineInterpolation": "smooth",
207
+ "lineWidth": 1,
208
+ "pointSize": 5,
209
+ "scaleDistribution": {
210
+ "type": "linear"
211
+ },
212
+ "showPoints": "auto",
213
+ "spanNulls": true,
214
+ "stacking": {
215
+ "group": "A",
216
+ "mode": "none"
217
+ },
218
+ "thresholdsStyle": {
219
+ "mode": "off"
220
+ }
221
+ },
222
+ "mappings": [],
223
+ "thresholds": {
224
+ "mode": "absolute",
225
+ "steps": [
226
+ {
227
+ "color": "green",
228
+ "value": null
229
+ },
230
+ {
231
+ "color": "red",
232
+ "value": 80
233
+ }
234
+ ]
235
+ },
236
+ "unit": "percent"
237
+ },
238
+ "overrides": []
239
+ },
240
+ "gridPos": {
241
+ "h": 8,
242
+ "w": 12,
243
+ "x": 12,
244
+ "y": 0
245
+ },
246
+ "id": 10,
247
+ "options": {
248
+ "legend": {
249
+ "calcs": [],
250
+ "displayMode": "list",
251
+ "placement": "bottom"
252
+ },
253
+ "tooltip": {
254
+ "mode": "single"
255
+ }
256
+ },
257
+ "targets": [
258
+ {
259
+ "alias": "$col",
260
+ "datasource": {
261
+ "type": "influxdb",
262
+ "uid": "${DS_CELERY}"
263
+ },
264
+ "groupBy": [
265
+ {
266
+ "params": [
267
+ "$__interval"
268
+ ],
269
+ "type": "time"
270
+ },
271
+ {
272
+ "params": [
273
+ "null"
274
+ ],
275
+ "type": "fill"
276
+ }
277
+ ],
278
+ "measurement": "metrics",
279
+ "orderByTime": "ASC",
280
+ "policy": "default",
281
+ "query": "\nSELECT \nmean(\"today_audio_success\")/mean(\"today_audio_request\")*100 as audio_success,\nmean(\"today_video_success\")/mean(\"today_video_request\")*100 as video_success\n\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
282
+ "rawQuery": true,
283
+ "refId": "A",
284
+ "resultFormat": "time_series",
285
+ "select": [
286
+ [
287
+ {
288
+ "params": [
289
+ "today_audio_success"
290
+ ],
291
+ "type": "field"
292
+ },
293
+ {
294
+ "params": [],
295
+ "type": "mean"
296
+ }
297
+ ]
298
+ ],
299
+ "tags": []
300
+ }
301
+ ],
302
+ "title": "Video & Audio Success Rate",
303
+ "type": "timeseries"
304
+ },
305
+ {
306
+ "datasource": {
307
+ "type": "influxdb",
308
+ "uid": "${DS_CELERY}"
309
+ },
310
+ "fieldConfig": {
311
+ "defaults": {
312
+ "color": {
313
+ "mode": "palette-classic"
314
+ },
315
+ "custom": {
316
+ "axisLabel": "",
317
+ "axisPlacement": "auto",
318
+ "barAlignment": 0,
319
+ "drawStyle": "line",
320
+ "fillOpacity": 5,
321
+ "gradientMode": "none",
322
+ "hideFrom": {
323
+ "legend": false,
324
+ "tooltip": false,
325
+ "viz": false
326
+ },
327
+ "lineInterpolation": "smooth",
328
+ "lineWidth": 1,
329
+ "pointSize": 5,
330
+ "scaleDistribution": {
331
+ "type": "linear"
332
+ },
333
+ "showPoints": "auto",
334
+ "spanNulls": true,
335
+ "stacking": {
336
+ "group": "A",
337
+ "mode": "none"
338
+ },
339
+ "thresholdsStyle": {
340
+ "mode": "off"
341
+ }
342
+ },
343
+ "mappings": [],
344
+ "thresholds": {
345
+ "mode": "absolute",
346
+ "steps": [
347
+ {
348
+ "color": "green",
349
+ "value": null
350
+ },
351
+ {
352
+ "color": "red",
353
+ "value": 80
354
+ }
355
+ ]
356
+ }
357
+ },
358
+ "overrides": []
359
+ },
360
+ "gridPos": {
361
+ "h": 8,
362
+ "w": 12,
363
+ "x": 0,
364
+ "y": 8
365
+ },
366
+ "id": 6,
367
+ "options": {
368
+ "legend": {
369
+ "calcs": [],
370
+ "displayMode": "list",
371
+ "placement": "bottom"
372
+ },
373
+ "tooltip": {
374
+ "mode": "single"
375
+ }
376
+ },
377
+ "targets": [
378
+ {
379
+ "alias": "$tag_hostname:$col",
380
+ "query": "SELECT mean(\"load1\") AS load1,mean(\"load5\") AS load5,mean(\"load15\") AS load15\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc \n\n",
381
+ "rawQuery": true,
382
+ "refId": "A",
383
+ "resultFormat": "time_series"
384
+ }
385
+ ],
386
+ "title": "Load Average",
387
+ "type": "timeseries"
388
+ },
389
+ {
390
+ "datasource": {
391
+ "type": "influxdb",
392
+ "uid": "${DS_CELERY}"
393
+ },
394
+ "fieldConfig": {
395
+ "defaults": {
396
+ "color": {
397
+ "mode": "palette-classic"
398
+ },
399
+ "custom": {
400
+ "axisLabel": "",
401
+ "axisPlacement": "auto",
402
+ "barAlignment": 0,
403
+ "drawStyle": "line",
404
+ "fillOpacity": 5,
405
+ "gradientMode": "none",
406
+ "hideFrom": {
407
+ "legend": false,
408
+ "tooltip": false,
409
+ "viz": false
410
+ },
411
+ "lineInterpolation": "smooth",
412
+ "lineWidth": 1,
413
+ "pointSize": 5,
414
+ "scaleDistribution": {
415
+ "type": "linear"
416
+ },
417
+ "showPoints": "auto",
418
+ "spanNulls": true,
419
+ "stacking": {
420
+ "group": "A",
421
+ "mode": "none"
422
+ },
423
+ "thresholdsStyle": {
424
+ "mode": "off"
425
+ }
426
+ },
427
+ "mappings": [],
428
+ "thresholds": {
429
+ "mode": "absolute",
430
+ "steps": [
431
+ {
432
+ "color": "green",
433
+ "value": null
434
+ },
435
+ {
436
+ "color": "red",
437
+ "value": 80
438
+ }
439
+ ]
440
+ },
441
+ "unit": "percent"
442
+ },
443
+ "overrides": []
444
+ },
445
+ "gridPos": {
446
+ "h": 8,
447
+ "w": 12,
448
+ "x": 12,
449
+ "y": 8
450
+ },
451
+ "id": 9,
452
+ "options": {
453
+ "legend": {
454
+ "calcs": [],
455
+ "displayMode": "list",
456
+ "placement": "bottom"
457
+ },
458
+ "tooltip": {
459
+ "mode": "single"
460
+ }
461
+ },
462
+ "targets": [
463
+ {
464
+ "alias": "$tag_hostname:$col",
465
+ "datasource": {
466
+ "type": "influxdb",
467
+ "uid": "${DS_CELERY}"
468
+ },
469
+ "groupBy": [
470
+ {
471
+ "params": [
472
+ "$__interval"
473
+ ],
474
+ "type": "time"
475
+ },
476
+ {
477
+ "params": [
478
+ "null"
479
+ ],
480
+ "type": "fill"
481
+ }
482
+ ],
483
+ "measurement": "tasks",
484
+ "orderByTime": "ASC",
485
+ "policy": "default",
486
+ "query": "\nSELECT mean(\"task-succeeded\")/mean(\"task-received\")*100 AS success_rate, mean(\"task-failed\")/mean(\"task-received\")*100 AS fail_rate\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
487
+ "rawQuery": true,
488
+ "refId": "A",
489
+ "resultFormat": "time_series",
490
+ "select": [
491
+ [
492
+ {
493
+ "params": [
494
+ "task-received"
495
+ ],
496
+ "type": "field"
497
+ }
498
+ ]
499
+ ],
500
+ "tags": [
501
+ {
502
+ "key": "hostname",
503
+ "operator": "=~",
504
+ "value": "/^$hostname$/"
505
+ }
506
+ ]
507
+ }
508
+ ],
509
+ "title": "Task Rate",
510
+ "type": "timeseries"
511
+ },
512
+ {
513
+ "datasource": {
514
+ "type": "influxdb",
515
+ "uid": "${DS_CELERY}"
516
+ },
517
+ "fieldConfig": {
518
+ "defaults": {
519
+ "color": {
520
+ "mode": "palette-classic"
521
+ },
522
+ "custom": {
523
+ "axisLabel": "",
524
+ "axisPlacement": "auto",
525
+ "barAlignment": 0,
526
+ "drawStyle": "line",
527
+ "fillOpacity": 5,
528
+ "gradientMode": "none",
529
+ "hideFrom": {
530
+ "legend": false,
531
+ "tooltip": false,
532
+ "viz": false
533
+ },
534
+ "lineInterpolation": "smooth",
535
+ "lineWidth": 1,
536
+ "pointSize": 5,
537
+ "scaleDistribution": {
538
+ "type": "linear"
539
+ },
540
+ "showPoints": "auto",
541
+ "spanNulls": true,
542
+ "stacking": {
543
+ "group": "A",
544
+ "mode": "none"
545
+ },
546
+ "thresholdsStyle": {
547
+ "mode": "off"
548
+ }
549
+ },
550
+ "mappings": [],
551
+ "thresholds": {
552
+ "mode": "absolute",
553
+ "steps": [
554
+ {
555
+ "color": "green",
556
+ "value": null
557
+ },
558
+ {
559
+ "color": "red",
560
+ "value": 80
561
+ }
562
+ ]
563
+ },
564
+ "unit": "none"
565
+ },
566
+ "overrides": []
567
+ },
568
+ "gridPos": {
569
+ "h": 8,
570
+ "w": 12,
571
+ "x": 0,
572
+ "y": 16
573
+ },
574
+ "id": 13,
575
+ "options": {
576
+ "legend": {
577
+ "calcs": [],
578
+ "displayMode": "list",
579
+ "placement": "bottom"
580
+ },
581
+ "tooltip": {
582
+ "mode": "single"
583
+ }
584
+ },
585
+ "targets": [
586
+ {
587
+ "alias": "$tag_hostname:$col",
588
+ "datasource": {
589
+ "type": "influxdb",
590
+ "uid": "${DS_CELERY}"
591
+ },
592
+ "groupBy": [
593
+ {
594
+ "params": [
595
+ "$__interval"
596
+ ],
597
+ "type": "time"
598
+ },
599
+ {
600
+ "params": [
601
+ "null"
602
+ ],
603
+ "type": "fill"
604
+ }
605
+ ],
606
+ "measurement": "tasks",
607
+ "orderByTime": "ASC",
608
+ "policy": "default",
609
+ "query": "\nSELECT mean(\"task-received\") AS received, mean(\"task-started\") AS started,mean(\"task-succeeded\") AS succeeded,mean(\"task-failed\") AS failed\n\nFROM \"tasks\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) ,* ORDER BY asc ",
610
+ "rawQuery": true,
611
+ "refId": "A",
612
+ "resultFormat": "time_series",
613
+ "select": [
614
+ [
615
+ {
616
+ "params": [
617
+ "task-received"
618
+ ],
619
+ "type": "field"
620
+ }
621
+ ]
622
+ ],
623
+ "tags": [
624
+ {
625
+ "key": "hostname",
626
+ "operator": "=~",
627
+ "value": "/^$hostname$/"
628
+ }
629
+ ]
630
+ }
631
+ ],
632
+ "title": "Task Status",
633
+ "type": "timeseries"
634
+ },
635
+ {
636
+ "datasource": {
637
+ "type": "influxdb",
638
+ "uid": "${DS_CELERY}"
639
+ },
640
+ "fieldConfig": {
641
+ "defaults": {
642
+ "color": {
643
+ "mode": "palette-classic"
644
+ },
645
+ "custom": {
646
+ "axisLabel": "",
647
+ "axisPlacement": "auto",
648
+ "barAlignment": 0,
649
+ "drawStyle": "line",
650
+ "fillOpacity": 5,
651
+ "gradientMode": "none",
652
+ "hideFrom": {
653
+ "legend": false,
654
+ "tooltip": false,
655
+ "viz": false
656
+ },
657
+ "lineInterpolation": "smooth",
658
+ "lineWidth": 1,
659
+ "pointSize": 5,
660
+ "scaleDistribution": {
661
+ "type": "linear"
662
+ },
663
+ "showPoints": "auto",
664
+ "spanNulls": true,
665
+ "stacking": {
666
+ "group": "A",
667
+ "mode": "none"
668
+ },
669
+ "thresholdsStyle": {
670
+ "mode": "off"
671
+ }
672
+ },
673
+ "mappings": [],
674
+ "thresholds": {
675
+ "mode": "absolute",
676
+ "steps": [
677
+ {
678
+ "color": "green",
679
+ "value": null
680
+ },
681
+ {
682
+ "color": "red",
683
+ "value": 80
684
+ }
685
+ ]
686
+ }
687
+ },
688
+ "overrides": []
689
+ },
690
+ "gridPos": {
691
+ "h": 8,
692
+ "w": 12,
693
+ "x": 12,
694
+ "y": 16
695
+ },
696
+ "id": 8,
697
+ "options": {
698
+ "legend": {
699
+ "calcs": [],
700
+ "displayMode": "list",
701
+ "placement": "bottom"
702
+ },
703
+ "tooltip": {
704
+ "mode": "single"
705
+ }
706
+ },
707
+ "targets": [
708
+ {
709
+ "alias": "$col",
710
+ "datasource": {
711
+ "type": "influxdb",
712
+ "uid": "${DS_CELERY}"
713
+ },
714
+ "groupBy": [
715
+ {
716
+ "params": [
717
+ "$__interval"
718
+ ],
719
+ "type": "time"
720
+ },
721
+ {
722
+ "params": [
723
+ "null"
724
+ ],
725
+ "type": "fill"
726
+ }
727
+ ],
728
+ "measurement": "metrics",
729
+ "orderByTime": "ASC",
730
+ "policy": "default",
731
+ "query": "SELECT \nmean(\"today_audio_request\") as audio_request,\nmean(\"today_audio_success\") as audio_success,\n\nmean(\"today_bad_request\") as bad_request,\n\nmean(\"today_video_request\") as video_request,\nmean(\"today_video_success\") as video_success\nFROM \"metrics\" WHERE $timeFilter GROUP BY time($__interval), * ORDER BY asc ",
732
+ "rawQuery": true,
733
+ "refId": "A",
734
+ "resultFormat": "time_series",
735
+ "select": [
736
+ [
737
+ {
738
+ "params": [
739
+ "today_audio_success"
740
+ ],
741
+ "type": "field"
742
+ },
743
+ {
744
+ "params": [],
745
+ "type": "mean"
746
+ }
747
+ ]
748
+ ],
749
+ "tags": []
750
+ }
751
+ ],
752
+ "title": "Video & Audio",
753
+ "type": "timeseries"
754
+ }
755
+ ],
756
+ "refresh": "",
757
+ "schemaVersion": 33,
758
+ "style": "dark",
759
+ "tags": [],
760
+ "templating": {
761
+ "list": [
762
+ {
763
+ "current": {},
764
+ "datasource": {
765
+ "type": "influxdb",
766
+ "uid": "${DS_CELERY}"
767
+ },
768
+ "definition": "show tag values with KEY=\"hostname\"",
769
+ "hide": 0,
770
+ "includeAll": true,
771
+ "label": "hostname",
772
+ "multi": true,
773
+ "name": "hostname",
774
+ "options": [],
775
+ "query": "show tag values with KEY=\"hostname\"",
776
+ "refresh": 1,
777
+ "regex": "",
778
+ "skipUrlSync": false,
779
+ "sort": 1,
780
+ "type": "query"
781
+ }
782
+ ]
783
+ },
784
+ "time": {
785
+ "from": "now-15m",
786
+ "to": "now"
787
+ },
788
+ "timepicker": {},
789
+ "timezone": "",
790
+ "title": "YouTube Download Celery",
791
+ "uid": "9yXGmc1nk",
792
+ "version": 14,
793
+ "weekStart": ""
794
+ }
conf/supervisor_main.conf ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [supervisord]
2
+ nodaemon=true
3
+ logfile=/dev/null
4
+ logfile_maxbytes=0
5
+ user=root
6
+
7
+
8
+ [program:vnstat]
9
+ command=vnstatd -n
10
+ autorestart=true
11
+
12
+
13
+ [program:ytdl]
14
+ directory=/ytdlbot/ytdlbot/
15
+ command=python ytdl_bot.py
16
+ autorestart=true
17
+ priority=900
18
+ stopasgroup=true
19
+ startsecs = 30
20
+ startretries = 2
21
+
22
+ redirect_stderr=true
23
+ stdout_logfile_maxbytes = 50MB
24
+ stdout_logfile_backups = 2
25
+ stdout_logfile = /var/log/ytdl.log
26
+
27
+ [program:log]
28
+ command=tail -f /var/log/ytdl.log
29
+ autorestart=true
30
+ priority=999
31
+
32
+ redirect_stderr=true
33
+ stdout_logfile=/dev/fd/1
34
+ stdout_logfile_maxbytes=0
conf/supervisor_worker.conf ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [supervisord]
2
+ nodaemon=true
3
+ logfile=/dev/null
4
+ logfile_maxbytes=0
5
+ user=root
6
+
7
+
8
+ [program:vnstat]
9
+ command=vnstatd -n
10
+ autorestart=true
11
+
12
+ [program:worker]
13
+ directory=/ytdlbot/ytdlbot/
14
+ command=python tasks.py
15
+ autorestart=true
16
+ priority=900
17
+ stopasgroup=true
18
+ startsecs = 5
19
+ startretries = 5
20
+
21
+ redirect_stderr=true
22
+ stdout_logfile_maxbytes = 50MB
23
+ stdout_logfile_backups = 2
24
+ stdout_logfile = /var/log/ytdl.log
25
+
26
+ [program:log]
27
+ command=tail -f /var/log/ytdl.log
28
+ autorestart=true
29
+ priority=999
30
+
31
+ redirect_stderr=true
32
+ stdout_logfile=/dev/fd/1
33
+ stdout_logfile_maxbytes=0
docker-compose.yml ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.1'
2
+
3
+ services:
4
+ socat:
5
+ image: bennythink/socat
6
+ restart: always
7
+ volumes:
8
+ - /var/run/docker.sock:/var/run/docker.sock
9
+ entrypoint: [ "socat", "tcp-listen:2375,fork,reuseaddr","unix-connect:/var/run/docker.sock" ]
10
+
11
+ redis:
12
+ image: redis:7-alpine
13
+ restart: always
14
+ logging:
15
+ driver: none
16
+
17
+ mysql:
18
+ image: ubuntu/mysql:8.0-22.04_beta
19
+ restart: always
20
+ volumes:
21
+ - ./db_data:/var/lib/mysql
22
+ environment:
23
+ MYSQL_ROOT_PASSWORD: 'root'
24
+ command: --default-authentication-plugin=mysql_native_password
25
+ logging:
26
+ driver: none
27
+
28
+ ytdl:
29
+ image: bennythink/ytdlbot
30
+ env_file:
31
+ - env/ytdl.env
32
+ restart: always
33
+ depends_on:
34
+ - socat
35
+ - redis
36
+ volumes:
37
+ - ./data/vnstat/:/var/lib/vnstat/
38
+ labels:
39
+ - "com.centurylinklabs.watchtower.enable=true"
40
+
41
+ flower:
42
+ image: bennythink/ytdlbot
43
+ env_file:
44
+ - env/ytdl.env
45
+ restart: unless-stopped
46
+ command: [ "/usr/local/bin/celery",
47
+ "-A", "flower_tasks", "flower",
48
+ "--basic_auth=benny:123456",
49
+ "--address=0.0.0.0", "--persistent","--purge_offline_workers=3600" ]
50
+ volumes:
51
+ - ./data/flower:/ytdlbot/ytdlbot/flower
52
+ ports:
53
+ - "127.0.0.1:15555:5555"
54
+
55
+ instagram:
56
+ image: bennythink/ytdlbot
57
+ env_file:
58
+ - env/ytdl.env
59
+ restart: always
60
+ command: [ "/usr/local/bin/python", "/ytdlbot/ytdlbot/instagram.py" ]
k8s.md ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Kubernetes
2
+
3
+ Kubernetes, also known as K8s, is an open-source system for automating deployment, scaling, and management of
4
+ containerized applications
5
+
6
+ # Complete deployment guide for k8s deloyment
7
+
8
+ * contains every functionality
9
+ * compatible with amd64, arm64 and armv7l
10
+
11
+ ## First. Get all file in k8s folder
12
+
13
+ Download `k8s` file to a directory on your k8s server and go to this folder
14
+
15
+ ## 1. Create Redis deloyment
16
+
17
+ ```shell
18
+ kubectl apply -f 01.redis.yml
19
+ ```
20
+
21
+ This command will create ytdl namespace, redis pod and redis service
22
+
23
+ ## 2. Creat MariaDB deloyment
24
+
25
+ ```shell
26
+ kubectl apply -f 02.mariadb.yml
27
+ ```
28
+
29
+ This deloyment will claim 10GB storage from storageClassName: longhorn. Please replace longhorn with your
30
+ storageClassName before apply.
31
+
32
+ ## 3. Set environment variables
33
+
34
+ Create configMap for env
35
+
36
+ ### 3.1 Edit configmap.yml
37
+
38
+ ```shell
39
+ vim 03.configmap.yml
40
+ ```
41
+
42
+ you can configure all the following environment variables:
43
+
44
+ * PYRO_WORKERS: number of workers for pyrogram, default is 100
45
+ * WORKERS: workers count for celery
46
+ * APP_ID: **REQUIRED**, get it from https://core.telegram.org/
47
+ * APP_HASH: **REQUIRED**
48
+ * TOKEN: **REQUIRED**
49
+ * REDIS: **REQUIRED if you need VIP mode and cache** ⚠️ Don't publish your redis server on the internet. ⚠️
50
+
51
+ * OWNER: owner username
52
+ * QUOTA: quota in bytes
53
+ * EX: quota expire time
54
+ * MULTIPLY: vip quota comparing to normal quota
55
+ * USD2CNY: exchange rate
56
+ * VIP: VIP mode, default: disable
57
+ * AFD_LINK
58
+ * COFFEE_LINK
59
+ * COFFEE_TOKEN
60
+ * AFD_TOKEN
61
+ * AFD_USER_ID
62
+
63
+ * AUTHORIZED_USER: users that could use this bot, user_id, separated with `,`
64
+ * REQUIRED_MEMBERSHIP: group or channel username, user must join this group to use the bot. Could be use with
65
+ above `AUTHORIZED_USER`
66
+
67
+ * ENABLE_CELERY: Distribution mode, default: disable. You'll can setup workers in different locations.
68
+ * ENABLE_FFMPEG: enable ffmpeg so Telegram can stream
69
+ * MYSQL_HOST: you'll have to setup MySQL if you enable VIP mode
70
+ * MYSQL_USER
71
+ * MYSQL_PASS
72
+ * GOOGLE_API_KEY: YouTube API key, required for YouTube video subscription.
73
+ * AUDIO_FORMAT: audio format, default is m4a. You can set to any known and supported format for ffmpeg. For
74
+ example,`mp3`, `flac`, etc. ⚠️ m4a is the fastest. Other formats may affect performance.
75
+ * ARCHIVE_ID: group or channel id/username. All downloads will send to this group first and then forward to end user.
76
+ **Inline button will be lost during the forwarding.**
77
+
78
+ ### 3.2 Apply configMap for environment variables
79
+
80
+ ```shell
81
+ kubectl apply -f 03.configmap.yml
82
+ ```
83
+
84
+ ## 4. Run Master Celery
85
+
86
+ ```shell
87
+ kubectl apply -f 04.ytdl-master.yml
88
+ ```
89
+
90
+ This deloyment will create ytdl-pvc PersistentVolumeClaim on storageClassName: longhorn. This clain will contain vnstat,
91
+ cookies folder and flower database. Please replace longhorn with your storageClassName before apply
92
+
93
+ ### 4.1 Setup instagram cookies
94
+
95
+ Required if you want to support instagram.
96
+
97
+ You can use this extension
98
+ [Get cookies.txt](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid)
99
+ to get instagram cookies
100
+
101
+ Get pod running ytdl master:
102
+
103
+ ```shell
104
+ kubectl get pods --namespace ytdl
105
+ ```
106
+
107
+ Name should be ytdl-xxxxxxxx
108
+
109
+ Access to pod
110
+
111
+ ```shell
112
+ kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
113
+ ```
114
+
115
+ (replace ytdl-xxx by your pod name)
116
+
117
+ Go to ytdl-pvc mounted folder
118
+
119
+ ```shell
120
+ cd /ytdlbot/ytdlbot/data/
121
+ vim instagram.com_cookies.txt
122
+ # paste your cookies
123
+ ```
124
+
125
+ ## 5. Run Worker Celery
126
+
127
+ ```shell
128
+ kubectl apply -f 05.ytdl-worker.yml
129
+ ```
130
+
131
+ ## 6. Run Flower image (OPTIONAL)
132
+
133
+ ### 6.1 Setup flower db
134
+
135
+ Get pod running ytdl master:
136
+
137
+ ```shell
138
+ kubectl get pods --namespace ytdl
139
+ ```
140
+
141
+ Name should be ytdl-xxxxxxxx
142
+
143
+ Access to pod
144
+
145
+ ```shell
146
+ kubectl --namespace=ytdl exec --stdin --tty ytdl-xxx -- sh
147
+ ```
148
+
149
+ (replace ytdl-xxx by your pod name)
150
+
151
+ Go to ytdl-pvc mounted folder
152
+
153
+ ```shel
154
+ cd /var/lib/vnstat/
155
+ ```
156
+
157
+ Create flower database file
158
+
159
+ ```shell
160
+ {} ~ python3
161
+ Python 3.9.9 (main, Nov 21 2021, 03:22:47)
162
+ [Clang 12.0.0 (clang-1200.0.32.29)] on darwin
163
+ Type "help", "copyright", "credits" or "license" for more information.
164
+ >>> import dbm;dbm.open("flower","n");exit()
165
+ ```
166
+
167
+ ### 6.2 Config Flower Ingress
168
+
169
+ This step need config ingress from line 51 of file 06.flower.yml with your ingress service. Need for access from
170
+ internet.
171
+ YML file should be adjusted depending on your load balancing, ingress and network system
172
+
173
+ For active SSL
174
+
175
+ ```yml
176
+ cert-manager.io/cluster-issuer: letsencrypt-prod
177
+ ```
178
+
179
+ Replace nginx by your ingress service
180
+
181
+ ```yml
182
+ ingressClassName: nginx
183
+ ```
184
+
185
+ Add your domain, example
186
+
187
+ ```yml
188
+ tls:
189
+ - hosts:
190
+ - flower.benny.com
191
+ secretName: flower-tls
192
+ rules:
193
+ - host: flower.benny.com
194
+ ```
195
+
196
+ ### 6.3 Apply Flower deloyment
197
+
198
+ ```shell
199
+ kubectl apply -f 06.flower.yml
200
+ ```
k8s/01.redis.yml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: Namespace
3
+ metadata:
4
+ name: ytdl
5
+
6
+ ---
7
+ apiVersion: apps/v1
8
+ kind: Deployment
9
+ metadata:
10
+ creationTimestamp: null
11
+ labels:
12
+ ytdl: redis
13
+ name: redis
14
+ namespace: ytdl
15
+ spec:
16
+ replicas: 1
17
+ selector:
18
+ matchLabels:
19
+ ytdl: redis
20
+ strategy: {}
21
+ template:
22
+ metadata:
23
+ creationTimestamp: null
24
+ labels:
25
+ ytdl: redis
26
+ spec:
27
+ containers:
28
+ - image: redis:7-alpine
29
+ name: redis
30
+ ports:
31
+ - containerPort: 6379
32
+ resources: {}
33
+ restartPolicy: Always
34
+ status: {}
35
+
36
+ ---
37
+ apiVersion: v1
38
+ kind: Service
39
+ metadata:
40
+ creationTimestamp: null
41
+ labels:
42
+ ytdl: redis
43
+ name: redis
44
+ namespace: ytdl
45
+ spec:
46
+ ports:
47
+ - name: "6379"
48
+ port: 6379
49
+ targetPort: 6379
50
+ selector:
51
+ ytdl: redis
52
+ status:
53
+ loadBalancer: {}
k8s/02.mariadb.yml ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: PersistentVolumeClaim
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: mariadb-pvc
7
+ name: mariadb-pvc
8
+ namespace: ytdl
9
+ spec:
10
+ accessModes:
11
+ - ReadWriteOnce
12
+ storageClassName: longhorn
13
+ resources:
14
+ requests:
15
+ storage: 10Gi
16
+ status: {}
17
+
18
+ ---
19
+ apiVersion: apps/v1
20
+ kind: Deployment
21
+ metadata:
22
+ annotations:
23
+ creationTimestamp: null
24
+ labels:
25
+ ytdl: mariadb
26
+ name: mariadb
27
+ namespace: ytdl
28
+ spec:
29
+ replicas: 1
30
+ selector:
31
+ matchLabels:
32
+ ytdl: mariadb
33
+ strategy:
34
+ type: Recreate
35
+ template:
36
+ metadata:
37
+ creationTimestamp: null
38
+ labels:
39
+ ytdl: mariadb
40
+ spec:
41
+ containers:
42
+ - env:
43
+ - name: MYSQL_ROOT_PASSWORD
44
+ value: ro0tP4sSworD
45
+ - name: MYSQL_DATABASE
46
+ value: ytdl
47
+ image: mariadb:latest
48
+ name: mariadb
49
+ ports:
50
+ - containerPort: 3306
51
+ resources: {}
52
+ volumeMounts:
53
+ - mountPath: /var/lib/mysql
54
+ name: "mariadb-persistent-storage"
55
+ restartPolicy: Always
56
+ volumes:
57
+ - name: mariadb-persistent-storage
58
+ persistentVolumeClaim:
59
+ claimName: mariadb-pvc
60
+ status: {}
61
+
62
+ ---
63
+ apiVersion: v1
64
+ kind: Service
65
+ metadata:
66
+ creationTimestamp: null
67
+ labels:
68
+ ytdl: mariadb
69
+ name: mariadb-svc
70
+ namespace: ytdl
71
+ spec:
72
+ ports:
73
+ - name: "3306"
74
+ port: 3306
75
+ targetPort: 3306
76
+ selector:
77
+ ytdl: mariadb
78
+ status:
79
+ loadBalancer: {}
80
+
k8s/03.configmap.yml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: v1
2
+ kind: ConfigMap
3
+ metadata:
4
+ name: ytdlenv
5
+ namespace: ytdl
6
+ annotations:
7
+ data:
8
+ APP_HASH:
9
+ APP_ID:
10
+ TOKEN:
11
+ ARCHIVE_ID:
12
+ ENABLE_CELERY: 'True'
13
+ ENABLE_FFMPEG: 'True'
14
+ MYSQL_HOST: mariadb-svc
15
+ MYSQL_PASS: ro0tP4sSworD
16
+ MYSQL_USER: root
17
+ REDIS: redis
k8s/04.ytdl-master.yml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ apiVersion: v1
3
+ kind: PersistentVolumeClaim
4
+ metadata:
5
+ name: ytdl-pvc
6
+ namespace: ytdl
7
+ creationTimestamp: null
8
+ labels:
9
+ ytdl: ytdl-pvc
10
+ spec:
11
+ accessModes:
12
+ - ReadWriteMany
13
+ storageClassName: longhorn
14
+ resources:
15
+ requests:
16
+ storage: 10Gi
17
+ status: {}
18
+
19
+ ---
20
+ apiVersion: apps/v1
21
+ kind: Deployment
22
+ metadata:
23
+ name: ytdl
24
+ namespace: ytdl
25
+ creationTimestamp: null
26
+ labels:
27
+ ytdl: ytdl
28
+ spec:
29
+ replicas: 1
30
+ selector:
31
+ matchLabels:
32
+ ytdl: ytdl
33
+ template:
34
+ metadata:
35
+ creationTimestamp: null
36
+ labels:
37
+ ytdl: ytdl
38
+ spec:
39
+ volumes:
40
+ - name: ytdl-pvc
41
+ persistentVolumeClaim:
42
+ claimName: ytdl-pvc
43
+ containers:
44
+ - name: ytdl
45
+ image: bennythink/ytdlbot
46
+ envFrom:
47
+ - configMapRef:
48
+ name: ytdlenv
49
+ resources: {}
50
+ volumeMounts:
51
+ - name: ytdl-pvc
52
+ mountPath: /var/lib/vnstat/
53
+ subPath: vnstat/
54
+ - name: ytdl-pvc
55
+ mountPath: /ytdlbot/ytdlbot/data/
56
+ subPath: data/
57
+ terminationMessagePath: /dev/termination-log
58
+ terminationMessagePolicy: File
59
+ imagePullPolicy: Always
60
+ restartPolicy: Always
61
+ terminationGracePeriodSeconds: 30
62
+ dnsPolicy: ClusterFirst
63
+ securityContext: {}
64
+ schedulerName: default-scheduler
65
+ status: {}
k8s/05.ytdl-worker.yml ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: apps/v1
2
+ kind: Deployment
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: ytdl-worker
7
+ name: ytdl-worker
8
+ namespace: ytdl
9
+ spec:
10
+ replicas: 4
11
+ selector:
12
+ matchLabels:
13
+ ytdl: ytdl-worker
14
+ template:
15
+ metadata:
16
+ creationTimestamp: null
17
+ labels:
18
+ ytdl: ytdl-worker
19
+ spec:
20
+ volumes:
21
+ - name: ytdl-pvc
22
+ persistentVolumeClaim:
23
+ claimName: ytdl-pvc
24
+ containers:
25
+ - name: ytdl-worker
26
+ image: bennythink/ytdlbot
27
+ args:
28
+ - /usr/local/bin/supervisord
29
+ - '-c'
30
+ - /ytdlbot/conf/supervisor_worker.conf
31
+ envFrom:
32
+ - configMapRef:
33
+ name: ytdlenv
34
+ resources: {}
35
+ volumeMounts:
36
+ - name: ytdl-pvc
37
+ mountPath: /ytdlbot/ytdlbot/data/
38
+ subPath: data/
39
+ terminationMessagePath: /dev/termination-log
40
+ terminationMessagePolicy: File
41
+ imagePullPolicy: Always
42
+ restartPolicy: Always
43
+ terminationGracePeriodSeconds: 30
44
+ dnsPolicy: ClusterFirst
45
+ securityContext: {}
46
+ schedulerName: default-scheduler
47
+ status: {}
k8s/06.flower.yml ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apiVersion: apps/v1
2
+ kind: Deployment
3
+ metadata:
4
+ creationTimestamp: null
5
+ labels:
6
+ ytdl: flower
7
+ name: flower
8
+ namespace: ytdl
9
+ spec:
10
+ replicas: 1
11
+ selector:
12
+ matchLabels:
13
+ ytdl: flower
14
+ strategy:
15
+ type: Recreate
16
+ template:
17
+ metadata:
18
+ creationTimestamp: null
19
+ labels:
20
+ ytdl: flower
21
+ spec:
22
+ containers:
23
+ - envFrom:
24
+ - configMapRef:
25
+ name: ytdlenv
26
+ args:
27
+ - /usr/local/bin/celery
28
+ - -A
29
+ - flower_tasks
30
+ - flower
31
+ - --basic_auth=bennythink:123456
32
+ - --address=0.0.0.0
33
+ - --persistent
34
+ - --purge_offline_workers=3600
35
+ image: bennythink/ytdlbot
36
+ name: flower
37
+ ports:
38
+ - containerPort: 5555
39
+ resources: {}
40
+ volumeMounts:
41
+ - name: ytdl-pvc
42
+ mountPath: /ytdlbot/ytdlbot/flower
43
+ subPath: vnstat/flower
44
+ restartPolicy: Always
45
+ volumes:
46
+ - name: ytdl-pvc
47
+ persistentVolumeClaim:
48
+ claimName: ytdl-pvc
49
+ status: {}
50
+
51
+ # THIS IS OPTION IF YOU WANT PUBLIC FLOWER PAGE TO INTERNET.
52
+ # should be adjusted depending on your load balancing system machine
53
+ ---
54
+ apiVersion: v1
55
+ kind: Service
56
+ metadata:
57
+ creationTimestamp: null
58
+ labels:
59
+ ytdl: flower
60
+ name: flower-svc
61
+ namespace: ytdl
62
+ spec:
63
+ type: NodePort
64
+ ports:
65
+ - name: "5555"
66
+ protocol: TCP
67
+ port: 5555
68
+ targetPort: 5555
69
+ selector:
70
+ ytdl: flower
71
+ status:
72
+ loadBalancer: {}
73
+
74
+ ---
75
+ apiVersion: networking.k8s.io/v1
76
+ kind: Ingress
77
+ metadata:
78
+ name: nginx-flower-ingress
79
+ namespace: ytdl
80
+ annotations:
81
+ # cert-manager.io/cluster-issuer: letsencrypt-prod
82
+ nginx.ingress.kubernetes.io/rewrite-target: /
83
+ # nginx.ingress.kubernetes.io/whitelist-source-range: 14.161.27.151 limit by ipaddresss
84
+
85
+ spec:
86
+ ingressClassName: nginx
87
+ tls:
88
+ - hosts:
89
+ - your-domain
90
+ secretName: flower-tls
91
+ rules:
92
+ - host: your-domain
93
+ http:
94
+ paths:
95
+ - path: /
96
+ pathType: Prefix
97
+ backend:
98
+ service:
99
+ name: flower-svc
100
+ port:
101
+ number: 5555
main.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ import subprocess
3
+
4
+ def run_python_verbose():
5
+ try:
6
+ # Run the 'python -v' command in a subprocess
7
+ print("okk")
8
+ subprocess.run(['python', 'ytdlbot/ytdl_bot.py'], check=True)
9
+ except subprocess.CalledProcessError as e:
10
+ print(f"Error running 'python -v': {e}")
11
+
12
+ # Call the function
13
+
14
+ app = FastAPI()
15
+
16
+ @app.get("/")
17
+ async def root():
18
+ return {"message": "Hello World"}
19
+ @app.get("/okk")
20
+ async def okk():
21
+ run_python_verbose()
requirements.txt ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pyrogram
2
+ tgcrypto
3
+ yt-dlp
4
+ APScheduler
5
+ beautifultable
6
+ ffmpeg-python
7
+ PyMySQL
8
+ celery
9
+ filetype
10
+ flower
11
+ psutil
12
+ influxdb
13
+ beautifulsoup4
14
+ fakeredis
15
+ supervisor
16
+ tgbot-ping
17
+ redis
18
+ requests
19
+ tqdm
20
+ requests-toolbelt
21
+ ffpb
22
+ youtube-search-python
23
+ token-bucket
24
+ coloredlogs
25
+ tronpy
26
+ mnemonic
27
+ qrcode
28
+ blinker
29
+ flask
30
+ fastapi
31
+ uvicorn
scripts/low_id.sh ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/usr/local/go/bin:/opt/bin
3
+
4
+ # Check the logs for the given string
5
+ if docker-compose logs --tail=100 ytdl | grep -q "The msg_id is too low"; then
6
+ # If the string is found, stop the ytdl service
7
+ echo "ytdl service stopped due to 'The msg_id is too low' found in logs."
8
+ docker-compose stop ytdl && docker-compose rm ytdl && docker-compose up -d
9
+
10
+ else
11
+ echo "String not found in logs."
12
+ fi
scripts/migrate_to_mysql.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/local/bin/python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - migrate_to_mysql.py
5
+ # 12/29/21 15:28
6
+ #
7
+
8
+ __author__ = "Benny <[email protected]>"
9
+
10
+ import sqlite3
11
+
12
+ import pymysql
13
+
14
+ mysql_con = pymysql.connect(host='localhost', user='root', passwd='root', db='vip', charset='utf8mb4')
15
+ sqlite_con = sqlite3.connect('vip.sqlite')
16
+
17
+ vips = sqlite_con.execute('SELECT * FROM VIP').fetchall()
18
+
19
+ for vip in vips:
20
+ mysql_con.cursor().execute('INSERT INTO vip VALUES (%s, %s, %s, %s, %s, %s)', vip)
21
+
22
+ settings = sqlite_con.execute('SELECT * FROM settings').fetchall()
23
+
24
+ for setting in settings:
25
+ mysql_con.cursor().execute("INSERT INTO settings VALUES (%s,%s,%s)", setting)
26
+
27
+ mysql_con.commit()
scripts/start.sh ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ docker run -d --restart unless-stopped --name ytdl \
2
+ --net host \
3
+ -e TOKEN=12345 \
4
+ -e APP_ID=123123 \
5
+ -e APP_HASH=4990 \
6
+ -e ENABLE_CELERY=True \
7
+ -e REDIS=192.168.6.1 \
8
+ -e MYSQL_HOST=192.168.6.1 \
9
+ -e WORKERS=4 \
10
+ -e VIP=True \
11
+ -e CUSTOM_TEXT=#StandWithUkraine \
12
+ bennythink/ytdlbot \
13
+ /usr/local/bin/supervisord -c "/ytdlbot/conf/supervisor_worker.conf"
scripts/transfer.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # coding: utf-8
3
+
4
+ # ytdlbot - transfer.py
5
+ # 2023-12-07 18:21
6
+ from tronpy import Tron
7
+ from tronpy.hdwallet import seed_from_mnemonic, key_from_seed
8
+ from tronpy.keys import PrivateKey
9
+
10
+ mnemonic = "web horse smile ramp olive slush blue property world physical donkey pumpkin"
11
+
12
+ client = Tron(network="nile")
13
+
14
+ from_ = client.generate_address_from_mnemonic(mnemonic, account_path="m/44'/195'/0'/0/0")["base58check_address"]
15
+ balance = client.get_account_balance(from_)
16
+ print("my addr: ", from_, "balance: ", balance)
17
+ to = input("to: ")
18
+ amount = int(input("amount in TRX: "))
19
+
20
+
21
+ def mnemonic_to_private_key():
22
+ seed = seed_from_mnemonic(mnemonic, passphrase="")
23
+ private_key = key_from_seed(seed, account_path="m/44'/195'/0'/0/0")
24
+ return PrivateKey(private_key)
25
+
26
+
27
+ t = client.trx.transfer(from_, to, amount * 1_000_000).build().sign(mnemonic_to_private_key()).broadcast()
28
+
29
+ print(t.wait())
worker.yml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.1'
2
+
3
+ services:
4
+ worker:
5
+ image: bennythink/ytdlbot
6
+ env_file:
7
+ - env/ytdl.env
8
+ restart: always
9
+ command: [ "/usr/local/bin/supervisord", "-c" ,"/ytdlbot/conf/supervisor_worker.conf" ]
10
+ # network_mode: "host"
11
+ # deploy:
12
+ # resources:
13
+ # limits:
14
+ # cpus: '0.3'
15
+ # memory: 1500M
ytdlbot/__pycache__/channel.cpython-310.pyc ADDED
Binary file (6.86 kB). View file
 
ytdlbot/__pycache__/channel.cpython-38.pyc ADDED
Binary file (6.88 kB). View file
 
ytdlbot/__pycache__/client_init.cpython-310.pyc ADDED
Binary file (575 Bytes). View file
 
ytdlbot/__pycache__/client_init.cpython-38.pyc ADDED
Binary file (571 Bytes). View file
 
ytdlbot/__pycache__/config.cpython-310.pyc ADDED
Binary file (1.84 kB). View file
 
ytdlbot/__pycache__/config.cpython-38.pyc ADDED
Binary file (1.85 kB). View file
 
ytdlbot/__pycache__/constant.cpython-310.pyc ADDED
Binary file (3.9 kB). View file
 
ytdlbot/__pycache__/constant.cpython-38.pyc ADDED
Binary file (3.89 kB). View file
 
ytdlbot/__pycache__/database.cpython-310.pyc ADDED
Binary file (13.1 kB). View file
 
ytdlbot/__pycache__/database.cpython-38.pyc ADDED
Binary file (13.2 kB). View file
 
ytdlbot/__pycache__/downloader.cpython-310.pyc ADDED
Binary file (8.38 kB). View file
 
ytdlbot/__pycache__/downloader.cpython-38.pyc ADDED
Binary file (8.35 kB). View file
 
ytdlbot/__pycache__/flower_tasks.cpython-310.pyc ADDED
Binary file (325 Bytes). View file
 
ytdlbot/__pycache__/flower_tasks.cpython-38.pyc ADDED
Binary file (323 Bytes). View file
 
ytdlbot/__pycache__/limit.cpython-310.pyc ADDED
Binary file (10 kB). View file
 
ytdlbot/__pycache__/limit.cpython-38.pyc ADDED
Binary file (10 kB). View file
 
ytdlbot/__pycache__/tasks.cpython-310.pyc ADDED
Binary file (13.1 kB). View file