File size: 4,138 Bytes
ffccd9e
 
 
03b32ac
 
ffccd9e
 
 
 
 
 
03b32ac
ffccd9e
 
 
 
 
 
 
 
 
 
 
03b32ac
 
 
 
 
 
 
 
 
 
 
 
ffccd9e
 
03b32ac
 
 
ffccd9e
03b32ac
ffccd9e
 
 
 
 
 
03b32ac
 
ffccd9e
03b32ac
ffccd9e
03b32ac
 
ffccd9e
03b32ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ffccd9e
 
03b32ac
 
ffccd9e
03b32ac
 
 
 
 
 
 
 
 
ffccd9e
03b32ac
ffccd9e
 
03b32ac
 
 
ffccd9e
03b32ac
 
 
ffccd9e
 
 
 
 
03b32ac
 
 
ffccd9e
 
 
03b32ac
 
 
ffccd9e
 
03b32ac
 
 
 
 
 
 
 
 
 
 
 
ffccd9e
 
03b32ac
 
 
ffccd9e
03b32ac
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import threading
import time
import traceback
import sys

from base import VERSION, LoginException, Scraper, Udemy, scraper_dict
from colors import bw, by, fb, fg, fr

# DUCE-CLI

def create_scraping_thread(site: str):
    """Creates a separate thread for scraping each site."""
    code_name = scraper_dict[site]
    try:
        t = threading.Thread(target=getattr(scraper, code_name), daemon=True)
        t.start()

        while getattr(scraper, f"{code_name}_length") == 0:
            time.sleep(0.1)  # Avoid busy waiting

        if getattr(scraper, f"{code_name}_length") == -1:
            raise Exception(f"Error in: {site}")

        prev_progress = 0
        total = getattr(scraper, f"{code_name}_length")

        while not getattr(scraper, f"{code_name}_done"):
            time.sleep(0.5)
            current_progress = getattr(scraper, f"{code_name}_progress")
            percent = (current_progress / total) * 100 if total else 0
            print(f"[{site}] Progress: {percent:.2f}%")
            sys.stdout.flush()

        print(f"[{site}] Scraping Completed βœ…")
        sys.stdout.flush()

    except Exception as e:
        error = getattr(scraper, f"{code_name}_error", traceback.format_exc())
        print(f"[ERROR] {site}: {error}")
        sys.stdout.flush()

##########################################

udemy = Udemy("cli")
udemy.load_settings()
login_title, main_title = udemy.check_for_update()

if "Update" in login_title:
    print(by + fr + login_title)
    sys.stdout.flush()

############## MAIN #############

login_successful = False
while not login_successful:
    try:
        if udemy.settings["use_browser_cookies"]:
            udemy.fetch_cookies()
            login_method = "Browser Cookies"
        elif udemy.settings["email"] and udemy.settings["password"]:
            email, password = udemy.settings["email"], udemy.settings["password"]
            login_method = "Saved Email and Password"
        else:
            email = input("Email: ")
            password = input("Password: ")
            login_method = "Email and Password"

        print(fb + f"Trying to login using {login_method}")
        sys.stdout.flush()

        if "Email" in login_method:
            udemy.manual_login(email, password)

        udemy.get_session_info()
        if "Email" in login_method:
            udemy.settings["email"], udemy.settings["password"] = email, password
        login_successful = True
    except LoginException as e:
        print(fr + f"Login Failed: {e}")
        sys.stdout.flush()
        if "Browser" in login_method:
            print("Can't login using cookies. Switching to manual login.")
            udemy.settings["use_browser_cookies"] = False
        elif "Email" in login_method:
            udemy.settings["email"], udemy.settings["password"] = "", ""

udemy.save_settings()

print(fg + f"Logged in as {udemy.display_name}")
sys.stdout.flush()

user_dumb = udemy.is_user_dumb()

if user_dumb:
    print(bw + fr + "Invalid user settings. Exiting...")
    sys.stdout.flush()
    exit()

scraper = Scraper(udemy.sites)

try:
    print("πŸ”Ž Starting Course Scraping...")
    sys.stdout.flush()

    udemy.scraped_data = scraper.get_scraped_courses(create_scraping_thread)
    time.sleep(0.5)

    print("\nβœ… Scraping Completed. Starting Enrollment...\n")
    sys.stdout.flush()

    udemy.start_enrolling()

    udemy.print(
        f"\nβœ” Successfully Enrolled: {udemy.successfully_enrolled_c}", color="green"
    )
    udemy.print(
        f"πŸ’° Amount Saved: {round(udemy.amount_saved_c,2)} {udemy.currency.upper()}",
        color="light green",
    )
    udemy.print(f"πŸ”΅ Already Enrolled: {udemy.already_enrolled_c}", color="blue")
    udemy.print(f"⚠️ Excluded Courses: {udemy.excluded_c}", color="yellow")
    udemy.print(f"❌ Expired Courses: {udemy.expired_c}", color="red")

    sys.stdout.flush()

except Exception as e:
    error = traceback.format_exc()
    print(f"\n[ERROR] {error}\n")
    sys.stdout.flush()

# βœ… Remove `input("Press Enter to exit...")` to prevent blocking in Flask
print("βœ… Process Completed!")
sys.stdout.flush()