**Refactor CSS and update LinkedIn post publishing logic**
Browse files- Update CSS styles for better readability and consistency
- Remove background image from .presentation class
- Refactor LinkedIn post publishing function for improved clarity
- Add new function for LinkedIn post generation
- Update authentication and navigation functions
- Remove unnecessary comments and whitespace
- __pycache__/functi.cpython-311.pyc +0 -0
- __pycache__/line_db.cpython-311.pyc +0 -0
- app.css +31 -28
- app.py +1 -1
- functi.py +51 -83
- line_db.py +35 -51
__pycache__/functi.cpython-311.pyc
CHANGED
Binary files a/__pycache__/functi.cpython-311.pyc and b/__pycache__/functi.cpython-311.pyc differ
|
|
__pycache__/line_db.cpython-311.pyc
CHANGED
Binary files a/__pycache__/line_db.cpython-311.pyc and b/__pycache__/line_db.cpython-311.pyc differ
|
|
app.css
CHANGED
@@ -22,15 +22,13 @@ div#root{
|
|
22 |
|
23 |
.source_body{
|
24 |
margin: 2em;
|
25 |
-
background-color: #ECF4F7;
|
26 |
-
|
27 |
}
|
28 |
|
29 |
.layout_top {
|
30 |
padding-top: 5em;
|
31 |
}
|
32 |
|
33 |
-
|
34 |
.css-w1lhxi{
|
35 |
/* --color-background-light: #910029; */
|
36 |
background-color: #910029 ;
|
@@ -44,20 +42,25 @@ div#root{
|
|
44 |
padding-top: 1.3em;
|
45 |
}
|
46 |
|
47 |
-
.Title_Page {
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
display: block;
|
52 |
box-sizing: border-box;
|
53 |
-
position: relative;
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
html, body {
|
63 |
margin: 0;
|
@@ -70,19 +73,15 @@ html, body {
|
|
70 |
padding: 0;
|
71 |
}
|
72 |
|
73 |
-
/* 3. Partie .presentation sans
|
74 |
.presentation {
|
75 |
margin: 0;
|
76 |
padding: 0;
|
77 |
height: 100vh;
|
78 |
width: 100%;
|
79 |
position: relative;
|
80 |
-
background-image
|
81 |
-
|
82 |
-
url("src/image/20250715_1931_Digital Workspace Dashboard_remix_01k07nha1pfn1sas0rkdrt7sj0.webp");
|
83 |
-
background-size: cover;
|
84 |
-
background-position: center;
|
85 |
-
color: white;
|
86 |
display: flex;
|
87 |
flex-direction: column;
|
88 |
justify-content: center;
|
@@ -90,13 +89,14 @@ html, body {
|
|
90 |
font-family: sans-serif;
|
91 |
}
|
92 |
|
93 |
-
/* 4. Nom de l
|
94 |
.App_name {
|
95 |
position: absolute;
|
96 |
top: 20px;
|
97 |
left: 30px;
|
98 |
font-size: 2.5em;
|
99 |
font-family: 'Pacifico', cursive;
|
|
|
100 |
z-index: 2;
|
101 |
}
|
102 |
|
@@ -108,16 +108,19 @@ html, body {
|
|
108 |
font-size: 2em;
|
109 |
padding: 20px 30px;
|
110 |
max-width: 80%;
|
111 |
-
background: rgba(
|
|
|
112 |
border-radius: 8px;
|
|
|
113 |
transition: transform 0.3s ease, background 0.3s ease, box-shadow 0.3s ease;
|
114 |
}
|
|
|
115 |
.presentation_text:hover {
|
116 |
-
background: rgba(
|
117 |
transform: translateY(-5px) scale(1.02);
|
118 |
box-shadow:
|
119 |
-
0 10px 20px rgba(0, 0, 0, 0.
|
120 |
-
0 0 15px rgba(
|
121 |
}
|
122 |
|
123 |
/* 6. Responsive */
|
@@ -131,4 +134,4 @@ html, body {
|
|
131 |
font-size: 1.3em;
|
132 |
padding: 15px 20px;
|
133 |
}
|
134 |
-
}
|
|
|
22 |
|
23 |
.source_body{
|
24 |
margin: 2em;
|
25 |
+
background-color: #ECF4F7;
|
|
|
26 |
}
|
27 |
|
28 |
.layout_top {
|
29 |
padding-top: 5em;
|
30 |
}
|
31 |
|
|
|
32 |
.css-w1lhxi{
|
33 |
/* --color-background-light: #910029; */
|
34 |
background-color: #910029 ;
|
|
|
42 |
padding-top: 1.3em;
|
43 |
}
|
44 |
|
45 |
+
.Title_Page {
|
46 |
+
font-weight: bold;
|
47 |
+
font-size: 2em;
|
48 |
+
color: #39404B;
|
49 |
display: block;
|
50 |
box-sizing: border-box;
|
51 |
+
position: relative;
|
52 |
+
width: 100%;
|
53 |
+
}
|
54 |
+
|
55 |
+
.Title_Page::after {
|
56 |
+
content: '';
|
57 |
+
position: absolute;
|
58 |
+
bottom: -8px;
|
59 |
+
left: 0;
|
60 |
+
width: 100%;
|
61 |
+
height: 3px;
|
62 |
+
background-color: #39404B;
|
63 |
+
}
|
64 |
|
65 |
html, body {
|
66 |
margin: 0;
|
|
|
73 |
padding: 0;
|
74 |
}
|
75 |
|
76 |
+
/* 3. Partie .presentation sans background image */
|
77 |
.presentation {
|
78 |
margin: 0;
|
79 |
padding: 0;
|
80 |
height: 100vh;
|
81 |
width: 100%;
|
82 |
position: relative;
|
83 |
+
background-color: #f8f9fa; /* Light gray background instead of image */
|
84 |
+
color: #2c3e50; /* Dark blue-gray text for good contrast */
|
|
|
|
|
|
|
|
|
85 |
display: flex;
|
86 |
flex-direction: column;
|
87 |
justify-content: center;
|
|
|
89 |
font-family: sans-serif;
|
90 |
}
|
91 |
|
92 |
+
/* 4. Nom de l'app en haut à gauche, stylé Pacifico */
|
93 |
.App_name {
|
94 |
position: absolute;
|
95 |
top: 20px;
|
96 |
left: 30px;
|
97 |
font-size: 2.5em;
|
98 |
font-family: 'Pacifico', cursive;
|
99 |
+
color: #2c3e50; /* Dark color for contrast */
|
100 |
z-index: 2;
|
101 |
}
|
102 |
|
|
|
108 |
font-size: 2em;
|
109 |
padding: 20px 30px;
|
110 |
max-width: 80%;
|
111 |
+
background: rgba(44, 62, 80, 0.05); /* Light dark background */
|
112 |
+
border: 1px solid rgba(44, 62, 80, 0.1);
|
113 |
border-radius: 8px;
|
114 |
+
color: #2c3e50; /* Dark text color */
|
115 |
transition: transform 0.3s ease, background 0.3s ease, box-shadow 0.3s ease;
|
116 |
}
|
117 |
+
|
118 |
.presentation_text:hover {
|
119 |
+
background: rgba(44, 62, 80, 0.1);
|
120 |
transform: translateY(-5px) scale(1.02);
|
121 |
box-shadow:
|
122 |
+
0 10px 20px rgba(0, 0, 0, 0.15),
|
123 |
+
0 0 15px rgba(44, 62, 80, 0.2);
|
124 |
}
|
125 |
|
126 |
/* 6. Responsive */
|
|
|
134 |
font-size: 1.3em;
|
135 |
padding: 15px 20px;
|
136 |
}
|
137 |
+
}
|
app.py
CHANGED
@@ -184,7 +184,7 @@ if __name__ == "__main__":
|
|
184 |
gui = Gui(pages=pages)
|
185 |
gui.run(
|
186 |
debug=True,
|
187 |
-
port=7860,host = "0.0.0.0",
|
188 |
stylekit=stylekit,
|
189 |
title="Lin",
|
190 |
dark_mode=False,
|
|
|
184 |
gui = Gui(pages=pages)
|
185 |
gui.run(
|
186 |
debug=True,
|
187 |
+
# port=7860,host = "0.0.0.0",
|
188 |
stylekit=stylekit,
|
189 |
title="Lin",
|
190 |
dark_mode=False,
|
functi.py
CHANGED
@@ -3,7 +3,7 @@ import secrets
|
|
3 |
from http.server import BaseHTTPRequestHandler, HTTPServer
|
4 |
import threading
|
5 |
import os
|
6 |
-
import time
|
7 |
import datetime
|
8 |
from line_db import DatabaseManager
|
9 |
from urllib.parse import urlencode
|
@@ -21,8 +21,6 @@ from apscheduler.triggers.cron import CronTrigger
|
|
21 |
apsched = BackgroundScheduler()
|
22 |
apsched.start()
|
23 |
|
24 |
-
|
25 |
-
|
26 |
Linked_account_name = " "
|
27 |
Linked_social_network = " "
|
28 |
data_schedule ={}
|
@@ -34,7 +32,6 @@ time_value_minute = 00
|
|
34 |
day_value = "Monday"
|
35 |
Linked_social_network = "Linkedin"
|
36 |
|
37 |
-
|
38 |
api_key_hugging = os.environ.get("hugging_key")
|
39 |
Source_table = {}
|
40 |
data_account = {}
|
@@ -64,7 +61,6 @@ urlss = ""
|
|
64 |
states = ""
|
65 |
social_network = "Linkedin"
|
66 |
|
67 |
-
|
68 |
db_manager = DatabaseManager(url,key)
|
69 |
client = Client("Zelyanoth/Linkedin_poster_dev",hf_token = api_key_hugging)
|
70 |
|
@@ -72,14 +68,8 @@ client_id = os.environ.get("CLIENT_ID")
|
|
72 |
redirect_url = os.environ.get("RED_URL")
|
73 |
client_secret = os.environ.get("CLIENT_SECRET")
|
74 |
|
75 |
-
|
76 |
-
|
77 |
linkedin = OAuth2Session(client_id, redirect_uri=redirect_url, scope=scope)
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
def replanifier_toutes_les_tâches(df):
|
84 |
# Efface toutes les anciennes tâches
|
85 |
df.apply(
|
@@ -93,13 +83,11 @@ def post_generation_for_robot(id,social,idd) :
|
|
93 |
generated_post = client.predict(
|
94 |
code=id,
|
95 |
api_name="/poster_linkedin"
|
96 |
-
|
97 |
)
|
98 |
db_manager.add_post(social,generated_post,idd)
|
99 |
except Exception as e:
|
100 |
print("Erreur dans gen():", e, flush=True)
|
101 |
-
|
102 |
-
|
103 |
|
104 |
def post_publishing_for_robot(id_social,id_user,idd,ss) :
|
105 |
try :
|
@@ -114,7 +102,7 @@ def post_publishing_for_robot(id_social,id_user,idd,ss) :
|
|
114 |
post = dd["Text_content"].iloc[0]
|
115 |
|
116 |
print("⏳ Tâche planifiée pour gfjfxd",flush = True)
|
117 |
-
|
118 |
url = "https://api.linkedin.com/v2/ugcPosts"
|
119 |
headers = {
|
120 |
"Authorization": f"Bearer {token_value}",
|
@@ -141,11 +129,6 @@ def post_publishing_for_robot(id_social,id_user,idd,ss) :
|
|
141 |
print([resp.status_code, resp.text],flush = True)
|
142 |
except Exception as e:
|
143 |
print("Erreur dans post():", e, flush=True)
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
|
150 |
def planifier_ligne(id_schedule, id_social, user_id, schedule_time_str, ss, adjusted_time):
|
151 |
# Parse schedule_time_str and adjusted_time
|
@@ -187,9 +170,9 @@ def planifier_ligne(id_schedule, id_social, user_id, schedule_time_str, ss, adju
|
|
187 |
return
|
188 |
|
189 |
# Remove previous jobs for this schedule (optional, if you want to avoid duplicates)
|
190 |
-
try :
|
191 |
apsched.remove_job(f"pub-{id_schedule}-{schedule_time_str}", jobstore=None)
|
192 |
-
apsched.remove_job(f"gen-{id_schedule}-{schedule_time_str}", jobstore=None)
|
193 |
except Exception as e:
|
194 |
print(f"❌ Erreur lors de la suppression des tâches : {e}", flush=True)
|
195 |
|
@@ -209,18 +192,16 @@ def planifier_ligne(id_schedule, id_social, user_id, schedule_time_str, ss, adju
|
|
209 |
|
210 |
print(f"⏳ APScheduler: Tâche planifiée pour {id_social} ({user_id}) le {jour} à {hour:02d}:{minute:02d} et {jour_adj} à {hour_adj:02d}:{minute_adj:02d}", flush=True)
|
211 |
|
212 |
-
|
213 |
-
|
214 |
def add_scheduling(state):
|
215 |
"""Add new scheduling with thread safety"""
|
216 |
try:
|
217 |
if isinstance(state.day_value, list):
|
218 |
for day in state.day_value:
|
219 |
timesche = f"{day} {int(state.time_value_hour)}:{int(state.time_value_minute)}"
|
220 |
-
|
221 |
# Get current schedule
|
222 |
df = db_manager.fetch_schedule_table()
|
223 |
-
|
224 |
if not df.empty:
|
225 |
df, final_time = add_request(df, timesche)
|
226 |
else:
|
@@ -229,20 +210,20 @@ def add_scheduling(state):
|
|
229 |
h, m = map(int, horaire.split(':'))
|
230 |
m -= 7 # 7 minutes before for generation
|
231 |
final_time = f"{jour} {h}:{m:02d}"
|
232 |
-
|
233 |
# Add to database
|
234 |
db_manager.create_scheduling_for_user(
|
235 |
-
state.user_inf.user.id,
|
236 |
-
state.Linked_social_network,
|
237 |
-
timesche,
|
238 |
final_time
|
239 |
)
|
240 |
else:
|
241 |
timesche = f"{state.day_value} {int(state.time_value_hour)}:{int(state.time_value_minute)}"
|
242 |
-
|
243 |
# Get current schedule
|
244 |
df = db_manager.fetch_schedule_table()
|
245 |
-
|
246 |
if not df.empty:
|
247 |
df, final_time = add_request(df, timesche)
|
248 |
else:
|
@@ -251,46 +232,41 @@ def add_scheduling(state):
|
|
251 |
h, m = map(int, horaire.split(':'))
|
252 |
m -= 7 # 7 minutes before for generation
|
253 |
final_time = f"{jour} {h}:{m:02d}"
|
254 |
-
|
255 |
# Add to database
|
256 |
db_manager.create_scheduling_for_user(
|
257 |
-
state.user_inf.user.id,
|
258 |
-
state.Linked_social_network,
|
259 |
-
timesche,
|
260 |
final_time
|
261 |
)
|
262 |
-
|
263 |
# Refresh the schedule after adding
|
264 |
df = db_manager.fetch_schedule_table()
|
265 |
state.data_schedule = db_manager.fetch_schedule_table_acc(state.user_inf.user.id)
|
266 |
-
|
267 |
# Reschedule all tasks
|
268 |
replanifier_toutes_les_tâches(df)
|
269 |
-
|
270 |
print(f"✅ Scheduling added successfully", flush=True)
|
271 |
-
|
272 |
except Exception as e:
|
273 |
print(f"❌ Error in add_scheduling: {e}", flush=True)
|
274 |
|
275 |
-
|
276 |
def planning():
|
277 |
df = db_manager.fetch_schedule_table()
|
278 |
if not df.empty :
|
279 |
replanifier_toutes_les_tâches(df)
|
280 |
|
281 |
-
|
282 |
def post_publishing(state) :
|
283 |
-
|
284 |
resp = db_manager.fetching_user_identif(state.user_inf.user.id,state.social_network)
|
285 |
data = pd.DataFrame(resp.data)
|
286 |
-
|
287 |
first = data[data['social_network'] == state.social_network].iloc[0]
|
288 |
token_value = first["token"]
|
289 |
sub_value = first["sub"]
|
290 |
-
|
291 |
|
292 |
-
|
293 |
-
|
294 |
url = "https://api.linkedin.com/v2/ugcPosts"
|
295 |
headers = {
|
296 |
"Authorization": f"Bearer {token_value}",
|
@@ -316,15 +292,12 @@ def post_publishing(state) :
|
|
316 |
resp = requests.post(url, headers=headers, json=body)
|
317 |
print([resp.status_code, resp.text],flush = True)
|
318 |
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
def post_generation(state) :
|
324 |
state.generated_post = client.predict(
|
325 |
code=state.user_inf.user.id,
|
326 |
api_name="/poster_linkedin"
|
327 |
)
|
|
|
328 |
def authen(state) :
|
329 |
if state.Linked_social_network == "Linkedin" :
|
330 |
print("jhdijb",flush = True)
|
@@ -332,9 +305,6 @@ def authen(state) :
|
|
332 |
'https://www.linkedin.com/oauth/v2/authorization'
|
333 |
)
|
334 |
navigate(state, state.urlss)
|
335 |
-
|
336 |
-
|
337 |
-
|
338 |
|
339 |
def on_my_clicking(state, action, payload) :
|
340 |
print(action,flush = True)
|
@@ -342,28 +312,24 @@ def on_my_clicking(state, action, payload) :
|
|
342 |
if payload["args"][0] == "Accueil" :
|
343 |
on_logout(state)
|
344 |
navigate(state, payload["args"][0])
|
345 |
-
|
346 |
-
return " "
|
347 |
-
|
348 |
-
|
349 |
|
|
|
350 |
|
351 |
def add_source(state) :
|
352 |
-
|
353 |
result = client.predict(
|
354 |
rss_link=state.source_ + "__thi_irrh'èçs_my_id__! "+state.user_inf.user.id,
|
355 |
api_name="/ajouter_rss"
|
356 |
)
|
357 |
-
|
358 |
state.source_add_message = result
|
359 |
data = db_manager.fetch_source_table(state.user_inf.user.id)
|
360 |
state.Source_table = pd.DataFrame(data)
|
361 |
|
362 |
-
|
363 |
def delete_source(state, var_name: str, payload: dict) :
|
364 |
state.Source_table_before = state.Source_table
|
365 |
state.get_gui().table_on_delete(state, var_name, payload)
|
366 |
-
|
367 |
diff = state.Source_table_before.merge(state.Source_table, how="outer", indicator=True) \
|
368 |
.query('_merge != "both"') \
|
369 |
.drop(columns='_merge')
|
@@ -373,7 +339,7 @@ def delete_source(state, var_name: str, payload: dict) :
|
|
373 |
def delete_account(state, var_name: str, payload: dict) :
|
374 |
state.data_account_before = state.data_account
|
375 |
state.get_gui().table_on_delete(state, var_name, payload)
|
376 |
-
|
377 |
diff = state.data_account_before.merge(state.data_account, how="outer", indicator=True) \
|
378 |
.query('_merge != "both"') \
|
379 |
.drop(columns='_merge')
|
@@ -383,31 +349,36 @@ def delete_account(state, var_name: str, payload: dict) :
|
|
383 |
def delete_schedule(state, var_name: str, payload: dict) :
|
384 |
state.data_schedule_before = state.data_schedule
|
385 |
state.get_gui().table_on_delete(state, var_name, payload)
|
386 |
-
|
387 |
diff = state.data_schedule_before.merge(state.data_schedule, how="outer", indicator=True) \
|
388 |
.query('_merge != "both"') \
|
389 |
.drop(columns='_merge')
|
390 |
valeurs = diff['id'].tolist()
|
391 |
-
db_manager.delete_from_table("Scheduling",valeurs)
|
392 |
|
393 |
def on_login(state, payload):
|
394 |
"""Handle login form submission"""
|
395 |
time.sleep(0.7)
|
396 |
email = state.login_email
|
397 |
password = state.login_password
|
398 |
-
|
399 |
if not email or not password:
|
400 |
state.message = "Please enter both email and password"
|
401 |
return
|
402 |
-
|
403 |
-
success, message,
|
404 |
-
|
|
|
|
|
|
|
|
|
|
|
405 |
if success:
|
406 |
state.current_user = email
|
407 |
-
data = db_manager.fetch_source_table(
|
408 |
-
dataac = db_manager.fetch_account_table(
|
409 |
-
state.data_schedule = db_manager.fetch_schedule_table_acc(
|
410 |
-
state.data_account =pd.DataFrame(dataac)
|
411 |
state.Source_table = pd.DataFrame(data)
|
412 |
navigate(state, "Source_Management")
|
413 |
state.is_logged_in = True
|
@@ -415,8 +386,6 @@ def on_login(state, payload):
|
|
415 |
# Clear form
|
416 |
state.login_email = ""
|
417 |
state.login_password = ""
|
418 |
-
|
419 |
-
|
420 |
else:
|
421 |
if message == "Compte non confirmé":
|
422 |
state.message = "Votre compte n'est pas encore activé. Veuillez vérifier votre email pour activer votre compte."
|
@@ -431,21 +400,21 @@ def on_register(state):
|
|
431 |
email = state.register_email
|
432 |
password = state.register_password
|
433 |
confirm_password = state.confirm_password
|
434 |
-
|
435 |
if not email or not password or not confirm_password:
|
436 |
state.message = "Please fill in all fields"
|
437 |
return
|
438 |
-
|
439 |
if password != confirm_password:
|
440 |
state.message = "Passwords do not match"
|
441 |
return
|
442 |
-
|
443 |
if len(password) < 8:
|
444 |
state.message = "Password must be at least 8 characters long"
|
445 |
return
|
446 |
-
|
447 |
success, message,user_inf = db_manager.create_user(email, password) # type: ignore
|
448 |
-
|
449 |
if success:
|
450 |
state.message = "Un lien d'activation a été envoyé à votre adresse email. Veuillez vérifier votre boîte de réception pour activer votre compte."
|
451 |
state.show_register = False
|
@@ -457,9 +426,9 @@ def on_register(state):
|
|
457 |
state.message = message or "Erreur lors de l'inscription. Veuillez réessayer."
|
458 |
|
459 |
def on_logout(state):
|
460 |
-
|
461 |
"""Handle logout"""
|
462 |
-
|
463 |
state.current_user = None
|
464 |
state.is_logged_in = False
|
465 |
state.message = "Logged out successfully"
|
@@ -475,4 +444,3 @@ def toggle_register(state):
|
|
475 |
state.register_email = ""
|
476 |
state.register_password = ""
|
477 |
state.confirm_password = ""
|
478 |
-
|
|
|
3 |
from http.server import BaseHTTPRequestHandler, HTTPServer
|
4 |
import threading
|
5 |
import os
|
6 |
+
import time
|
7 |
import datetime
|
8 |
from line_db import DatabaseManager
|
9 |
from urllib.parse import urlencode
|
|
|
21 |
apsched = BackgroundScheduler()
|
22 |
apsched.start()
|
23 |
|
|
|
|
|
24 |
Linked_account_name = " "
|
25 |
Linked_social_network = " "
|
26 |
data_schedule ={}
|
|
|
32 |
day_value = "Monday"
|
33 |
Linked_social_network = "Linkedin"
|
34 |
|
|
|
35 |
api_key_hugging = os.environ.get("hugging_key")
|
36 |
Source_table = {}
|
37 |
data_account = {}
|
|
|
61 |
states = ""
|
62 |
social_network = "Linkedin"
|
63 |
|
|
|
64 |
db_manager = DatabaseManager(url,key)
|
65 |
client = Client("Zelyanoth/Linkedin_poster_dev",hf_token = api_key_hugging)
|
66 |
|
|
|
68 |
redirect_url = os.environ.get("RED_URL")
|
69 |
client_secret = os.environ.get("CLIENT_SECRET")
|
70 |
|
|
|
|
|
71 |
linkedin = OAuth2Session(client_id, redirect_uri=redirect_url, scope=scope)
|
72 |
|
|
|
|
|
|
|
|
|
73 |
def replanifier_toutes_les_tâches(df):
|
74 |
# Efface toutes les anciennes tâches
|
75 |
df.apply(
|
|
|
83 |
generated_post = client.predict(
|
84 |
code=id,
|
85 |
api_name="/poster_linkedin"
|
86 |
+
|
87 |
)
|
88 |
db_manager.add_post(social,generated_post,idd)
|
89 |
except Exception as e:
|
90 |
print("Erreur dans gen():", e, flush=True)
|
|
|
|
|
91 |
|
92 |
def post_publishing_for_robot(id_social,id_user,idd,ss) :
|
93 |
try :
|
|
|
102 |
post = dd["Text_content"].iloc[0]
|
103 |
|
104 |
print("⏳ Tâche planifiée pour gfjfxd",flush = True)
|
105 |
+
|
106 |
url = "https://api.linkedin.com/v2/ugcPosts"
|
107 |
headers = {
|
108 |
"Authorization": f"Bearer {token_value}",
|
|
|
129 |
print([resp.status_code, resp.text],flush = True)
|
130 |
except Exception as e:
|
131 |
print("Erreur dans post():", e, flush=True)
|
|
|
|
|
|
|
|
|
|
|
132 |
|
133 |
def planifier_ligne(id_schedule, id_social, user_id, schedule_time_str, ss, adjusted_time):
|
134 |
# Parse schedule_time_str and adjusted_time
|
|
|
170 |
return
|
171 |
|
172 |
# Remove previous jobs for this schedule (optional, if you want to avoid duplicates)
|
173 |
+
try :
|
174 |
apsched.remove_job(f"pub-{id_schedule}-{schedule_time_str}", jobstore=None)
|
175 |
+
apsched.remove_job(f"gen-{id_schedule}-{schedule_time_str}", jobstore=None)
|
176 |
except Exception as e:
|
177 |
print(f"❌ Erreur lors de la suppression des tâches : {e}", flush=True)
|
178 |
|
|
|
192 |
|
193 |
print(f"⏳ APScheduler: Tâche planifiée pour {id_social} ({user_id}) le {jour} à {hour:02d}:{minute:02d} et {jour_adj} à {hour_adj:02d}:{minute_adj:02d}", flush=True)
|
194 |
|
|
|
|
|
195 |
def add_scheduling(state):
|
196 |
"""Add new scheduling with thread safety"""
|
197 |
try:
|
198 |
if isinstance(state.day_value, list):
|
199 |
for day in state.day_value:
|
200 |
timesche = f"{day} {int(state.time_value_hour)}:{int(state.time_value_minute)}"
|
201 |
+
|
202 |
# Get current schedule
|
203 |
df = db_manager.fetch_schedule_table()
|
204 |
+
|
205 |
if not df.empty:
|
206 |
df, final_time = add_request(df, timesche)
|
207 |
else:
|
|
|
210 |
h, m = map(int, horaire.split(':'))
|
211 |
m -= 7 # 7 minutes before for generation
|
212 |
final_time = f"{jour} {h}:{m:02d}"
|
213 |
+
|
214 |
# Add to database
|
215 |
db_manager.create_scheduling_for_user(
|
216 |
+
state.user_inf.user.id,
|
217 |
+
state.Linked_social_network,
|
218 |
+
timesche,
|
219 |
final_time
|
220 |
)
|
221 |
else:
|
222 |
timesche = f"{state.day_value} {int(state.time_value_hour)}:{int(state.time_value_minute)}"
|
223 |
+
|
224 |
# Get current schedule
|
225 |
df = db_manager.fetch_schedule_table()
|
226 |
+
|
227 |
if not df.empty:
|
228 |
df, final_time = add_request(df, timesche)
|
229 |
else:
|
|
|
232 |
h, m = map(int, horaire.split(':'))
|
233 |
m -= 7 # 7 minutes before for generation
|
234 |
final_time = f"{jour} {h}:{m:02d}"
|
235 |
+
|
236 |
# Add to database
|
237 |
db_manager.create_scheduling_for_user(
|
238 |
+
state.user_inf.user.id,
|
239 |
+
state.Linked_social_network,
|
240 |
+
timesche,
|
241 |
final_time
|
242 |
)
|
243 |
+
|
244 |
# Refresh the schedule after adding
|
245 |
df = db_manager.fetch_schedule_table()
|
246 |
state.data_schedule = db_manager.fetch_schedule_table_acc(state.user_inf.user.id)
|
247 |
+
|
248 |
# Reschedule all tasks
|
249 |
replanifier_toutes_les_tâches(df)
|
250 |
+
|
251 |
print(f"✅ Scheduling added successfully", flush=True)
|
252 |
+
|
253 |
except Exception as e:
|
254 |
print(f"❌ Error in add_scheduling: {e}", flush=True)
|
255 |
|
|
|
256 |
def planning():
|
257 |
df = db_manager.fetch_schedule_table()
|
258 |
if not df.empty :
|
259 |
replanifier_toutes_les_tâches(df)
|
260 |
|
|
|
261 |
def post_publishing(state) :
|
262 |
+
|
263 |
resp = db_manager.fetching_user_identif(state.user_inf.user.id,state.social_network)
|
264 |
data = pd.DataFrame(resp.data)
|
265 |
+
|
266 |
first = data[data['social_network'] == state.social_network].iloc[0]
|
267 |
token_value = first["token"]
|
268 |
sub_value = first["sub"]
|
|
|
269 |
|
|
|
|
|
270 |
url = "https://api.linkedin.com/v2/ugcPosts"
|
271 |
headers = {
|
272 |
"Authorization": f"Bearer {token_value}",
|
|
|
292 |
resp = requests.post(url, headers=headers, json=body)
|
293 |
print([resp.status_code, resp.text],flush = True)
|
294 |
|
|
|
|
|
|
|
|
|
295 |
def post_generation(state) :
|
296 |
state.generated_post = client.predict(
|
297 |
code=state.user_inf.user.id,
|
298 |
api_name="/poster_linkedin"
|
299 |
)
|
300 |
+
|
301 |
def authen(state) :
|
302 |
if state.Linked_social_network == "Linkedin" :
|
303 |
print("jhdijb",flush = True)
|
|
|
305 |
'https://www.linkedin.com/oauth/v2/authorization'
|
306 |
)
|
307 |
navigate(state, state.urlss)
|
|
|
|
|
|
|
308 |
|
309 |
def on_my_clicking(state, action, payload) :
|
310 |
print(action,flush = True)
|
|
|
312 |
if payload["args"][0] == "Accueil" :
|
313 |
on_logout(state)
|
314 |
navigate(state, payload["args"][0])
|
|
|
|
|
|
|
|
|
315 |
|
316 |
+
return " "
|
317 |
|
318 |
def add_source(state) :
|
319 |
+
|
320 |
result = client.predict(
|
321 |
rss_link=state.source_ + "__thi_irrh'èçs_my_id__! "+state.user_inf.user.id,
|
322 |
api_name="/ajouter_rss"
|
323 |
)
|
324 |
+
|
325 |
state.source_add_message = result
|
326 |
data = db_manager.fetch_source_table(state.user_inf.user.id)
|
327 |
state.Source_table = pd.DataFrame(data)
|
328 |
|
|
|
329 |
def delete_source(state, var_name: str, payload: dict) :
|
330 |
state.Source_table_before = state.Source_table
|
331 |
state.get_gui().table_on_delete(state, var_name, payload)
|
332 |
+
|
333 |
diff = state.Source_table_before.merge(state.Source_table, how="outer", indicator=True) \
|
334 |
.query('_merge != "both"') \
|
335 |
.drop(columns='_merge')
|
|
|
339 |
def delete_account(state, var_name: str, payload: dict) :
|
340 |
state.data_account_before = state.data_account
|
341 |
state.get_gui().table_on_delete(state, var_name, payload)
|
342 |
+
|
343 |
diff = state.data_account_before.merge(state.data_account, how="outer", indicator=True) \
|
344 |
.query('_merge != "both"') \
|
345 |
.drop(columns='_merge')
|
|
|
349 |
def delete_schedule(state, var_name: str, payload: dict) :
|
350 |
state.data_schedule_before = state.data_schedule
|
351 |
state.get_gui().table_on_delete(state, var_name, payload)
|
352 |
+
|
353 |
diff = state.data_schedule_before.merge(state.data_schedule, how="outer", indicator=True) \
|
354 |
.query('_merge != "both"') \
|
355 |
.drop(columns='_merge')
|
356 |
valeurs = diff['id'].tolist()
|
357 |
+
db_manager.delete_from_table("Scheduling",valeurs)
|
358 |
|
359 |
def on_login(state, payload):
|
360 |
"""Handle login form submission"""
|
361 |
time.sleep(0.7)
|
362 |
email = state.login_email
|
363 |
password = state.login_password
|
364 |
+
|
365 |
if not email or not password:
|
366 |
state.message = "Please enter both email and password"
|
367 |
return
|
368 |
+
|
369 |
+
success, message, user_inf = db_manager.authenticate_user(email, password)
|
370 |
+
|
371 |
+
if user_inf is None:
|
372 |
+
# Handle the case when authentication fails
|
373 |
+
state.message = message
|
374 |
+
return
|
375 |
+
|
376 |
if success:
|
377 |
state.current_user = email
|
378 |
+
data = db_manager.fetch_source_table(user_inf.user.id)
|
379 |
+
dataac = db_manager.fetch_account_table(user_inf.user.id)
|
380 |
+
state.data_schedule = db_manager.fetch_schedule_table_acc(user_inf.user.id)
|
381 |
+
state.data_account = pd.DataFrame(dataac)
|
382 |
state.Source_table = pd.DataFrame(data)
|
383 |
navigate(state, "Source_Management")
|
384 |
state.is_logged_in = True
|
|
|
386 |
# Clear form
|
387 |
state.login_email = ""
|
388 |
state.login_password = ""
|
|
|
|
|
389 |
else:
|
390 |
if message == "Compte non confirmé":
|
391 |
state.message = "Votre compte n'est pas encore activé. Veuillez vérifier votre email pour activer votre compte."
|
|
|
400 |
email = state.register_email
|
401 |
password = state.register_password
|
402 |
confirm_password = state.confirm_password
|
403 |
+
|
404 |
if not email or not password or not confirm_password:
|
405 |
state.message = "Please fill in all fields"
|
406 |
return
|
407 |
+
|
408 |
if password != confirm_password:
|
409 |
state.message = "Passwords do not match"
|
410 |
return
|
411 |
+
|
412 |
if len(password) < 8:
|
413 |
state.message = "Password must be at least 8 characters long"
|
414 |
return
|
415 |
+
|
416 |
success, message,user_inf = db_manager.create_user(email, password) # type: ignore
|
417 |
+
|
418 |
if success:
|
419 |
state.message = "Un lien d'activation a été envoyé à votre adresse email. Veuillez vérifier votre boîte de réception pour activer votre compte."
|
420 |
state.show_register = False
|
|
|
426 |
state.message = message or "Erreur lors de l'inscription. Veuillez réessayer."
|
427 |
|
428 |
def on_logout(state):
|
429 |
+
|
430 |
"""Handle logout"""
|
431 |
+
|
432 |
state.current_user = None
|
433 |
state.is_logged_in = False
|
434 |
state.message = "Logged out successfully"
|
|
|
444 |
state.register_email = ""
|
445 |
state.register_password = ""
|
446 |
state.confirm_password = ""
|
|
line_db.py
CHANGED
@@ -3,25 +3,20 @@ from supabase import create_client, Client
|
|
3 |
from pandas import json_normalize
|
4 |
import pandas
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
class DatabaseManager:
|
11 |
-
|
12 |
def __init__(self,url,key):
|
13 |
# Supabase connection string format
|
14 |
# postgresql://postgres:[password]@[host]:[port]/[database]
|
15 |
self.supabase: Client = create_client(url, key)
|
16 |
-
|
17 |
def create_user(self, email, password):
|
18 |
response = self.supabase.auth.sign_up(
|
19 |
{
|
20 |
"email": email,
|
21 |
"password": password,
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
)
|
26 |
if response.user.aud == "authenticated" : # type: ignore
|
27 |
return True,"Un mail a été envoyé a votre address mail",response
|
@@ -43,7 +38,7 @@ class DatabaseManager:
|
|
43 |
.execute()
|
44 |
)
|
45 |
return response.data
|
46 |
-
|
47 |
def fetch_schedule_table_acc(self,filter) :
|
48 |
response = (
|
49 |
self.supabase
|
@@ -52,7 +47,7 @@ class DatabaseManager:
|
|
52 |
.execute()
|
53 |
)
|
54 |
print(response.data,flush=True)
|
55 |
-
|
56 |
df = json_normalize(response.data)
|
57 |
print(df,flush=True)
|
58 |
# Renomme les colonnes pour simplifier
|
@@ -65,9 +60,8 @@ class DatabaseManager:
|
|
65 |
# Filtre les lignes pour l'utilisateur donné
|
66 |
df_user = df[df["user_id"] == filter].reset_index(drop=True)
|
67 |
|
68 |
-
|
69 |
return df_user
|
70 |
-
return None
|
71 |
|
72 |
def delete_from_table(self,Source,values) :
|
73 |
response = (
|
@@ -76,23 +70,27 @@ class DatabaseManager:
|
|
76 |
.in_("id", values)
|
77 |
.execute()
|
78 |
)
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
def authenticate_user(self, email, password):
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
)
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
def add_token_network(self,token,social_network,account_name,uids,data):
|
98 |
response = (
|
@@ -100,15 +98,13 @@ class DatabaseManager:
|
|
100 |
.insert({"social_network": social_network,"account_name" :account_name, "id_utilisateur":uids,"token": token,
|
101 |
"sub" : data["sub"],"given_name" : data["given_name"],"family_name" : data["family_name"],"picture" : data["picture"] })
|
102 |
.execute()
|
103 |
-
)
|
104 |
def add_post(self,id_social,content,ids,tg : bool =False) :
|
105 |
response = (
|
106 |
self.supabase.table("Post_content")
|
107 |
.insert({"id_social": id_social,"Text_content" :content ,"is_published" : tg,"sched" : ids })
|
108 |
.execute())
|
109 |
|
110 |
-
|
111 |
-
|
112 |
def update_post(self,ids,idd):
|
113 |
response = (
|
114 |
self.supabase.table("Post_content")
|
@@ -127,31 +123,25 @@ class DatabaseManager:
|
|
127 |
.eq("is_published", active)
|
128 |
.eq("sched", idd)
|
129 |
|
130 |
-
|
131 |
.execute()
|
132 |
)
|
133 |
data = response.data # liste de dicts, chaque dict contient clé 'Social_network'
|
134 |
df = json_normalize(data)
|
135 |
return df
|
136 |
|
137 |
-
|
138 |
def fetching_user_identif(self,uids,rs) :
|
139 |
response = (
|
140 |
self.supabase.table("Social_network")
|
141 |
.select("*")
|
142 |
.eq("id_utilisateur", uids)
|
143 |
.eq("account_name", rs)
|
144 |
-
|
145 |
.execute()
|
146 |
)
|
147 |
return response
|
148 |
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
def get_id_social(self,user_id: str, reseau: str):
|
154 |
-
|
155 |
resp = (
|
156 |
self.supabase
|
157 |
.table("Social_network")
|
@@ -160,12 +150,11 @@ class DatabaseManager:
|
|
160 |
.eq("account_name", reseau)
|
161 |
.execute()
|
162 |
)
|
163 |
-
|
164 |
return resp.data[0]["id"]
|
165 |
-
|
166 |
|
167 |
def create_scheduling_for_user(self,user_id: str, reseau: str, schedule_time: str,adj):
|
168 |
-
|
169 |
id_social = self.get_id_social(user_id, reseau)
|
170 |
resp = (
|
171 |
self.supabase
|
@@ -174,13 +163,12 @@ class DatabaseManager:
|
|
174 |
"id_social": id_social,
|
175 |
"schedule_time": schedule_time,
|
176 |
"adjusted_time": adj,
|
177 |
-
|
178 |
})
|
179 |
.execute()
|
180 |
)
|
181 |
-
|
182 |
print("Scheduling inséré avec succès.")
|
183 |
-
|
184 |
|
185 |
def fetch_schedule_table(self) :
|
186 |
response = (
|
@@ -189,21 +177,17 @@ class DatabaseManager:
|
|
189 |
.select("*, Social_network(id_utilisateur, account_name)")
|
190 |
.execute()
|
191 |
)
|
192 |
-
|
193 |
# 2️⃣ On normalise/la platifie la structure JSON en DataFrame
|
194 |
data = response.data # liste de dicts, chaque dict contient clé 'Social_network'
|
195 |
df = json_normalize(data)
|
196 |
-
|
197 |
|
198 |
df = df.rename(columns={"Social_network.id_utilisateur": "user_id"})
|
199 |
df = df.rename(columns={"Social_network.account_name": "social_network"})
|
200 |
|
201 |
-
|
202 |
# 4️⃣ On peut réordonner ou filtrer les colonnes si besoin
|
203 |
# par exemple : id, id_social, user_id, schedule_time, created_at
|
204 |
cols = ["id", "id_social", "user_id", "schedule_time","social_network","adjusted_time","created_at"]
|
205 |
df = df[[c for c in cols if c in df.columns]]
|
206 |
|
207 |
return df
|
208 |
-
|
209 |
-
|
|
|
3 |
from pandas import json_normalize
|
4 |
import pandas
|
5 |
|
|
|
|
|
|
|
|
|
6 |
class DatabaseManager:
|
7 |
+
|
8 |
def __init__(self,url,key):
|
9 |
# Supabase connection string format
|
10 |
# postgresql://postgres:[password]@[host]:[port]/[database]
|
11 |
self.supabase: Client = create_client(url, key)
|
12 |
+
|
13 |
def create_user(self, email, password):
|
14 |
response = self.supabase.auth.sign_up(
|
15 |
{
|
16 |
"email": email,
|
17 |
"password": password,
|
18 |
+
}
|
19 |
+
|
|
|
20 |
)
|
21 |
if response.user.aud == "authenticated" : # type: ignore
|
22 |
return True,"Un mail a été envoyé a votre address mail",response
|
|
|
38 |
.execute()
|
39 |
)
|
40 |
return response.data
|
41 |
+
|
42 |
def fetch_schedule_table_acc(self,filter) :
|
43 |
response = (
|
44 |
self.supabase
|
|
|
47 |
.execute()
|
48 |
)
|
49 |
print(response.data,flush=True)
|
50 |
+
|
51 |
df = json_normalize(response.data)
|
52 |
print(df,flush=True)
|
53 |
# Renomme les colonnes pour simplifier
|
|
|
60 |
# Filtre les lignes pour l'utilisateur donné
|
61 |
df_user = df[df["user_id"] == filter].reset_index(drop=True)
|
62 |
|
|
|
63 |
return df_user
|
64 |
+
return None
|
65 |
|
66 |
def delete_from_table(self,Source,values) :
|
67 |
response = (
|
|
|
70 |
.in_("id", values)
|
71 |
.execute()
|
72 |
)
|
73 |
+
|
|
|
|
|
74 |
def authenticate_user(self, email, password):
|
75 |
+
try:
|
76 |
+
response = self.supabase.auth.sign_in_with_password(
|
77 |
+
{
|
78 |
+
"email": email,
|
79 |
+
"password": password,
|
80 |
+
}
|
81 |
+
)
|
82 |
+
if response.user.aud == "authenticated" and response.user.email_confirmed_at is not None : # type: ignore
|
83 |
+
return True,"Logged in successfully",response
|
84 |
+
elif response.user.aud == "authenticated" : # type: ignore
|
85 |
+
return False,"Compte non confirmé",response
|
86 |
+
else :
|
87 |
+
return False,"Compte non existant",response
|
88 |
+
except Exception as e:
|
89 |
+
# Handle authentication errors
|
90 |
+
if "Invalid login credentials" in str(e):
|
91 |
+
return False, "Invalid email or password", None
|
92 |
+
else:
|
93 |
+
return False, f"Authentication error: {str(e)}", None
|
94 |
|
95 |
def add_token_network(self,token,social_network,account_name,uids,data):
|
96 |
response = (
|
|
|
98 |
.insert({"social_network": social_network,"account_name" :account_name, "id_utilisateur":uids,"token": token,
|
99 |
"sub" : data["sub"],"given_name" : data["given_name"],"family_name" : data["family_name"],"picture" : data["picture"] })
|
100 |
.execute()
|
101 |
+
)
|
102 |
def add_post(self,id_social,content,ids,tg : bool =False) :
|
103 |
response = (
|
104 |
self.supabase.table("Post_content")
|
105 |
.insert({"id_social": id_social,"Text_content" :content ,"is_published" : tg,"sched" : ids })
|
106 |
.execute())
|
107 |
|
|
|
|
|
108 |
def update_post(self,ids,idd):
|
109 |
response = (
|
110 |
self.supabase.table("Post_content")
|
|
|
123 |
.eq("is_published", active)
|
124 |
.eq("sched", idd)
|
125 |
|
|
|
126 |
.execute()
|
127 |
)
|
128 |
data = response.data # liste de dicts, chaque dict contient clé 'Social_network'
|
129 |
df = json_normalize(data)
|
130 |
return df
|
131 |
|
|
|
132 |
def fetching_user_identif(self,uids,rs) :
|
133 |
response = (
|
134 |
self.supabase.table("Social_network")
|
135 |
.select("*")
|
136 |
.eq("id_utilisateur", uids)
|
137 |
.eq("account_name", rs)
|
138 |
+
|
139 |
.execute()
|
140 |
)
|
141 |
return response
|
142 |
|
|
|
|
|
|
|
|
|
143 |
def get_id_social(self,user_id: str, reseau: str):
|
144 |
+
|
145 |
resp = (
|
146 |
self.supabase
|
147 |
.table("Social_network")
|
|
|
150 |
.eq("account_name", reseau)
|
151 |
.execute()
|
152 |
)
|
153 |
+
|
154 |
return resp.data[0]["id"]
|
|
|
155 |
|
156 |
def create_scheduling_for_user(self,user_id: str, reseau: str, schedule_time: str,adj):
|
157 |
+
|
158 |
id_social = self.get_id_social(user_id, reseau)
|
159 |
resp = (
|
160 |
self.supabase
|
|
|
163 |
"id_social": id_social,
|
164 |
"schedule_time": schedule_time,
|
165 |
"adjusted_time": adj,
|
166 |
+
|
167 |
})
|
168 |
.execute()
|
169 |
)
|
170 |
+
|
171 |
print("Scheduling inséré avec succès.")
|
|
|
172 |
|
173 |
def fetch_schedule_table(self) :
|
174 |
response = (
|
|
|
177 |
.select("*, Social_network(id_utilisateur, account_name)")
|
178 |
.execute()
|
179 |
)
|
180 |
+
|
181 |
# 2️⃣ On normalise/la platifie la structure JSON en DataFrame
|
182 |
data = response.data # liste de dicts, chaque dict contient clé 'Social_network'
|
183 |
df = json_normalize(data)
|
|
|
184 |
|
185 |
df = df.rename(columns={"Social_network.id_utilisateur": "user_id"})
|
186 |
df = df.rename(columns={"Social_network.account_name": "social_network"})
|
187 |
|
|
|
188 |
# 4️⃣ On peut réordonner ou filtrer les colonnes si besoin
|
189 |
# par exemple : id, id_social, user_id, schedule_time, created_at
|
190 |
cols = ["id", "id_social", "user_id", "schedule_time","social_network","adjusted_time","created_at"]
|
191 |
df = df[[c for c in cols if c in df.columns]]
|
192 |
|
193 |
return df
|
|
|
|