Spaces:
Sleeping
Sleeping
Upload 4 files
Browse filesUploading correct files
- README.md +44 -7
- app.py +187 -0
- requirements.txt +232 -0
- shortener_tool.py +42 -0
README.md
CHANGED
@@ -1,12 +1,49 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
|
4 |
-
colorFrom: green
|
5 |
-
colorTo: red
|
6 |
sdk: gradio
|
7 |
sdk_version: 5.38.0
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: nat-ad
|
3 |
+
app_file: app.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
sdk_version: 5.38.0
|
|
|
|
|
6 |
---
|
7 |
+
# Social Media Ads Creator
|
8 |
+
|
9 |
+
This project leverages AI agents to automatically generate social media ad copy for products from their URLs.
|
10 |
+
|
11 |
+
## How it Works
|
12 |
+
|
13 |
+
The system uses a Gradio interface (`app.py`) to take product URLs and other parameters as input. Behind the scenes, a "crew" of AI agents, each with a specific role, processes this information:
|
14 |
+
|
15 |
+
1. **Product Analyst:** This agent scrapes a product URL to extract key information like the product name, features, price, and any available discounts. It also uses a tool to shorten the URL.
|
16 |
+
2. **Social Media Copywriter:** This agent takes the product information and crafts a compelling social media post in Portuguese, tailored for platforms like WhatsApp. The post includes a call to action, emojis, and the shortened URL.
|
17 |
+
|
18 |
+
## Setup and Usage
|
19 |
+
|
20 |
+
1. **Prerequisites:**
|
21 |
+
* Python 3.12 or higher
|
22 |
+
* An OpenAI API key
|
23 |
+
* A Natura API token (for the URL shortener)
|
24 |
+
|
25 |
+
2. **Installation:**
|
26 |
+
* The dependencies are listed in the `pyproject.toml` file.
|
27 |
+
|
28 |
+
3. **Configuration:**
|
29 |
+
* Create a `.env` file in the root directory.
|
30 |
+
* Add your OpenAI API key and Natura API token to the `.env` file:
|
31 |
+
```
|
32 |
+
OPENAI_API_KEY="your_openai_api_key"
|
33 |
+
NATURA_API_TOKEN="your_natura_api_token"
|
34 |
+
```
|
35 |
+
|
36 |
+
4. **Execution:**
|
37 |
+
* Run the `app.py` script to launch the Gradio application:
|
38 |
+
```bash
|
39 |
+
u run app.py
|
40 |
+
```
|
41 |
+
* Access the Gradio interface in your web browser at the address provided in the console (usually `http://127.0.0.1:7860`).
|
42 |
+
|
43 |
+
## Key Files
|
44 |
|
45 |
+
* `app.py`: The Gradio application that provides the user interface.
|
46 |
+
* `social_media_crew.py`: Defines the AI agents and their tasks.
|
47 |
+
* `shortener_tool.py`: A custom tool for shortening URLs.
|
48 |
+
* `.env`: The configuration file for API keys.
|
49 |
+
* `pyproject.toml`: The project's metadata and dependencies.
|
app.py
ADDED
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import os
|
3 |
+
import requests
|
4 |
+
from crewai import Agent, Task, Crew, Process
|
5 |
+
from crewai_tools import ScrapeWebsiteTool
|
6 |
+
from crewai.tools import BaseTool
|
7 |
+
|
8 |
+
os.environ["OPENAI_MODEL_NAME"] = "gpt-4o-mini"
|
9 |
+
|
10 |
+
# --- API Key Management ---
|
11 |
+
def get_api_keys():
|
12 |
+
return {
|
13 |
+
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", ""),
|
14 |
+
"NATURA_API_TOKEN": os.getenv("NATURA_API_TOKEN", "")
|
15 |
+
}
|
16 |
+
|
17 |
+
def set_api_key(openai_key: str, natura_token: str):
|
18 |
+
os.environ["OPENAI_API_KEY"] = openai_key
|
19 |
+
os.environ["NATURA_API_TOKEN"] = natura_token
|
20 |
+
return "API keys saved successfully!"
|
21 |
+
|
22 |
+
# --- CrewAI Integration ---
|
23 |
+
|
24 |
+
class ShortenerTool(BaseTool):
|
25 |
+
name: str = "URL Shortener Tool"
|
26 |
+
description: str = "Generates a short version of a given URL using an external API."
|
27 |
+
|
28 |
+
def _run(self, original_url: str) -> str:
|
29 |
+
api_url = "https://sales-mgmt-cb-bff-apigw.prd.naturacloud.com/cb-bff-cms/cms/shortener"
|
30 |
+
headers = {"authorization": f"Bearer {os.getenv('NATURA_API_TOKEN')}", "content-type": "application/json"}
|
31 |
+
payload = {"url": original_url}
|
32 |
+
|
33 |
+
try:
|
34 |
+
response = requests.post(api_url, headers=headers, json=payload)
|
35 |
+
response.raise_for_status()
|
36 |
+
short_url_data = response.json()
|
37 |
+
return short_url_data.get("short", original_url)
|
38 |
+
except requests.exceptions.RequestException as e:
|
39 |
+
print(f"Warning: Error generating short URL: {e}. Returning original URL.")
|
40 |
+
return original_url
|
41 |
+
except ValueError:
|
42 |
+
print(f"Warning: Invalid JSON response from shortener API. Returning original URL.")
|
43 |
+
return original_url
|
44 |
+
|
45 |
+
class CalculateDiscountedPriceTool(BaseTool):
|
46 |
+
name: str = "Calculate Discounted Price Tool"
|
47 |
+
description: str = "Calculates the price after applying a given discount percentage."
|
48 |
+
|
49 |
+
def _run(self, original_price: float, discount_percentage: float) -> float:
|
50 |
+
if not isinstance(original_price, (int, float)) or not isinstance(discount_percentage, (int, float)):
|
51 |
+
raise ValueError("Both original_price and discount_percentage must be numbers.")
|
52 |
+
if discount_percentage < 0 or discount_percentage > 100:
|
53 |
+
raise ValueError("Discount percentage must be between 0 and 100.")
|
54 |
+
|
55 |
+
discount_amount = original_price * (discount_percentage / 100)
|
56 |
+
discounted_price = original_price - discount_amount
|
57 |
+
return round(discounted_price, 2)
|
58 |
+
|
59 |
+
class SocialMediaCrew:
|
60 |
+
def __init__(self):
|
61 |
+
self.scrape_tool = ScrapeWebsiteTool()
|
62 |
+
self.shortener_tool = ShortenerTool()
|
63 |
+
self.calculate_discounted_price_tool = CalculateDiscountedPriceTool()
|
64 |
+
|
65 |
+
self.product_analyst = Agent(
|
66 |
+
role='Product Analyst',
|
67 |
+
goal='Analyze the provided URL and extract key product information',
|
68 |
+
backstory=("You are an expert in analyzing product pages and extracting the most important information. You can identify the product name, its main features, and the target audience."),
|
69 |
+
verbose=True,
|
70 |
+
tools=[self.scrape_tool, self.shortener_tool, self.calculate_discounted_price_tool],
|
71 |
+
allow_delegation=False
|
72 |
+
)
|
73 |
+
|
74 |
+
self.social_media_copywriter = Agent(
|
75 |
+
role='Social Media Copywriter',
|
76 |
+
goal='Create a compelling social media post in Portuguese to sell the product',
|
77 |
+
backstory=("You are a creative copywriter specialized in the beauty and fragrance market. You know how to craft posts that are engaging, persuasive, and tailored for a Portuguese-speaking audience. You are an expert in using emojis and hashtags to increase engagement."),
|
78 |
+
verbose=True,
|
79 |
+
allow_delegation=False
|
80 |
+
)
|
81 |
+
|
82 |
+
def run_crew(self, product_url: str, main_cupom: str, main_cupom_discount_percentage: float, cupom_1: str, cupom_2: str) -> str:
|
83 |
+
headers = {
|
84 |
+
"accept": "*/*",
|
85 |
+
"accept-language": "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7",
|
86 |
+
"sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"' ,
|
87 |
+
"sec-ch-ua-mobile": "?0",
|
88 |
+
"sec-ch-ua-platform": '"Windows"' ,
|
89 |
+
"sec-fetch-dest": "empty",
|
90 |
+
"sec-fetch-mode": "cors",
|
91 |
+
"sec-fetch-site": "cross-site",
|
92 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
|
93 |
+
}
|
94 |
+
try:
|
95 |
+
response = requests.get(product_url, headers=headers)
|
96 |
+
response.raise_for_status()
|
97 |
+
if '<template data-dgst="NEXT_NOT_FOUND">' in response.text:
|
98 |
+
return "INVALID_URL"
|
99 |
+
except requests.exceptions.RequestException as e:
|
100 |
+
print(f"Error checking URL: {e}")
|
101 |
+
return "INVALID_URL"
|
102 |
+
|
103 |
+
analyze_product_task = Task(
|
104 |
+
description=(f"1. Scrape the content of the URL: {product_url} using the 'scrape_tool'.\n2. Identify and extract the original product price and the final discounted price if existing. IGNORE any price breakdowns like 'produto' or 'consultoria'.\n3. Extract the product name, key characteristics, and any other relevant DISCOUNT available.\n4. Use the 'Calculate Discounted Price Tool' with the extracted final best price and the provided discount percentage ({main_cupom_discount_percentage}) to get the CUPOM DISCOUNTED PRICE.\n5. Use the 'URL Shortener Tool' to generate a short URL for {product_url}. If the shortener tool returns an error, use the original URL.\n6. Provide all this information, including the product name, ORIGINAL PRICE (the primary price from step 2), CUPOM DISCOUNTED PRICE, and the generated short URL (or the original if the shortener failed). If any of this information cannot be extracted, you MUST return 'MISSING_PRODUCT_INFO'."),
|
105 |
+
agent=self.product_analyst,
|
106 |
+
expected_output="A concise summary of the product including its name, key features, unique selling points, ORIGINAL PRICE, CUPOM DISCOUNTED PRICE, and the SHORT SHAREABLE URL (or the original if the shortener failed), OR 'MISSING_PRODUCT_INFO' if essential product details are not found."
|
107 |
+
)
|
108 |
+
|
109 |
+
create_post_task = Task(
|
110 |
+
description=(f"""Based on the product analysis, create a CONCISE and DIRECT social media post in Portuguese, suitable for a WhatsApp group.
|
111 |
+
If the input you receive is 'INVALID_URL' or 'MISSING_PRODUCT_INFO', you MUST stop and output only that same message.
|
112 |
+
The post should strictly follow this template:
|
113 |
+
###Template:
|
114 |
+
{{Title}}
|
115 |
+
|
116 |
+
{{Description}}
|
117 |
+
|
118 |
+
De ~~{{ORIGINAL PRICE}}~~
|
119 |
+
🔥Por {{CUPOM DISCOUNTED PRICE}} 🔥
|
120 |
+
|
121 |
+
❤️ USE O CUPOM >>> {main_cupom}
|
122 |
+
|
123 |
+
🛒 Link >>> {{short_url}}
|
124 |
+
|
125 |
+
`🎟️ *Cupom válido para a primeira compra no link Minha Loja Natura, mesmo se já comprou no app ou link antigo. Demais compras ou app, use o cupom {cupom_1} ou {cupom_2} (o desconto é um pouco menor)`
|
126 |
+
###End Template
|
127 |
+
|
128 |
+
Ensure a URL is always present in the output. Include a clear call to action and a MAXIMUM of 2 relevant emojis. DO NOT include hashtags. Keep it short and impactful and does not forget to include the backticks around the last paragraph.
|
129 |
+
|
130 |
+
If the input you receive is 'INVALID_URL', you MUST stop and output only 'INVALID_URL'."""),
|
131 |
+
agent=self.social_media_copywriter,
|
132 |
+
expected_output="A short, direct, and impactful social media post in Portuguese for WhatsApp, strictly following the provided template, including the FINAL PRICE, any DISCOUNT, the SHORT SHAREABLE URL, a call to action, and up to 2 emojis, one in the Title and another in the Description. No hashtags should be present. A URL must always be present in the final output, OR the message 'INVALID_URL' or 'MISSING_PRODUCT_INFO' if the page was not found or product info is missing.",
|
133 |
+
context=[analyze_product_task]
|
134 |
+
)
|
135 |
+
|
136 |
+
crew = Crew(
|
137 |
+
agents=[self.product_analyst, self.social_media_copywriter],
|
138 |
+
tasks=[analyze_product_task, create_post_task],
|
139 |
+
process=Process.sequential
|
140 |
+
)
|
141 |
+
|
142 |
+
print(f"Crew is kicking off for URL: {product_url}")
|
143 |
+
result = crew.kickoff()
|
144 |
+
return result
|
145 |
+
|
146 |
+
# --- Gradio Interface ---
|
147 |
+
def generate_ad(product_url: str, main_cupom: str, main_cupom_discount_percentage: float, cupom_1: str, cupom_2: str):
|
148 |
+
api_keys = get_api_keys()
|
149 |
+
if not api_keys["OPENAI_API_KEY"] or not api_keys["NATURA_API_TOKEN"]:
|
150 |
+
return "Please configure your API keys in the settings section below."
|
151 |
+
|
152 |
+
social_media_crew = SocialMediaCrew()
|
153 |
+
result = social_media_crew.run_crew(product_url, main_cupom, main_cupom_discount_percentage, cupom_1, cupom_2)
|
154 |
+
|
155 |
+
if result == "INVALID_URL":
|
156 |
+
return "❌ The provided URL is invalid or the product page could not be found."
|
157 |
+
elif result == "MISSING_PRODUCT_INFO":
|
158 |
+
return "⚠️ Could not extract all required product information from the URL. Please check the URL or try a different one."
|
159 |
+
else:
|
160 |
+
return result.raw
|
161 |
+
|
162 |
+
with gr.Blocks() as demo:
|
163 |
+
gr.Markdown("# 🚀 Social Media Ad Generator")
|
164 |
+
gr.Markdown("Enter a product URL to generate a social media ad.")
|
165 |
+
|
166 |
+
with gr.Tab("Generate Ad"):
|
167 |
+
url_input = gr.Textbox(label="Product URL", placeholder="Enter product URL here...")
|
168 |
+
|
169 |
+
main_cupom_input = gr.Textbox(label="Main Cupom (e.g., PRIMEIRACOMPRA)", value="PRIMEIRACOMPRA")
|
170 |
+
main_cupom_discount_percentage_input = gr.Number(label="Main Cupom Discount Percentage (e.g., 20 for 20%)", value=20, minimum=0, maximum=100)
|
171 |
+
cupom_1_input = gr.Textbox(label="Cupom 1 (e.g., AMIGO15)", placeholder="Enter first coupon code...")
|
172 |
+
cupom_2_input = gr.Textbox(label="Cupom 2 (e.g., JULHOA)", placeholder="Enter second coupon code...")
|
173 |
+
generate_button = gr.Button("Generate Ad")
|
174 |
+
ad_output = gr.Markdown(label="Your Generated Ad")
|
175 |
+
|
176 |
+
with gr.Tab("Settings"):
|
177 |
+
gr.Markdown("### ⚙️ API Key Settings")
|
178 |
+
gr.Markdown("Enter your API keys below. These will be stored as environment variables for the running application.")
|
179 |
+
openai_key_input = gr.Textbox(label="OPENAI_API_KEY", type="password", value=os.getenv("OPENAI_API_KEY"))
|
180 |
+
natura_token_input = gr.Textbox(label="NATURA_API_TOKEN", type="password", value=os.getenv("NATURA_API_TOKEN"))
|
181 |
+
save_button = gr.Button("Save Keys")
|
182 |
+
settings_message = gr.Markdown()
|
183 |
+
save_button.click(set_api_key, inputs=[openai_key_input, natura_token_input], outputs=settings_message)
|
184 |
+
|
185 |
+
generate_button.click(generate_ad, inputs=[url_input, main_cupom_input, main_cupom_discount_percentage_input, cupom_1_input, cupom_2_input], outputs=ad_output)
|
186 |
+
|
187 |
+
demo.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,232 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiofiles==24.1.0
|
2 |
+
aiohappyeyeballs==2.6.1
|
3 |
+
aiohttp==3.12.14
|
4 |
+
aiosignal==1.4.0
|
5 |
+
alembic==1.16.4
|
6 |
+
annotated-types==0.7.0
|
7 |
+
anyio==4.9.0
|
8 |
+
appdirs==1.4.4
|
9 |
+
asgiref==3.9.1
|
10 |
+
asttokens==3.0.0
|
11 |
+
attrs==25.3.0
|
12 |
+
backoff==2.2.1
|
13 |
+
bcrypt==4.3.0
|
14 |
+
beautifulsoup4==4.13.4
|
15 |
+
blinker==1.9.0
|
16 |
+
brotli==1.1.0
|
17 |
+
build==1.2.2.post1
|
18 |
+
cachetools==5.5.2
|
19 |
+
certifi==2025.7.14
|
20 |
+
cffi==1.17.1
|
21 |
+
charset-normalizer==3.4.2
|
22 |
+
chroma-hnswlib==0.7.6
|
23 |
+
chromadb==0.5.23
|
24 |
+
click==8.2.1
|
25 |
+
cohere==5.16.1
|
26 |
+
coloredlogs==15.0.1
|
27 |
+
crewai==0.148.0
|
28 |
+
crewai-tools==0.55.0
|
29 |
+
cryptography==45.0.5
|
30 |
+
dataclasses-json==0.6.7
|
31 |
+
decorator==5.2.1
|
32 |
+
deprecation==2.1.0
|
33 |
+
diskcache==5.6.3
|
34 |
+
distro==1.9.0
|
35 |
+
docker==7.1.0
|
36 |
+
docstring-parser==0.16
|
37 |
+
durationpy==0.10
|
38 |
+
embedchain==0.1.128
|
39 |
+
et-xmlfile==2.0.0
|
40 |
+
executing==2.2.0
|
41 |
+
fastapi==0.116.1
|
42 |
+
fastavro==1.11.1
|
43 |
+
ffmpy==0.6.0
|
44 |
+
filelock==3.18.0
|
45 |
+
flatbuffers==25.2.10
|
46 |
+
frozenlist==1.7.0
|
47 |
+
fsspec==2025.7.0
|
48 |
+
google-auth==2.40.3
|
49 |
+
googleapis-common-protos==1.70.0
|
50 |
+
gptcache==0.1.44
|
51 |
+
gradio==5.38.0
|
52 |
+
gradio-client==1.11.0
|
53 |
+
greenlet==3.2.3
|
54 |
+
groovy==0.1.2
|
55 |
+
grpcio==1.73.1
|
56 |
+
h11==0.16.0
|
57 |
+
h2==4.2.0
|
58 |
+
hf-xet==1.1.5
|
59 |
+
hpack==4.1.0
|
60 |
+
httpcore==1.0.9
|
61 |
+
httptools==0.6.4
|
62 |
+
httpx==0.28.1
|
63 |
+
httpx-sse==0.4.0
|
64 |
+
huggingface-hub==0.33.4
|
65 |
+
humanfriendly==10.0
|
66 |
+
hyperframe==6.1.0
|
67 |
+
idna==3.10
|
68 |
+
importlib-metadata==8.7.0
|
69 |
+
importlib-resources==6.5.2
|
70 |
+
instructor==1.10.0
|
71 |
+
ipython==9.4.0
|
72 |
+
ipython-pygments-lexers==1.1.1
|
73 |
+
jedi==0.19.2
|
74 |
+
jinja2==3.1.6
|
75 |
+
jiter==0.10.0
|
76 |
+
json-repair==0.25.2
|
77 |
+
json5==0.12.0
|
78 |
+
jsonpatch==1.33
|
79 |
+
jsonpickle==4.1.1
|
80 |
+
jsonpointer==3.0.0
|
81 |
+
jsonref==1.1.0
|
82 |
+
jsonschema==4.25.0
|
83 |
+
jsonschema-specifications==2025.4.1
|
84 |
+
kubernetes==33.1.0
|
85 |
+
lancedb==0.24.1
|
86 |
+
langchain==0.3.26
|
87 |
+
langchain-cohere==0.3.5
|
88 |
+
langchain-community==0.3.27
|
89 |
+
langchain-core==0.3.69
|
90 |
+
langchain-experimental==0.3.4
|
91 |
+
langchain-openai==0.2.14
|
92 |
+
langchain-text-splitters==0.3.8
|
93 |
+
langsmith==0.3.45
|
94 |
+
litellm==1.72.6
|
95 |
+
mako==1.3.10
|
96 |
+
markdown-it-py==3.0.0
|
97 |
+
markupsafe==3.0.2
|
98 |
+
marshmallow==3.26.1
|
99 |
+
matplotlib-inline==0.1.7
|
100 |
+
mdurl==0.1.2
|
101 |
+
mem0ai==0.1.114
|
102 |
+
mkdocs-material==9.6.14
|
103 |
+
mkdocs-material-extensions==1.3.1
|
104 |
+
mlflow==3.1.1
|
105 |
+
mlflow-skinny==3.1.1
|
106 |
+
mmh3==5.1.0
|
107 |
+
monotonic==1.6
|
108 |
+
mpmath==1.3.0
|
109 |
+
multidict==6.6.3
|
110 |
+
mypy-extensions==1.1.0
|
111 |
+
networkx==3.5
|
112 |
+
nodeenv==1.9.1
|
113 |
+
numpy==2.3.1
|
114 |
+
oauthlib==3.3.1
|
115 |
+
onnxruntime==1.22.0
|
116 |
+
openai==1.97.0
|
117 |
+
openpyxl==3.1.5
|
118 |
+
opentelemetry-api==1.35.0
|
119 |
+
opentelemetry-exporter-otlp-proto-common==1.35.0
|
120 |
+
opentelemetry-exporter-otlp-proto-grpc==1.35.0
|
121 |
+
opentelemetry-exporter-otlp-proto-http==1.35.0
|
122 |
+
opentelemetry-instrumentation==0.56b0
|
123 |
+
opentelemetry-instrumentation-asgi==0.56b0
|
124 |
+
opentelemetry-instrumentation-fastapi==0.56b0
|
125 |
+
opentelemetry-proto==1.35.0
|
126 |
+
opentelemetry-sdk==1.35.0
|
127 |
+
opentelemetry-semantic-conventions==0.56b0
|
128 |
+
opentelemetry-util-http==0.56b0
|
129 |
+
orjson==3.11.0
|
130 |
+
overrides==7.7.0
|
131 |
+
packaging==25.0
|
132 |
+
paginate==0.5.7
|
133 |
+
pandas==2.3.1
|
134 |
+
parso==0.8.4
|
135 |
+
pathspec==0.12.1
|
136 |
+
pdfminer-six==20250506
|
137 |
+
pdfplumber==0.11.7
|
138 |
+
pexpect==4.9.0
|
139 |
+
pillow==11.3.0
|
140 |
+
platformdirs==4.3.8
|
141 |
+
portalocker==3.2.0
|
142 |
+
posthog==3.25.0
|
143 |
+
pre-commit==4.2.0
|
144 |
+
prompt-toolkit==3.0.51
|
145 |
+
propcache==0.3.2
|
146 |
+
protobuf==6.31.1
|
147 |
+
ptyprocess==0.7.0
|
148 |
+
pure-eval==0.2.3
|
149 |
+
pyarrow==21.0.0
|
150 |
+
pyasn1==0.6.1
|
151 |
+
pyasn1-modules==0.4.2
|
152 |
+
pybase64==1.4.1
|
153 |
+
pycparser==2.22
|
154 |
+
pydantic==2.11.7
|
155 |
+
pydantic-core==2.33.2
|
156 |
+
pydantic-settings==2.10.1
|
157 |
+
pydub==0.25.1
|
158 |
+
pygments==2.19.2
|
159 |
+
pyjwt==2.10.1
|
160 |
+
pymdown-extensions==10.16
|
161 |
+
pyparsing==3.2.3
|
162 |
+
pypdf==5.8.0
|
163 |
+
pypdfium2==4.30.1
|
164 |
+
pypika==0.48.9
|
165 |
+
pyproject-hooks==1.2.0
|
166 |
+
pyright==1.1.403
|
167 |
+
pysbd==0.3.4
|
168 |
+
python-dateutil==2.9.0.post0
|
169 |
+
python-dotenv==1.1.1
|
170 |
+
python-fasthtml==0.12.21
|
171 |
+
python-multipart==0.0.20
|
172 |
+
pytube==15.0.0
|
173 |
+
pytz==2025.2
|
174 |
+
pyvis==0.3.2
|
175 |
+
pyyaml==6.0.2
|
176 |
+
pyyaml-env-tag==1.1
|
177 |
+
qdrant-client==1.15.0
|
178 |
+
referencing==0.36.2
|
179 |
+
regex==2024.11.6
|
180 |
+
requests==2.32.4
|
181 |
+
requests-oauthlib==2.0.0
|
182 |
+
requests-toolbelt==1.0.0
|
183 |
+
rich==13.9.4
|
184 |
+
rpds-py==0.26.0
|
185 |
+
rsa==4.9.1
|
186 |
+
ruff==0.12.4
|
187 |
+
safehttpx==0.1.6
|
188 |
+
schema==0.7.7
|
189 |
+
scikit-learn==1.7.0
|
190 |
+
scipy==1.16.0
|
191 |
+
semantic-version==2.10.0
|
192 |
+
shellingham==1.5.4
|
193 |
+
six==1.17.0
|
194 |
+
smmap==5.0.2
|
195 |
+
sniffio==1.3.1
|
196 |
+
soupsieve==2.7
|
197 |
+
sqlalchemy==2.0.41
|
198 |
+
sqlparse==0.5.3
|
199 |
+
stack-data==0.6.3
|
200 |
+
starlette==0.47.1
|
201 |
+
sympy==1.14.0
|
202 |
+
tabulate==0.9.0
|
203 |
+
tenacity==9.1.2
|
204 |
+
threadpoolctl==3.6.0
|
205 |
+
tiktoken==0.9.0
|
206 |
+
tokenizers==0.20.3
|
207 |
+
tomli==2.2.1
|
208 |
+
tomli-w==1.2.0
|
209 |
+
tomlkit==0.13.3
|
210 |
+
tqdm==4.67.1
|
211 |
+
traitlets==5.14.3
|
212 |
+
typer==0.16.0
|
213 |
+
types-requests==2.32.4.20250611
|
214 |
+
typing-extensions==4.14.1
|
215 |
+
typing-inspect==0.9.0
|
216 |
+
typing-inspection==0.4.1
|
217 |
+
tzdata==2025.2
|
218 |
+
urllib3==2.5.0
|
219 |
+
uv==0.8.0
|
220 |
+
uvicorn==0.35.0
|
221 |
+
uvloop==0.21.0
|
222 |
+
virtualenv==20.31.2
|
223 |
+
watchdog==6.0.0
|
224 |
+
watchfiles==1.1.0
|
225 |
+
wcwidth==0.2.13
|
226 |
+
websocket-client==1.8.0
|
227 |
+
websockets==15.0.1
|
228 |
+
werkzeug==3.1.3
|
229 |
+
wrapt==1.17.2
|
230 |
+
yarl==1.20.1
|
231 |
+
zipp==3.23.0
|
232 |
+
zstandard==0.23.0
|
shortener_tool.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import os
|
3 |
+
import requests
|
4 |
+
from crewai.tools import BaseTool
|
5 |
+
|
6 |
+
class ShortenerTool(BaseTool):
|
7 |
+
name: str = "URL Shortener Tool"
|
8 |
+
description: str = "Generates a short version of a given URL using an external API."
|
9 |
+
|
10 |
+
def _run(self, original_url: str) -> str:
|
11 |
+
api_url = "https://sales-mgmt-cb-bff-apigw.prd.naturacloud.com/cb-bff-cms/cms/shortener"
|
12 |
+
headers = {
|
13 |
+
"accept": "*/*",
|
14 |
+
"accept-language": "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7",
|
15 |
+
"authorization": f"Bearer {os.getenv('NATURA_API_TOKEN')}", # Get token from env
|
16 |
+
"content-type": "application/json",
|
17 |
+
"origin": "https://minhaloja.natura.com",
|
18 |
+
"referer": "https://minhaloja.natura.com/",
|
19 |
+
"sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"' , # Escaped quotes
|
20 |
+
"sec-ch-ua-mobile": "?0",
|
21 |
+
"sec-ch-ua-platform": '"Windows"' , # Escaped quotes
|
22 |
+
"sec-fetch-dest": "empty",
|
23 |
+
"sec-fetch-mode": "cors",
|
24 |
+
"sec-fetch-site": "cross-site",
|
25 |
+
"siteid": "CBBrazil",
|
26 |
+
"tenantid": "brazil-natura",
|
27 |
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
|
28 |
+
}
|
29 |
+
payload = {"url": original_url}
|
30 |
+
|
31 |
+
try:
|
32 |
+
response = requests.post(api_url, headers=headers, json=payload)
|
33 |
+
response.raise_for_status() # Raise an exception for HTTP errors
|
34 |
+
short_url_data = response.json()
|
35 |
+
return short_url_data.get("short", original_url) # Return original_url if short not found
|
36 |
+
except requests.exceptions.RequestException as e:
|
37 |
+
print(f"Warning: Error generating short URL for {original_url}: {e}. Returning original URL.")
|
38 |
+
return original_url
|
39 |
+
except ValueError:
|
40 |
+
print(f"Warning: Invalid JSON response from shortener API for {original_url}. Returning original URL.")
|
41 |
+
return original_url
|
42 |
+
|