Jangai commited on
Commit
2b25af2
·
verified ·
1 Parent(s): 7326a19

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -14,10 +14,19 @@ if not ZEPHYR_API_TOKEN or not SD_API_TOKEN:
14
  ZEPHYR_API_URL = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
15
  SD_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
16
 
17
- def query_zephyr(linkedin_text):
18
- prompt = f"prepare a prompt for Stable Diffusion for the following Linkedin post: {linkedin_text}"
 
 
 
 
 
 
19
  headers = {"Authorization": f"Bearer {ZEPHYR_API_TOKEN}"}
20
- response = requests.post(ZEPHYR_API_URL, headers=headers, json={"inputs": prompt})
 
 
 
21
  return response.json()
22
 
23
  def generate_image_from_prompt(prompt, negative_prompt, guidance_scale, width, height, num_inference_steps):
 
14
  ZEPHYR_API_URL = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
15
  SD_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
16
 
17
+ query_zephyr(linkedin_text):
18
+ messages = [
19
+ {
20
+ "role": "system",
21
+ "content": "Prepare a prompt for Stable Diffusion for the following LinkedIn post:",
22
+ },
23
+ {"role": "user", "content": linkedin_text},
24
+ ]
25
  headers = {"Authorization": f"Bearer {ZEPHYR_API_TOKEN}"}
26
+ # Since your original approach doesn't directly use the transformers library, you need to construct the payload manually.
27
+ # Adjust the payload to mimic the structure needed for chat interactions.
28
+ payload = json.dumps({"inputs": {"past_user_inputs": [], "generated_responses": [], "text": linkedin_text, "conversation": messages}})
29
+ response = requests.post(ZEPHYR_API_URL, headers=headers, data=payload)
30
  return response.json()
31
 
32
  def generate_image_from_prompt(prompt, negative_prompt, guidance_scale, width, height, num_inference_steps):