Jangai commited on
Commit
f49b96e
·
verified ·
1 Parent(s): b3fb41e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -12
app.py CHANGED
@@ -14,26 +14,35 @@ if not ZEPHYR_API_TOKEN or not SD_API_TOKEN:
14
  ZEPHYR_API_URL = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
15
  SD_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
16
 
17
- def query_zephyr(linkedin_text):
18
- # Ensure that all lines inside this function are indented at the same level
19
- messages = [
20
- {
21
- "role": "system",
22
- "content": "Prepare a prompt for Stable Diffusion for the following LinkedIn post:",
23
- },
24
- {"role": "user", "content": linkedin_text},
25
  ]
26
- headers = {"Authorization": f"Bearer {ZEPHYR_API_TOKEN}"}
 
 
 
 
 
 
27
  payload = {
28
  "inputs": {
29
  "past_user_inputs": [],
30
  "generated_responses": [],
31
  "text": linkedin_text,
32
- "conversation": messages
33
- }
34
  }
 
35
  response = requests.post(ZEPHYR_API_URL, headers=headers, json=payload)
36
- return response.json()
 
 
 
 
37
 
38
 
39
  def generate_image_from_prompt(prompt, negative_prompt, guidance_scale, width, height, num_inference_steps):
 
14
  ZEPHYR_API_URL = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
15
  SD_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
16
 
17
+ def query_zephyr(linkedin_text):
18
+ # Construct the prompt to include the LinkedIn post with an instruction for Zephyr
19
+ instruction = "Prepare a prompt for Stable Diffusion for the following LinkedIn post:"
20
+ conversation = [
21
+ {"role": "system", "content": instruction},
22
+ {"role": "user", "content": linkedin_text}
 
 
23
  ]
24
+
25
+ headers = {
26
+ "Authorization": f"Bearer {ZEPHYR_API_TOKEN}",
27
+ "Content-Type": "application/json",
28
+ }
29
+
30
+ # Format the payload according to the Hugging Face Inference API documentation
31
  payload = {
32
  "inputs": {
33
  "past_user_inputs": [],
34
  "generated_responses": [],
35
  "text": linkedin_text,
36
+ "conversation": conversation,
37
+ },
38
  }
39
+
40
  response = requests.post(ZEPHYR_API_URL, headers=headers, json=payload)
41
+ if response.status_code == 200:
42
+ return response.json()
43
+ else:
44
+ raise Exception(f"Failed to query Zephyr model, status code: {response.status_code}")
45
+
46
 
47
 
48
  def generate_image_from_prompt(prompt, negative_prompt, guidance_scale, width, height, num_inference_steps):