Spaces:
Runtime error
Runtime error
Update src/services/processor.py
Browse files
src/services/processor.py
CHANGED
@@ -11,9 +11,9 @@ model = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
|
11 |
def retrieve_constraints(prompt):
|
12 |
request_input = {"models": ["meta-llama/llama-4-scout-17b-16e-instruct"], "messages": [{"role":"user", "content":prompt}]}
|
13 |
response = r.post("https://organizedprogrammers-bettergroqinterface.hf.space/chat", json=request_input)
|
14 |
-
print(f"response : {response}")
|
15 |
decoded_content = json.loads(response.content.decode())
|
16 |
llm_response = decoded_content["content"]
|
|
|
17 |
|
18 |
start_marker = '{'
|
19 |
end_marker = '}'
|
@@ -23,6 +23,7 @@ def retrieve_constraints(prompt):
|
|
23 |
|
24 |
constraints_json = json.loads("{"+json_str+"}")
|
25 |
|
|
|
26 |
return constraints_json
|
27 |
|
28 |
|
|
|
11 |
def retrieve_constraints(prompt):
|
12 |
request_input = {"models": ["meta-llama/llama-4-scout-17b-16e-instruct"], "messages": [{"role":"user", "content":prompt}]}
|
13 |
response = r.post("https://organizedprogrammers-bettergroqinterface.hf.space/chat", json=request_input)
|
|
|
14 |
decoded_content = json.loads(response.content.decode())
|
15 |
llm_response = decoded_content["content"]
|
16 |
+
print(f"llm response : {llm_response}")
|
17 |
|
18 |
start_marker = '{'
|
19 |
end_marker = '}'
|
|
|
23 |
|
24 |
constraints_json = json.loads("{"+json_str+"}")
|
25 |
|
26 |
+
print(f"Whats returned : {constraints_json}")
|
27 |
return constraints_json
|
28 |
|
29 |
|