Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -19,7 +19,7 @@ HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
|
19 |
# Initialize the model instances
|
20 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
|
21 |
llm_model_instance = HuggingFaceEndpoint(
|
22 |
-
repo_id=repo_id, max_length=128, temperature=0.
|
23 |
)
|
24 |
|
25 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|
@@ -59,7 +59,7 @@ def scrape_and_summarize(prompt, source):
|
|
59 |
else:
|
60 |
raise ValueError(f"Invalid JSON output: {result}") from e
|
61 |
|
62 |
-
return result_json
|
63 |
|
64 |
# Gradio interface
|
65 |
with gr.Blocks() as demo:
|
|
|
19 |
# Initialize the model instances
|
20 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
|
21 |
llm_model_instance = HuggingFaceEndpoint(
|
22 |
+
repo_id=repo_id, max_length=128, temperature=0.3, token=HUGGINGFACEHUB_API_TOKEN
|
23 |
)
|
24 |
|
25 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|
|
|
59 |
else:
|
60 |
raise ValueError(f"Invalid JSON output: {result}") from e
|
61 |
|
62 |
+
return result_json
|
63 |
|
64 |
# Gradio interface
|
65 |
with gr.Blocks() as demo:
|