Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,16 +1,14 @@
|
|
1 |
-
from __future__ import annotations
|
2 |
-
from typing import List, Dict, Union
|
3 |
-
import os
|
4 |
-
import re
|
5 |
-
import subprocess
|
6 |
-
import streamlit as st
|
7 |
-
import time
|
8 |
from langchain_core.prompts import PromptTemplate
|
|
|
|
|
9 |
from langchain_community.llms import HuggingFaceEndpoint
|
10 |
-
from
|
|
|
|
|
11 |
|
12 |
# Load LLM
|
13 |
-
llm =
|
|
|
14 |
|
15 |
class Agent:
|
16 |
def __init__(self, name: str, agent_type: str, complexity: int):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from langchain_core.prompts import PromptTemplate
|
2 |
+
from langchain_core.runnables import RunnableSequence
|
3 |
+
from huggingface_hub import HuggingFaceHub, InferenceApi as InferenceClient
|
4 |
from langchain_community.llms import HuggingFaceEndpoint
|
5 |
+
from streamlit import StreamlitApp, write, text_input, text_area, button, session_state, write as st_write
|
6 |
+
import os
|
7 |
+
import time
|
8 |
|
9 |
# Load LLM
|
10 |
+
llm = HuggingFaceEndpoint(repo_id="tiiuae/falcon-7b-instruct", model_kwargs={"temperature": 0.1, "max_new_tokens": 500})
|
11 |
+
|
12 |
|
13 |
class Agent:
|
14 |
def __init__(self, name: str, agent_type: str, complexity: int):
|