Update app.py
Browse files
app.py
CHANGED
@@ -1,11 +1,13 @@
|
|
1 |
import streamlit as st
|
2 |
import os
|
|
|
|
|
3 |
|
4 |
#from transformers import BertModel, BertTokenizer
|
5 |
from transformers import HfAgent, load_tool
|
6 |
|
7 |
import torch
|
8 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, LocalAgent
|
9 |
|
10 |
|
11 |
#checkpoint = "THUDM/agentlm-7b"
|
@@ -180,7 +182,7 @@ def handle_submission():
|
|
180 |
# Initialize the agent with the selected tools
|
181 |
#agent = HfAgent("https://api-inference.huggingface.co/models/bigcode/starcoder", additional_tools=tools)
|
182 |
#agent = HfAgent("https://api-inference.huggingface.co/models/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", additional_tools=tools)
|
183 |
-
agent = HfAgent("https://api-inference.huggingface.co/models/THUDM/agentlm-7b", additional_tools=tools)
|
184 |
|
185 |
|
186 |
# agent.config.tokenizer = tokenizer
|
|
|
1 |
import streamlit as st
|
2 |
import os
|
3 |
+
import requests
|
4 |
+
|
5 |
|
6 |
#from transformers import BertModel, BertTokenizer
|
7 |
from transformers import HfAgent, load_tool
|
8 |
|
9 |
import torch
|
10 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, Agent, LocalAgent
|
11 |
|
12 |
|
13 |
#checkpoint = "THUDM/agentlm-7b"
|
|
|
182 |
# Initialize the agent with the selected tools
|
183 |
#agent = HfAgent("https://api-inference.huggingface.co/models/bigcode/starcoder", additional_tools=tools)
|
184 |
#agent = HfAgent("https://api-inference.huggingface.co/models/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5", additional_tools=tools)
|
185 |
+
#agent = HfAgent("https://api-inference.huggingface.co/models/THUDM/agentlm-7b", additional_tools=tools)
|
186 |
|
187 |
|
188 |
# agent.config.tokenizer = tokenizer
|