Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,15 +6,29 @@ import os
|
|
6 |
from dotenv import load_dotenv
|
7 |
load_dotenv()
|
8 |
|
9 |
-
OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
loader = WebBaseLoader("https://www.usinoip.com/")
|
12 |
docs = loader.load()
|
13 |
|
14 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo-16k")
|
15 |
chain = load_summarize_chain(llm, chain_type="stuff")
|
16 |
|
17 |
result=chain.run(docs)
|
18 |
-
print(result)
|
19 |
-
|
20 |
-
|
|
|
6 |
from dotenv import load_dotenv
|
7 |
load_dotenv()
|
8 |
|
9 |
+
#OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')
|
10 |
+
|
11 |
+
hf_token = os.environ.get('HUGGINGFACEHUB_API_TOKEN')
|
12 |
+
#starchat_repo_id = os.environ.get('starchat_repo_id')
|
13 |
+
repo_id=os.environ.get('repo_id')
|
14 |
+
#port = os.getenv('port')
|
15 |
+
|
16 |
+
llm = HuggingFaceHub(repo_id=repo_id, #for Llama2
|
17 |
+
#repo_id=starchat_repo_id, #for StarChat
|
18 |
+
huggingfacehub_api_token=hf_token,
|
19 |
+
model_kwargs={#"min_length":512, #for StarChat
|
20 |
+
"min_length":1024, #for Llama2
|
21 |
+
"max_new_tokens":3072, "do_sample":True, #for StarChat
|
22 |
+
#"max_new_tokens":5632, "do_sample":True, #for Llama2
|
23 |
+
"temperature":0.1,
|
24 |
+
"top_k":50,
|
25 |
+
"top_p":0.95, "eos_token_id":49155})
|
26 |
|
27 |
loader = WebBaseLoader("https://www.usinoip.com/")
|
28 |
docs = loader.load()
|
29 |
|
30 |
+
#llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo-16k")
|
31 |
chain = load_summarize_chain(llm, chain_type="stuff")
|
32 |
|
33 |
result=chain.run(docs)
|
34 |
+
print(result)
|
|
|
|