Spaces:
Sleeping
Sleeping
fix
Browse files- .ipynb_checkpoints/app-checkpoint.py +3 -4
- app.py +3 -4
.ipynb_checkpoints/app-checkpoint.py
CHANGED
@@ -4,11 +4,13 @@ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
|
4 |
from llama_index.legacy.callbacks import CallbackManager
|
5 |
from llama_index.llms.openai_like import OpenAILike
|
6 |
|
|
|
|
|
7 |
# Create an instance of CallbackManager
|
8 |
callback_manager = CallbackManager()
|
9 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
10 |
model = "internlm2.5-latest"
|
11 |
-
api_key =
|
12 |
|
13 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
14 |
# model = "internlm/internlm2_5-7b-chat"
|
@@ -16,10 +18,7 @@ api_key = ""
|
|
16 |
|
17 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
18 |
|
19 |
-
|
20 |
-
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
21 |
st.title("llama_index_demo")
|
22 |
-
# st.set_page_config(page_title="llamaindexdemo", page_icon=" ")
|
23 |
|
24 |
# εε§ε樑ε
|
25 |
@st.cache_resource
|
|
|
4 |
from llama_index.legacy.callbacks import CallbackManager
|
5 |
from llama_index.llms.openai_like import OpenAILike
|
6 |
|
7 |
+
|
8 |
+
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
9 |
# Create an instance of CallbackManager
|
10 |
callback_manager = CallbackManager()
|
11 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
12 |
model = "internlm2.5-latest"
|
13 |
+
api_key = api_key = st.sidebar.text_input('API Key', value='', type='password')
|
14 |
|
15 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
16 |
# model = "internlm/internlm2_5-7b-chat"
|
|
|
18 |
|
19 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
20 |
|
|
|
|
|
21 |
st.title("llama_index_demo")
|
|
|
22 |
|
23 |
# εε§ε樑ε
|
24 |
@st.cache_resource
|
app.py
CHANGED
@@ -4,11 +4,13 @@ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
|
4 |
from llama_index.legacy.callbacks import CallbackManager
|
5 |
from llama_index.llms.openai_like import OpenAILike
|
6 |
|
|
|
|
|
7 |
# Create an instance of CallbackManager
|
8 |
callback_manager = CallbackManager()
|
9 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
10 |
model = "internlm2.5-latest"
|
11 |
-
api_key =
|
12 |
|
13 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
14 |
# model = "internlm/internlm2_5-7b-chat"
|
@@ -16,10 +18,7 @@ api_key = ""
|
|
16 |
|
17 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
18 |
|
19 |
-
|
20 |
-
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
21 |
st.title("llama_index_demo")
|
22 |
-
# st.set_page_config(page_title="llamaindexdemo", page_icon=" ")
|
23 |
|
24 |
# εε§ε樑ε
|
25 |
@st.cache_resource
|
|
|
4 |
from llama_index.legacy.callbacks import CallbackManager
|
5 |
from llama_index.llms.openai_like import OpenAILike
|
6 |
|
7 |
+
|
8 |
+
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
9 |
# Create an instance of CallbackManager
|
10 |
callback_manager = CallbackManager()
|
11 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
12 |
model = "internlm2.5-latest"
|
13 |
+
api_key = api_key = st.sidebar.text_input('API Key', value='', type='password')
|
14 |
|
15 |
# api_base_url = "https://api.siliconflow.cn/v1"
|
16 |
# model = "internlm/internlm2_5-7b-chat"
|
|
|
18 |
|
19 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
20 |
|
|
|
|
|
21 |
st.title("llama_index_demo")
|
|
|
22 |
|
23 |
# εε§ε樑ε
|
24 |
@st.cache_resource
|