Tadashi commited on
Commit
e7f2f0c
·
unverified ·
1 Parent(s): 216ab65

fix: disable user management

Browse files
Files changed (2) hide show
  1. Dockerfile +2 -0
  2. flowsettings.py +299 -0
Dockerfile CHANGED
@@ -7,6 +7,8 @@ RUN --mount=type=ssh chown -R user:user /usr/local/lib/python3.10
7
  USER user
8
  WORKDIR /app
9
 
 
 
10
  ENV GRADIO_SERVER_NAME=0.0.0.0
11
  ENTRYPOINT ["python", "app.py"]
12
  EXPOSE 7860
 
7
  USER user
8
  WORKDIR /app
9
 
10
+ COPY flowsettings.py /app
11
+
12
  ENV GRADIO_SERVER_NAME=0.0.0.0
13
  ENTRYPOINT ["python", "app.py"]
14
  EXPOSE 7860
flowsettings.py ADDED
@@ -0,0 +1,299 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from importlib.metadata import version
3
+ from inspect import currentframe, getframeinfo
4
+ from pathlib import Path
5
+
6
+ from decouple import config
7
+ from theflow.settings.default import * # noqa
8
+
9
+ cur_frame = currentframe()
10
+ if cur_frame is None:
11
+ raise ValueError("Cannot get the current frame.")
12
+ this_file = getframeinfo(cur_frame).filename
13
+ this_dir = Path(this_file).parent
14
+
15
+ # change this if your app use a different name
16
+ KH_PACKAGE_NAME = "kotaemon_app"
17
+
18
+ KH_APP_VERSION = config("KH_APP_VERSION", None)
19
+ if not KH_APP_VERSION:
20
+ try:
21
+ # Caution: This might produce the wrong version
22
+ # https://stackoverflow.com/a/59533071
23
+ KH_APP_VERSION = version(KH_PACKAGE_NAME)
24
+ except Exception:
25
+ KH_APP_VERSION = "local"
26
+
27
+ # App can be ran from anywhere and it's not trivial to decide where to store app data.
28
+ # So let's use the same directory as the flowsetting.py file.
29
+ KH_ENABLE_FIRST_SETUP = True
30
+ KH_APP_DATA_DIR = this_dir / "ktem_app_data"
31
+ KH_APP_DATA_EXISTS = KH_APP_DATA_DIR.exists()
32
+ KH_APP_DATA_DIR.mkdir(parents=True, exist_ok=True)
33
+
34
+ # User data directory
35
+ KH_USER_DATA_DIR = KH_APP_DATA_DIR / "user_data"
36
+ KH_USER_DATA_DIR.mkdir(parents=True, exist_ok=True)
37
+
38
+ # markdown output directory
39
+ KH_MARKDOWN_OUTPUT_DIR = KH_APP_DATA_DIR / "markdown_cache_dir"
40
+ KH_MARKDOWN_OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
41
+
42
+ # chunks output directory
43
+ KH_CHUNKS_OUTPUT_DIR = KH_APP_DATA_DIR / "chunks_cache_dir"
44
+ KH_CHUNKS_OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
45
+
46
+ # zip output directory
47
+ KH_ZIP_OUTPUT_DIR = KH_APP_DATA_DIR / "zip_cache_dir"
48
+ KH_ZIP_OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
49
+
50
+ # zip input directory
51
+ KH_ZIP_INPUT_DIR = KH_APP_DATA_DIR / "zip_cache_dir_in"
52
+ KH_ZIP_INPUT_DIR.mkdir(parents=True, exist_ok=True)
53
+
54
+ # HF models can be big, let's store them in the app data directory so that it's easier
55
+ # for users to manage their storage.
56
+ # ref: https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache
57
+ os.environ["HF_HOME"] = str(KH_APP_DATA_DIR / "huggingface")
58
+ os.environ["HF_HUB_CACHE"] = str(KH_APP_DATA_DIR / "huggingface")
59
+
60
+ # doc directory
61
+ KH_DOC_DIR = this_dir / "docs"
62
+
63
+ KH_MODE = "dev"
64
+ KH_FEATURE_USER_MANAGEMENT = False
65
+ KH_USER_CAN_SEE_PUBLIC = None
66
+ KH_FEATURE_USER_MANAGEMENT_ADMIN = str(
67
+ config("KH_FEATURE_USER_MANAGEMENT_ADMIN", default="admin")
68
+ )
69
+ KH_FEATURE_USER_MANAGEMENT_PASSWORD = str(
70
+ config("KH_FEATURE_USER_MANAGEMENT_PASSWORD", default="admin")
71
+ )
72
+ KH_ENABLE_ALEMBIC = False
73
+ KH_DATABASE = f"sqlite:///{KH_USER_DATA_DIR / 'sql.db'}"
74
+ KH_FILESTORAGE_PATH = str(KH_USER_DATA_DIR / "files")
75
+
76
+ KH_DOCSTORE = {
77
+ # "__type__": "kotaemon.storages.ElasticsearchDocumentStore",
78
+ # "__type__": "kotaemon.storages.SimpleFileDocumentStore",
79
+ "__type__": "kotaemon.storages.LanceDBDocumentStore",
80
+ "path": str(KH_USER_DATA_DIR / "docstore"),
81
+ }
82
+ KH_VECTORSTORE = {
83
+ # "__type__": "kotaemon.storages.LanceDBVectorStore",
84
+ "__type__": "kotaemon.storages.ChromaVectorStore",
85
+ # "__type__": "kotaemon.storages.MilvusVectorStore",
86
+ # "__type__": "kotaemon.storages.QdrantVectorStore",
87
+ "path": str(KH_USER_DATA_DIR / "vectorstore"),
88
+ }
89
+ KH_LLMS = {}
90
+ KH_EMBEDDINGS = {}
91
+
92
+ # populate options from config
93
+ if config("AZURE_OPENAI_API_KEY", default="") and config(
94
+ "AZURE_OPENAI_ENDPOINT", default=""
95
+ ):
96
+ if config("AZURE_OPENAI_CHAT_DEPLOYMENT", default=""):
97
+ KH_LLMS["azure"] = {
98
+ "spec": {
99
+ "__type__": "kotaemon.llms.AzureChatOpenAI",
100
+ "temperature": 0,
101
+ "azure_endpoint": config("AZURE_OPENAI_ENDPOINT", default=""),
102
+ "api_key": config("AZURE_OPENAI_API_KEY", default=""),
103
+ "api_version": config("OPENAI_API_VERSION", default="")
104
+ or "2024-02-15-preview",
105
+ "azure_deployment": config("AZURE_OPENAI_CHAT_DEPLOYMENT", default=""),
106
+ "timeout": 20,
107
+ },
108
+ "default": False,
109
+ }
110
+ if config("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT", default=""):
111
+ KH_EMBEDDINGS["azure"] = {
112
+ "spec": {
113
+ "__type__": "kotaemon.embeddings.AzureOpenAIEmbeddings",
114
+ "azure_endpoint": config("AZURE_OPENAI_ENDPOINT", default=""),
115
+ "api_key": config("AZURE_OPENAI_API_KEY", default=""),
116
+ "api_version": config("OPENAI_API_VERSION", default="")
117
+ or "2024-02-15-preview",
118
+ "azure_deployment": config(
119
+ "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT", default=""
120
+ ),
121
+ "timeout": 10,
122
+ },
123
+ "default": False,
124
+ }
125
+
126
+ if config("OPENAI_API_KEY", default=""):
127
+ KH_LLMS["openai"] = {
128
+ "spec": {
129
+ "__type__": "kotaemon.llms.ChatOpenAI",
130
+ "temperature": 0,
131
+ "base_url": config("OPENAI_API_BASE", default="")
132
+ or "https://api.openai.com/v1",
133
+ "api_key": config("OPENAI_API_KEY", default=""),
134
+ "model": config("OPENAI_CHAT_MODEL", default="gpt-3.5-turbo"),
135
+ "timeout": 20,
136
+ },
137
+ "default": True,
138
+ }
139
+ KH_EMBEDDINGS["openai"] = {
140
+ "spec": {
141
+ "__type__": "kotaemon.embeddings.OpenAIEmbeddings",
142
+ "base_url": config("OPENAI_API_BASE", default="https://api.openai.com/v1"),
143
+ "api_key": config("OPENAI_API_KEY", default=""),
144
+ "model": config(
145
+ "OPENAI_EMBEDDINGS_MODEL", default="text-embedding-ada-002"
146
+ ),
147
+ "timeout": 10,
148
+ "context_length": 8191,
149
+ },
150
+ "default": True,
151
+ }
152
+
153
+ if config("LOCAL_MODEL", default=""):
154
+ KH_LLMS["ollama"] = {
155
+ "spec": {
156
+ "__type__": "kotaemon.llms.ChatOpenAI",
157
+ "base_url": "http://localhost:11434/v1/",
158
+ "model": config("LOCAL_MODEL", default="llama3.1:8b"),
159
+ "api_key": "ollama",
160
+ },
161
+ "default": False,
162
+ }
163
+ KH_EMBEDDINGS["ollama"] = {
164
+ "spec": {
165
+ "__type__": "kotaemon.embeddings.OpenAIEmbeddings",
166
+ "base_url": "http://localhost:11434/v1/",
167
+ "model": config("LOCAL_MODEL_EMBEDDINGS", default="nomic-embed-text"),
168
+ "api_key": "ollama",
169
+ },
170
+ "default": False,
171
+ }
172
+
173
+ KH_EMBEDDINGS["fast_embed"] = {
174
+ "spec": {
175
+ "__type__": "kotaemon.embeddings.FastEmbedEmbeddings",
176
+ "model_name": "BAAI/bge-base-en-v1.5",
177
+ },
178
+ "default": False,
179
+ }
180
+
181
+ # additional LLM configurations
182
+ KH_LLMS["claude"] = {
183
+ "spec": {
184
+ "__type__": "kotaemon.llms.chats.LCAnthropicChat",
185
+ "model_name": "claude-3-5-sonnet-20240620",
186
+ "api_key": "your-key",
187
+ },
188
+ "default": False,
189
+ }
190
+ # KH_LLMS["gemini"] = {
191
+ # "spec": {
192
+ # "__type__": "kotaemon.llms.chats.LCGeminiChat",
193
+ # "model_name": "gemini-1.5-pro",
194
+ # "api_key": "your-key",
195
+ # },
196
+ # "default": False,
197
+ # }
198
+ KH_LLMS["groq"] = {
199
+ "spec": {
200
+ "__type__": "kotaemon.llms.ChatOpenAI",
201
+ "base_url": "https://api.groq.com/openai/v1",
202
+ "model": "llama-3.1-8b-instant",
203
+ "api_key": "your-key",
204
+ },
205
+ "default": False,
206
+ }
207
+ KH_LLMS["cohere"] = {
208
+ "spec": {
209
+ "__type__": "kotaemon.llms.chats.LCCohereChat",
210
+ "model_name": "command-r-plus-08-2024",
211
+ "api_key": "your-key",
212
+ },
213
+ "default": False,
214
+ }
215
+
216
+ # additional embeddings configurations
217
+ KH_EMBEDDINGS["cohere"] = {
218
+ "spec": {
219
+ "__type__": "kotaemon.embeddings.LCCohereEmbeddings",
220
+ "model": "embed-multilingual-v2.0",
221
+ "cohere_api_key": "your-key",
222
+ "user_agent": "default",
223
+ },
224
+ "default": False,
225
+ }
226
+ # KH_EMBEDDINGS["huggingface"] = {
227
+ # "spec": {
228
+ # "__type__": "kotaemon.embeddings.LCHuggingFaceEmbeddings",
229
+ # "model_name": "sentence-transformers/all-mpnet-base-v2",
230
+ # },
231
+ # "default": False,
232
+ # }
233
+
234
+ KH_REASONINGS = [
235
+ "ktem.reasoning.simple.FullQAPipeline",
236
+ "ktem.reasoning.simple.FullDecomposeQAPipeline",
237
+ "ktem.reasoning.react.ReactAgentPipeline",
238
+ "ktem.reasoning.rewoo.RewooAgentPipeline",
239
+ ]
240
+ KH_REASONINGS_USE_MULTIMODAL = False
241
+ KH_VLM_ENDPOINT = "{0}/openai/deployments/{1}/chat/completions?api-version={2}".format(
242
+ config("AZURE_OPENAI_ENDPOINT", default=""),
243
+ config("OPENAI_VISION_DEPLOYMENT_NAME", default="gpt-4o"),
244
+ config("OPENAI_API_VERSION", default=""),
245
+ )
246
+
247
+
248
+ SETTINGS_APP: dict[str, dict] = {}
249
+
250
+
251
+ SETTINGS_REASONING = {
252
+ "use": {
253
+ "name": "Reasoning options",
254
+ "value": None,
255
+ "choices": [],
256
+ "component": "radio",
257
+ },
258
+ "lang": {
259
+ "name": "Language",
260
+ "value": "en",
261
+ "choices": [("English", "en"), ("Japanese", "ja"), ("Vietnamese", "vi")],
262
+ "component": "dropdown",
263
+ },
264
+ "max_context_length": {
265
+ "name": "Max context length (LLM)",
266
+ "value": 32000,
267
+ "component": "number",
268
+ },
269
+ }
270
+
271
+
272
+ KH_INDEX_TYPES = [
273
+ "ktem.index.file.FileIndex",
274
+ "ktem.index.file.graph.GraphRAGIndex",
275
+ ]
276
+ KH_INDICES = [
277
+ {
278
+ "name": "File",
279
+ "config": {
280
+ "supported_file_types": (
281
+ ".png, .jpeg, .jpg, .tiff, .tif, .pdf, .xls, .xlsx, .doc, .docx, "
282
+ ".pptx, .csv, .html, .mhtml, .txt, .md, .zip"
283
+ ),
284
+ "private": False,
285
+ },
286
+ "index_type": "ktem.index.file.FileIndex",
287
+ },
288
+ {
289
+ "name": "GraphRAG",
290
+ "config": {
291
+ "supported_file_types": (
292
+ ".png, .jpeg, .jpg, .tiff, .tif, .pdf, .xls, .xlsx, .doc, .docx, "
293
+ ".pptx, .csv, .html, .mhtml, .txt, .md, .zip"
294
+ ),
295
+ "private": False,
296
+ },
297
+ "index_type": "ktem.index.file.graph.GraphRAGIndex",
298
+ },
299
+ ]