runtime error
un_chainlit load_module(config.run.module_name) File "/home/user/.local/lib/python3.9/site-packages/chainlit/config.py", line 274, in load_module spec.loader.exec_module(module) File "<frozen importlib._bootstrap_external>", line 850, in exec_module File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed File "app.py", line 45, in <module> split_chunks = text_splitter.split_documents(docs) File "/home/user/.local/lib/python3.9/site-packages/langchain_text_splitters/base.py", line 95, in split_documents return self.create_documents(texts, metadatas=metadatas) File "/home/user/.local/lib/python3.9/site-packages/langchain_text_splitters/base.py", line 78, in create_documents for chunk in self.split_text(text): File "/home/user/.local/lib/python3.9/site-packages/langchain_text_splitters/character.py", line 110, in split_text return self._split_text(text, self._separators) File "/home/user/.local/lib/python3.9/site-packages/langchain_text_splitters/character.py", line 92, in _split_text if self._length_function(s) < self._chunk_size: File "app.py", line 32, in hf_token_len tokenizer = AutoTokenizer.from_pretrained("solar-1-mini-chat") File "/home/user/.local/lib/python3.9/site-packages/transformers/models/auto/tokenization_auto.py", line 718, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) File "/home/user/.local/lib/python3.9/site-packages/transformers/models/auto/tokenization_auto.py", line 550, in get_tokenizer_config resolved_config_file = cached_file( File "/home/user/.local/lib/python3.9/site-packages/transformers/utils/hub.py", line 451, in cached_file raise EnvironmentError( OSError: solar-1-mini-chat is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=<your_token>`
Container logs:
Fetching error logs...