HemaMeena commited on
Commit
6baf0ef
·
verified ·
1 Parent(s): 9f6b2a1

Update texttrail.py

Browse files
Files changed (1) hide show
  1. texttrail.py +32 -66
texttrail.py CHANGED
@@ -1,75 +1,41 @@
1
  # -*- coding: utf-8 -*-
2
- """TextTrail.ipynb
 
3
 
4
- Automatically generated by Colab.
 
 
 
5
 
6
- Original file is located at
7
- https://colab.research.google.com/drive/19FMO4hPcBUvq4whuvATRRXDxJ4ONqElV
8
- """
9
 
10
- ! nvidia-smi -L
 
11
 
12
- # Commented out IPython magic to ensure Python compatibility.
13
- # %%time
14
- #
15
- # from IPython.display import clear_output
16
- #
17
- # ! pip install sentence_transformers==2.2.2
18
- #
19
- # ! pip install -qq -U langchain-community
20
- # ! pip install -U langchain-huggingface
21
- # ! pip install -qq -U tiktoken
22
- # ! pip install -qq -U pypdf
23
- # ! pip install -qq -U faiss-gpu
24
- # ! pip install -qq -U InstructorEmbedding
25
- #
26
- # ! pip install -qq -U transformers
27
- # ! pip install -qq -U accelerate
28
- # ! pip install -qq -U bitsandbytes
29
- #
30
- # clear_output()
 
 
 
 
31
 
32
- # Commented out IPython magic to ensure Python compatibility.
33
- # %%time
34
- #
35
- # import warnings
36
- # warnings.filterwarnings("ignore")
37
- #
38
- # import os
39
- # import glob
40
- # import textwrap
41
- # import time
42
- #
43
- # import langchain
44
- #
45
- # ### loaders
46
- # from langchain.document_loaders import PyPDFLoader, DirectoryLoader
47
- #
48
- # ### splits
49
- # from langchain.text_splitter import RecursiveCharacterTextSplitter
50
- #
51
- # ### prompts
52
- # from langchain import PromptTemplate, LLMChain
53
- #
54
- # ### vector stores
55
- # from langchain.vectorstores import FAISS
56
- #
57
- # ### models
58
- # from langchain.llms import HuggingFacePipeline
59
- # from langchain.embeddings import HuggingFaceInstructEmbeddings
60
- #
61
- # ### retrievers
62
- # from langchain.chains import RetrievalQA
63
- #
64
- # import torch
65
- # import transformers
66
- # from transformers import (
67
- # AutoTokenizer, AutoModelForCausalLM,
68
- # BitsAndBytesConfig,
69
- # pipeline
70
- # )
71
- #
72
- # clear_output()
73
 
74
  sorted(glob.glob('/content/anatomy_vol_*'))
75
 
 
1
  # -*- coding: utf-8 -*-
2
+ import warnings
3
+ warnings.filterwarnings("ignore")
4
 
5
+ import os
6
+ import glob
7
+ import textwrap
8
+ import time
9
 
10
+ import langchain
 
 
11
 
12
+ # Loaders
13
+ from langchain.document_loaders import PyPDFLoader, DirectoryLoader
14
 
15
+ # Splits
16
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
17
+
18
+ # Prompts
19
+ from langchain import PromptTemplate, LLMChain
20
+
21
+ # Vector stores
22
+ from langchain.vectorstores import FAISS
23
+
24
+ # Models
25
+ from langchain.llms import HuggingFacePipeline
26
+ from langchain.embeddings import HuggingFaceInstructEmbeddings
27
+
28
+ # Retrievers
29
+ from langchain.chains import RetrievalQA
30
+
31
+ import torch
32
+ import transformers
33
+ from transformers import (
34
+ AutoTokenizer, AutoModelForCausalLM,
35
+ BitsAndBytesConfig,
36
+ pipeline
37
+ )
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  sorted(glob.glob('/content/anatomy_vol_*'))
41