Spaces:
Runtime error
Runtime error
Upload 2 files
Browse files- app.py +5 -5
- requirements.txt +3 -5
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
from fastapi import FastAPI
|
2 |
-
from transformers import AutoModelForCausalLM
|
3 |
-
from mistral_common import MistralTokenizer # Hypothetical package, adjust based on actual package name and usage
|
4 |
from peft import PeftModel, PeftConfig
|
|
|
5 |
|
6 |
# Initialize FastAPI app
|
7 |
app = FastAPI()
|
@@ -12,11 +12,11 @@ base_model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct
|
|
12 |
model = PeftModel.from_pretrained(base_model, "frankmorales2020/Mistral-7B-text-to-sql-flash-attention-2-dataeval")
|
13 |
|
14 |
# Load recommended tokenizer
|
15 |
-
tokenizer =
|
16 |
|
17 |
# Create the pipeline
|
18 |
from transformers import pipeline
|
19 |
-
pipe = pipeline("
|
20 |
|
21 |
@app.get("/")
|
22 |
def home():
|
@@ -25,4 +25,4 @@ def home():
|
|
25 |
@app.get("/generate")
|
26 |
def generate(text: str):
|
27 |
output = pipe(text)
|
28 |
-
return {"output": output[0]['generated_text']}
|
|
|
1 |
from fastapi import FastAPI
|
2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
3 |
from peft import PeftModel, PeftConfig
|
4 |
+
from mistral_common.tokenizer import AutoMistralTokenizer
|
5 |
|
6 |
# Initialize FastAPI app
|
7 |
app = FastAPI()
|
|
|
12 |
model = PeftModel.from_pretrained(base_model, "frankmorales2020/Mistral-7B-text-to-sql-flash-attention-2-dataeval")
|
13 |
|
14 |
# Load recommended tokenizer
|
15 |
+
tokenizer = AutoMistralTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
|
16 |
|
17 |
# Create the pipeline
|
18 |
from transformers import pipeline
|
19 |
+
pipe = pipeline("text2text-generation", model=model, tokenizer=tokenizer)
|
20 |
|
21 |
@app.get("/")
|
22 |
def home():
|
|
|
25 |
@app.get("/generate")
|
26 |
def generate(text: str):
|
27 |
output = pipe(text)
|
28 |
+
return {"output": output[0]['generated_text']}
|
requirements.txt
CHANGED
@@ -1,11 +1,9 @@
|
|
1 |
fastapi==0.103.0
|
2 |
requests==2.27.*
|
3 |
uvicorn[standard]==0.17.*
|
4 |
-
sentencepiece==0.1.99 # Downgraded to match mistral-common's requirement
|
5 |
torch>=1.13.0
|
6 |
-
transformers==4.
|
7 |
numpy<2
|
8 |
-
peft==0.
|
9 |
huggingface-hub==0.17.0
|
10 |
-
mistral-common
|
11 |
-
pydantic>=2.6.1,<3.0.0
|
|
|
1 |
fastapi==0.103.0
|
2 |
requests==2.27.*
|
3 |
uvicorn[standard]==0.17.*
|
|
|
4 |
torch>=1.13.0
|
5 |
+
transformers==4.34.0
|
6 |
numpy<2
|
7 |
+
peft==0.6.0
|
8 |
huggingface-hub==0.17.0
|
9 |
+
git+https://github.com/mistralai/mistral-common.git@main
|
|