Spaces:
Sleeping
Sleeping
Creating transformers_setup module
Browse files- app.py +1 -8
- transformers_setup.py +11 -0
app.py
CHANGED
@@ -5,17 +5,10 @@ import streamlit as st
|
|
5 |
import os
|
6 |
from trainer import train
|
7 |
from tester import test
|
8 |
-
import
|
9 |
-
from transformers import TFAutoModelForCausalLM, AutoTokenizer
|
10 |
|
11 |
|
12 |
def main():
|
13 |
-
model_name = "tiiuae/falcon-7b-instruct"
|
14 |
-
model = TFAutoModelForCausalLM.from_pretrained(model_name)
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
-
pipeline = transformers.pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=100,
|
17 |
-
temperature=0.7)
|
18 |
-
|
19 |
st.title("Beyond the Anti-Jam: Integration of DRL with LLM")
|
20 |
|
21 |
st.sidebar.header("Make Your Environment Configuration")
|
|
|
5 |
import os
|
6 |
from trainer import train
|
7 |
from tester import test
|
8 |
+
from transformers_setup import pipeline # Import the pipeline from the setup module
|
|
|
9 |
|
10 |
|
11 |
def main():
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
st.title("Beyond the Anti-Jam: Integration of DRL with LLM")
|
13 |
|
14 |
st.sidebar.header("Make Your Environment Configuration")
|
transformers_setup.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
|
4 |
+
|
5 |
+
import transformers
|
6 |
+
from transformers import TFAutoModelForCausalLM, AutoTokenizer
|
7 |
+
|
8 |
+
model_name = "tiiuae/falcon-7b-instruct"
|
9 |
+
model = TFAutoModelForCausalLM.from_pretrained(model_name)
|
10 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
11 |
+
pipeline = transformers.pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=100, temperature=0.7)
|