udayr commited on
Commit
ddf2513
·
verified ·
1 Parent(s): 012d5b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -1,14 +1,17 @@
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain_community.llms import CTransformers
 
4
 
5
-
 
 
6
 
7
  def getLLamaResponse(input_text,no_words,blog_style):
8
 
9
  # LLma Model
10
  llm = CTransformers(
11
- model="models/llama-2-7b-chat.ggmlv3.q8_0.bin",
12
  model_type="llma",
13
  config={"max_new_tokens": 256, "temperature": 0.01}
14
  )
 
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain_community.llms import CTransformers
4
+ from transformers import set_token
5
 
6
+ YOUR_HF_API_TOKEN = " "
7
+ set_token("YOUR_HF_API_TOKEN")
8
+ model_name = "TheBloke/llama-2-7b-chat.ggmlv3.q8_0.bin"
9
 
10
  def getLLamaResponse(input_text,no_words,blog_style):
11
 
12
  # LLma Model
13
  llm = CTransformers(
14
+ model=model_name,
15
  model_type="llma",
16
  config={"max_new_tokens": 256, "temperature": 0.01}
17
  )