learningai commited on
Commit
9e13414
·
1 Parent(s): 57308e8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -2
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import os
2
  from dotenv import load_dotenv
3
  from huggingface_hub import login
4
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
5
  import gradio as gr
6
 
7
  load_dotenv()
@@ -12,7 +12,17 @@ login(API_TOKEN)
12
 
13
  model_id = "meta-llama/Llama-2-7b-chat-hf"
14
 
15
- model = AutoModelForCausalLM.from_pretrained(model_id, load_in_4bit=True, device_map="auto")
 
 
 
 
 
 
 
 
 
 
16
  tokenizer = AutoTokenizer.from_pretrained(model_id)
17
 
18
  generate_text_pipeline = pipeline(
 
1
  import os
2
  from dotenv import load_dotenv
3
  from huggingface_hub import login
4
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, BitsAndBytesConfig
5
  import gradio as gr
6
 
7
  load_dotenv()
 
12
 
13
  model_id = "meta-llama/Llama-2-7b-chat-hf"
14
 
15
+ quantization_config = BitsAndBytesConfig(llm_int8_enable_fp32_cpu_offload=True)
16
+ device_map = {
17
+ "transformer.word_embeddings": 0,
18
+ "transformer.word_embeddings_layernorm": 0,
19
+ "lm_head": "cpu",
20
+ "transformer.h": 0,
21
+ "transformer.ln_f": 0,
22
+ }
23
+
24
+
25
+ model = AutoModelForCausalLM.from_pretrained(model_id, device_map=device_map,quantization_config=quantization_config)
26
  tokenizer = AutoTokenizer.from_pretrained(model_id)
27
 
28
  generate_text_pipeline = pipeline(