dar-tau commited on
Commit
f2d60cb
·
verified ·
1 Parent(s): 6402bfe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -12,27 +12,27 @@ MAX_PROMPT_TOKENS = 30
12
 
13
  ## info
14
  model_info = {
15
- 'meta-llama/Llama-2-7b-chat-hf': dict(device_map='cpu', token=os.environ['hf_token'],
16
  original_prompt_template='<s>[INST] {prompt} [/INST]',
17
  interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
18
  ), # , load_in_8bit=True
19
 
20
- 'google/gemma-2b': dict(device_map='cpu', token=os.environ['hf_token'],
21
  original_prompt_template='<bos> {prompt}',
22
  interpretation_prompt_template='<bos>User: [X]\n\nAnswer: {prompt}',
23
  ),
24
 
25
- 'mistralai/Mistral-7B-Instruct-v0.2': dict(device_map='cpu',
26
  original_prompt_template='<s>[INST] {prompt} [/INST]',
27
  interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
28
  ),
29
 
30
- 'TheBloke/Mistral-7B-Instruct-v0.2-GGUF': dict(model_file='mistral-7b-instruct-v0.2.Q5_K_S.gguf',
31
- tokenizer='mistralai/Mistral-7B-Instruct-v0.2',
32
- model_type='llama', hf=True, ctransformers=True,
33
- original_prompt_template='<s>[INST] {prompt} [/INST]',
34
- interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
35
- )
36
  }
37
 
38
 
 
12
 
13
  ## info
14
  model_info = {
15
+ 'LLAMA2-7B': dict(model_path='meta-llama/Llama-2-7b-chat-hf', device_map='cpu', token=os.environ['hf_token'],
16
  original_prompt_template='<s>[INST] {prompt} [/INST]',
17
  interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
18
  ), # , load_in_8bit=True
19
 
20
+ 'Gemma-2B': dict(model_path='google/gemma-2b', device_map='cpu', token=os.environ['hf_token'],
21
  original_prompt_template='<bos> {prompt}',
22
  interpretation_prompt_template='<bos>User: [X]\n\nAnswer: {prompt}',
23
  ),
24
 
25
+ 'Mistral-7B Instruct': dict(model_path='mistralai/Mistral-7B-Instruct-v0.2', device_map='cpu',
26
  original_prompt_template='<s>[INST] {prompt} [/INST]',
27
  interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
28
  ),
29
 
30
+ # 'TheBloke/Mistral-7B-Instruct-v0.2-GGUF': dict(model_file='mistral-7b-instruct-v0.2.Q5_K_S.gguf',
31
+ # tokenizer='mistralai/Mistral-7B-Instruct-v0.2',
32
+ # model_type='llama', hf=True, ctransformers=True,
33
+ # original_prompt_template='<s>[INST] {prompt} [/INST]',
34
+ # interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
35
+ # )
36
  }
37
 
38