Update app.py
Browse files
app.py
CHANGED
@@ -13,7 +13,7 @@ MAX_PROMPT_TOKENS = 30
|
|
13 |
## info
|
14 |
model_info = {
|
15 |
'meta-llama/Llama-2-7b-chat-hf': dict(device_map='cpu', token=os.environ['hf_token'],
|
16 |
-
original_prompt_template='<s>[INST] {prompt}',
|
17 |
interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
|
18 |
), # , load_in_8bit=True
|
19 |
|
@@ -111,6 +111,11 @@ css = '''
|
|
111 |
margin-top: 10px;
|
112 |
background-color: pink;
|
113 |
}
|
|
|
|
|
|
|
|
|
|
|
114 |
'''
|
115 |
|
116 |
|
|
|
13 |
## info
|
14 |
model_info = {
|
15 |
'meta-llama/Llama-2-7b-chat-hf': dict(device_map='cpu', token=os.environ['hf_token'],
|
16 |
+
original_prompt_template='<s>[INST] {prompt} [/INST]',
|
17 |
interpretation_prompt_template='<s>[INST] [X] [/INST] {prompt}',
|
18 |
), # , load_in_8bit=True
|
19 |
|
|
|
111 |
margin-top: 10px;
|
112 |
background-color: pink;
|
113 |
}
|
114 |
+
.bubble > textarea{
|
115 |
+
border: none;
|
116 |
+
background-color: inherit;
|
117 |
+
}
|
118 |
+
|
119 |
'''
|
120 |
|
121 |
|