samidh commited on
Commit
f355584
·
verified ·
1 Parent(s): fea5a1d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -7,7 +7,7 @@ import torch
7
  import torch.nn.functional as F
8
  from peft import PeftConfig, PeftModel
9
  from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
10
- """
11
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
12
 
13
  base_model_name = "google/gemma-2-9b"
@@ -27,7 +27,7 @@ model.merge_and_unload()
27
  model = model.to(device)
28
 
29
  tokenizer = AutoTokenizer.from_pretrained(base_model_name)
30
- """
31
  PROMPT = """
32
  INSTRUCTIONS
33
  ============
@@ -126,7 +126,6 @@ iface = gr.Interface(
126
  gr.Textbox(label="Policy", lines=8, value=DEFAULT_POLICY)],
127
  outputs=[gr.Textbox(label="Result"),
128
  gr.Markdown("A BUNCH OF STUFF")],
129
- article="TEST CONTENT",
130
  title="Zentropi CoPE Demo",
131
  #description="See if the given content violates your given policy.",
132
  api_name=False
 
7
  import torch.nn.functional as F
8
  from peft import PeftConfig, PeftModel
9
  from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
10
+
11
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
12
 
13
  base_model_name = "google/gemma-2-9b"
 
27
  model = model.to(device)
28
 
29
  tokenizer = AutoTokenizer.from_pretrained(base_model_name)
30
+
31
  PROMPT = """
32
  INSTRUCTIONS
33
  ============
 
126
  gr.Textbox(label="Policy", lines=8, value=DEFAULT_POLICY)],
127
  outputs=[gr.Textbox(label="Result"),
128
  gr.Markdown("A BUNCH OF STUFF")],
 
129
  title="Zentropi CoPE Demo",
130
  #description="See if the given content violates your given policy.",
131
  api_name=False