karthik18AI commited on
Commit
c37a02b
·
verified ·
1 Parent(s): e61910a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -48
app.py CHANGED
@@ -1,51 +1,11 @@
1
- from huggingface_hub import InferenceClient
2
 
3
- client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
4
 
5
- def format_prompt(message, history, system_prompt=None):
6
- prompt = "<s>"
7
- for user_prompt, bot_response in history:
8
- prompt += f"[INST] {user_prompt} [/INST]"
9
- prompt += f" {bot_response}</s> "
10
- if system_prompt:
11
- prompt += f"[SYS] {system_prompt} [/SYS]"
12
- prompt += f"[INST] {message} [/INST]"
13
- return prompt
14
 
15
- def generate(
16
- prompt, history, system_prompt=None, temperature=0.2, max_new_tokens=1024, top_p=0.95, repetition_penalty=1.0,
17
- ):
18
- temperature = float(temperature)
19
- if temperature < 1e-2:
20
- temperature = 1e-2
21
- top_p = float(top_p)
22
-
23
- generate_kwargs = dict(
24
- temperature=temperature,
25
- max_new_tokens=max_new_tokens,
26
- top_p=top_p,
27
- repetition_penalty=repetition_penalty,
28
- do_sample=True,
29
- seed=42,
30
- )
31
-
32
- formatted_prompt = format_prompt(prompt, history, system_prompt)
33
-
34
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
35
- output = ""
36
-
37
- for response in stream:
38
- output += response.token.text
39
- print(output, end='', flush=True)
40
- return output
41
-
42
- if __name__ == "__main__":
43
- history = []
44
- system_prompt = None
45
- while True:
46
- user_input = input("You: ")
47
- if user_input.lower() in ["exit", "quit"]:
48
- break
49
- response = generate(user_input, history, system_prompt)
50
- history.append((user_input, response))
51
- print(f"\nBot: {response}\n")
 
 
1
 
2
+ huggingface-cli login
3
 
4
+ # Use a pipeline as a high-level helper
5
+ from transformers import pipeline
 
 
 
 
 
 
 
6
 
7
+ messages = [
8
+ {"role": "user", "content": "Who are you?"},
9
+ ]
10
+ pipe = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3")
11
+ pipe(messages)