prateekbh commited on
Commit
c434d8b
1 Parent(s): f617eac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -43,7 +43,7 @@ def getProductDetails(history, image):
43
  clients = InferenceClient("google/gemma-7b")
44
  rand_val = random.randint(1, 1111111111111111)
45
  if not history:
46
- history = [[{"role": "system", "content": "You are a helpful assistant."}], [{"role": "user", "content": "hello"}]]
47
  generate_kwargs = dict(
48
  temperature=0.67,
49
  max_new_tokens=1024,
@@ -54,8 +54,8 @@ def getProductDetails(history, image):
54
  )
55
  system_prompt="you're a helpful e-commerce marketting assitant"
56
  prompt="Write me a poem"
57
- # formatted_prompt = self.format_prompt(f"{system_prompt}, {prompt}", history)
58
- # stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=stream_output, details=True, return_full_text=False)
59
  # output = ""
60
 
61
  # for response in stream:
 
43
  clients = InferenceClient("google/gemma-7b")
44
  rand_val = random.randint(1, 1111111111111111)
45
  if not history:
46
+ history = [[{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "hello"}]]
47
  generate_kwargs = dict(
48
  temperature=0.67,
49
  max_new_tokens=1024,
 
54
  )
55
  system_prompt="you're a helpful e-commerce marketting assitant"
56
  prompt="Write me a poem"
57
+ formatted_prompt = self.format_prompt(f"{system_prompt}, {prompt}", history)
58
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=stream_output, details=True, return_full_text=False)
59
  # output = ""
60
 
61
  # for response in stream: