prateekbh commited on
Commit
f7085f0
1 Parent(s): 734cb58

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -5
app.py CHANGED
@@ -51,8 +51,7 @@ def getProductDetails(history, image):
51
  product_description=getImageDescription(image)
52
  system_prompt="You're a helpful e-commerce marketing assitant working on art products."
53
  prompt="Our product description is as follows: " + product_description + ". Please write a product title options for it."
54
- history = interactWithModel(history, system_prompt, prompt)
55
- yield history
56
 
57
  def interactWithModel(history, system_prompt, prompt):
58
  client = InferenceClient("google/gemma-7b-it")
@@ -70,13 +69,10 @@ def interactWithModel(history, system_prompt, prompt):
70
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
71
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
72
  output = ""
73
-
74
  for response in stream:
75
  output += response.token.text
76
  yield [(prompt, output)]
77
- gr.Info('Gemma:' + product_description)
78
  history.append((prompt, output))
79
- return history
80
 
81
  @torch.no_grad()
82
  def getImageDescription(image):
 
51
  product_description=getImageDescription(image)
52
  system_prompt="You're a helpful e-commerce marketing assitant working on art products."
53
  prompt="Our product description is as follows: " + product_description + ". Please write a product title options for it."
54
+ interactWithModel(history, system_prompt, prompt)
 
55
 
56
  def interactWithModel(history, system_prompt, prompt):
57
  client = InferenceClient("google/gemma-7b-it")
 
69
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
70
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
71
  output = ""
 
72
  for response in stream:
73
  output += response.token.text
74
  yield [(prompt, output)]
 
75
  history.append((prompt, output))
 
76
 
77
  @torch.no_grad()
78
  def getImageDescription(image):