tien314 commited on
Commit
307dfae
·
verified ·
1 Parent(s): 945ec60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -34,8 +34,8 @@ def load_model():
34
  #device = "cuda" if torch.cuda.is_available() else "cpu"
35
 
36
  #llm = OllamaLLM(model="gemma2", temperature=0, device=device)
37
- api_key = "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"
38
-
39
  llm = ChatGroq(model = "llama-3.1-70b-versatile", temperature = 0,api_key = api_key)
40
  chain = prompt|llm
41
  return chain
 
34
  #device = "cuda" if torch.cuda.is_available() else "cpu"
35
 
36
  #llm = OllamaLLM(model="gemma2", temperature=0, device=device)
37
+ #api_key = "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"
38
+ api_key = "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"
39
  llm = ChatGroq(model = "llama-3.1-70b-versatile", temperature = 0,api_key = api_key)
40
  chain = prompt|llm
41
  return chain