datacipen commited on
Commit
b85cd0d
·
verified ·
1 Parent(s): ad741ab

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +6 -2
main.py CHANGED
@@ -122,11 +122,15 @@ def Connexion_Mistral():
122
  #endpoint = "https://models.inference.ai.azure.com"
123
  #return Mistral(api_key=os.environ["GITHUB_TOKEN"], server_url=endpoint)
124
  #repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
125
- repo_id = "Qwen/Qwen2.5-3B-Instruct"
 
 
 
 
126
  #repo_id = "microsoft/Phi-3.5-mini-instruct"
127
  #repo_id = "meta-llama/Llama-3.2-3B-Instruct"
128
  llm = HuggingFaceEndpoint(
129
- repo_id=repo_id, max_new_tokens=1024, temperature=0.1, task="text2text-generation", streaming=True
130
  )
131
  return llm
132
 
 
122
  #endpoint = "https://models.inference.ai.azure.com"
123
  #return Mistral(api_key=os.environ["GITHUB_TOKEN"], server_url=endpoint)
124
  #repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
125
+ #repo_id = "Qwen/Qwen2.5-3B-Instruct"
126
+ repo_id = "HuggingFaceH4/zephyr-7b-beta"
127
+ #repo_id = "HuggingFaceH4/zephyr-7b-alpha"
128
+ #repo_id = "tiiuae/falcon-7b-instruct"
129
+ #repo_id = "HuggingFaceTB/SmolLM2-1.7B-Instruct"
130
  #repo_id = "microsoft/Phi-3.5-mini-instruct"
131
  #repo_id = "meta-llama/Llama-3.2-3B-Instruct"
132
  llm = HuggingFaceEndpoint(
133
+ repo_id=repo_id, max_new_tokens=3000, temperature=0.1, task="text2text-generation", streaming=True
134
  )
135
  return llm
136