sarim commited on
Commit
f3762f9
·
1 Parent(s): d58ef6e

use llama model

Browse files
Files changed (1) hide show
  1. app.py +11 -0
app.py CHANGED
@@ -9,6 +9,8 @@ import planet_model as customModel
9
  from typing import List, Dict
10
  import constants
11
  import os
 
 
12
 
13
  api_key = os.getenv("api_key")
14
 
@@ -86,6 +88,15 @@ async def main():
86
  duration = ""
87
  purpose = ""
88
  interests = ""
 
 
 
 
 
 
 
 
 
89
 
90
 
91
  if __name__ == '__main__':
 
9
  from typing import List, Dict
10
  import constants
11
  import os
12
+ from ollama import chat
13
+ from ollama import ChatResponse
14
 
15
  api_key = os.getenv("api_key")
16
 
 
88
  duration = ""
89
  purpose = ""
90
  interests = ""
91
+ response: ChatResponse = chat(model='llama3.2', messages=[
92
+ {
93
+ 'role': 'user',
94
+ 'content': 'Why is the sky blue?',
95
+ },
96
+ ])
97
+ print(response['message']['content'])
98
+ # or access fields directly from the response object
99
+ print(response.message.content)
100
 
101
 
102
  if __name__ == '__main__':