accforus commited on
Commit
49e0a75
·
verified ·
1 Parent(s): c6808a2

Upload 5 files

Browse files
Files changed (5) hide show
  1. Dockerfile +2 -2
  2. README.md +3 -3
  3. entrypoint.sh +20 -20
  4. gitattributes +35 -0
  5. ollama-api-demo.ipynb +220 -220
Dockerfile CHANGED
@@ -26,8 +26,8 @@ RUN mkdir -p /.ollama && chmod 777 /.ollama
26
  WORKDIR /.ollama
27
 
28
  # Copy the entry point script
29
- COPY entrypoint.sh /.ollama/entrypoint.sh
30
- RUN chmod +x /.ollama/entrypoint.sh
31
 
32
  # Set the entry point script as the default command
33
  ENTRYPOINT ["/entrypoint.sh"]
 
26
  WORKDIR /.ollama
27
 
28
  # Copy the entry point script
29
+ COPY entrypoint.sh /entrypoint.sh
30
+ RUN chmod +x /entrypoint.sh
31
 
32
  # Set the entry point script as the default command
33
  ENTRYPOINT ["/entrypoint.sh"]
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
- title: Ollama API 3
3
- emoji: 📈
4
- colorFrom: red
5
  colorTo: purple
6
  sdk: docker
7
  pinned: false
 
1
  ---
2
+ title: Ollama Server API
3
+ emoji:
4
+ colorFrom: purple
5
  colorTo: purple
6
  sdk: docker
7
  pinned: false
entrypoint.sh CHANGED
@@ -1,20 +1,20 @@
1
- #!/bin/bash
2
-
3
- # Starting server
4
- echo "Starting server"
5
- ollama serve &
6
- sleep 1
7
-
8
- # Splitting the models by comma and pulling each
9
- IFS=',' read -ra MODELS <<< "$model"
10
- for m in "${MODELS[@]}"; do
11
- echo "Pulling $m"
12
- ollama pull "$m"
13
- sleep 5
14
- # echo "Running $m"
15
- # ollama run "$m"
16
- # No need to sleep here unless you want to give some delay between each pull for some reason
17
- done
18
-
19
- # Keep the script running to prevent the container from exiting
20
- wait
 
1
+ #!/bin/bash
2
+
3
+ # Starting server
4
+ echo "Starting server"
5
+ ollama serve &
6
+ sleep 1
7
+
8
+ # Splitting the models by comma and pulling each
9
+ IFS=',' read -ra MODELS <<< "$model"
10
+ for m in "${MODELS[@]}"; do
11
+ echo "Pulling $m"
12
+ ollama pull "$m"
13
+ sleep 5
14
+ # echo "Running $m"
15
+ # ollama run "$m"
16
+ # No need to sleep here unless you want to give some delay between each pull for some reason
17
+ done
18
+
19
+ # Keep the script running to prevent the container from exiting
20
+ wait
gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
ollama-api-demo.ipynb CHANGED
@@ -1,220 +1,220 @@
1
- {
2
- "cells": [
3
- {
4
- "attachments": {},
5
- "cell_type": "markdown",
6
- "metadata": {},
7
- "source": [
8
- "### Dependencies"
9
- ]
10
- },
11
- {
12
- "cell_type": "code",
13
- "execution_count": null,
14
- "metadata": {},
15
- "outputs": [],
16
- "source": [
17
- "%pip install openai --upgrade"
18
- ]
19
- },
20
- {
21
- "attachments": {},
22
- "cell_type": "markdown",
23
- "metadata": {},
24
- "source": [
25
- "## API Response"
26
- ]
27
- },
28
- {
29
- "cell_type": "code",
30
- "execution_count": 68,
31
- "metadata": {},
32
- "outputs": [
33
- {
34
- "name": "stdout",
35
- "output_type": "stream",
36
- "text": [
37
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.839736985Z\",\"response\":\"```\",\"done\":false}\n",
38
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.859007873Z\",\"response\":\"\\n\",\"done\":false}\n",
39
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.878431213Z\",\"response\":\"def\",\"done\":false}\n",
40
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.897784641Z\",\"response\":\" add\",\"done\":false}\n",
41
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.91718876Z\",\"response\":\"(\",\"done\":false}\n",
42
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.936866527Z\",\"response\":\"a\",\"done\":false}\n",
43
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.95776024Z\",\"response\":\",\",\"done\":false}\n",
44
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.979133947Z\",\"response\":\" b\",\"done\":false}\n",
45
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.000494731Z\",\"response\":\"):\",\"done\":false}\n",
46
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.021318934Z\",\"response\":\"\\n\",\"done\":false}\n",
47
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.041779731Z\",\"response\":\" \",\"done\":false}\n",
48
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.062190588Z\",\"response\":\" return\",\"done\":false}\n",
49
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.082505875Z\",\"response\":\" a\",\"done\":false}\n",
50
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.102662719Z\",\"response\":\" +\",\"done\":false}\n",
51
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.122760355Z\",\"response\":\" b\",\"done\":false}\n",
52
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.142907745Z\",\"response\":\"\\n\",\"done\":false}\n",
53
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.163285108Z\",\"response\":\"```\",\"done\":false}\n",
54
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.18370624Z\",\"response\":\"\\n\",\"done\":false}\n",
55
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.203963933Z\",\"response\":\"Example\",\"done\":false}\n",
56
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.224025854Z\",\"response\":\" usage\",\"done\":false}\n",
57
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.244386112Z\",\"response\":\":\",\"done\":false}\n",
58
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.264846213Z\",\"response\":\"\\n\",\"done\":false}\n",
59
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.285448321Z\",\"response\":\"```\",\"done\":false}\n",
60
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.305657169Z\",\"response\":\"\\n\",\"done\":false}\n",
61
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.325782131Z\",\"response\":\"print\",\"done\":false}\n",
62
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.346353022Z\",\"response\":\"(\",\"done\":false}\n",
63
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.366430166Z\",\"response\":\"add\",\"done\":false}\n",
64
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.386881006Z\",\"response\":\"(\",\"done\":false}\n",
65
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.406680624Z\",\"response\":\"3\",\"done\":false}\n",
66
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.426827031Z\",\"response\":\",\",\"done\":false}\n",
67
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.447157302Z\",\"response\":\" \",\"done\":false}\n",
68
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.467234406Z\",\"response\":\"5\",\"done\":false}\n",
69
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.487442969Z\",\"response\":\"))\",\"done\":false}\n",
70
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.50753674Z\",\"response\":\" #\",\"done\":false}\n",
71
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.527739408Z\",\"response\":\" Output\",\"done\":false}\n",
72
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.54789446Z\",\"response\":\":\",\"done\":false}\n",
73
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.568672362Z\",\"response\":\" \",\"done\":false}\n",
74
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.591076535Z\",\"response\":\"8\",\"done\":false}\n",
75
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.614757129Z\",\"response\":\"\\n\",\"done\":false}\n",
76
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.637841098Z\",\"response\":\"```\",\"done\":false}\n",
77
- "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.660407109Z\",\"response\":\"\",\"done\":true,\"context\":[518,25580,29962,3532,14816,29903,29958,5299,829,14816,29903,6778,13,13,6113,5132,775,304,788,29871,29906,3694,518,29914,25580,29962,13,28956,13,1753,788,29898,29874,29892,289,1125,13,1678,736,263,718,289,13,28956,13,14023,8744,29901,13,28956,13,2158,29898,1202,29898,29941,29892,29871,29945,876,396,10604,29901,29871,29947,13,28956],\"total_duration\":10037918982,\"load_duration\":9097178085,\"prompt_eval_count\":28,\"prompt_eval_duration\":119308000,\"eval_count\":41,\"eval_duration\":820449000}\n"
78
- ]
79
- }
80
- ],
81
- "source": [
82
- "!curl https://thewise-ollama-server.hf.space/api/generate -d '''{\"model\": \"codellama\",\"prompt\":\"Write Python code to add 2 numbers\"}'''"
83
- ]
84
- },
85
- {
86
- "attachments": {},
87
- "cell_type": "markdown",
88
- "metadata": {},
89
- "source": [
90
- "## Langchain Demo"
91
- ]
92
- },
93
- {
94
- "cell_type": "code",
95
- "execution_count": 69,
96
- "metadata": {},
97
- "outputs": [],
98
- "source": [
99
- "from langchain.llms import Ollama\n",
100
- "from langchain.callbacks.manager import CallbackManager\n",
101
- "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler"
102
- ]
103
- },
104
- {
105
- "attachments": {},
106
- "cell_type": "markdown",
107
- "metadata": {},
108
- "source": [
109
- "##### CODELLAMA"
110
- ]
111
- },
112
- {
113
- "cell_type": "code",
114
- "execution_count": 70,
115
- "metadata": {},
116
- "outputs": [
117
- {
118
- "name": "stdout",
119
- "output_type": "stream",
120
- "text": [
121
- "```\n",
122
- "def add(a, b):\n",
123
- " return a + b\n",
124
- "```\n",
125
- "This function takes two arguments `a` and `b`, adds them together, and returns the result. You can call this function by passing in two numbers, like this:\n",
126
- "```\n",
127
- "print(add(3, 5)) # prints 8\n",
128
- "```"
129
- ]
130
- },
131
- {
132
- "data": {
133
- "text/plain": [
134
- "'```\\ndef add(a, b):\\n return a + b\\n```\\nThis function takes two arguments `a` and `b`, adds them together, and returns the result. You can call this function by passing in two numbers, like this:\\n```\\nprint(add(3, 5)) # prints 8\\n```'"
135
- ]
136
- },
137
- "execution_count": 70,
138
- "metadata": {},
139
- "output_type": "execute_result"
140
- }
141
- ],
142
- "source": [
143
- "llm = Ollama(\n",
144
- " model=\"codellama\",\n",
145
- " base_url=\"https://thewise-ollama-server.hf.space\",\n",
146
- " callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))\n",
147
- "\n",
148
- "llm('Write Python code to add 2 numbers')"
149
- ]
150
- },
151
- {
152
- "attachments": {},
153
- "cell_type": "markdown",
154
- "metadata": {},
155
- "source": [
156
- "##### LLAMA2"
157
- ]
158
- },
159
- {
160
- "cell_type": "code",
161
- "execution_count": 71,
162
- "metadata": {},
163
- "outputs": [
164
- {
165
- "name": "stdout",
166
- "output_type": "stream",
167
- "text": [
168
- "```\n",
169
- "# Adding two numbers\n",
170
- "a = 5\n",
171
- "b = 3\n",
172
- "result = a + b\n",
173
- "print(result) # Output: 8\n",
174
- "```"
175
- ]
176
- },
177
- {
178
- "data": {
179
- "text/plain": [
180
- "'```\\n# Adding two numbers\\na = 5\\nb = 3\\nresult = a + b\\nprint(result) # Output: 8\\n```'"
181
- ]
182
- },
183
- "execution_count": 71,
184
- "metadata": {},
185
- "output_type": "execute_result"
186
- }
187
- ],
188
- "source": [
189
- "llm = Ollama(\n",
190
- " model=\"llama2\",\n",
191
- " base_url=\"https://thewise-ollama-server.hf.space\",\n",
192
- " callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))\n",
193
- "\n",
194
- "llm('Write Python code to add 2 numbers')"
195
- ]
196
- }
197
- ],
198
- "metadata": {
199
- "kernelspec": {
200
- "display_name": "langchain",
201
- "language": "python",
202
- "name": "python3"
203
- },
204
- "language_info": {
205
- "codemirror_mode": {
206
- "name": "ipython",
207
- "version": 3
208
- },
209
- "file_extension": ".py",
210
- "mimetype": "text/x-python",
211
- "name": "python",
212
- "nbconvert_exporter": "python",
213
- "pygments_lexer": "ipython3",
214
- "version": "3.11.4"
215
- },
216
- "orig_nbformat": 4
217
- },
218
- "nbformat": 4,
219
- "nbformat_minor": 2
220
- }
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "attachments": {},
5
+ "cell_type": "markdown",
6
+ "metadata": {},
7
+ "source": [
8
+ "### Dependencies"
9
+ ]
10
+ },
11
+ {
12
+ "cell_type": "code",
13
+ "execution_count": null,
14
+ "metadata": {},
15
+ "outputs": [],
16
+ "source": [
17
+ "%pip install openai --upgrade"
18
+ ]
19
+ },
20
+ {
21
+ "attachments": {},
22
+ "cell_type": "markdown",
23
+ "metadata": {},
24
+ "source": [
25
+ "## API Response"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "code",
30
+ "execution_count": 68,
31
+ "metadata": {},
32
+ "outputs": [
33
+ {
34
+ "name": "stdout",
35
+ "output_type": "stream",
36
+ "text": [
37
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.839736985Z\",\"response\":\"```\",\"done\":false}\n",
38
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.859007873Z\",\"response\":\"\\n\",\"done\":false}\n",
39
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.878431213Z\",\"response\":\"def\",\"done\":false}\n",
40
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.897784641Z\",\"response\":\" add\",\"done\":false}\n",
41
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.91718876Z\",\"response\":\"(\",\"done\":false}\n",
42
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.936866527Z\",\"response\":\"a\",\"done\":false}\n",
43
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.95776024Z\",\"response\":\",\",\"done\":false}\n",
44
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:42:59.979133947Z\",\"response\":\" b\",\"done\":false}\n",
45
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.000494731Z\",\"response\":\"):\",\"done\":false}\n",
46
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.021318934Z\",\"response\":\"\\n\",\"done\":false}\n",
47
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.041779731Z\",\"response\":\" \",\"done\":false}\n",
48
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.062190588Z\",\"response\":\" return\",\"done\":false}\n",
49
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.082505875Z\",\"response\":\" a\",\"done\":false}\n",
50
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.102662719Z\",\"response\":\" +\",\"done\":false}\n",
51
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.122760355Z\",\"response\":\" b\",\"done\":false}\n",
52
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.142907745Z\",\"response\":\"\\n\",\"done\":false}\n",
53
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.163285108Z\",\"response\":\"```\",\"done\":false}\n",
54
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.18370624Z\",\"response\":\"\\n\",\"done\":false}\n",
55
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.203963933Z\",\"response\":\"Example\",\"done\":false}\n",
56
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.224025854Z\",\"response\":\" usage\",\"done\":false}\n",
57
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.244386112Z\",\"response\":\":\",\"done\":false}\n",
58
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.264846213Z\",\"response\":\"\\n\",\"done\":false}\n",
59
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.285448321Z\",\"response\":\"```\",\"done\":false}\n",
60
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.305657169Z\",\"response\":\"\\n\",\"done\":false}\n",
61
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.325782131Z\",\"response\":\"print\",\"done\":false}\n",
62
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.346353022Z\",\"response\":\"(\",\"done\":false}\n",
63
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.366430166Z\",\"response\":\"add\",\"done\":false}\n",
64
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.386881006Z\",\"response\":\"(\",\"done\":false}\n",
65
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.406680624Z\",\"response\":\"3\",\"done\":false}\n",
66
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.426827031Z\",\"response\":\",\",\"done\":false}\n",
67
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.447157302Z\",\"response\":\" \",\"done\":false}\n",
68
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.467234406Z\",\"response\":\"5\",\"done\":false}\n",
69
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.487442969Z\",\"response\":\"))\",\"done\":false}\n",
70
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.50753674Z\",\"response\":\" #\",\"done\":false}\n",
71
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.527739408Z\",\"response\":\" Output\",\"done\":false}\n",
72
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.54789446Z\",\"response\":\":\",\"done\":false}\n",
73
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.568672362Z\",\"response\":\" \",\"done\":false}\n",
74
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.591076535Z\",\"response\":\"8\",\"done\":false}\n",
75
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.614757129Z\",\"response\":\"\\n\",\"done\":false}\n",
76
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.637841098Z\",\"response\":\"```\",\"done\":false}\n",
77
+ "{\"model\":\"codellama\",\"created_at\":\"2024-03-20T17:43:00.660407109Z\",\"response\":\"\",\"done\":true,\"context\":[518,25580,29962,3532,14816,29903,29958,5299,829,14816,29903,6778,13,13,6113,5132,775,304,788,29871,29906,3694,518,29914,25580,29962,13,28956,13,1753,788,29898,29874,29892,289,1125,13,1678,736,263,718,289,13,28956,13,14023,8744,29901,13,28956,13,2158,29898,1202,29898,29941,29892,29871,29945,876,396,10604,29901,29871,29947,13,28956],\"total_duration\":10037918982,\"load_duration\":9097178085,\"prompt_eval_count\":28,\"prompt_eval_duration\":119308000,\"eval_count\":41,\"eval_duration\":820449000}\n"
78
+ ]
79
+ }
80
+ ],
81
+ "source": [
82
+ "!curl https://thewise-ollama-server.hf.space/api/generate -d '''{\"model\": \"codellama\",\"prompt\":\"Write Python code to add 2 numbers\"}'''"
83
+ ]
84
+ },
85
+ {
86
+ "attachments": {},
87
+ "cell_type": "markdown",
88
+ "metadata": {},
89
+ "source": [
90
+ "## Langchain Demo"
91
+ ]
92
+ },
93
+ {
94
+ "cell_type": "code",
95
+ "execution_count": 69,
96
+ "metadata": {},
97
+ "outputs": [],
98
+ "source": [
99
+ "from langchain.llms import Ollama\n",
100
+ "from langchain.callbacks.manager import CallbackManager\n",
101
+ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler"
102
+ ]
103
+ },
104
+ {
105
+ "attachments": {},
106
+ "cell_type": "markdown",
107
+ "metadata": {},
108
+ "source": [
109
+ "##### CODELLAMA"
110
+ ]
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": 70,
115
+ "metadata": {},
116
+ "outputs": [
117
+ {
118
+ "name": "stdout",
119
+ "output_type": "stream",
120
+ "text": [
121
+ "```\n",
122
+ "def add(a, b):\n",
123
+ " return a + b\n",
124
+ "```\n",
125
+ "This function takes two arguments `a` and `b`, adds them together, and returns the result. You can call this function by passing in two numbers, like this:\n",
126
+ "```\n",
127
+ "print(add(3, 5)) # prints 8\n",
128
+ "```"
129
+ ]
130
+ },
131
+ {
132
+ "data": {
133
+ "text/plain": [
134
+ "'```\\ndef add(a, b):\\n return a + b\\n```\\nThis function takes two arguments `a` and `b`, adds them together, and returns the result. You can call this function by passing in two numbers, like this:\\n```\\nprint(add(3, 5)) # prints 8\\n```'"
135
+ ]
136
+ },
137
+ "execution_count": 70,
138
+ "metadata": {},
139
+ "output_type": "execute_result"
140
+ }
141
+ ],
142
+ "source": [
143
+ "llm = Ollama(\n",
144
+ " model=\"codellama\",\n",
145
+ " base_url=\"https://thewise-ollama-server.hf.space\",\n",
146
+ " callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))\n",
147
+ "\n",
148
+ "llm('Write Python code to add 2 numbers')"
149
+ ]
150
+ },
151
+ {
152
+ "attachments": {},
153
+ "cell_type": "markdown",
154
+ "metadata": {},
155
+ "source": [
156
+ "##### LLAMA2"
157
+ ]
158
+ },
159
+ {
160
+ "cell_type": "code",
161
+ "execution_count": 71,
162
+ "metadata": {},
163
+ "outputs": [
164
+ {
165
+ "name": "stdout",
166
+ "output_type": "stream",
167
+ "text": [
168
+ "```\n",
169
+ "# Adding two numbers\n",
170
+ "a = 5\n",
171
+ "b = 3\n",
172
+ "result = a + b\n",
173
+ "print(result) # Output: 8\n",
174
+ "```"
175
+ ]
176
+ },
177
+ {
178
+ "data": {
179
+ "text/plain": [
180
+ "'```\\n# Adding two numbers\\na = 5\\nb = 3\\nresult = a + b\\nprint(result) # Output: 8\\n```'"
181
+ ]
182
+ },
183
+ "execution_count": 71,
184
+ "metadata": {},
185
+ "output_type": "execute_result"
186
+ }
187
+ ],
188
+ "source": [
189
+ "llm = Ollama(\n",
190
+ " model=\"llama2\",\n",
191
+ " base_url=\"https://thewise-ollama-server.hf.space\",\n",
192
+ " callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))\n",
193
+ "\n",
194
+ "llm('Write Python code to add 2 numbers')"
195
+ ]
196
+ }
197
+ ],
198
+ "metadata": {
199
+ "kernelspec": {
200
+ "display_name": "langchain",
201
+ "language": "python",
202
+ "name": "python3"
203
+ },
204
+ "language_info": {
205
+ "codemirror_mode": {
206
+ "name": "ipython",
207
+ "version": 3
208
+ },
209
+ "file_extension": ".py",
210
+ "mimetype": "text/x-python",
211
+ "name": "python",
212
+ "nbconvert_exporter": "python",
213
+ "pygments_lexer": "ipython3",
214
+ "version": "3.11.4"
215
+ },
216
+ "orig_nbformat": 4
217
+ },
218
+ "nbformat": 4,
219
+ "nbformat_minor": 2
220
+ }