Germano Cavalcante commited on
Commit
5a9e15b
·
1 Parent(s): 2cb6a82

Update Modelfile - llama3.2-blender-assistant

Browse files
Files changed (3) hide show
  1. Dockerfile +1 -0
  2. Modelfile +47 -0
  3. start.sh +2 -1
Dockerfile CHANGED
@@ -13,6 +13,7 @@ RUN curl -fsSL https://ollama.com/install.sh | sh
13
  RUN mkdir /app && chmod 777 /app
14
 
15
  # Copy only the necessary files into the container
 
16
  COPY start.sh /app/start.sh
17
 
18
  # Set the working directory
 
13
  RUN mkdir /app && chmod 777 /app
14
 
15
  # Copy only the necessary files into the container
16
+ COPY Modelfile /app/Modelfile
17
  COPY start.sh /app/start.sh
18
 
19
  # Set the working directory
Modelfile ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM hf.co/mano-wii/llama3.2-1b-tunned-for-blender
2
+ TEMPLATE """<|start_header_id|>system<|end_header_id|>
3
+
4
+ Cutting Knowledge Date: December 2023
5
+
6
+ {{ if .System }}{{ .System }}
7
+ {{- end }}
8
+ {{- if .Tools }}When you receive a tool call response, use the output to format an answer to the orginal user question.
9
+
10
+ You are a helpful assistant with tool calling capabilities.
11
+ {{- end }}<|eot_id|>
12
+ {{- range $i, $_ := .Messages }}
13
+ {{- $last := eq (len (slice $.Messages $i)) 1 }}
14
+ {{- if eq .Role "user" }}<|start_header_id|>user<|end_header_id|>
15
+ {{- if and $.Tools $last }}
16
+
17
+ If your answer does not require any information provided by any of the following functions, ignore the functions, otherwise respond with a JSON for a function call with its proper arguments that best answers the given prompt.
18
+ And respond in the format {"name": function name, "parameters": dictionary of argument name and its value}. Do not use variables.
19
+
20
+ {{ range $.Tools }}
21
+ {{- . }}
22
+ {{ end }}
23
+ {{ .Content }}<|eot_id|>
24
+ {{- else }}
25
+
26
+ {{ .Content }}<|eot_id|>
27
+ {{- end }}{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
28
+
29
+ {{ end }}
30
+ {{- else if eq .Role "assistant" }}<|start_header_id|>assistant<|end_header_id|>
31
+ {{- if .ToolCalls }}
32
+ {{ range .ToolCalls }}
33
+ {"name": "{{ .Function.Name }}", "parameters": {{ .Function.Arguments }}}{{ end }}
34
+ {{- else }}
35
+
36
+ {{ .Content }}
37
+ {{- end }}{{ if not $last }}<|eot_id|>{{ end }}
38
+ {{- else if eq .Role "tool" }}<|start_header_id|>ipython<|end_header_id|>
39
+
40
+ {{ .Content }}<|eot_id|>{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
41
+
42
+ {{ end }}
43
+ {{- end }}
44
+ {{- end }}"""
45
+ PARAMETER stop <|start_header_id|>
46
+ PARAMETER stop <|end_header_id|>
47
+ PARAMETER stop <|eot_id|>
start.sh CHANGED
@@ -11,7 +11,8 @@ ollama serve &
11
  sleep 10
12
 
13
  # Create the model using Ollama
14
- ollama run hf.co/mano-wii/llama3.2-1B-tunned-for-blender:Q4_K_M
 
15
 
16
  # Keep the container running indefinitely
17
  tail -f /dev/null
 
11
  sleep 10
12
 
13
  # Create the model using Ollama
14
+ ollama create llama3.2-blender-assistant -f Modelfile
15
+ ollama run llama3.2-blender-assistant
16
 
17
  # Keep the container running indefinitely
18
  tail -f /dev/null