Germano Cavalcante commited on
Commit
2cb6a82
·
1 Parent(s): 196cfa8

Run the 1B model for the sake of user's patience

Browse files
Files changed (3) hide show
  1. Dockerfile +0 -1
  2. Modelfile +0 -53
  3. start.sh +1 -1
Dockerfile CHANGED
@@ -13,7 +13,6 @@ RUN curl -fsSL https://ollama.com/install.sh | sh
13
  RUN mkdir /app && chmod 777 /app
14
 
15
  # Copy only the necessary files into the container
16
- COPY Modelfile /app/Modelfile
17
  COPY start.sh /app/start.sh
18
 
19
  # Set the working directory
 
13
  RUN mkdir /app && chmod 777 /app
14
 
15
  # Copy only the necessary files into the container
 
16
  COPY start.sh /app/start.sh
17
 
18
  # Set the working directory
Modelfile DELETED
@@ -1,53 +0,0 @@
1
- FROM ./llama.gguf
2
-
3
- SYSTEM """You are a helpful AI assistant. Respond to users accurately."""
4
-
5
- TEMPLATE """<|start_header_id|>system<|end_header_id|>
6
-
7
- Cutting Knowledge Date: December 2023
8
-
9
- {{ if .System }}{{ .System }}
10
- {{- end }}
11
- {{- if .Tools }}When you receive a tool call response, use the output to format an answer to the orginal user question.
12
-
13
- You are a helpful assistant with tool calling capabilities.
14
- {{- end }}<|eot_id|>
15
- {{- range $i, $_ := .Messages }}
16
- {{- $last := eq (len (slice $.Messages $i)) 1 }}
17
- {{- if eq .Role "user" }}<|start_header_id|>user<|end_header_id|>
18
- {{- if and $.Tools $last }}
19
-
20
- Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.
21
-
22
- Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}. Do not use variables.
23
-
24
- {{ range $.Tools }}
25
- {{- . }}
26
- {{ end }}
27
- {{ .Content }}<|eot_id|>
28
- {{- else }}
29
-
30
- {{ .Content }}<|eot_id|>
31
- {{- end }}{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
32
-
33
- {{ end }}
34
- {{- else if eq .Role "assistant" }}<|start_header_id|>assistant<|end_header_id|>
35
- {{- if .ToolCalls }}
36
- {{ range .ToolCalls }}
37
- {"name": "{{ .Function.Name }}", "parameters": {{ .Function.Arguments }}}{{ end }}
38
- {{- else }}
39
-
40
- {{ .Content }}
41
- {{- end }}{{ if not $last }}<|eot_id|>{{ end }}
42
- {{- else if eq .Role "tool" }}<|start_header_id|>ipython<|end_header_id|>
43
-
44
- {{ .Content }}<|eot_id|>{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
45
-
46
- {{ end }}
47
- {{- end }}
48
- {{- end }}
49
- """
50
-
51
- PARAMETER stop "<|start_header_id|>"
52
- PARAMETER stop "<|end_header_id|>"
53
- PARAMETER stop "<|eot_id|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
start.sh CHANGED
@@ -11,7 +11,7 @@ ollama serve &
11
  sleep 10
12
 
13
  # Create the model using Ollama
14
- ollama run hf.co/mano-wii/llama3.2-3B-tunned-for-blender:Q5_K_M
15
 
16
  # Keep the container running indefinitely
17
  tail -f /dev/null
 
11
  sleep 10
12
 
13
  # Create the model using Ollama
14
+ ollama run hf.co/mano-wii/llama3.2-1B-tunned-for-blender:Q4_K_M
15
 
16
  # Keep the container running indefinitely
17
  tail -f /dev/null