File size: 290 Bytes
37673ad
 
 
 
 
 
 
 
 
 
 
 
 
 
2fb3a13
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/bash

# Start the Ollama server in the background
ollama serve &

# Wait for the server to be ready
sleep 5

# Pull the necessary models
ollama pull llama3.2
ollama pull llama3.2:1b
ollama pull granite3-moe
ollama pull granite3-moe:1b

# Keep the container running
tail -f /dev/null