This repository was archived by the owner on Mar 20, 2026. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 19
Expand file tree
/
Copy pathrun.sh
More file actions
executable file
·50 lines (43 loc) · 1.36 KB
/
run.sh
File metadata and controls
executable file
·50 lines (43 loc) · 1.36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#!/bin/bash
# Detect docker compose command
if docker compose version &> /dev/null; then
DOCKER_COMPOSE="docker compose"
COMPOSE_UP="$DOCKER_COMPOSE up -d --build"
elif command -v docker-compose &> /dev/null; then
DOCKER_COMPOSE="docker-compose"
COMPOSE_UP="$DOCKER_COMPOSE up -d --build"
else
echo "Error: Neither 'docker compose' nor 'docker-compose' is available."
exit 1
fi
# Start services in detached mode
echo "Starting services..."
eval $COMPOSE_UP
# Wait for Ollama to be ready
echo "Waiting for Ollama to start..."
until curl -s http://localhost:11436/api/tags > /dev/null 2>&1; do
echo -n "."
sleep 2
done
echo " Ready!"
# Check if llama3.2:3b model exists
if ! docker exec ollama ollama list | grep -q "llama3.2:3b"; then
echo "Downloading llama3.2:3b model (this may take several minutes)..."
echo "This is a one-time download - subsequent starts will be much faster"
docker exec ollama ollama pull llama3.2:3b
if [ $? -eq 0 ]; then
echo "Model llama3.2:3b downloaded successfully!"
else
echo "Failed to download model llama3.2:3b"
exit 1
fi
else
echo "Model llama3.2:3b already available"
fi
echo "All services ready!"
echo "- Frontend: http://localhost:7860"
echo "- Ollama API: http://localhost:11436"
echo ""
echo "Press Ctrl+C to stop all services"
# Follow logs
$DOCKER_COMPOSE logs