|
| 1 | +--- |
| 2 | +title: run-ollama-in-docker |
| 3 | +date: 2025-01-15 15:06:31 |
| 4 | +tags: |
| 5 | +--- |
| 6 | + |
| 7 | +# Run Ollama in docker |
| 8 | + |
| 9 | +## install nvidia container toolkit |
| 10 | + |
| 11 | +https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#installation |
| 12 | + |
| 13 | +### Installing with Apt |
| 14 | + |
| 15 | +1. Configure the production repository: |
| 16 | + |
| 17 | + ``` |
| 18 | + $ curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \ |
| 19 | + && curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \ |
| 20 | + sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \ |
| 21 | + sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list |
| 22 | + ``` |
| 23 | + |
| 24 | + Optionally, configure the repository to use experimental packages: |
| 25 | + |
| 26 | + ``` |
| 27 | + $ sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/nvidia-container-toolkit.list |
| 28 | + ``` |
| 29 | + |
| 30 | +2. Update the packages list from the repository: |
| 31 | + |
| 32 | + ``` |
| 33 | + $ sudo apt-get update |
| 34 | + ``` |
| 35 | + |
| 36 | +3. Install the NVIDIA Container Toolkit packages: |
| 37 | + |
| 38 | + ``` |
| 39 | + $ sudo apt-get install -y nvidia-container-toolkit |
| 40 | + ``` |
| 41 | + |
| 42 | +## Start the container |
| 43 | + |
| 44 | +```bash |
| 45 | +docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama |
| 46 | +``` |
| 47 | + |
| 48 | +## exec the container |
| 49 | + |
| 50 | +```shell |
| 51 | +docker exec -it ollama /bin/bash |
| 52 | +``` |
| 53 | + |
| 54 | +## webui |
| 55 | + |
| 56 | +可以编写一个flask应用来实现简单的webui: |
| 57 | + |
| 58 | +```python |
| 59 | + from flask import Flask, request, jsonify, render_template_string |
| 60 | + |
| 61 | + app = Flask(__name__) |
| 62 | + |
| 63 | + # Ollama API的基本URL |
| 64 | + OLLAMA_API_URL = "https://api.ollama.com/v1/generate" |
| 65 | + |
| 66 | + @app.route('/') |
| 67 | + def index(): |
| 68 | + return render_template_string(''' |
| 69 | + <!DOCTYPE html> |
| 70 | + <html lang="en"> |
| 71 | + <head> |
| 72 | + <meta charset="UTF-8"> |
| 73 | + <title>Chat with Ollama</title> |
| 74 | + </head> |
| 75 | + <body> |
| 76 | + <h1>Welcome to the Chat Interface!</h1> |
| 77 | + <form id="chatForm" method="post" action="/chat"> |
| 78 | + <input type="text" name="message" placeholder="Type your message here..."> |
| 79 | + <button type="submit">Send</button> |
| 80 | + </form> |
| 81 | +
|
| 82 | + <div id="response"></div> |
| 83 | +
|
| 84 | + <script> |
| 85 | + document.getElementById('chatForm').addEventListener('submit', function(e) { |
| 86 | + e.preventDefault(); |
| 87 | + const input = document.querySelector('input[name=message]'); |
| 88 | + const xhr = new XMLHttpRequest(); |
| 89 | + xhr.open("POST", "/chat"); |
| 90 | + xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded"); |
| 91 | + xhr.onload = function() { |
| 92 | + if (xhr.status === 200) { |
| 93 | + document.getElementById('response').innerText = xhr.responseText; |
| 94 | + } else { |
| 95 | + console.error("Request failed. Returned status of ", xhr.status); |
| 96 | + } |
| 97 | + }; |
| 98 | + const formData = new FormData(); |
| 99 | + formData.append("message", input.value); |
| 100 | + xhr.send(formData); |
| 101 | + }); |
| 102 | + </script> |
| 103 | + </body> |
| 104 | + </html> |
| 105 | + ''') |
| 106 | + |
| 107 | + @app.route('/chat', methods=['POST']) |
| 108 | + def chat(): |
| 109 | + user_input = request.form.get('message') |
| 110 | + |
| 111 | + # 构建API请求参数 |
| 112 | + payload = { |
| 113 | + 'prompt': f"User: {user_input}\nAssistant:", |
| 114 | + 'model': "your_model_name_here", # 替换为你使用的模型名称 |
| 115 | + 'max_new_tokens': 100 # 设置生成的最大新令牌数量 |
| 116 | + } |
| 117 | + |
| 118 | + try: |
| 119 | + response = requests.post(OLLAMA_API_URL, json=payload) |
| 120 | + response.raise_for_status() |
| 121 | + generated_text = response.json()['choices'][0]['text'] |
| 122 | + return jsonify({'response': generated_text}) |
| 123 | + except Exception as e: |
| 124 | + return jsonify({'error': str(e)}), 500 |
| 125 | + |
| 126 | + if __name__ == '__main__': |
| 127 | + app.run(debug=True) |
| 128 | +``` |
| 129 | + |
| 130 | +同时需要html文件: |
| 131 | + |
| 132 | +```html |
| 133 | +<!DOCTYPE html> |
| 134 | +<html lang="en"> |
| 135 | +<head> |
| 136 | + <meta charset="UTF-8"> |
| 137 | + <title>Chat with Ollama</title> |
| 138 | +</head> |
| 139 | +<body> |
| 140 | + <h1>Welcome to the Chat Interface!</h1> |
| 141 | + <form id="chatForm" method="post" action="/chat"> |
| 142 | + <input type="text" name="message" placeholder="Type your message here..."> |
| 143 | + <button type="submit">Send</button> |
| 144 | + </form> |
| 145 | + |
| 146 | + <div id="response"></div> |
| 147 | + |
| 148 | + <script> |
| 149 | + document.getElementById('chatForm').addEventListener('submit', function(e) { |
| 150 | + e.preventDefault(); |
| 151 | + const input = document.querySelector('input[name=message]'); |
| 152 | + const xhr = new XMLHttpRequest(); |
| 153 | + xhr.open("POST", "/chat"); |
| 154 | + xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded"); |
| 155 | + xhr.onload = function() { |
| 156 | + if (xhr.status === 200) { |
| 157 | + document.getElementById('response').innerText = xhr.responseText; |
| 158 | + } else { |
| 159 | + console.error("Request failed. Returned status of ", xhr.status); |
| 160 | + } |
| 161 | + }; |
| 162 | + const formData = new FormData(); |
| 163 | + formData.append("message", input.value); |
| 164 | + xhr.send(formData); |
| 165 | + }); |
| 166 | + </script> |
| 167 | +</body> |
| 168 | +</html> |
| 169 | +``` |
0 commit comments