基于LLM的个人知识库搭建

基于docker安装ollama及lobechat

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
version: '3.8'
services:
ollama:
image: ollama/ollama:latest
container_name: ollama
ports:
- 1434:1434
volumes:
- D:\\dockerwork\\ollama:/root/.ollama
environment:
- 'OLLAMA_HOST=0.0.0.0:1434'
- 'OLLAMA_ORIGINS=*'
restart: unless-stopped

lobe-chat:
image: lobehub/lobe-chat:latest
container_name: lobe-chat
depends_on:
- ollama
ports:
- 3210:3210
environment:
OPENAI_API_KEY: sk-xxxx
OPENAI_PROXY_URL: http://host.docker.internal:1434/v1
ACCESS_CODE: lobe66
extra_hosts:
- host.docker.internal:host-gateway
restart: always

配置ollama

ollama模型库

配置lobechat