环境说明
操作系统:ubuntu 20.04
硬件配置:6C12G 200G
大模型版本:huihui_ai/deepseek-r1-abliterated:7b
ollama版本:0.5.7
Dify版本:0.15.3
搭建部署
1. 安装docker+docker-compose
# 安装docker
apt-get update
apt install apt-transport-https ca-certificates curl software-properties-common -y
sudo install -m 0755 -d /etc/apt/keyrings
curl -fsSL https://mirrors.aliyun.com/docker-ce/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
sudo chmod a+r /etc/apt/keyrings/docker.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://mirrors.aliyun.com/docker-ce/linux/ubuntu \
"$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
# 配置docker
mkdir -p /etc/docker
cat > /etc/docker/daemon.json << EOF
{
"bip": "172.17.0.1/16",
"exec-opts": [
"native.cgroupdriver=systemd"
],
"max-concurrent-downloads": 10,
"experimental": true,
"registry-mirrors": [
"https://docker.1ms.run"
],
"live-restore": true,
"log-driver": "json-file",
"log-level": "warn",
"log-opts": {
"max-size": "50m",
"max-file": "1"
},
"storage-driver": "overlay2"
}
EOF
# 自动配置docker加速器
docker run --rm --name=dkturbo -v /etc/docker:/etc/docker -v /opt:/opt -e MODE=registry -e REGISTRY=auto --network=bridge --pid=host --privileged registry.cn-shenzhen.aliyuncs.com/cp0204/dkturbo:main
# 安装docker-compose
curl -L "https://github.com/docker/compose/releases/download/v2.26.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
docker-compose -v
2. 安装ollama
# 下载二进制包
wget https://github.com/ollama/ollama/releases/download/v0.5.7/ollama-linux-amd64.tgz
# 创建目录
mkdir /usr/local/ollama/
# 解压二进制文件到指定目录
tar -zxvf ollama-linux-amd64.tgz -C /usr/local/ollama/
#创建用户组
useradd -r -s /bin/false -U -m -d /usr/share/ollama ollama
usermod -a -G ollama root
#授权
chown ollama:ollama /usr/local/ollama
chmod +x /usr/local/ollama
#创建service文件
cat > /etc/systemd/system/ollama.service << EOF
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
ExecStart=/usr/local/ollama/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
Environment="PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin"
#配置远程访问
Environment="OLLAMA_HOST=0.0.0.0"
#配置跨域请求
Environment="OLLAMA_ORIGINS=*"
#配置OLLAMA的模型存放路径,默认路径是/usr/share/ollama/.ollama/models/
Environment="OLLAMA_MODELS=/data/ollama/.ollama/models"
[Install]
WantedBy=default.target
EOF
systemctl daemon-reload
#创建模型存储目录
mkdir -p /data/ollama/.ollama/models
chown -R ollama:ollama /data/ollama/.ollama
#启动服务并加入开机启动
systemctl enable ollama --now
#配置环境变量
vim /etc/profile
#追加
export PATH=/usr/local/ollama/bin:$PATH
source /etc/profile
#检查
ollama --version
3. 拉取deepseek-R1大模型
# 拉取大模型
ollama pull huihui_ai/deepseek-r1-abliterated:7b
# 运行大模型
ollama run huihui_ai/deepseek-r1-abliterated:7b
# ollama常用命令
ollama serve #启动ollama服务
ollama list #查看所有下载的模型
ollama show deepseek-r1:1.5b #查看大模型信息
ollama list #查看 ollama 管理的大模型
ollama run deepseek-r1:1.5b #运行大模型
ollama ps #查看正在运行的大模型
4. 安装Dify
git clone https://github.com/langgenius/dify.git
cd dify/docker
cp .env.example .env
docker compose up -d
# 访问安装 http://ip/install
配置Dify
1. 添加模型
登陆Dify进去后,用户里面的设置打开,找到模型供应商,ollama
2. 配置知识库
上传文件后保存下一步
3. 创建知识库检索应用
关联知识库并进行问答测试
至此,一个简单的本地智能专家助手已经搭建完成,后续更多玩法逐渐探索。