Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add a ujust action to install ollama quadlet #1072

Merged
merged 3 commits into from
Apr 7, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 72 additions & 0 deletions just/bluefin-tools.just
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,75 @@ tensorflow:
# Run the yafti setup tool
yafti:
yafti /etc/yafti.yml --force

# Setup a local Ollama instance in a container.
# Detect hardware, offer a choice if needed.
ollama:
#!/usr/bin/env bash
echo 'Follow the prompts and check the tutorial: '
echo
GPU_CHOICES=()
# Detect nvidia drivers
if which nvidia-smi > /dev/null 2>&1; then
GPU_CHOICES+=("Nvidia (CUDA)")
fi
# Detect radeon hardware
if lspci | grep ' VGA ' | grep -sq Radeon; then
GPU_CHOICES+=("AMD (ROCm)")
fi
GPU_SELECTION=$(printf '%s\n' "${GPU_CHOICES[@]}" | gum choose --select-if-one --header "Select the type of graphics card you have")
echo "Selected ${GPU_SELECTION}!"
case "$GPU_SELECTION" in
"Nvidia (CUDA)")
IMAGE=latest
CUSTOM_ARGS="AddDevice=nvidia.com/gpu=all"
;;

"AMD (ROCm)")
IMAGE=rocm
read -r -d '' CUSTOM_ARGS <<-'EOF'
AddDevice=/dev/dri
AddDevice=/dev/kfd
EOF
;;
esac

read -r -d '' QUADLET <<-EOF
[Unit]
Description=The Ollama container
After=local-fs.target

[Service]
Restart=always
TimeoutStartSec=60
# Ensure there's a userland podman.sock
ExecStartPre=/bin/systemctl --user enable podman.socket

[Container]
ContainerName=ollama
PublishPort=11434:11434
RemapUsers=keep-id
RunInit=yes
NoNewPrivileges=no
Volume=%h/.ollama:/.ollama
PodmanArgs=--userns=keep-id
PodmanArgs=--group-add=keep-groups
PodmanArgs=--ulimit=host
PodmanArgs=--security-opt=label=disable
PodmanArgs=--cgroupns=host

Image=docker.io/ollama/ollama:${IMAGE}
${CUSTOM_ARGS}

[Install]
RequiredBy=default.target
EOF
if [ ! -f ~/.config/containers/systemd/ollama.container ]; then
mkdir -p ~/.config/containers/systemd
echo "${QUADLET}" > ~/.config/containers/systemd/ollama.container
else
echo "Ollama container already exists, skipping..."
fi
systemctl --user daemon-reload
systemctl --user start ollama.service
echo "Please install the ollama cli via \`brew install ollama\`"
Loading