AI/Ollama: Difference between revisions
Jump to navigation
Jump to search
| Line 661: | Line 661: | ||
ollama ps | ollama ps | ||
</syntaxhighlight> | </syntaxhighlight> | ||
|} | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| | |||
Legion 5 Pro (Gen6 + R7 5800H + 32GB + 1TB) | |||
|- | |||
|valign='top'| | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Install | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash' highlight='4,16'> | |||
curl -fsSL https://ollama.com/install.sh | sh | |||
:' | |||
>>> Installing ollama to /usr/local | |||
[sudo] password for shahed: | |||
>>> Downloading ollama-linux-amd64.tar.zst | |||
######################################################################## 100.0% | |||
>>> Creating ollama user... | |||
>>> Adding ollama user to render group... | |||
>>> Adding ollama user to video group... | |||
>>> Adding current user to ollama group... | |||
>>> Creating ollama systemd service... | |||
>>> Enabling and starting ollama service... | |||
Created symlink /etc/systemd/system/default.target.wants/ollama.service → /etc/systemd/system/ollama.service. | |||
>>> NVIDIA GPU installed. | |||
' | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
systemctl daemon-reload | |||
systemctl enable ollama.service | |||
systemctl restart ollama.service | |||
systemctl status ollama.service | |||
EXE | |||
cat /etc/systemd/system/ollama.service | |||
setsid open http://127.0.0.1:11434 >/dev/null 2>&1 & | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left' colspan='2'| Ollama » ArcGPU | |||
|- | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
sudo journalctl -u ollama --no-pager | grep -i "sycl\|intel\|gpu" | |||
</syntaxhighlight> | |||
|valign='top' style='width:50%'| | |||
|- | |||
|valign='top' colspan='2'| | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
apt-get update;echo | |||
apt-get install -y apt-transport-https ca-certificates gnupg build-essential | |||
apt-get install -y software-properties-common git curl file procps libfuse2t64 | |||
apt-get clean;sleep 5 | |||
EXE | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Config | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='ini'> | |||
cat <<'INI' | sudo tee /etc/systemd/system/ollama.service >/dev/null | |||
[Unit] | |||
Description=Ollama Service | |||
After=network-online.target | |||
[Service] | |||
User=ollama | |||
Group=ollama | |||
# Ollama listens on any host | |||
Environment="OLLAMA_HOST=0.0.0.0" | |||
Environment="PATH=/home/linuxbrew/.linuxbrew/Cellar/pyenv-virtualenv/1.2.3/shims:/home/shahed/.pyenv/shims:/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin:/home/shahed/.rbenv/shims:/home/shahed/.rbenv/bin:/home/shahed/.cargo/bin:/home/shahed/.nvm/versions/node/v18.12.1/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/opt/cli/sencha/cmd:/opt/cli/sencha/cmd:/home/shahed/.local/share/JetBrains/Toolbox/scripts:/home/shahed/.local/share/JetBrains/Toolbox/scripts" | |||
ExecStart=/usr/local/bin/ollama serve | |||
Restart=always | |||
RestartSec=3 | |||
[Install] | |||
WantedBy=default.target | |||
INI | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
systemctl daemon-reload | |||
systemctl restart ollama.service | |||
systemctl status ollama.service | |||
EXE | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
ufw allow from 10.19.80.0/22 to any port 11434 proto tcp | |||
ufw allow from 10.20.0.0/24 to any port 11434 proto tcp | |||
ufw allow from 10.20.13.0/24 to any port 11434 proto tcp | |||
ufw allow from 10.20.40.0/24 to any port 11434 proto tcp | |||
ufw allow from 192.168.49.2/32 to any port 11434 proto tcp | |||
EXE | |||
telnet 10.20.40.16 11434 | |||
telnet 192.168.49.2 11434 | |||
telnet shahed-ac.local 11434 | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Model | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
ollama pull gpt-oss:20b | |||
:' | |||
pulling manifest | |||
pulling e7b273f96360: 100% ▕███████████████████████████████████████████▏ 13 GB | |||
pulling fa6710a93d78: 100% ▕███████████████████████████████████████████▏ 7.2 KB | |||
pulling f60356777647: 100% ▕███████████████████████████████████████████▏ 11 KB | |||
pulling d8ba2f9a17b3: 100% ▕███████████████████████████████████████████▏ 18 B | |||
pulling 776beb3adb23: 100% ▕███████████████████████████████████████████▏ 489 B | |||
verifying sha256 digest | |||
writing manifest | |||
success | |||
' | |||
ollama ls | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Claude » Install | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
curl -fsSL https://claude.ai/install.sh | bash | |||
:' | |||
Setting up Claude Code... | |||
✔ Claude Code successfully installed! | |||
Version: 2.1.71 | |||
Location: ~/.local/bin/claude | |||
Next: Run claude --help to get started | |||
⚠ Setup notes: | |||
• Native installation exists but ~/.local/bin is not in your PATH. Run: | |||
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc && source ~/.bashrc | |||
✅ Installation complete! | |||
' | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'ENV' | tee -a ${HOME}/.bashrc >/dev/null | |||
# claude config | |||
export PATH="$HOME/.local/bin:$PATH" >> ~/.bashrc | |||
ENV | |||
source ~/.bashrc && sleep 1 | |||
claude --version | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible' | |||
!scope='col' style='text-align:left' colspan='2'| Claude » Config | |||
|- | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
export ANTHROPIC_BASE_URL=http://localhost:11434 | |||
export ANTHROPIC_AUTH_TOKEN=ollama | |||
export ANTHROPIC_API_KEY='' | |||
</syntaxhighlight> | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
export ANTHROPIC_BASE_URL=http://shahed-ac.local:11434 | |||
export ANTHROPIC_AUTH_TOKEN=ollama | |||
export ANTHROPIC_API_KEY='' | |||
</syntaxhighlight> | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
claude --model gpt-oss:20b | |||
ollama stop gpt-oss:20b | |||
ollama ps | |||
</syntaxhighlight> | |||
|valign='top'| | |||
|} | |} | ||
|} | |} | ||
Revision as of 09:11, 8 March 2026
curl -fsSL https://ollama.com/install.sh | sh
ollama pull gpt-oss:20b
ollama --version
ollama ls
curl -fsSL https://claude.ai/install.sh | bash
ollama launch claude --model gpt-oss:20b
|
export ANTHROPIC_BASE_URL=http://localhost:11434
export ANTHROPIC_AUTH_TOKEN=ollama
export ANTHROPIC_API_KEY=''
export OLLAMA_NUM_CTX=32768
export OLLAMA_KEEP_ALIVE=5m
claude --model gpt-oss:20b
| ||||
| |||||
Optimization
|
Optimization | |||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
Installation
|
Installation | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
References
|
References | ||
|---|---|---|