AI/Ollama: Difference between revisions
Jump to navigation
Jump to search
No edit summary |
|||
| (13 intermediate revisions by the same user not shown) | |||
| Line 21: | Line 21: | ||
claude --model gpt-oss:20b | claude --model gpt-oss:20b | ||
</syntaxhighlight> | </syntaxhighlight> | ||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
sudo usermod -aG render,video ${USER} | |||
echo 'id -nG'|sudo -i -u ${USER} bash | |||
newgrp render | |||
</syntaxhighlight> | |||
|valign='top'| | |||
|- | |- | ||
|valign='top' colspan='2'| | |valign='top' colspan='2'| | ||
| Line 69: | Line 78: | ||
{|class='wikitable mw-collapsible mw-collapsed' | {|class='wikitable mw-collapsible mw-collapsed' | ||
!scope='col' style='text-align:left'| | !scope='col' style='text-align:left'| | ||
[https://editor.plantuml.com/uml/ | [https://editor.plantuml.com/uml/jPHHRzCm4CVV-odyW4INh0rCsWCyRRGTJ0oeMYh44tBiI-lLzaK-frgryjrnudHhko3nGY8bKkx_y_tFT_4LUUuySUrxj5Qsv8uRbdEnNZgih1oXHiUyuvP22EoXIaB1A-rlqFglt00pQ36e1CZUO-_n-w2N9acM9Apxsh4PecStIWERRyc3wQKrK97BD0gkirpP0tp79WyWAi_p07nGbZi5590KjUQ6TwVu5cDpS1jmh3_tt4gkqSBPSp9QXSAokCe-tpDP1udrPq1OEG5KWu4S7kixKZPWBKwL_0NoulBoxRiN_gPERRW7zXcs9z9XMUN7qYhNIh0dQP8ObA2fmyASRq2oBqsuqQQvnfoohXtw0MgvyiIIfByi_G29tb-yoS-I98A6MrK0-LDu5vTd1ZoNtFEudU15HaqlPVkExdPjXsNJ2IKE6pUa-EcaX5Q_3nPXJ6AE5Hh8NYtcazbnvJks2EBNn3u50Hih1yAZsxRjwfgeG-WlivKApIjvvEs97CNG2El7tUq_piKuRDQnk_yp7y5AzDvXvT1vXn-1jb3BuJsXFIMlfxTiCRixPslOqZah7Tu7a0N2muDlQ0CcBDGvQrDmf49VhMTH5OsQxQ1TwBGvFyC_5uwP3MWiJOqLg2Mu9i--lAuyhIqgGQWYR0g1zyekOqj7TzUByMGOXki5-B5I4dB4TKZacAXSVUBjg-pz3lSLM5aVbdy0 Sturcture] | ||
|- | |- | ||
|valign='top'| | |valign='top'| | ||
| Line 81: | Line 90: | ||
++**/usr/local/bin/** | Executable Binaries | ++**/usr/local/bin/** | Executable Binaries | ||
+++ollama | Ollama Server (Standalone) | +++ollama | Ollama Server (Standalone) | ||
++**/usr/share/ollama/** | Ollama Shares (Resources) | |||
+++**.ollama/** | Ollama Shares | |||
++++id_ed25519 | Private Key | |||
++++id_ed25519.pub | Public Key | |||
++++**models/** | Saved Models | |||
+++++blobs/ | Weights **(gpt-oss:20b)** | |||
+++++manifests/ | Model metadata | |||
++**/etc/systemd/system/** | Systemd Services | ++**/etc/systemd/system/** | Systemd Services | ||
+++ollama.service | Systemd service file | +++ollama.service | Systemd service file | ||
| Line 87: | Line 103: | ||
++++claude | Claude Code CLI | ++++claude | Claude Code CLI | ||
+++**.ollama/** | Ollama Data Directory | +++**.ollama/** | Ollama Data Directory | ||
++++ | ++++id_ed25519 | Private Key | ||
++++ | ++++id_ed25519.pub | Public Key | ||
+++**.claude/** | Claude Code Data Directory | +++**.claude/** | Claude Code Data Directory | ||
++++config.json | API URL, keys, project context | ++++config.json | API URL, keys, project context | ||
| Line 153: | Line 167: | ||
|- | |- | ||
|valign='top'| | |valign='top'| | ||
{|class='wikitable mw-collapsible' | {|class='wikitable mw-collapsible mw-collapsed' | ||
!scope='col' style='text-align:left'| | !scope='col' style='text-align:left'| | ||
Yoga Pro 7i (G9 + U7 155H + 32GB + 1TB) | Yoga Pro 7i (G9 + U7 155H + 32GB + 1TB) | ||
| Line 402: | Line 416: | ||
|} | |} | ||
{|class='wikitable mw-collapsible mw-collapsed' | {|class='wikitable mw-collapsible mw-collapsed' | ||
!scope='col' style='text-align:left'| | |||
Legion 5 Pro (G6 + R7 5800H + 32GB + 1TB) | |||
|- | |||
|valign='top'| | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Install | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash' highlight='4,16'> | |||
curl -fsSL https://ollama.com/install.sh | sh | |||
:' | |||
>>> Installing ollama to /usr/local | |||
[sudo] password for shahed: | |||
>>> Downloading ollama-linux-amd64.tar.zst | |||
######################################################################## 100.0% | |||
>>> Creating ollama user... | |||
>>> Adding ollama user to render group... | |||
>>> Adding ollama user to video group... | |||
>>> Adding current user to ollama group... | |||
>>> Creating ollama systemd service... | |||
>>> Enabling and starting ollama service... | |||
Created symlink /etc/systemd/system/default.target.wants/ollama.service → /etc/systemd/system/ollama.service. | |||
>>> NVIDIA GPU installed. | |||
' | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
systemctl daemon-reload | |||
systemctl enable ollama.service | |||
systemctl restart ollama.service | |||
systemctl status ollama.service | |||
EXE | |||
cat /etc/systemd/system/ollama.service | |||
setsid open http://127.0.0.1:11434 >/dev/null 2>&1 & | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left' colspan='2'| Ollama » Utility | |||
|- | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
sudo journalctl -u ollama --no-pager | grep -i "sycl\|intel\|gpu" | |||
</syntaxhighlight> | |||
|valign='top' style='width:50%'| | |||
|- | |||
|valign='top' colspan='2'| | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
apt-get update;echo | |||
apt-get install -y apt-transport-https ca-certificates gnupg build-essential | |||
apt-get install -y software-properties-common git curl file procps libfuse2t64 | |||
apt-get clean;sleep 5 | |||
EXE | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Config | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='ini'> | |||
cat <<'INI' | sudo tee /etc/systemd/system/ollama.service >/dev/null | |||
[Unit] | |||
Description=Ollama Service | |||
After=network-online.target | |||
[Service] | |||
User=ollama | |||
Group=ollama | |||
# Ollama listens on any host | |||
Environment="OLLAMA_HOST=0.0.0.0" | |||
Environment="PATH=/home/linuxbrew/.linuxbrew/Cellar/pyenv-virtualenv/1.2.3/shims:/home/shahed/.pyenv/shims:/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin:/home/shahed/.rbenv/shims:/home/shahed/.rbenv/bin:/home/shahed/.cargo/bin:/home/shahed/.nvm/versions/node/v18.12.1/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/opt/cli/sencha/cmd:/opt/cli/sencha/cmd:/home/shahed/.local/share/JetBrains/Toolbox/scripts:/home/shahed/.local/share/JetBrains/Toolbox/scripts" | |||
ExecStart=/usr/local/bin/ollama serve | |||
Restart=always | |||
RestartSec=3 | |||
[Install] | |||
WantedBy=default.target | |||
INI | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
systemctl daemon-reload | |||
systemctl restart ollama.service | |||
systemctl status ollama.service | |||
EXE | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'EXE' | sudo bash | |||
ufw allow from 10.19.80.0/22 to any port 11434 proto tcp | |||
ufw allow from 10.20.0.0/24 to any port 11434 proto tcp | |||
ufw allow from 10.20.13.0/24 to any port 11434 proto tcp | |||
ufw allow from 10.20.40.0/24 to any port 11434 proto tcp | |||
ufw allow from 192.168.49.2/32 to any port 11434 proto tcp | |||
EXE | |||
telnet 10.20.40.16 11434 | |||
telnet 192.168.49.2 11434 | |||
telnet shahed-ac.local 11434 | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Ollama » Model | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
ollama pull gpt-oss:20b | |||
:' | |||
pulling manifest | |||
pulling e7b273f96360: 100% ▕███████████████████████████████████████████▏ 13 GB | |||
pulling fa6710a93d78: 100% ▕███████████████████████████████████████████▏ 7.2 KB | |||
pulling f60356777647: 100% ▕███████████████████████████████████████████▏ 11 KB | |||
pulling d8ba2f9a17b3: 100% ▕███████████████████████████████████████████▏ 18 B | |||
pulling 776beb3adb23: 100% ▕███████████████████████████████████████████▏ 489 B | |||
verifying sha256 digest | |||
writing manifest | |||
success | |||
' | |||
ollama ls | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible mw-collapsed' | |||
!scope='col' style='text-align:left'| Claude » Install | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
curl -fsSL https://claude.ai/install.sh | bash | |||
:' | |||
Setting up Claude Code... | |||
✔ Claude Code successfully installed! | |||
Version: 2.1.71 | |||
Location: ~/.local/bin/claude | |||
Next: Run claude --help to get started | |||
⚠ Setup notes: | |||
• Native installation exists but ~/.local/bin is not in your PATH. Run: | |||
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc && source ~/.bashrc | |||
✅ Installation complete! | |||
' | |||
</syntaxhighlight> | |||
<syntaxhighlight lang='bash'> | |||
cat <<'ENV' | tee -a ${HOME}/.bashrc >/dev/null | |||
# claude config | |||
export PATH="$HOME/.local/bin:$PATH" >> ~/.bashrc | |||
ENV | |||
source ~/.bashrc && sleep 1 | |||
claude --version | |||
</syntaxhighlight> | |||
|} | |||
{|class='wikitable mw-collapsible' | |||
!scope='col' style='text-align:left' colspan='2'| Claude » Config | |||
|- | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
export ANTHROPIC_BASE_URL=http://localhost:11434 | |||
export ANTHROPIC_AUTH_TOKEN=ollama | |||
export ANTHROPIC_API_KEY='' | |||
</syntaxhighlight> | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
export ANTHROPIC_BASE_URL=http://shahed-ac.local:11434 | |||
export ANTHROPIC_AUTH_TOKEN=ollama | |||
export ANTHROPIC_API_KEY='' | |||
</syntaxhighlight> | |||
|- | |||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
claude --model gpt-oss:20b | |||
ollama stop gpt-oss:20b | |||
ollama ps | |||
</syntaxhighlight> | |||
|valign='top'| | |||
|} | |||
|} | |||
{|class='wikitable mw-collapsible' | |||
!scope='col' style='text-align:left'| | !scope='col' style='text-align:left'| | ||
ASUS NUC 15 Pro (L10 + U7 255H + 64GB + 2TB) | ASUS NUC 15 Pro (L10 + U7 255H + 64GB + 2TB) | ||
| Line 544: | Line 741: | ||
User=ollama | User=ollama | ||
Group=ollama | Group=ollama | ||
# Optimizations for ASUS NUC 15 Pro (32GB Target) | |||
Environment="OLLAMA_NUM_CTX=32768" | |||
Environment="OLLAMA_NUM_GPU=999" | |||
Environment="OLLAMA_VULKAN=1" | |||
# Intel optimizations if available | # Intel optimizations if available | ||
| Line 584: | Line 786: | ||
|} | |} | ||
{|class='wikitable mw-collapsible mw-collapsed' | {|class='wikitable mw-collapsible mw-collapsed' | ||
!scope='col' style='text-align:left'| Ollama » Model | !scope='col' style='text-align:left' colspan='2'| Ollama » Model | ||
|- | |- | ||
|valign='top'| | |valign='top' style='width:50%'| | ||
<syntaxhighlight lang='bash'> | <syntaxhighlight lang='bash'> | ||
ollama pull gpt-oss:20b | ollama pull gpt-oss:20b | ||
| Line 600: | Line 802: | ||
success | success | ||
' | ' | ||
ollama ls | |||
</syntaxhighlight> | |||
|valign='top' style='width:50%'| | |||
<syntaxhighlight lang='bash'> | |||
ollama pull qwen3-coder:30b | |||
:' | |||
pulling manifest | |||
pulling 1194192cf2a1: 100% ▕███████████████████████████████████████████▏ 18 GB | |||
pulling d18a5cc71b84: 100% ▕███████████████████████████████████████████▏ 11 KB | |||
pulling 69aa441ea44f: 100% ▕███████████████████████████████████████████▏ 148 B | |||
pulling 24a94682582c: 100% ▕███████████████████████████████████████████▏ 542 B | |||
verifying sha256 digest | |||
writing manifest | |||
success | |||
' | |||
ollama ls | ollama ls | ||
</syntaxhighlight> | </syntaxhighlight> | ||
| Line 655: | Line 873: | ||
</syntaxhighlight> | </syntaxhighlight> | ||
|valign='top'| | |||
|- | |||
|valign='top'| | |valign='top'| | ||
<syntaxhighlight lang='bash'> | <syntaxhighlight lang='bash'> | ||
claude --model qwen3-coder:30b | |||
claude --model gpt-oss:20b | claude --model gpt-oss:20b | ||
ollama stop | </syntaxhighlight> | ||
|valign='top'| | |||
<syntaxhighlight lang='bash'> | |||
ollama stop qwen3-coder:30b | |||
ollama stop gpt-oss:20b | |||
</syntaxhighlight> | </syntaxhighlight> | ||
|} | |} | ||
| Line 674: | Line 899: | ||
* [https://ollama.com/search AI » Ollama » Search] | * [https://ollama.com/search AI » Ollama » Search] | ||
* [https://github.com/exo-explore/exo AI » Cluster » Exo] | * [https://github.com/exo-explore/exo AI » Cluster » Exo] | ||
* [https://developers.openai.com/codex/cli/ AI » Codex CLI] | |||
* [https://ollama.com/ AI » Ollama] | * [https://ollama.com/ AI » Ollama] | ||
Latest revision as of 04:14, 9 March 2026
curl -fsSL https://ollama.com/install.sh | sh
ollama pull gpt-oss:20b
ollama --version
ollama ls
curl -fsSL https://claude.ai/install.sh | bash
ollama launch claude --model gpt-oss:20b
|
export ANTHROPIC_BASE_URL=http://localhost:11434
export ANTHROPIC_AUTH_TOKEN=ollama
export ANTHROPIC_API_KEY=''
export OLLAMA_NUM_CTX=32768
export OLLAMA_KEEP_ALIVE=5m
claude --model gpt-oss:20b
| ||||
sudo usermod -aG render,video ${USER}
echo 'id -nG'|sudo -i -u ${USER} bash
newgrp render
|
|||||
| |||||
Optimization
|
Optimization | |||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
Installation
|
Installation | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
References
|
References | ||
|---|---|---|