Ollama icon

Ollama 0.11.0

Get up and running with large language models locally

Installation

System wide:

{
  environment.systemPackages = with nix-casks.packages.${system}; [
    ollama-app # Ollama
  ];
}

Home Manager:

{
  home.packages = with nix-casks.packages.${system}; [
    ollama-app # Ollama
  ];
}

Package Definition

{
  "pname": "ollama-app",
  "version": "0.11.0",
  "src": {
    "url": "https://github.com/ollama/ollama/releases/download/v0.11.0/Ollama-darwin.zip",
    "sha256": "7070ea9e45f6f867f6e2678f02d0c41540eb74ad76bb41e582b8a6e8f89853c1"
  },
  "installPhase": [
    "mkdir -p \"$out/Applications/Ollama.app\" && cp -r \"Ollama.app\" \"$out/Applications\"",
    "mkdir -p \"$out/bin\" && ln -s \"$out/Applications/Ollama.app/Contents/Resources/ollama\" \"$out/bin/ollama\""
  ],
  "meta": {
    "description": "Get up and running with large language models locally",
    "homepage": "https://ollama.com/"
  }
}