Created
April 2, 2026 20:20
-
-
Save Kaligraphy247/d86662492b16c2638e28b6496da785c0 to your computer and use it in GitHub Desktop.
Ollama model backup/restore tool (improved from https://gist.github.com/nahushrk/5d980e676c4f2762ca385bd6fb9498a9)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| # Based on: https://gist.github.com/nahushrk/5d980e676c4f2762ca385bd6fb9498a9 | |
| # | |
| # The original script broke with newer Ollama versions due to hardcoded `library` | |
| # namespace paths and a malformed tar archive structure. This rewrite fixes those | |
| # issues and adds: namespaced/custom-registry model support, a `list` command, | |
| # `--as` (import under a different name), `--force`, `--output`, tag defaulting | |
| # to `latest`, auto-detection of the Ollama base path, and safe tarfile extraction | |
| # for Python 3.12+. | |
| """ | |
| Ollama Model Manager v2 | |
| Backup and restore Ollama models as tarballs for migration between systems. | |
| Handles standard library models, namespaced models, and custom registries. | |
| Usage: | |
| List available models: | |
| python ollama_manager.py list | |
| Export a model: | |
| python ollama_manager.py export llama3.2:3b | |
| python ollama_manager.py export llama3.2 # defaults to :latest | |
| python ollama_manager.py export namespace/model:tag | |
| python ollama_manager.py export model --output /path/to/backup.tar | |
| Import a model: | |
| python ollama_manager.py import ollama_export_llama3.2_3b.tar | |
| python ollama_manager.py import backup.tar --force | |
| """ | |
| import os | |
| import sys | |
| import json | |
| import argparse | |
| import shutil | |
| import tarfile | |
| import tempfile | |
| from pathlib import Path | |
| # --------------------------------------------------------------------------- | |
| # Utilities | |
| # --------------------------------------------------------------------------- | |
| def get_default_base_path(): | |
| """Return the Ollama models base path, checking common install locations.""" | |
| candidates = [ | |
| Path.home() / ".ollama" / "models", | |
| Path("/usr/share/ollama/.ollama/models"), # system-level Linux install | |
| Path("/var/lib/ollama/.ollama/models"), | |
| ] | |
| for p in candidates: | |
| if p.is_dir(): | |
| return str(p) | |
| return str(candidates[0]) # fallback even if it doesn't exist yet | |
| def format_size(n_bytes): | |
| """Format a byte count as a human-readable string.""" | |
| for unit in ("B", "KB", "MB", "GB", "TB"): | |
| if n_bytes < 1024: | |
| return f"{n_bytes:.1f} {unit}" | |
| n_bytes /= 1024 | |
| return f"{n_bytes:.1f} PB" | |
| def copy_with_progress(src: Path, dst: Path, label: str = ""): | |
| """Copy a file, printing its name and size.""" | |
| label = label or src.name | |
| size = src.stat().st_size | |
| print(f" {label} ({format_size(size)}) ... ", end="", flush=True) | |
| shutil.copy2(src, dst) | |
| print("done") | |
| def safe_extractall(tar: tarfile.TarFile, path: Path): | |
| """Extract a tarfile safely, compatible with Python 3.11 and 3.12+.""" | |
| try: | |
| tar.extractall(path, filter="data") # Python 3.12+ safe extraction | |
| except TypeError: | |
| tar.extractall(path) # Python < 3.12 | |
| # --------------------------------------------------------------------------- | |
| # Model spec parsing | |
| # --------------------------------------------------------------------------- | |
| def parse_model_spec(spec: str): | |
| """ | |
| Parse a model specification into (registry, namespace, name, tag). | |
| Accepted forms: | |
| model → registry.ollama.ai / library / model / latest | |
| model:tag → registry.ollama.ai / library / model / tag | |
| ns/model:tag → registry.ollama.ai / ns / model / tag | |
| reg.io/ns/model:tag → reg.io / ns / model / tag | |
| """ | |
| # Peel off the tag (only from the last path segment) | |
| last_slash = spec.rfind("/") | |
| last_segment = spec[last_slash + 1:] | |
| if ":" in last_segment: | |
| # tag is in the last segment | |
| tag_sep = spec.rfind(":") | |
| path_part, tag = spec[:tag_sep], spec[tag_sep + 1:] | |
| else: | |
| path_part, tag = spec, "latest" | |
| parts = path_part.split("/") | |
| if len(parts) == 1: | |
| return "registry.ollama.ai", "library", parts[0], tag | |
| elif len(parts) == 2: | |
| return "registry.ollama.ai", parts[0], parts[1], tag | |
| else: | |
| # first segment is the registry host (contains a dot or colon port) | |
| return parts[0], parts[1], "/".join(parts[2:]), tag | |
| # --------------------------------------------------------------------------- | |
| # List | |
| # --------------------------------------------------------------------------- | |
| def list_models(base_path: str): | |
| """Print all models found in the Ollama models directory.""" | |
| base = Path(base_path) | |
| manifests_root = base / "manifests" | |
| if not manifests_root.is_dir(): | |
| print(f"No models found — manifests directory does not exist: {manifests_root}") | |
| return | |
| rows = [] | |
| for manifest_file in sorted(manifests_root.rglob("*")): | |
| if not manifest_file.is_file(): | |
| continue | |
| rel = manifest_file.relative_to(manifests_root) | |
| parts = rel.parts # (registry, namespace, name, tag) — may be deeper | |
| if len(parts) < 3: | |
| continue # unexpected layout, skip | |
| # Reconstruct a display name | |
| registry = parts[0] | |
| namespace = parts[1] | |
| name = "/".join(parts[2:-1]) if len(parts) > 3 else parts[2] | |
| tag = parts[-1] | |
| if registry == "registry.ollama.ai" and namespace == "library": | |
| display = f"{name}:{tag}" | |
| elif registry == "registry.ollama.ai": | |
| display = f"{namespace}/{name}:{tag}" | |
| else: | |
| display = f"{registry}/{namespace}/{name}:{tag}" | |
| try: | |
| data = json.loads(manifest_file.read_text()) | |
| total = sum(l.get("size", 0) for l in data.get("layers", [])) | |
| total += data.get("config", {}).get("size", 0) | |
| size_str = format_size(total) | |
| except Exception: | |
| size_str = "?" | |
| rows.append((display, size_str)) | |
| if not rows: | |
| print("No models found.") | |
| return | |
| print(f"Models in {base_path}:\n") | |
| col = max(len(r[0]) for r in rows) + 4 | |
| for name, size in rows: | |
| print(f" {name:<{col}}{size}") | |
| # --------------------------------------------------------------------------- | |
| # Export | |
| # --------------------------------------------------------------------------- | |
| def export_model(spec: str, base_path: str, output: str = None, force: bool = False): | |
| """Export an Ollama model to a tarball.""" | |
| base = Path(base_path).resolve() | |
| print(f"Base path: {base}") | |
| registry, namespace, name, tag = parse_model_spec(spec) | |
| manifest_path = base / "manifests" / registry / namespace / name / tag | |
| if not manifest_path.is_file(): | |
| # Try to give a helpful hint | |
| _suggest_models(base / "manifests", name, tag) | |
| print(f"Error: Manifest not found: {manifest_path}") | |
| sys.exit(1) | |
| print(f"Manifest: {manifest_path.relative_to(base)}") | |
| manifest_json = json.loads(manifest_path.read_text()) | |
| # Collect digests from config + all layers | |
| digests = set() | |
| config = manifest_json.get("config", {}) | |
| if "digest" in config: | |
| digests.add(config["digest"]) | |
| for layer in manifest_json.get("layers", []): | |
| if "digest" in layer: | |
| digests.add(layer["digest"]) | |
| print(f"Blobs to export: {len(digests)}") | |
| blob_dir = base / "blobs" | |
| blob_files: list[tuple[str, Path]] = [] | |
| missing: list[str] = [] | |
| for digest in digests: | |
| fname = digest.replace(":", "-", 1) | |
| blob_path = blob_dir / fname | |
| if blob_path.is_file(): | |
| blob_files.append((fname, blob_path)) | |
| else: | |
| missing.append(fname) | |
| if missing: | |
| print("Error: Missing blob files:") | |
| for m in missing: | |
| print(f" {blob_dir / m}") | |
| sys.exit(1) | |
| # Build output filename | |
| safe_name = name.replace("/", "_") | |
| safe_tag = tag.replace("/", "_") | |
| tarball_name = output or f"ollama_export_{safe_name}_{safe_tag}.tar" | |
| if os.path.exists(tarball_name) and not force: | |
| resp = input(f"'{tarball_name}' already exists. Overwrite? (y/N): ").strip().lower() | |
| if resp != "y": | |
| print("Aborted.") | |
| sys.exit(0) | |
| tmpdir = Path(tempfile.mkdtemp(prefix="ollama_export_")) | |
| try: | |
| # Manifest | |
| manifest_dest_dir = tmpdir / "manifests" / registry / namespace / name | |
| manifest_dest_dir.mkdir(parents=True, exist_ok=True) | |
| shutil.copy2(manifest_path, manifest_dest_dir / tag) | |
| print(f"Copied manifest → manifests/{registry}/{namespace}/{name}/{tag}") | |
| # Blobs | |
| blobs_dest_dir = tmpdir / "blobs" | |
| blobs_dest_dir.mkdir() | |
| for fname, blob_path in blob_files: | |
| copy_with_progress(blob_path, blobs_dest_dir / fname, fname) | |
| # Create tarball — add manifests/ and blobs/ as top-level entries | |
| print(f"\nCreating {tarball_name} ...") | |
| with tarfile.open(tarball_name, "w") as tar: | |
| for item in tmpdir.iterdir(): | |
| tar.add(str(item), arcname=item.name) | |
| tar_size = os.path.getsize(tarball_name) | |
| model_size = sum(p.stat().st_size for _, p in blob_files) | |
| print(f"Done. {tarball_name} ({format_size(tar_size)} / {format_size(model_size)} uncompressed)") | |
| finally: | |
| shutil.rmtree(tmpdir) | |
| def _suggest_models(manifests_root: Path, name: str, tag: str): | |
| """Print candidate manifests that match the given model name or tag.""" | |
| if not manifests_root.is_dir(): | |
| return | |
| matches = [ | |
| f for f in manifests_root.rglob("*") | |
| if f.is_file() and (name in f.parts or tag in f.parts) | |
| ] | |
| if matches: | |
| print("Possible matches:") | |
| for m in matches: | |
| rel = m.relative_to(manifests_root) | |
| print(f" {rel}") | |
| # --------------------------------------------------------------------------- | |
| # Import | |
| # --------------------------------------------------------------------------- | |
| def import_model(tarball_path: str, base_path: str, force: bool = False, rename_as: str = None): | |
| """Import a model from a tarball into the Ollama models directory. | |
| rename_as: optional model spec (e.g. "my-model:latest") to install the | |
| model under instead of its original name. | |
| """ | |
| base = Path(base_path).resolve() | |
| tarball = Path(tarball_path) | |
| if not tarball.is_file(): | |
| print(f"Error: Tarball not found: {tarball_path}") | |
| sys.exit(1) | |
| print(f"Base path: {base}") | |
| print(f"Tarball: {tarball}") | |
| if rename_as: | |
| print(f"Importing as: {rename_as}") | |
| # Pre-parse the rename target so we fail early before extracting | |
| rename_parts = parse_model_spec(rename_as) if rename_as else None | |
| tmpdir = Path(tempfile.mkdtemp(prefix="ollama_import_")) | |
| try: | |
| print("Extracting ...") | |
| with tarfile.open(tarball_path, "r") as tar: | |
| safe_extractall(tar, tmpdir) | |
| # ---- Manifests ---- | |
| manifests_root = tmpdir / "manifests" | |
| if not manifests_root.is_dir(): | |
| print("Error: Tarball is missing a 'manifests' directory.") | |
| sys.exit(1) | |
| manifest_files = [f for f in manifests_root.rglob("*") if f.is_file()] | |
| if not manifest_files: | |
| print("Error: No manifest files found inside 'manifests/'.") | |
| sys.exit(1) | |
| if rename_parts and len(manifest_files) > 1: | |
| print("Error: --as cannot be used with tarballs that contain multiple models.") | |
| sys.exit(1) | |
| for src in manifest_files: | |
| if rename_parts: | |
| reg, ns, nm, tg = rename_parts | |
| dest = base / "manifests" / reg / ns / nm / tg | |
| display_rel = Path(reg) / ns / nm / tg | |
| else: | |
| rel = src.relative_to(manifests_root) | |
| dest = base / "manifests" / rel | |
| display_rel = rel | |
| dest.parent.mkdir(parents=True, exist_ok=True) | |
| if dest.exists() and not force: | |
| resp = input(f"Manifest '{display_rel}' already exists. Overwrite? (y/N): ").strip().lower() | |
| if resp != "y": | |
| print("Aborted.") | |
| sys.exit(0) | |
| shutil.copy2(src, dest) | |
| print(f"Manifest → {display_rel}") | |
| # ---- Blobs ---- | |
| blobs_src = tmpdir / "blobs" | |
| if not blobs_src.is_dir(): | |
| print("Error: Tarball is missing a 'blobs' directory.") | |
| sys.exit(1) | |
| blobs_dest = base / "blobs" | |
| blobs_dest.mkdir(parents=True, exist_ok=True) | |
| for blob_file in sorted(blobs_src.iterdir()): | |
| if not blob_file.is_file(): | |
| continue | |
| dest = blobs_dest / blob_file.name | |
| if dest.exists(): | |
| if force: | |
| copy_with_progress(blob_file, dest, blob_file.name) | |
| else: | |
| # Blobs are content-addressed; if the file exists it's identical. | |
| print(f" Skipping existing blob: {blob_file.name}") | |
| else: | |
| copy_with_progress(blob_file, dest, blob_file.name) | |
| print("\nImport complete.") | |
| finally: | |
| shutil.rmtree(tmpdir) | |
| # --------------------------------------------------------------------------- | |
| # CLI | |
| # --------------------------------------------------------------------------- | |
| def main(): | |
| parser = argparse.ArgumentParser( | |
| description="Ollama Model Manager — backup and restore models as tarballs.", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=__doc__, | |
| ) | |
| parser.add_argument( | |
| "--base-path", | |
| default=get_default_base_path(), | |
| metavar="PATH", | |
| help="Ollama models directory (auto-detected by default)", | |
| ) | |
| parser.add_argument( | |
| "--force", "-f", | |
| action="store_true", | |
| help="Overwrite existing files without prompting", | |
| ) | |
| sub = parser.add_subparsers(dest="command", required=True) | |
| # list | |
| sub.add_parser("list", help="List available models and their sizes.") | |
| # export | |
| ep = sub.add_parser("export", help="Export a model to a tarball.") | |
| ep.add_argument( | |
| "model_spec", | |
| metavar="MODEL", | |
| help="Model spec: model, model:tag, namespace/model:tag", | |
| ) | |
| ep.add_argument( | |
| "--output", "-o", | |
| metavar="FILE", | |
| help="Output tarball path (default: ollama_export_<name>_<tag>.tar)", | |
| ) | |
| # import | |
| ip = sub.add_parser("import", help="Import a model from a tarball.") | |
| ip.add_argument( | |
| "tarball", | |
| metavar="FILE", | |
| help="Path to the tarball to import", | |
| ) | |
| ip.add_argument( | |
| "--as", | |
| metavar="MODEL", | |
| dest="rename_as", | |
| help="Install under a different name (e.g. my-model:latest)", | |
| ) | |
| args = parser.parse_args() | |
| if args.command == "list": | |
| list_models(args.base_path) | |
| elif args.command == "export": | |
| export_model(args.model_spec, args.base_path, getattr(args, "output", None), args.force) | |
| elif args.command == "import": | |
| import_model(args.tarball, args.base_path, args.force, getattr(args, "rename_as", None)) | |
| if __name__ == "__main__": | |
| main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment