Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save jacobsapps/488deaf4f7fef6a8e55a2ba6bf86e179 to your computer and use it in GitHub Desktop.
Save jacobsapps/488deaf4f7fef6a8e55a2ba6bf86e179 to your computer and use it in GitHub Desktop.
import os
import json
import numpy as np
import coremltools as ct
from transformers import CLIPTokenizer
# 1. Load labels
with open("stats.txt", "r") as f:
categories = [line.strip() for line in f if line.strip()]
# 2. Load CLIP-compatible tokenizer
tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32")
# 3. Process embeddings
model_path = os.path.join(
"mobileclip_blt_text.mlpackage",
"Data",
"com.apple.CoreML",
"model.mlmodel"
)
if not os.path.isfile(model_path):
print(f"❌ model.mlmodel not found for {variant}")
continue
print(f"📦 Loading model for {variant}: {model_path}")
model = ct.models.MLModel(model_path)
entries = []
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment