Skip to content

Instantly share code, notes, and snippets.

View asomoza's full-sized avatar

Álvaro Somoza asomoza

View GitHub Profile
@asomoza
asomoza / hidream_24_128_simple.py
Created April 17, 2025 13:12
HiDream with 24GB GPU and 128GB of RAM, simple
import torch
from diffusers import FlowMatchLCMScheduler, HiDreamImagePipeline
from transformers import LlamaForCausalLM, PreTrainedTokenizerFast
device = torch.device("cuda:0")
repo_id = "HiDream-ai/HiDream-I1-Dev"
llama_repo = "meta-llama/Llama-3.1-8B-Instruct"
torch_dtype = torch.bfloat16
prompt = "Ultra-realistic, high-quality photo of an anthropomorphic capybara with a tough, streetwise attitude, wearing a worn black leather jacket, dark sunglasses, and ripped jeans. The capybara is leaning casually against a gritty urban wall covered in vibrant graffiti. Behind it, in bold, dripping yellow spray paint, the word “HuggingFace” is scrawled in large street-art style letters. The scene is set in a dimly lit alleyway with moody lighting, scattered trash, and an edgy, rebellious vibe — like a character straight out of an underground comic book."
@asomoza
asomoza / test_24_64_advanced.py
Last active April 17, 2025 01:16
Run HiDream on 24GB GPU and 128GB of RAM
import gc
import threading
import time
import psutil
import torch
from diffusers import HiDreamImagePipeline
from transformers import (
CLIPTextModelWithProjection,
CLIPTokenizer,
import gc
import torch
from transformers import (
BitsAndBytesConfig as BitsAndBytesConfig,
)
from transformers import (
CLIPTextModelWithProjection,
CLIPTokenizer,
LlamaForCausalLM,
import torch
from optimum.quanto import QuantizedDiffusersModel, freeze, qfloat8, quantize
from diffusers import FluxPipeline, FluxTransformer2DModel
class QuantizedFluxTransformer2DModel(QuantizedDiffusersModel):
base_class = FluxTransformer2DModel
@asomoza
asomoza / gist:2a7514caceffdbc28f11da5e7f74561c
Created December 31, 2023 05:31
dreambooth lora training
#!/usr/bin/env python
# coding=utf-8
# Copyright 2023 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#