Skip to content

Instantly share code, notes, and snippets.

@kundeng
Created September 13, 2025 02:31
Show Gist options
  • Save kundeng/bd4d38fd9d658ac2ae2a75af0a5cc34d to your computer and use it in GitHub Desktop.
Save kundeng/bd4d38fd9d658ac2ae2a75af0a5cc34d to your computer and use it in GitHub Desktop.
pytorch rocm nightly for windows
# Stop on any error
$ErrorActionPreference = "Stop"
Write-Host "=== Installing ROCm Nightly PyTorch (gfx1151) in current directory ==="
# Locate uv
$uvExe = (Get-Command uv -ErrorAction SilentlyContinue | Select-Object -ExpandProperty Source)
if (-not $uvExe) { throw "uv not found. Please install with: winget install astral-sh.uv" }
function Run-UV { param([Parameter(ValueFromRemainingArguments = $true)] $Args); & $uvExe @Args }
# Ensure Python 3.12 + venv
Run-UV python install 3.12
if (-not (Test-Path ".venv-nightly")) {
Write-Host "Creating uv-managed Python 3.12 virtual environment (.venv-nightly)..."
Run-UV venv --python=3.12 .venv-nightly
}
& .\.venv-nightly\Scripts\Activate.ps1
# ROCm nightly index
$indexUrl = "https://rocm.nightlies.amd.com/v2/gfx1151/"
Write-Host "Installing ROCm runtime + PyTorch packages from nightly index..."
Run-UV pip install --upgrade pip
Run-UV pip install --index-url $indexUrl "rocm[libraries,devel]" --prerelease=allow
Run-UV pip install --index-url $indexUrl torch torchvision torchaudio --prerelease=allow
# GPU test
$testScript = @"
import torch, numpy
print("Torch version:", torch.__version__)
print("NumPy version:", numpy.__version__)
print("CUDA available:", torch.cuda.is_available())
if torch.cuda.is_available():
print("Device count:", torch.cuda.device_count())
for i in range(torch.cuda.device_count()):
print(f"Device {i}:", torch.cuda.get_device_name(i))
x = torch.rand((3,3), device="cuda")
y = torch.mm(x, x)
print("Matrix multiply result on GPU:\n", y)
else:
print("No ROCm-compatible GPU detected.")
"@
$testFile = (Join-Path (Get-Location) "test_torch_gpu_nightly.py")
$testScript | Out-File -Encoding UTF8 $testFile
python $testFile
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment