Skip to content

Instantly share code, notes, and snippets.

View alexdremov's full-sized avatar
🥑
study

Aleksandr Dremov alexdremov

🥑
study
View GitHub Profile
@alexdremov
alexdremov / snippet.py
Created January 12, 2025 22:24
Code snippet uploaded via Python script (py)
def self_attn_fwd(...):
# loading sample len
seq_len = ...
# running qk^T max (initialized by -inf)
m_i = tl.zeros([TILE_Q_SIZE], dtype=tl.float32) - float("inf")
# current softmax denominator
l_i = tl.zeros([TILE_Q_SIZE], dtype=tl.float32)
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:39
Code snippet uploaded via Python script (py)
row_minus_max = row - tl.max(row, axis=1, keep_dims=True)
row_minus_max = tl.where(cols_mask, row_minus_max, -float('inf'))
numerator = tl.exp(row_minus_max)
denominator = tl.sum(numerator, axis=1, keep_dims=True)
softmax_output = numerator / denominator
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:39
Code snippet uploaded via Python script (py)
import torch
import triton
import triton.language as tl
@triton.autotune(
configs=[
triton.Config(
kwargs=dict(
BLOCK_SIZE_ROWS=BLOCK_SIZE_ROWS,
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:39
Code snippet uploaded via Python script (py)
import torch
# Our softmax function in PyTorch land
def softmax_pytorch(x):
# Avoid numerical instability by subtracting max
x_max = torch.max(x, dim=-1, keepdim=True).values
x_exp = torch.exp(x - x_max)
return x_exp / torch.sum(x_exp, dim=-1, keepdim=True)
# Let's compile it with torch.compile
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:38
Code snippet uploaded via Python script (py)
row_minus_max = row - tl.max(row, axis=1, keep_dims=True)
row_minus_max = tl.where(cols_mask, row_minus_max, -float('inf'))
numerator = tl.exp(row_minus_max)
denominator = tl.sum(numerator, axis=1, keep_dims=True)
softmax_output = numerator / denominator
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:38
Code snippet uploaded via Python script (py)
import torch
import triton
import triton.language as tl
@triton.autotune(
configs=[
triton.Config(
kwargs=dict(
BLOCK_SIZE_ROWS=BLOCK_SIZE_ROWS,
@alexdremov
alexdremov / snippet.py
Created January 5, 2025 15:38
Code snippet uploaded via Python script (py)
import torch
# Our softmax function in PyTorch land
def softmax_pytorch(x):
# Avoid numerical instability by subtracting max
x_max = torch.max(x, dim=-1, keepdim=True).values
x_exp = torch.exp(x - x_max)
return x_exp / torch.sum(x_exp, dim=-1, keepdim=True)
# Let's compile it with torch.compile
@alexdremov
alexdremov / dice_metrics.py
Created October 10, 2023 18:59
SymmetricBestDICE
import numpy as np
import scipy
def ravel_image(img):
"""
Разворачивает изображения в одномерный массив с учетом батча
"""
assert 1 < len(img.shape) < 4
name: Task0 tests
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.9]
@alexdremov
alexdremov / code_16.swift
Created January 9, 2023 09:40
Swift uses ARC to track and deallocate unused objects. Learn about the three types of reference counts and how ARC works — in this detailed post. Post: https://alexdremov.me/dive-into-swifts-memory-management/
class Person {
let name: String
init(name: String) { self.name = name }
var apartment: Apartment?
deinit { print("\(name) is being deinitialized") }
}
class Apartment {
let unit: String
init(unit: String) { self.unit = unit }