Created
January 17, 2025 18:01
-
-
Save YouJiacheng/18e01b5e24b4e94ece824152adfaa694 to your computer and use it in GitHub Desktop.
sub 3 minutes
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import sys | |
with open(sys.argv[0]) as f: | |
code = f.read() # read the code of this file ASAP, for logging | |
os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" | |
import time | |
import uuid | |
from dataclasses import dataclass | |
from functools import lru_cache, partial | |
from itertools import cycle, islice | |
from pathlib import Path | |
import torch | |
import torch._inductor.config as config | |
import torch.distributed as dist | |
import torch.nn.functional as F | |
from torch import Tensor, nn | |
# Use of FlexAttention contributed by @KoszarskyB | |
from torch.nn.attention.flex_attention import BlockMask, flex_attention | |
config.coordinate_descent_tuning = True | |
# ----------------------------------------------------------------------------- | |
# Custom operators | |
@torch.library.custom_op("nanogpt::mm", mutates_args=()) | |
def mm_op(x: Tensor, w: Tensor, x_s: float, w_s: float, grad_s: float) -> tuple[Tensor, Tensor, Tensor]: | |
@torch.compile | |
def impl(x: Tensor, w: Tensor): | |
assert x.is_contiguous() and w.is_contiguous() | |
x_f8 = x.mul(x_s).to(torch.float8_e4m3fn) | |
w_f8 = w.mul(w_s).to(torch.float8_e4m3fn) | |
out = torch._scaled_mm( | |
x_f8, | |
w_f8.t(), | |
out_dtype=torch.bfloat16, | |
scale_a=x.new_tensor(1 / x_s, dtype=torch.float32), | |
scale_b=x.new_tensor(1 / w_s, dtype=torch.float32), | |
use_fast_accum=True, | |
) | |
return out, x_f8, w_f8 | |
return impl(x, w) | |
@mm_op.register_fake | |
def _(x: Tensor, w: Tensor, *_): | |
assert x.ndim == w.ndim == 2 | |
assert x.shape[1] == w.shape[1] | |
assert x.device == w.device | |
assert x.is_contiguous() and w.is_contiguous() | |
return x @ w.t(), x.to(torch.float8_e4m3fn), w.to(torch.float8_e4m3fn) | |
@torch.library.custom_op("nanogpt::mm_backward", mutates_args=()) | |
def mm_backward_op(g: Tensor, x_f8: Tensor, w_f8: Tensor, x_s: float, w_s: float, grad_s: float) -> tuple[Tensor, Tensor]: | |
@torch.compile | |
def impl(grad: Tensor, x_f8: Tensor, w_f8: Tensor): | |
assert grad.is_contiguous() | |
x_inv_s = grad.new_tensor(1 / x_s, dtype=torch.float32) | |
w_inv_s = grad.new_tensor(1 / w_s, dtype=torch.float32) | |
grad_inv_s = grad.new_tensor(1 / grad_s, dtype=torch.float32) | |
grad_f8 = grad.mul(grad_s).to(torch.float8_e5m2) | |
grad_x = torch._scaled_mm( | |
grad_f8, | |
w_f8.t().contiguous().t(), | |
out_dtype=torch.bfloat16, | |
scale_a=grad_inv_s, | |
scale_b=w_inv_s, | |
use_fast_accum=False, | |
) | |
# faster than grad_f8_t @ x_f8, for (d_out, d_in) == (50304, 768) | |
grad_w = torch._scaled_mm( | |
x_f8.t().contiguous(), | |
grad_f8.t().contiguous().t(), | |
out_dtype=torch.float32, | |
scale_a=x_inv_s, | |
scale_b=grad_inv_s, | |
use_fast_accum=False, | |
).t() | |
return grad_x, grad_w | |
return impl(g, x_f8, w_f8) | |
@mm_backward_op.register_fake | |
def _(g: Tensor, x_f8: Tensor, w_f8: Tensor, *_): | |
return x_f8.to(torch.bfloat16), w_f8.to(torch.float32) | |
def backward(ctx, grad_out: Tensor, *_): | |
x_f8, w_f8 = ctx.saved_tensors | |
x_s, w_s, grad_s = ctx.scales | |
grad_x, grad_w = torch.ops.nanogpt.mm_backward( | |
grad_out, x_f8, w_f8, x_s, w_s, grad_s | |
) | |
return grad_x, grad_w, None, None, None | |
def setup_context(ctx: torch.autograd.function.FunctionCtx, inputs, output): | |
*_, x_s, w_s, grad_s = inputs | |
_, x_f8, w_f8 = output | |
ctx.save_for_backward(x_f8, w_f8) | |
ctx.scales = x_s, w_s, grad_s | |
ctx.set_materialize_grads(False) | |
mm_op.register_autograd(backward, setup_context=setup_context) | |
def lm_head(x: Tensor, w: Tensor) -> Tensor: | |
_x = x.flatten(0, -2) | |
out: Tensor = torch.ops.nanogpt.mm(_x, w, x_s=2.0, w_s=32.0, grad_s=2.0**29)[0] | |
return out.reshape(*x.shape[:-1], -1) | |
# ----------------------------------------------------------------------------- | |
# Muon optimizer | |
@torch.compile | |
def zeropower_via_newtonschulz5(G: Tensor, steps: int) -> Tensor: | |
""" | |
Newton-Schulz iteration to compute the zeroth power / orthogonalization of G. We opt to use a | |
quintic iteration whose coefficients are selected to maximize the slope at zero. For the purpose | |
of minimizing steps, it turns out to be empirically effective to keep increasing the slope at | |
zero even beyond the point where the iteration no longer converges all the way to one everywhere | |
on the interval. This iteration therefore does not produce UV^T but rather something like US'V^T | |
where S' is diagonal with S_{ii}' ~ Uniform(0.5, 1.5), which turns out not to hurt model | |
performance at all relative to UV^T, where USV^T = G is the SVD. | |
""" | |
a, b, c = (3.4445, -4.7750, 2.0315) | |
X = G.bfloat16() | |
if G.size(-2) > G.size(-1): | |
X = X.mT | |
# Ensure spectral norm is at most 1 | |
X = X / (X.norm(dim=(-2, -1), keepdim=True) + 1e-7) | |
# Perform the NS iterations | |
for _ in range(steps): | |
A = X @ X.mT | |
B = b * A + c * A @ A # adapted from suggestion by @jxbz, @leloykun, and @YouJiacheng | |
X = a * X + B @ X | |
if G.size(-2) > G.size(-1): | |
X = X.mT | |
return X | |
class Muon(torch.optim.Optimizer): | |
""" | |
Muon - MomentUm Orthogonalized by Newton-schulz | |
Muon internally runs standard SGD-momentum, and then performs an orthogonalization post- | |
processing step, in which each 2D parameter's update is replaced with the nearest orthogonal | |
matrix. To efficiently orthogonalize each update, we use a Newton-Schulz iteration, which has | |
the advantage that it can be stably run in bfloat16 on the GPU. | |
Some warnings: | |
- This optimizer assumes that all parameters passed in are 2D. | |
- It should not be used for the embedding layer, the final fully connected layer, or any {0,1}-D | |
parameters; those should all be optimized by a standard method (e.g., AdamW). | |
- To use it with 4D convolutional filters, it works well to just flatten their last 3 dimensions. | |
- We believe it is unlikely to work well for training with small batch size. | |
- We believe it may not work well for finetuning pretrained models, but we haven't tested this. | |
- We have not yet tried this optimizer for training scenarios larger than NanoGPT (124M). | |
Arguments: | |
lr: The learning rate used by the internal SGD. | |
momentum: The momentum used by the internal SGD. | |
nesterov: Whether to use Nesterov-style momentum in the internal SGD. (recommended) | |
ns_steps: The number of Newton-Schulz iteration steps to use. | |
""" | |
def __init__(self, params, lr=0.02, momentum=0.95, nesterov=True, ns_steps=5): | |
defaults = dict(lr=lr, momentum=momentum, nesterov=nesterov, ns_steps=ns_steps) | |
params: "list[Tensor]" = [*params] | |
assert all(isinstance(p, Tensor) for p in params) | |
sizes = {p.numel() for p in params} | |
def create_update_buffer(size: int): | |
b = torch.empty(world_size, size, dtype=torch.bfloat16, device="cuda") | |
return dict(update_buffer=b, update_buffer_views=[b[i] for i in range(world_size)]) | |
param_groups = [ | |
dict(params=[p for p in params if p.numel() == size], **create_update_buffer(size)) for size in sizes] | |
super().__init__(param_groups, defaults) | |
@torch.no_grad() | |
def step(self): | |
for group in self.param_groups: | |
lr = group['lr'] | |
momentum = group['momentum'] | |
nesterov = group['nesterov'] | |
ns_steps = group['ns_steps'] | |
update_buffer = group['update_buffer'] | |
update_buffer_views: "list[Tensor]" = group['update_buffer_views'] | |
# generate weight updates in distributed fashion | |
params: "list[Tensor]" = group['params'] | |
handle = None | |
params_world = None | |
def update_prev(): | |
if params_world is None: | |
return | |
assert handle is not None | |
handle.wait() | |
for p_world, g_world in zip(params_world, update_buffer_views): | |
p_world.add_( | |
g_world.view_as(p_world), | |
alpha=-lr * max(1, p_world.size(-2) / p_world.size(-1)) ** 0.5, | |
) | |
for base_i in range(len(params))[::world_size]: | |
if base_i + rank < len(params): | |
p = params[base_i + rank] | |
g = p.grad | |
assert g is not None | |
state = self.state[p] | |
if 'momentum_buffer' not in state: | |
state['momentum_buffer'] = torch.zeros_like(g) | |
buf: Tensor = state['momentum_buffer'] | |
buf.lerp_(g, 1 - momentum) | |
g = g.lerp_(buf, momentum) if nesterov else buf | |
g = zeropower_via_newtonschulz5(g, steps=ns_steps).flatten() | |
else: | |
g = update_buffer_views[rank] | |
update_prev() # async all_gather instead of sync all_reduce by @YouJiacheng | |
handle = dist.all_gather_into_tensor(update_buffer, g, async_op=True) | |
params_world = params[base_i : base_i + world_size] | |
update_prev() | |
# ----------------------------------------------------------------------------- | |
# PyTorch nn.Module definitions for the GPT-2 model | |
def norm(x: Tensor, size: int = None): | |
if size is None: | |
size = x.size(-1) | |
return F.rms_norm(x.unflatten(-1, (-1, size)), (size,)).flatten(-2) | |
class CastedLinear(nn.Linear): | |
def __init__(self, in_features: int, out_features: int): | |
super().__init__(in_features, out_features, bias=False) | |
def reset_parameters(self) -> None: | |
std = 0.5 * (self.in_features ** -0.5) # 0.5 is a bit better than the default 1/sqrt(3) | |
bound = (3 ** 0.5) * std | |
with torch.no_grad(): | |
self.weight.uniform_(-bound, bound) | |
def forward(self, x): | |
return F.linear(x, self.weight.type_as(x)) | |
class Rotary(nn.Module): | |
def __init__(self, dim: int, max_seq_len=65536): | |
super().__init__() | |
# half-truncate RoPE by @YouJiacheng (w/ base freq tuning) | |
angular_freq = (1 / 1024) ** torch.linspace(0, 1, steps=dim//4, dtype=torch.float32) | |
angular_freq = torch.cat([angular_freq, angular_freq.new_zeros(dim//4)]) | |
t = torch.arange(max_seq_len, dtype=torch.float32) | |
theta = torch.einsum('i,j -> ij', t, angular_freq) | |
self.cos = nn.Buffer(theta.cos(), persistent=False) | |
self.sin = nn.Buffer(theta.sin(), persistent=False) | |
def forward(self, x_BTHD: Tensor): | |
assert self.cos.size(0) >= x_BTHD.size(-3) | |
cos, sin = self.cos[None, :x_BTHD.size(-3), None, :], self.sin[None, :x_BTHD.size(-3), None, :] | |
x1, x2 = x_BTHD.to(dtype=torch.float32).chunk(2, dim=-1) | |
y1 = x1 * cos + x2 * sin | |
y2 = x1 * (-sin) + x2 * cos | |
return torch.cat((y1, y2), 3).type_as(x_BTHD) | |
class CausalSelfAttention(nn.Module): | |
def __init__(self, dim: int, num_heads: int): | |
super().__init__() | |
assert dim % num_heads == 0 | |
self.num_heads = num_heads | |
std = 0.5 * (dim ** -0.5) | |
bound = (3 ** 0.5) * std | |
self.qkv_w = nn.Parameter(torch.empty(3, 768, 768).uniform_(-bound, bound)) | |
self.lambdas = nn.Parameter(torch.tensor([0.5, 0.5])) | |
self.rotary = Rotary(dim // num_heads) # dim // num_heads = head_dim | |
self.c_proj = CastedLinear(dim, dim) | |
self.c_proj.weight.detach().zero_() # zero init suggested by @Grad62304977 | |
def forward(self, x: Tensor, ve: Tensor | None, block_mask: BlockMask): | |
B, T = x.size(0), x.size(1) # batch size, sequence length | |
assert B == 1, 'Must use batch size = 1 for FlexAttention' | |
q, k, v = F.linear(x, self.qkv_w.flatten(end_dim=1).type_as(x)).view(B, T, 3 * self.num_heads, -1).chunk(3, dim=-2) | |
if ve is not None: | |
v = self.lambdas[0] * v + self.lambdas[1] * ve.view_as(v) # @KoszarskyB & @Grad62304977 | |
else: # skip mid-layers token value embeddings by @YouJiacheng | |
v = self.lambdas[0] * v | |
q, k = norm(q), norm(k) # QK norm @Grad62304977 | |
q, k = self.rotary(q), self.rotary(k) | |
y = flex_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), block_mask=block_mask, scale=0.12) | |
y = y.transpose(1, 2).contiguous().view_as(x) # re-assemble all head outputs side by side | |
y = self.c_proj(y) | |
return y | |
class MLP(nn.Module): | |
def __init__(self, dim): | |
super().__init__() | |
self.c_fc = CastedLinear(dim, 4 * dim) | |
self.c_proj = CastedLinear(4 * dim, dim) | |
self.c_proj.weight.detach().zero_() # zero init suggested by @Grad62304977 | |
def forward(self, x): | |
x = self.c_fc(x) | |
x = F.relu(x).square() # https://arxiv.org/abs/2109.08668v2; ~1-2% better than GELU; suggested by @SKYLINEZ007 and @Grad62304977 | |
x = self.c_proj(x) | |
return x | |
class Block(nn.Module): | |
def __init__(self, model_dim: int, num_heads: int, layer_idx: int): | |
super().__init__() | |
# skip attention of blocks.7 (the 8th layer) by @YouJiacheng | |
self.attn = CausalSelfAttention(model_dim, num_heads) if layer_idx != 7 else None | |
self.mlp = MLP(model_dim) | |
self.lambdas = nn.Parameter(torch.tensor([1., 0.])) | |
def forward(self, x, ve, x0, block_mask): | |
x = self.lambdas[0] * x + self.lambdas[1] * x0 | |
if self.attn is not None: | |
x = x + self.attn(norm(x), ve, block_mask) | |
x = x + self.mlp(norm(x)) | |
return x | |
class ValueEmbedding(nn.Module): | |
def __init__(self, num_embeddings: int, embedding_dim: int): | |
super().__init__() | |
self.__setattr__ | |
self.embed = nn.ModuleList([nn.Embedding(num_embeddings, embedding_dim) for _ in range(3)]) | |
def forward(self, input_seq) -> "list[Tensor | None]": | |
ve = [emb(input_seq) for emb in self.embed] | |
# 012 ... 012 structure on token value embeddings by @YouJiacheng, improved on @leloykun's U-net structure | |
ve = [ve[0], ve[1], ve[2], None, None, None, None, None, None, ve[0], ve[1], ve[2]] | |
return ve | |
# ----------------------------------------------------------------------------- | |
# The main GPT-2 model | |
def next_multiple_of_n(v: float | int, *, n: int): | |
return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) | |
class GPT(nn.Module): | |
def __init__(self, vocab_size: int, num_layers: int, num_heads: int, model_dim: int): | |
super().__init__() | |
self.embed = nn.Embedding(vocab_size, model_dim) | |
# token value embeddings by @KoszarskyB - inspired by @Grad62304977's value residual learning | |
self.value_embeds = ValueEmbedding(vocab_size, model_dim) | |
self.blocks = nn.ModuleList([Block(model_dim, num_heads, layer_idx) for layer_idx in range(num_layers)]) | |
# U-net design by @brendanh0gan | |
self.num_encoder_layers = num_layers // 2 # Half of the layers for encoder | |
self.num_decoder_layers = num_layers - self.num_encoder_layers # Remaining for decoder | |
# Add learnable skip connection weights for decoder layers | |
self.skip_weights = nn.Parameter(torch.ones(self.num_decoder_layers)) | |
# there are only 50257 unique GPT-2 tokens; we extend to nearest multiple of 128 for efficiency. | |
# suggested to me by @Grad62304977. this originates from Karpathy's experiments. | |
self.lm_head = CastedLinear(model_dim, next_multiple_of_n(vocab_size, n=128)) | |
self.lm_head.weight.detach().zero_() # @Grad62304977 | |
def forward(self, input_seq: Tensor, target_seq: Tensor, sliding_window_num_blocks: Tensor): | |
BLOCK_SIZE = 128 | |
assert input_seq.ndim == 1 | |
assert len(input_seq) % BLOCK_SIZE == 0 | |
NUM_BLOCKS = len(input_seq) // BLOCK_SIZE | |
docs = (input_seq == 50256).cumsum(0) | |
docs_low = docs.view(-1, BLOCK_SIZE)[:, 0].contiguous() | |
docs_high = docs.view(-1, BLOCK_SIZE)[:, -1].contiguous() | |
def document_causal(b, h, q_idx, kv_idx): | |
causal_mask = q_idx >= kv_idx | |
document_mask = docs[q_idx] == docs[kv_idx] | |
return causal_mask & document_mask | |
def dense_to_ordered(dense_mask: Tensor): | |
num_blocks = dense_mask.sum(dim=-1, dtype=torch.int32) | |
indices = dense_mask.argsort(dim=-1, descending=False, stable=True).to(torch.int32).flip(-1) | |
return num_blocks[None, None].contiguous(), indices[None, None].contiguous() | |
# manual block mask creation by @YouJiacheng | |
def create_doc_swc_block_mask(sw_num_blks: Tensor): | |
kv_idx = block_idx = torch.arange(NUM_BLOCKS, dtype=torch.int32, device="cuda") | |
q_idx = block_idx[:, None] | |
causal_bm = q_idx >= kv_idx | |
causal_full_bm = q_idx > kv_idx | |
# document_bm = (docs_low[q_idx] <= docs_high[kv_idx]) & (docs_low[kv_idx] <= docs_high[q_idx]) | |
document_bm = (docs_low[:, None] <= docs_high) & (docs_low <= docs_high[:, None]) | |
document_full_bm = (docs_low[:, None] == docs_high) & (docs_low == docs_high[:, None]) | |
nonzero_bm = causal_bm & document_bm | |
full_bm = causal_full_bm & document_full_bm | |
kv_num_blocks, kv_indices = dense_to_ordered(nonzero_bm & ~full_bm) | |
full_kv_num_blocks, full_kv_indices = dense_to_ordered(full_bm) | |
def build_bm(sw_num_blks): | |
return BlockMask.from_kv_blocks( | |
torch.clamp_max(kv_num_blocks, torch.clamp_min(sw_num_blks - full_kv_num_blocks, 1)), | |
kv_indices, | |
torch.clamp_max(full_kv_num_blocks, sw_num_blks - 1), | |
full_kv_indices, | |
BLOCK_SIZE=BLOCK_SIZE, | |
mask_mod=document_causal, | |
) | |
return build_bm(sw_num_blks), build_bm(sw_num_blks // 2) | |
long_bm, short_bm = create_doc_swc_block_mask(sliding_window_num_blocks) | |
x = x0 = norm(self.embed(input_seq)[None]) # use of norm here by @Grad62304977 | |
ve = self.value_embeds(input_seq) | |
ve_enc, ve_dec = ve[:self.num_encoder_layers], ve[self.num_encoder_layers:] | |
assert len(ve_enc) == self.num_encoder_layers and len(ve_dec) == self.num_decoder_layers | |
# Store outputs for U-Net skip connections | |
skip_connections = [] | |
# Encoder pass - process only the first half of the blocks | |
block_masks = [long_bm, short_bm, short_bm, short_bm, long_bm, short_bm] | |
for i in range(self.num_encoder_layers): | |
x = self.blocks[i](x, ve_enc[i], x0, block_masks[i]) | |
skip_connections.append(x) | |
# Decoder pass - process the remaining blocks with weighted skip connections | |
block_masks.reverse() | |
for i in range(self.num_decoder_layers): | |
x = x + self.skip_weights[i] * skip_connections.pop() | |
x = self.blocks[self.num_encoder_layers + i](x, ve_dec[i], x0, block_masks[i]) | |
x = norm(x) | |
logits = lm_head(x, self.lm_head.weight) if self.training else self.lm_head(x) | |
# @Grad62304977 added tanh softcapping, @KoszarskyB reduced it from 30 to 15, @YouJiacheng shifted it by +15 (2*sigmoid(2*x)=tanh(x)+1) | |
logits = 30 * torch.sigmoid(logits.float() / 7.5) | |
loss = F.cross_entropy(logits.view(-1, logits.size(-1)), target_seq) | |
return loss | |
# ----------------------------------------------------------------------------- | |
# Our own simple Distributed Data Loader | |
def _load_data_shard(file: Path): | |
header = torch.from_file(f"{file}", False, 256, dtype=torch.int32) # header is 256 int32 | |
assert header[0] == 20240520, 'magic number mismatch in the data .bin file' | |
assert header[1] == 1, 'unsupported version' | |
num_tokens = int(header[2]) # number of tokens (claimed) | |
with file.open('rb', buffering=0) as f: | |
tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) # avoid pin_memory copy by @YouJiacheng | |
f.seek(256 * 4) | |
nbytes = f.readinto(tokens.numpy()) # avoid bytes->array copy by @YouJiacheng | |
assert nbytes == 2 * num_tokens, 'number of tokens read does not match header' | |
return tokens | |
def distributed_data(filename_pattern: str, batch_size: int): | |
files = sorted(Path.cwd().glob(filename_pattern)) | |
assert batch_size % world_size == 0 | |
local_batch_size = batch_size // world_size | |
file_iter = cycle(files) | |
tokens, pos = _load_data_shard(next(file_iter)), 0 | |
while True: | |
if pos + batch_size + 1 >= len(tokens): | |
tokens, pos = _load_data_shard(next(file_iter)), 0 | |
buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] | |
inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; | |
targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. | |
pos += batch_size | |
yield inputs, targets | |
# ----------------------------------------------------------------------------- | |
# int main | |
@dataclass | |
class Hyperparameters: | |
# data | |
train_files = 'data/fineweb10B/fineweb_train_*.bin' # input .bin to train on | |
val_files = 'data/fineweb10B/fineweb_val_*.bin' # input .bin to eval validation loss on | |
val_tokens = 10485760 # how many tokens of validation data? it's important to keep this fixed for consistent comparisons | |
# optimization | |
batch_size = 8*64*1024 # batch size in tokens | |
num_iterations = 1395 # number of iterations to run | |
cooldown_frac = 0.4 # fraction of training spent cooling down the learning rate | |
# evaluation and logging | |
val_loss_every = 125 # every how many steps to evaluate val loss? 0 for only at the end | |
# implementation | |
seq_len = 64*1024 # FlexAttention sequence length | |
save_checkpoint = False | |
args = Hyperparameters() | |
# torchrun sets these env variables | |
rank = int(os.environ['RANK']) | |
world_size = int(os.environ['WORLD_SIZE']) | |
assert torch.cuda.is_available() | |
device = torch.device('cuda', int(os.environ['LOCAL_RANK'])) | |
torch.cuda.set_device(device) | |
dist.init_process_group(backend='nccl', device_id=device) | |
dist.barrier() | |
master_process = (rank == 0) # this process will do logging, checkpointing etc. | |
# begin logging | |
def print0(s, console=False): ... | |
if master_process: | |
run_id = uuid.uuid4() | |
(logs_dir := Path("logs")).mkdir(exist_ok=True) | |
logfile = logs_dir / f"{run_id}.txt" | |
print(logfile.stem) | |
def print0(s, console=False): | |
with logfile.open("a") as f: | |
# if console: | |
# print(s) | |
print(s, file=f) | |
# begin by printing this file (the Python code) | |
print0(code) | |
print0('='*100) | |
# log information about the hardware/software environment this is running on | |
print0(f'Running Python {sys.version}') | |
print0(f'Running PyTorch {torch.version.__version__} compiled for CUDA {torch.version.cuda}') | |
def nvidia_smi(): | |
import subprocess # avoid top level import | |
return subprocess.run(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True).stdout | |
print0(nvidia_smi()) | |
print0('='*100) | |
# load data | |
train_loader = distributed_data(args.train_files, args.batch_size) | |
val_loader = partial(distributed_data, args.val_files) | |
model = GPT(vocab_size=50257, num_layers=12, num_heads=6, model_dim=768).cuda() | |
for m in model.modules(): | |
if isinstance(m, nn.Embedding): | |
m.bfloat16() | |
for param in model.parameters(): | |
dist.broadcast(param.detach(), 0) | |
# collect the parameters to optimize | |
hidden_matrix_params = [p for p in model.blocks.parameters() if p.ndim in (2, 3)] | |
embed_params = [model.embed.weight, *model.value_embeds.parameters()] | |
scalar_params = [p for p in model.parameters() if p.ndim < 2] | |
head_params = [model.lm_head.weight] | |
# init the optimizer(s) | |
adam_params = [dict(params=head_params, lr=0.008), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] | |
optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) | |
optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95) | |
optimizers = [optimizer1, optimizer2] | |
# learning rate schedule: stable then decay | |
def get_lr(it: int): | |
t = 1 - it / args.num_iterations # time remaining in training | |
assert 1 >= t >= 0 | |
w = min(t / args.cooldown_frac, 1.0) # 1 -> 0 | |
return w * 1.0 + (1 - w) * 0.1 | |
schedulers = [torch.optim.lr_scheduler.LambdaLR(opt, get_lr) for opt in optimizers] | |
@lru_cache(1) | |
def sw_num_blks(window_size: int): | |
return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) | |
model: nn.Module = torch.compile(model) | |
training_time_ms = 0 | |
# start the clock | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
# begin training | |
train_steps = args.num_iterations | |
for step in range(train_steps + 1): | |
last_step = (step == train_steps) | |
# This effectively ignores timing first 10 steps, which are slower for weird reasons. | |
# Alternately, and slightly more correctly in terms of benchmarking, we could do 10 | |
# steps with dummy data first, and then re-initialize the model and reset the loader. | |
if step == 10: | |
training_time_ms = 0 | |
t0 = time.perf_counter() | |
timed_steps = float('nan') if step <= 11 else (step - 10) + 1 # <= 11 to avoid bug in val | |
# Linearly increase the block-wise sliding window size over training 128 -> 1792: | |
# increase by @fernbear.bsky.social; block-wise by @YouJiacheng | |
window_size = next_multiple_of_n(1728 * step / train_steps, n=128) | |
# --------------- VALIDATION SECTION ----------------- | |
if (last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0)): | |
# stop the clock | |
torch.cuda.synchronize() | |
training_time_ms += 1000 * (time.perf_counter() - t0) | |
model.eval() | |
val_bs = world_size * args.seq_len | |
assert args.val_tokens % val_bs == 0 | |
val_steps = args.val_tokens // val_bs | |
with torch.no_grad(): | |
val_loss = sum(model(x, y, sw_num_blks(window_size)) for x, y in islice(val_loader(val_bs), val_steps)) / val_steps | |
dist.all_reduce(val_loss, op=dist.ReduceOp.AVG) | |
print0(f'step:{step}/{train_steps} val_loss:{val_loss:.4f} train_time:{training_time_ms:.0f}ms step_avg:{training_time_ms/(timed_steps-1):.2f}ms', console=True) | |
model.train() | |
# start the clock again | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
if last_step: | |
if master_process and args.save_checkpoint: | |
log = dict(step=step, code=code, model=model.state_dict(), optimizers=[opt.state_dict() for opt in optimizers]) | |
os.makedirs(f'logs/{run_id}', exist_ok=True) | |
torch.save(log, f'logs/{run_id}/state_step{step:06d}.pt') | |
# the last step only has the validation loop, so break to avoid training | |
break | |
# --------------- TRAINING SECTION BEGIN ----------------- | |
inputs, targets = next(train_loader) | |
for input_seq, target_seq in zip(inputs.split(args.seq_len), targets.split(args.seq_len)): | |
model(input_seq, target_seq, sw_num_blks(window_size)).backward() | |
for param in model.parameters(): | |
dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) | |
# momentum warmup for Muon | |
frac = min(step / 300, 1) | |
for group in optimizer2.param_groups: | |
group['momentum'] = (1 - frac) * 0.85 + frac * 0.95 | |
# step the optimizers and schedulers | |
for opt, sched in zip(optimizers, schedulers): | |
opt.step() | |
sched.step() | |
# null the gradients | |
model.zero_grad(set_to_none=True) | |
# logging | |
approx_time = training_time_ms + 1000 * (time.perf_counter() - t0) | |
print0(f'step:{step+1}/{train_steps} train_time:{approx_time:.0f}ms step_avg:{approx_time/timed_steps:.2f}ms', console=True) | |
print0( | |
f"peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " | |
f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB" | |
) | |
dist.destroy_process_group() | |
==================================================================================================== | |
Running Python 3.12.8 (main, Dec 19 2024, 14:33:20) [Clang 18.1.8 ] | |
Running PyTorch 2.7.0.dev20250110+cu126 compiled for CUDA 12.6 | |
Fri Jan 17 16:54:50 2025 | |
+---------------------------------------------------------------------------------------+ | |
| NVIDIA-SMI 535.129.03 Driver Version: 535.129.03 CUDA Version: 12.6 | | |
|-----------------------------------------+----------------------+----------------------+ | |
| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC | | |
| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. | | |
| | | MIG M. | | |
|=========================================+======================+======================| | |
| 0 NVIDIA H100 80GB HBM3 On | 00000000:65:02.0 Off | 0 | | |
| N/A 37C P0 119W / 700W | 7092MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 1 NVIDIA H100 80GB HBM3 On | 00000000:67:02.0 Off | 0 | | |
| N/A 45C P0 129W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 2 NVIDIA H100 80GB HBM3 On | 00000000:69:02.0 Off | 0 | | |
| N/A 45C P0 123W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 3 NVIDIA H100 80GB HBM3 On | 00000000:6B:02.0 Off | 0 | | |
| N/A 39C P0 118W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 4 NVIDIA H100 80GB HBM3 On | 00000000:6F:02.0 Off | 0 | | |
| N/A 38C P0 117W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 5 NVIDIA H100 80GB HBM3 On | 00000000:71:02.0 Off | 0 | | |
| N/A 45C P0 122W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 6 NVIDIA H100 80GB HBM3 On | 00000000:73:02.0 Off | 0 | | |
| N/A 45C P0 127W / 700W | 3459MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
| 7 NVIDIA H100 80GB HBM3 On | 00000000:75:02.0 Off | 0 | | |
| N/A 38C P0 123W / 700W | 3219MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+----------------------+----------------------+ | |
+---------------------------------------------------------------------------------------+ | |
| Processes: | | |
| GPU GI CI PID Type Process name GPU Memory | | |
| ID ID Usage | | |
|=======================================================================================| | |
+---------------------------------------------------------------------------------------+ | |
==================================================================================================== | |
step:0/1395 val_loss:10.8258 train_time:0ms step_avg:nanms | |
step:1/1395 train_time:19774ms step_avg:nanms | |
step:2/1395 train_time:20103ms step_avg:nanms | |
step:3/1395 train_time:20223ms step_avg:nanms | |
step:4/1395 train_time:20343ms step_avg:nanms | |
step:5/1395 train_time:20464ms step_avg:nanms | |
step:6/1395 train_time:20586ms step_avg:nanms | |
step:7/1395 train_time:20709ms step_avg:nanms | |
step:8/1395 train_time:20831ms step_avg:nanms | |
step:9/1395 train_time:20952ms step_avg:nanms | |
step:10/1395 train_time:21080ms step_avg:nanms | |
step:11/1395 train_time:126ms step_avg:nanms | |
step:12/1395 train_time:249ms step_avg:nanms | |
step:13/1395 train_time:371ms step_avg:123.64ms | |
step:14/1395 train_time:493ms step_avg:123.20ms | |
step:15/1395 train_time:614ms step_avg:122.81ms | |
step:16/1395 train_time:738ms step_avg:122.95ms | |
step:17/1395 train_time:859ms step_avg:122.76ms | |
step:18/1395 train_time:981ms step_avg:122.65ms | |
step:19/1395 train_time:1106ms step_avg:122.86ms | |
step:20/1395 train_time:1230ms step_avg:122.96ms | |
step:21/1395 train_time:1352ms step_avg:122.94ms | |
step:22/1395 train_time:1474ms step_avg:122.80ms | |
step:23/1395 train_time:1595ms step_avg:122.68ms | |
step:24/1395 train_time:1718ms step_avg:122.72ms | |
step:25/1395 train_time:1842ms step_avg:122.79ms | |
step:26/1395 train_time:1964ms step_avg:122.73ms | |
step:27/1395 train_time:2085ms step_avg:122.65ms | |
step:28/1395 train_time:2209ms step_avg:122.74ms | |
step:29/1395 train_time:2334ms step_avg:122.83ms | |
step:30/1395 train_time:2458ms step_avg:122.89ms | |
step:31/1395 train_time:2581ms step_avg:122.88ms | |
step:32/1395 train_time:2704ms step_avg:122.90ms | |
step:33/1395 train_time:2828ms step_avg:122.97ms | |
step:34/1395 train_time:2950ms step_avg:122.93ms | |
step:35/1395 train_time:3074ms step_avg:122.98ms | |
step:36/1395 train_time:3196ms step_avg:122.94ms | |
step:37/1395 train_time:3319ms step_avg:122.94ms | |
step:38/1395 train_time:3444ms step_avg:123.00ms | |
step:39/1395 train_time:3571ms step_avg:123.14ms | |
step:40/1395 train_time:3689ms step_avg:122.98ms | |
step:41/1395 train_time:3812ms step_avg:122.97ms | |
step:42/1395 train_time:3934ms step_avg:122.94ms | |
step:43/1395 train_time:4056ms step_avg:122.91ms | |
step:44/1395 train_time:4179ms step_avg:122.91ms | |
step:45/1395 train_time:4304ms step_avg:122.98ms | |
step:46/1395 train_time:4430ms step_avg:123.05ms | |
step:47/1395 train_time:4552ms step_avg:123.03ms | |
step:48/1395 train_time:4675ms step_avg:123.04ms | |
step:49/1395 train_time:4798ms step_avg:123.03ms | |
step:50/1395 train_time:4923ms step_avg:123.07ms | |
step:51/1395 train_time:5045ms step_avg:123.05ms | |
step:52/1395 train_time:5168ms step_avg:123.04ms | |
step:53/1395 train_time:5290ms step_avg:123.03ms | |
step:54/1395 train_time:5413ms step_avg:123.02ms | |
step:55/1395 train_time:5535ms step_avg:123.00ms | |
step:56/1395 train_time:5657ms step_avg:122.98ms | |
step:57/1395 train_time:5780ms step_avg:122.98ms | |
step:58/1395 train_time:5904ms step_avg:123.01ms | |
step:59/1395 train_time:6028ms step_avg:123.02ms | |
step:60/1395 train_time:6151ms step_avg:123.01ms | |
step:61/1395 train_time:6272ms step_avg:122.98ms | |
step:62/1395 train_time:6394ms step_avg:122.96ms | |
step:63/1395 train_time:6517ms step_avg:122.95ms | |
step:64/1395 train_time:6639ms step_avg:122.95ms | |
step:65/1395 train_time:6764ms step_avg:122.98ms | |
step:66/1395 train_time:6887ms step_avg:122.98ms | |
step:67/1395 train_time:7009ms step_avg:122.97ms | |
step:68/1395 train_time:7131ms step_avg:122.94ms | |
step:69/1395 train_time:7253ms step_avg:122.93ms | |
step:70/1395 train_time:7374ms step_avg:122.91ms | |
step:71/1395 train_time:7498ms step_avg:122.92ms | |
step:72/1395 train_time:7620ms step_avg:122.90ms | |
step:73/1395 train_time:7743ms step_avg:122.91ms | |
step:74/1395 train_time:7865ms step_avg:122.90ms | |
step:75/1395 train_time:7988ms step_avg:122.89ms | |
step:76/1395 train_time:8109ms step_avg:122.87ms | |
step:77/1395 train_time:8231ms step_avg:122.86ms | |
step:78/1395 train_time:8355ms step_avg:122.86ms | |
step:79/1395 train_time:8478ms step_avg:122.87ms | |
step:80/1395 train_time:8600ms step_avg:122.86ms | |
step:81/1395 train_time:8724ms step_avg:122.87ms | |
step:82/1395 train_time:8849ms step_avg:122.91ms | |
step:83/1395 train_time:8971ms step_avg:122.89ms | |
step:84/1395 train_time:9094ms step_avg:122.89ms | |
step:85/1395 train_time:9216ms step_avg:122.88ms | |
step:86/1395 train_time:9338ms step_avg:122.87ms | |
step:87/1395 train_time:9461ms step_avg:122.86ms | |
step:88/1395 train_time:9583ms step_avg:122.86ms | |
step:89/1395 train_time:9705ms step_avg:122.85ms | |
step:90/1395 train_time:9830ms step_avg:122.88ms | |
step:91/1395 train_time:9953ms step_avg:122.88ms | |
step:92/1395 train_time:10074ms step_avg:122.86ms | |
step:93/1395 train_time:10198ms step_avg:122.86ms | |
step:94/1395 train_time:10321ms step_avg:122.86ms | |
step:95/1395 train_time:10444ms step_avg:122.87ms | |
step:96/1395 train_time:10566ms step_avg:122.86ms | |
step:97/1395 train_time:10689ms step_avg:122.86ms | |
step:98/1395 train_time:10810ms step_avg:122.85ms | |
step:99/1395 train_time:10934ms step_avg:122.86ms | |
step:100/1395 train_time:11058ms step_avg:122.87ms | |
step:101/1395 train_time:11181ms step_avg:122.87ms | |
step:102/1395 train_time:11305ms step_avg:122.88ms | |
step:103/1395 train_time:11431ms step_avg:122.91ms | |
step:104/1395 train_time:11553ms step_avg:122.91ms | |
step:105/1395 train_time:11677ms step_avg:122.91ms | |
step:106/1395 train_time:11800ms step_avg:122.92ms | |
step:107/1395 train_time:11924ms step_avg:122.93ms | |
step:108/1395 train_time:12048ms step_avg:122.94ms | |
step:109/1395 train_time:12171ms step_avg:122.94ms | |
step:110/1395 train_time:12295ms step_avg:122.95ms | |
step:111/1395 train_time:12418ms step_avg:122.95ms | |
step:112/1395 train_time:12542ms step_avg:122.96ms | |
step:113/1395 train_time:12666ms step_avg:122.97ms | |
step:114/1395 train_time:12788ms step_avg:122.96ms | |
step:115/1395 train_time:12910ms step_avg:122.96ms | |
step:116/1395 train_time:13035ms step_avg:122.97ms | |
step:117/1395 train_time:13160ms step_avg:122.99ms | |
step:118/1395 train_time:13283ms step_avg:122.99ms | |
step:119/1395 train_time:13405ms step_avg:122.98ms | |
step:120/1395 train_time:13529ms step_avg:122.99ms | |
step:121/1395 train_time:13651ms step_avg:122.98ms | |
step:122/1395 train_time:13773ms step_avg:122.98ms | |
step:123/1395 train_time:13896ms step_avg:122.97ms | |
step:124/1395 train_time:14019ms step_avg:122.97ms | |
step:125/1395 train_time:14143ms step_avg:122.98ms | |
step:125/1395 val_loss:4.3886 train_time:14265ms step_avg:124.04ms | |
step:126/1395 train_time:14285ms step_avg:123.14ms | |
step:127/1395 train_time:14407ms step_avg:123.14ms | |
step:128/1395 train_time:14533ms step_avg:123.16ms | |
step:129/1395 train_time:14655ms step_avg:123.15ms | |
step:130/1395 train_time:14777ms step_avg:123.14ms | |
step:131/1395 train_time:14899ms step_avg:123.14ms | |
step:132/1395 train_time:15021ms step_avg:123.13ms | |
step:133/1395 train_time:15145ms step_avg:123.13ms | |
step:134/1395 train_time:15268ms step_avg:123.13ms | |
step:135/1395 train_time:15392ms step_avg:123.13ms | |
step:136/1395 train_time:15519ms step_avg:123.17ms | |
step:137/1395 train_time:15643ms step_avg:123.17ms | |
step:138/1395 train_time:15767ms step_avg:123.18ms | |
step:139/1395 train_time:15890ms step_avg:123.18ms | |
step:140/1395 train_time:16014ms step_avg:123.18ms | |
step:141/1395 train_time:16137ms step_avg:123.19ms | |
step:142/1395 train_time:16260ms step_avg:123.18ms | |
step:143/1395 train_time:16383ms step_avg:123.18ms | |
step:144/1395 train_time:16509ms step_avg:123.20ms | |
step:145/1395 train_time:16633ms step_avg:123.21ms | |
step:146/1395 train_time:16758ms step_avg:123.22ms | |
step:147/1395 train_time:16880ms step_avg:123.21ms | |
step:148/1395 train_time:17003ms step_avg:123.21ms | |
step:149/1395 train_time:17125ms step_avg:123.20ms | |
step:150/1395 train_time:17247ms step_avg:123.19ms | |
step:151/1395 train_time:17370ms step_avg:123.19ms | |
step:152/1395 train_time:17493ms step_avg:123.19ms | |
step:153/1395 train_time:17618ms step_avg:123.20ms | |
step:154/1395 train_time:17743ms step_avg:123.21ms | |
step:155/1395 train_time:17867ms step_avg:123.22ms | |
step:156/1395 train_time:17991ms step_avg:123.23ms | |
step:157/1395 train_time:18114ms step_avg:123.23ms | |
step:158/1395 train_time:18238ms step_avg:123.23ms | |
step:159/1395 train_time:18363ms step_avg:123.24ms | |
step:160/1395 train_time:18487ms step_avg:123.25ms | |
step:161/1395 train_time:18611ms step_avg:123.25ms | |
step:162/1395 train_time:18734ms step_avg:123.25ms | |
step:163/1395 train_time:18858ms step_avg:123.26ms | |
step:164/1395 train_time:18981ms step_avg:123.25ms | |
step:165/1395 train_time:19105ms step_avg:123.26ms | |
step:166/1395 train_time:19230ms step_avg:123.27ms | |
step:167/1395 train_time:19353ms step_avg:123.27ms | |
step:168/1395 train_time:19476ms step_avg:123.27ms | |
step:169/1395 train_time:19601ms step_avg:123.27ms | |
step:170/1395 train_time:19725ms step_avg:123.28ms | |
step:171/1395 train_time:19848ms step_avg:123.28ms | |
step:172/1395 train_time:19971ms step_avg:123.28ms | |
step:173/1395 train_time:20093ms step_avg:123.27ms | |
step:174/1395 train_time:20217ms step_avg:123.28ms | |
step:175/1395 train_time:20340ms step_avg:123.27ms | |
step:176/1395 train_time:20462ms step_avg:123.27ms | |
step:177/1395 train_time:20587ms step_avg:123.27ms | |
step:178/1395 train_time:20711ms step_avg:123.28ms | |
step:179/1395 train_time:20836ms step_avg:123.29ms | |
step:180/1395 train_time:20960ms step_avg:123.29ms | |
step:181/1395 train_time:21081ms step_avg:123.28ms | |
step:182/1395 train_time:21205ms step_avg:123.29ms | |
step:183/1395 train_time:21330ms step_avg:123.30ms | |
step:184/1395 train_time:21453ms step_avg:123.29ms | |
step:185/1395 train_time:21576ms step_avg:123.29ms | |
step:186/1395 train_time:21699ms step_avg:123.29ms | |
step:187/1395 train_time:21821ms step_avg:123.28ms | |
step:188/1395 train_time:21945ms step_avg:123.28ms | |
step:189/1395 train_time:22068ms step_avg:123.28ms | |
step:190/1395 train_time:22192ms step_avg:123.29ms | |
step:191/1395 train_time:22316ms step_avg:123.29ms | |
step:192/1395 train_time:22441ms step_avg:123.30ms | |
step:193/1395 train_time:22564ms step_avg:123.30ms | |
step:194/1395 train_time:22688ms step_avg:123.30ms | |
step:195/1395 train_time:22810ms step_avg:123.30ms | |
step:196/1395 train_time:22933ms step_avg:123.29ms | |
step:197/1395 train_time:23056ms step_avg:123.29ms | |
step:198/1395 train_time:23179ms step_avg:123.29ms | |
step:199/1395 train_time:23301ms step_avg:123.29ms | |
step:200/1395 train_time:23425ms step_avg:123.29ms | |
step:201/1395 train_time:23549ms step_avg:123.29ms | |
step:202/1395 train_time:23672ms step_avg:123.29ms | |
step:203/1395 train_time:23794ms step_avg:123.29ms | |
step:204/1395 train_time:23919ms step_avg:123.29ms | |
step:205/1395 train_time:24041ms step_avg:123.29ms | |
step:206/1395 train_time:24165ms step_avg:123.29ms | |
step:207/1395 train_time:24291ms step_avg:123.30ms | |
step:208/1395 train_time:24414ms step_avg:123.30ms | |
step:209/1395 train_time:24539ms step_avg:123.31ms | |
step:210/1395 train_time:24662ms step_avg:123.31ms | |
step:211/1395 train_time:24785ms step_avg:123.31ms | |
step:212/1395 train_time:24909ms step_avg:123.31ms | |
step:213/1395 train_time:25034ms step_avg:123.32ms | |
step:214/1395 train_time:25159ms step_avg:123.33ms | |
step:215/1395 train_time:25283ms step_avg:123.33ms | |
step:216/1395 train_time:25407ms step_avg:123.34ms | |
step:217/1395 train_time:25532ms step_avg:123.34ms | |
step:218/1395 train_time:25660ms step_avg:123.36ms | |
step:219/1395 train_time:25780ms step_avg:123.35ms | |
step:220/1395 train_time:25904ms step_avg:123.35ms | |
step:221/1395 train_time:26028ms step_avg:123.36ms | |
step:222/1395 train_time:26151ms step_avg:123.36ms | |
step:223/1395 train_time:26274ms step_avg:123.35ms | |
step:224/1395 train_time:26398ms step_avg:123.36ms | |
step:225/1395 train_time:26524ms step_avg:123.37ms | |
step:226/1395 train_time:26648ms step_avg:123.37ms | |
step:227/1395 train_time:26772ms step_avg:123.37ms | |
step:228/1395 train_time:26897ms step_avg:123.38ms | |
step:229/1395 train_time:27021ms step_avg:123.38ms | |
step:230/1395 train_time:27147ms step_avg:123.40ms | |
step:231/1395 train_time:27271ms step_avg:123.40ms | |
step:232/1395 train_time:27394ms step_avg:123.40ms | |
step:233/1395 train_time:27520ms step_avg:123.41ms | |
step:234/1395 train_time:27642ms step_avg:123.40ms | |
step:235/1395 train_time:27765ms step_avg:123.40ms | |
step:236/1395 train_time:27889ms step_avg:123.40ms | |
step:237/1395 train_time:28012ms step_avg:123.40ms | |
step:238/1395 train_time:28137ms step_avg:123.41ms | |
step:239/1395 train_time:28261ms step_avg:123.41ms | |
step:240/1395 train_time:28383ms step_avg:123.40ms | |
step:241/1395 train_time:28506ms step_avg:123.40ms | |
step:242/1395 train_time:28630ms step_avg:123.40ms | |
step:243/1395 train_time:28754ms step_avg:123.41ms | |
step:244/1395 train_time:28878ms step_avg:123.41ms | |
step:245/1395 train_time:29000ms step_avg:123.41ms | |
step:246/1395 train_time:29124ms step_avg:123.41ms | |
step:247/1395 train_time:29247ms step_avg:123.41ms | |
step:248/1395 train_time:29370ms step_avg:123.40ms | |
step:249/1395 train_time:29493ms step_avg:123.40ms | |
step:250/1395 train_time:29616ms step_avg:123.40ms | |
step:250/1395 val_loss:3.9853 train_time:29739ms step_avg:123.91ms | |
step:251/1395 train_time:29759ms step_avg:123.48ms | |
step:252/1395 train_time:29878ms step_avg:123.46ms | |
step:253/1395 train_time:30003ms step_avg:123.47ms | |
step:254/1395 train_time:30127ms step_avg:123.47ms | |
step:255/1395 train_time:30249ms step_avg:123.47ms | |
step:256/1395 train_time:30372ms step_avg:123.46ms | |
step:257/1395 train_time:30495ms step_avg:123.46ms | |
step:258/1395 train_time:30617ms step_avg:123.46ms | |
step:259/1395 train_time:30741ms step_avg:123.46ms | |
step:260/1395 train_time:30868ms step_avg:123.47ms | |
step:261/1395 train_time:30994ms step_avg:123.48ms | |
step:262/1395 train_time:31120ms step_avg:123.49ms | |
step:263/1395 train_time:31245ms step_avg:123.50ms | |
step:264/1395 train_time:31368ms step_avg:123.49ms | |
step:265/1395 train_time:31491ms step_avg:123.49ms | |
step:266/1395 train_time:31614ms step_avg:123.49ms | |
step:267/1395 train_time:31737ms step_avg:123.49ms | |
step:268/1395 train_time:31859ms step_avg:123.49ms | |
step:269/1395 train_time:31983ms step_avg:123.49ms | |
step:270/1395 train_time:32107ms step_avg:123.49ms | |
step:271/1395 train_time:32232ms step_avg:123.49ms | |
step:272/1395 train_time:32355ms step_avg:123.49ms | |
step:273/1395 train_time:32478ms step_avg:123.49ms | |
step:274/1395 train_time:32600ms step_avg:123.49ms | |
step:275/1395 train_time:32725ms step_avg:123.49ms | |
step:276/1395 train_time:32848ms step_avg:123.49ms | |
step:277/1395 train_time:32973ms step_avg:123.49ms | |
step:278/1395 train_time:33096ms step_avg:123.49ms | |
step:279/1395 train_time:33221ms step_avg:123.50ms | |
step:280/1395 train_time:33345ms step_avg:123.50ms | |
step:281/1395 train_time:33468ms step_avg:123.50ms | |
step:282/1395 train_time:33592ms step_avg:123.50ms | |
step:283/1395 train_time:33718ms step_avg:123.51ms | |
step:284/1395 train_time:33842ms step_avg:123.51ms | |
step:285/1395 train_time:33966ms step_avg:123.51ms | |
step:286/1395 train_time:34089ms step_avg:123.51ms | |
step:287/1395 train_time:34215ms step_avg:123.52ms | |
step:288/1395 train_time:34340ms step_avg:123.53ms | |
step:289/1395 train_time:34463ms step_avg:123.52ms | |
step:290/1395 train_time:34586ms step_avg:123.52ms | |
step:291/1395 train_time:34711ms step_avg:123.53ms | |
step:292/1395 train_time:34835ms step_avg:123.53ms | |
step:293/1395 train_time:34958ms step_avg:123.53ms | |
step:294/1395 train_time:35082ms step_avg:123.53ms | |
step:295/1395 train_time:35205ms step_avg:123.53ms | |
step:296/1395 train_time:35330ms step_avg:123.53ms | |
step:297/1395 train_time:35454ms step_avg:123.53ms | |
step:298/1395 train_time:35577ms step_avg:123.53ms | |
step:299/1395 train_time:35700ms step_avg:123.53ms | |
step:300/1395 train_time:35825ms step_avg:123.53ms | |
step:301/1395 train_time:35949ms step_avg:123.54ms | |
step:302/1395 train_time:36073ms step_avg:123.54ms | |
step:303/1395 train_time:36195ms step_avg:123.53ms | |
step:304/1395 train_time:36318ms step_avg:123.53ms | |
step:305/1395 train_time:36446ms step_avg:123.55ms | |
step:306/1395 train_time:36565ms step_avg:123.53ms | |
step:307/1395 train_time:36688ms step_avg:123.53ms | |
step:308/1395 train_time:36812ms step_avg:123.53ms | |
step:309/1395 train_time:36937ms step_avg:123.54ms | |
step:310/1395 train_time:37059ms step_avg:123.53ms | |
step:311/1395 train_time:37183ms step_avg:123.53ms | |
step:312/1395 train_time:37306ms step_avg:123.53ms | |
step:313/1395 train_time:37433ms step_avg:123.54ms | |
step:314/1395 train_time:37560ms step_avg:123.55ms | |
step:315/1395 train_time:37685ms step_avg:123.56ms | |
step:316/1395 train_time:37811ms step_avg:123.56ms | |
step:317/1395 train_time:37939ms step_avg:123.58ms | |
step:318/1395 train_time:38065ms step_avg:123.59ms | |
step:319/1395 train_time:38191ms step_avg:123.60ms | |
step:320/1395 train_time:38317ms step_avg:123.60ms | |
step:321/1395 train_time:38444ms step_avg:123.61ms | |
step:322/1395 train_time:38569ms step_avg:123.62ms | |
step:323/1395 train_time:38696ms step_avg:123.63ms | |
step:324/1395 train_time:38821ms step_avg:123.63ms | |
step:325/1395 train_time:38948ms step_avg:123.64ms | |
step:326/1395 train_time:39074ms step_avg:123.65ms | |
step:327/1395 train_time:39199ms step_avg:123.66ms | |
step:328/1395 train_time:39326ms step_avg:123.67ms | |
step:329/1395 train_time:39452ms step_avg:123.67ms | |
step:330/1395 train_time:39579ms step_avg:123.68ms | |
step:331/1395 train_time:39705ms step_avg:123.69ms | |
step:332/1395 train_time:39833ms step_avg:123.70ms | |
step:333/1395 train_time:39959ms step_avg:123.71ms | |
step:334/1395 train_time:40084ms step_avg:123.72ms | |
step:335/1395 train_time:40211ms step_avg:123.73ms | |
step:336/1395 train_time:40337ms step_avg:123.73ms | |
step:337/1395 train_time:40464ms step_avg:123.74ms | |
step:338/1395 train_time:40591ms step_avg:123.75ms | |
step:339/1395 train_time:40717ms step_avg:123.76ms | |
step:340/1395 train_time:40843ms step_avg:123.77ms | |
step:341/1395 train_time:40968ms step_avg:123.77ms | |
step:342/1395 train_time:41094ms step_avg:123.78ms | |
step:343/1395 train_time:41220ms step_avg:123.78ms | |
step:344/1395 train_time:41347ms step_avg:123.79ms | |
step:345/1395 train_time:41473ms step_avg:123.80ms | |
step:346/1395 train_time:41599ms step_avg:123.81ms | |
step:347/1395 train_time:41724ms step_avg:123.81ms | |
step:348/1395 train_time:41850ms step_avg:123.82ms | |
step:349/1395 train_time:41976ms step_avg:123.82ms | |
step:350/1395 train_time:42102ms step_avg:123.83ms | |
step:351/1395 train_time:42227ms step_avg:123.83ms | |
step:352/1395 train_time:42352ms step_avg:123.84ms | |
step:353/1395 train_time:42479ms step_avg:123.84ms | |
step:354/1395 train_time:42604ms step_avg:123.85ms | |
step:355/1395 train_time:42733ms step_avg:123.86ms | |
step:356/1395 train_time:42859ms step_avg:123.87ms | |
step:357/1395 train_time:42984ms step_avg:123.87ms | |
step:358/1395 train_time:43110ms step_avg:123.88ms | |
step:359/1395 train_time:43237ms step_avg:123.89ms | |
step:360/1395 train_time:43363ms step_avg:123.89ms | |
step:361/1395 train_time:43490ms step_avg:123.90ms | |
step:362/1395 train_time:43617ms step_avg:123.91ms | |
step:363/1395 train_time:43743ms step_avg:123.92ms | |
step:364/1395 train_time:43867ms step_avg:123.92ms | |
step:365/1395 train_time:43994ms step_avg:123.93ms | |
step:366/1395 train_time:44120ms step_avg:123.93ms | |
step:367/1395 train_time:44247ms step_avg:123.94ms | |
step:368/1395 train_time:44374ms step_avg:123.95ms | |
step:369/1395 train_time:44499ms step_avg:123.95ms | |
step:370/1395 train_time:44625ms step_avg:123.96ms | |
step:371/1395 train_time:44753ms step_avg:123.97ms | |
step:372/1395 train_time:44879ms step_avg:123.97ms | |
step:373/1395 train_time:45005ms step_avg:123.98ms | |
step:374/1395 train_time:45132ms step_avg:123.99ms | |
step:375/1395 train_time:45258ms step_avg:123.99ms | |
step:375/1395 val_loss:3.7834 train_time:45383ms step_avg:124.34ms | |
step:376/1395 train_time:45403ms step_avg:124.05ms | |
step:377/1395 train_time:45521ms step_avg:124.04ms | |
step:378/1395 train_time:45649ms step_avg:124.05ms | |
step:379/1395 train_time:45775ms step_avg:124.05ms | |
step:380/1395 train_time:45901ms step_avg:124.06ms | |
step:381/1395 train_time:46028ms step_avg:124.06ms | |
step:382/1395 train_time:46152ms step_avg:124.06ms | |
step:383/1395 train_time:46278ms step_avg:124.07ms | |
step:384/1395 train_time:46406ms step_avg:124.08ms | |
step:385/1395 train_time:46535ms step_avg:124.09ms | |
step:386/1395 train_time:46660ms step_avg:124.10ms | |
step:387/1395 train_time:46786ms step_avg:124.10ms | |
step:388/1395 train_time:46913ms step_avg:124.11ms | |
step:389/1395 train_time:47042ms step_avg:124.12ms | |
step:390/1395 train_time:47165ms step_avg:124.12ms | |
step:391/1395 train_time:47291ms step_avg:124.12ms | |
step:392/1395 train_time:47418ms step_avg:124.13ms | |
step:393/1395 train_time:47546ms step_avg:124.14ms | |
step:394/1395 train_time:47673ms step_avg:124.15ms | |
step:395/1395 train_time:47799ms step_avg:124.15ms | |
step:396/1395 train_time:47925ms step_avg:124.16ms | |
step:397/1395 train_time:48050ms step_avg:124.16ms | |
step:398/1395 train_time:48175ms step_avg:124.16ms | |
step:399/1395 train_time:48303ms step_avg:124.17ms | |
step:400/1395 train_time:48430ms step_avg:124.18ms | |
step:401/1395 train_time:48555ms step_avg:124.18ms | |
step:402/1395 train_time:48683ms step_avg:124.19ms | |
step:403/1395 train_time:48809ms step_avg:124.20ms | |
step:404/1395 train_time:48936ms step_avg:124.20ms | |
step:405/1395 train_time:49062ms step_avg:124.21ms | |
step:406/1395 train_time:49187ms step_avg:124.21ms | |
step:407/1395 train_time:49313ms step_avg:124.21ms | |
step:408/1395 train_time:49442ms step_avg:124.23ms | |
step:409/1395 train_time:49565ms step_avg:124.22ms | |
step:410/1395 train_time:49691ms step_avg:124.23ms | |
step:411/1395 train_time:49817ms step_avg:124.23ms | |
step:412/1395 train_time:49945ms step_avg:124.24ms | |
step:413/1395 train_time:50071ms step_avg:124.25ms | |
step:414/1395 train_time:50199ms step_avg:124.25ms | |
step:415/1395 train_time:50326ms step_avg:124.26ms | |
step:416/1395 train_time:50451ms step_avg:124.26ms | |
step:417/1395 train_time:50578ms step_avg:124.27ms | |
step:418/1395 train_time:50705ms step_avg:124.28ms | |
step:419/1395 train_time:50835ms step_avg:124.29ms | |
step:420/1395 train_time:50957ms step_avg:124.29ms | |
step:421/1395 train_time:51084ms step_avg:124.29ms | |
step:422/1395 train_time:51211ms step_avg:124.30ms | |
step:423/1395 train_time:51338ms step_avg:124.31ms | |
step:424/1395 train_time:51465ms step_avg:124.31ms | |
step:425/1395 train_time:51592ms step_avg:124.32ms | |
step:426/1395 train_time:51718ms step_avg:124.32ms | |
step:427/1395 train_time:51845ms step_avg:124.33ms | |
step:428/1395 train_time:51972ms step_avg:124.34ms | |
step:429/1395 train_time:52099ms step_avg:124.34ms | |
step:430/1395 train_time:52226ms step_avg:124.35ms | |
step:431/1395 train_time:52353ms step_avg:124.35ms | |
step:432/1395 train_time:52480ms step_avg:124.36ms | |
step:433/1395 train_time:52607ms step_avg:124.37ms | |
step:434/1395 train_time:52734ms step_avg:124.37ms | |
step:435/1395 train_time:52862ms step_avg:124.38ms | |
step:436/1395 train_time:52988ms step_avg:124.39ms | |
step:437/1395 train_time:53115ms step_avg:124.39ms | |
step:438/1395 train_time:53243ms step_avg:124.40ms | |
step:439/1395 train_time:53370ms step_avg:124.40ms | |
step:440/1395 train_time:53496ms step_avg:124.41ms | |
step:441/1395 train_time:53624ms step_avg:124.42ms | |
step:442/1395 train_time:53751ms step_avg:124.42ms | |
step:443/1395 train_time:53878ms step_avg:124.43ms | |
step:444/1395 train_time:54006ms step_avg:124.44ms | |
step:445/1395 train_time:54132ms step_avg:124.44ms | |
step:446/1395 train_time:54258ms step_avg:124.44ms | |
step:447/1395 train_time:54385ms step_avg:124.45ms | |
step:448/1395 train_time:54512ms step_avg:124.46ms | |
step:449/1395 train_time:54639ms step_avg:124.46ms | |
step:450/1395 train_time:54766ms step_avg:124.47ms | |
step:451/1395 train_time:54892ms step_avg:124.47ms | |
step:452/1395 train_time:55019ms step_avg:124.48ms | |
step:453/1395 train_time:55147ms step_avg:124.49ms | |
step:454/1395 train_time:55274ms step_avg:124.49ms | |
step:455/1395 train_time:55402ms step_avg:124.50ms | |
step:456/1395 train_time:55529ms step_avg:124.50ms | |
step:457/1395 train_time:55655ms step_avg:124.51ms | |
step:458/1395 train_time:55782ms step_avg:124.51ms | |
step:459/1395 train_time:55908ms step_avg:124.52ms | |
step:460/1395 train_time:56036ms step_avg:124.52ms | |
step:461/1395 train_time:56163ms step_avg:124.53ms | |
step:462/1395 train_time:56290ms step_avg:124.53ms | |
step:463/1395 train_time:56417ms step_avg:124.54ms | |
step:464/1395 train_time:56544ms step_avg:124.55ms | |
step:465/1395 train_time:56669ms step_avg:124.55ms | |
step:466/1395 train_time:56796ms step_avg:124.55ms | |
step:467/1395 train_time:56923ms step_avg:124.56ms | |
step:468/1395 train_time:57050ms step_avg:124.56ms | |
step:469/1395 train_time:57176ms step_avg:124.57ms | |
step:470/1395 train_time:57303ms step_avg:124.57ms | |
step:471/1395 train_time:57429ms step_avg:124.58ms | |
step:472/1395 train_time:57556ms step_avg:124.58ms | |
step:473/1395 train_time:57683ms step_avg:124.59ms | |
step:474/1395 train_time:57810ms step_avg:124.59ms | |
step:475/1395 train_time:57936ms step_avg:124.59ms | |
step:476/1395 train_time:58063ms step_avg:124.60ms | |
step:477/1395 train_time:58190ms step_avg:124.60ms | |
step:478/1395 train_time:58317ms step_avg:124.61ms | |
step:479/1395 train_time:58445ms step_avg:124.62ms | |
step:480/1395 train_time:58572ms step_avg:124.62ms | |
step:481/1395 train_time:58699ms step_avg:124.63ms | |
step:482/1395 train_time:58826ms step_avg:124.63ms | |
step:483/1395 train_time:58952ms step_avg:124.64ms | |
step:484/1395 train_time:59079ms step_avg:124.64ms | |
step:485/1395 train_time:59206ms step_avg:124.64ms | |
step:486/1395 train_time:59332ms step_avg:124.65ms | |
step:487/1395 train_time:59459ms step_avg:124.65ms | |
step:488/1395 train_time:59586ms step_avg:124.66ms | |
step:489/1395 train_time:59713ms step_avg:124.66ms | |
step:490/1395 train_time:59840ms step_avg:124.67ms | |
step:491/1395 train_time:59966ms step_avg:124.67ms | |
step:492/1395 train_time:60094ms step_avg:124.68ms | |
step:493/1395 train_time:60221ms step_avg:124.68ms | |
step:494/1395 train_time:60347ms step_avg:124.68ms | |
step:495/1395 train_time:60474ms step_avg:124.69ms | |
step:496/1395 train_time:60602ms step_avg:124.69ms | |
step:497/1395 train_time:60727ms step_avg:124.70ms | |
step:498/1395 train_time:60854ms step_avg:124.70ms | |
step:499/1395 train_time:60981ms step_avg:124.70ms | |
step:500/1395 train_time:61106ms step_avg:124.71ms | |
step:500/1395 val_loss:3.6638 train_time:61231ms step_avg:124.96ms | |
step:501/1395 train_time:61252ms step_avg:124.75ms | |
step:502/1395 train_time:61374ms step_avg:124.74ms | |
step:503/1395 train_time:61501ms step_avg:124.75ms | |
step:504/1395 train_time:61627ms step_avg:124.75ms | |
step:505/1395 train_time:61753ms step_avg:124.75ms | |
step:506/1395 train_time:61878ms step_avg:124.75ms | |
step:507/1395 train_time:62004ms step_avg:124.76ms | |
step:508/1395 train_time:62130ms step_avg:124.76ms | |
step:509/1395 train_time:62256ms step_avg:124.76ms | |
step:510/1395 train_time:62384ms step_avg:124.77ms | |
step:511/1395 train_time:62513ms step_avg:124.78ms | |
step:512/1395 train_time:62638ms step_avg:124.78ms | |
step:513/1395 train_time:62765ms step_avg:124.78ms | |
step:514/1395 train_time:62893ms step_avg:124.79ms | |
step:515/1395 train_time:63019ms step_avg:124.79ms | |
step:516/1395 train_time:63145ms step_avg:124.79ms | |
step:517/1395 train_time:63273ms step_avg:124.80ms | |
step:518/1395 train_time:63400ms step_avg:124.80ms | |
step:519/1395 train_time:63535ms step_avg:124.82ms | |
step:520/1395 train_time:63661ms step_avg:124.83ms | |
step:521/1395 train_time:63790ms step_avg:124.83ms | |
step:522/1395 train_time:63919ms step_avg:124.84ms | |
step:523/1395 train_time:64046ms step_avg:124.85ms | |
step:524/1395 train_time:64175ms step_avg:124.86ms | |
step:525/1395 train_time:64304ms step_avg:124.86ms | |
step:526/1395 train_time:64433ms step_avg:124.87ms | |
step:527/1395 train_time:64561ms step_avg:124.88ms | |
step:528/1395 train_time:64689ms step_avg:124.88ms | |
step:529/1395 train_time:64819ms step_avg:124.89ms | |
step:530/1395 train_time:64947ms step_avg:124.90ms | |
step:531/1395 train_time:65077ms step_avg:124.91ms | |
step:532/1395 train_time:65206ms step_avg:124.92ms | |
step:533/1395 train_time:65336ms step_avg:124.93ms | |
step:534/1395 train_time:65465ms step_avg:124.93ms | |
step:535/1395 train_time:65596ms step_avg:124.94ms | |
step:536/1395 train_time:65724ms step_avg:124.95ms | |
step:537/1395 train_time:65851ms step_avg:124.96ms | |
step:538/1395 train_time:65979ms step_avg:124.96ms | |
step:539/1395 train_time:66107ms step_avg:124.97ms | |
step:540/1395 train_time:66236ms step_avg:124.97ms | |
step:541/1395 train_time:66365ms step_avg:124.98ms | |
step:542/1395 train_time:66493ms step_avg:124.99ms | |
step:543/1395 train_time:66621ms step_avg:124.99ms | |
step:544/1395 train_time:66751ms step_avg:125.00ms | |
step:545/1395 train_time:66880ms step_avg:125.01ms | |
step:546/1395 train_time:67008ms step_avg:125.02ms | |
step:547/1395 train_time:67139ms step_avg:125.03ms | |
step:548/1395 train_time:67269ms step_avg:125.04ms | |
step:549/1395 train_time:67398ms step_avg:125.04ms | |
step:550/1395 train_time:67526ms step_avg:125.05ms | |
step:551/1395 train_time:67655ms step_avg:125.05ms | |
step:552/1395 train_time:67783ms step_avg:125.06ms | |
step:553/1395 train_time:67911ms step_avg:125.07ms | |
step:554/1395 train_time:68039ms step_avg:125.07ms | |
step:555/1395 train_time:68169ms step_avg:125.08ms | |
step:556/1395 train_time:68299ms step_avg:125.09ms | |
step:557/1395 train_time:68428ms step_avg:125.10ms | |
step:558/1395 train_time:68557ms step_avg:125.10ms | |
step:559/1395 train_time:68688ms step_avg:125.12ms | |
step:560/1395 train_time:68818ms step_avg:125.12ms | |
step:561/1395 train_time:68946ms step_avg:125.13ms | |
step:562/1395 train_time:69075ms step_avg:125.14ms | |
step:563/1395 train_time:69205ms step_avg:125.15ms | |
step:564/1395 train_time:69335ms step_avg:125.15ms | |
step:565/1395 train_time:69462ms step_avg:125.16ms | |
step:566/1395 train_time:69591ms step_avg:125.16ms | |
step:567/1395 train_time:69720ms step_avg:125.17ms | |
step:568/1395 train_time:69850ms step_avg:125.18ms | |
step:569/1395 train_time:69978ms step_avg:125.18ms | |
step:570/1395 train_time:70106ms step_avg:125.19ms | |
step:571/1395 train_time:70238ms step_avg:125.20ms | |
step:572/1395 train_time:70365ms step_avg:125.20ms | |
step:573/1395 train_time:70495ms step_avg:125.21ms | |
step:574/1395 train_time:70622ms step_avg:125.22ms | |
step:575/1395 train_time:70751ms step_avg:125.22ms | |
step:576/1395 train_time:70878ms step_avg:125.23ms | |
step:577/1395 train_time:71007ms step_avg:125.23ms | |
step:578/1395 train_time:71137ms step_avg:125.24ms | |
step:579/1395 train_time:71266ms step_avg:125.25ms | |
step:580/1395 train_time:71396ms step_avg:125.26ms | |
step:581/1395 train_time:71525ms step_avg:125.26ms | |
step:582/1395 train_time:71655ms step_avg:125.27ms | |
step:583/1395 train_time:71783ms step_avg:125.28ms | |
step:584/1395 train_time:71912ms step_avg:125.28ms | |
step:585/1395 train_time:72040ms step_avg:125.29ms | |
step:586/1395 train_time:72169ms step_avg:125.29ms | |
step:587/1395 train_time:72298ms step_avg:125.30ms | |
step:588/1395 train_time:72427ms step_avg:125.31ms | |
step:589/1395 train_time:72556ms step_avg:125.31ms | |
step:590/1395 train_time:72684ms step_avg:125.32ms | |
step:591/1395 train_time:72814ms step_avg:125.32ms | |
step:592/1395 train_time:72942ms step_avg:125.33ms | |
step:593/1395 train_time:73070ms step_avg:125.33ms | |
step:594/1395 train_time:73199ms step_avg:125.34ms | |
step:595/1395 train_time:73328ms step_avg:125.35ms | |
step:596/1395 train_time:73458ms step_avg:125.35ms | |
step:597/1395 train_time:73587ms step_avg:125.36ms | |
step:598/1395 train_time:73717ms step_avg:125.37ms | |
step:599/1395 train_time:73845ms step_avg:125.37ms | |
step:600/1395 train_time:73975ms step_avg:125.38ms | |
step:601/1395 train_time:74103ms step_avg:125.39ms | |
step:602/1395 train_time:74231ms step_avg:125.39ms | |
step:603/1395 train_time:74360ms step_avg:125.40ms | |
step:604/1395 train_time:74488ms step_avg:125.40ms | |
step:605/1395 train_time:74618ms step_avg:125.41ms | |
step:606/1395 train_time:74747ms step_avg:125.41ms | |
step:607/1395 train_time:74877ms step_avg:125.42ms | |
step:608/1395 train_time:75007ms step_avg:125.43ms | |
step:609/1395 train_time:75137ms step_avg:125.44ms | |
step:610/1395 train_time:75266ms step_avg:125.44ms | |
step:611/1395 train_time:75396ms step_avg:125.45ms | |
step:612/1395 train_time:75525ms step_avg:125.46ms | |
step:613/1395 train_time:75655ms step_avg:125.47ms | |
step:614/1395 train_time:75784ms step_avg:125.47ms | |
step:615/1395 train_time:75914ms step_avg:125.48ms | |
step:616/1395 train_time:76041ms step_avg:125.48ms | |
step:617/1395 train_time:76170ms step_avg:125.49ms | |
step:618/1395 train_time:76299ms step_avg:125.49ms | |
step:619/1395 train_time:76428ms step_avg:125.50ms | |
step:620/1395 train_time:76558ms step_avg:125.50ms | |
step:621/1395 train_time:76687ms step_avg:125.51ms | |
step:622/1395 train_time:76818ms step_avg:125.52ms | |
step:623/1395 train_time:76947ms step_avg:125.52ms | |
step:624/1395 train_time:77076ms step_avg:125.53ms | |
step:625/1395 train_time:77205ms step_avg:125.54ms | |
step:625/1395 val_loss:3.5810 train_time:77333ms step_avg:125.75ms | |
step:626/1395 train_time:77353ms step_avg:125.57ms | |
step:627/1395 train_time:77475ms step_avg:125.57ms | |
step:628/1395 train_time:77605ms step_avg:125.57ms | |
step:629/1395 train_time:77733ms step_avg:125.58ms | |
step:630/1395 train_time:77862ms step_avg:125.58ms | |
step:631/1395 train_time:77991ms step_avg:125.59ms | |
step:632/1395 train_time:78119ms step_avg:125.59ms | |
step:633/1395 train_time:78246ms step_avg:125.60ms | |
step:634/1395 train_time:78378ms step_avg:125.61ms | |
step:635/1395 train_time:78508ms step_avg:125.61ms | |
step:636/1395 train_time:78638ms step_avg:125.62ms | |
step:637/1395 train_time:78765ms step_avg:125.62ms | |
step:638/1395 train_time:78895ms step_avg:125.63ms | |
step:639/1395 train_time:79023ms step_avg:125.63ms | |
step:640/1395 train_time:79152ms step_avg:125.64ms | |
step:641/1395 train_time:79282ms step_avg:125.64ms | |
step:642/1395 train_time:79411ms step_avg:125.65ms | |
step:643/1395 train_time:79542ms step_avg:125.66ms | |
step:644/1395 train_time:79671ms step_avg:125.66ms | |
step:645/1395 train_time:79800ms step_avg:125.67ms | |
step:646/1395 train_time:79929ms step_avg:125.68ms | |
step:647/1395 train_time:80059ms step_avg:125.68ms | |
step:648/1395 train_time:80188ms step_avg:125.69ms | |
step:649/1395 train_time:80316ms step_avg:125.69ms | |
step:650/1395 train_time:80444ms step_avg:125.69ms | |
step:651/1395 train_time:80575ms step_avg:125.70ms | |
step:652/1395 train_time:80705ms step_avg:125.71ms | |
step:653/1395 train_time:80835ms step_avg:125.71ms | |
step:654/1395 train_time:80964ms step_avg:125.72ms | |
step:655/1395 train_time:81093ms step_avg:125.72ms | |
step:656/1395 train_time:81222ms step_avg:125.73ms | |
step:657/1395 train_time:81350ms step_avg:125.73ms | |
step:658/1395 train_time:81480ms step_avg:125.74ms | |
step:659/1395 train_time:81609ms step_avg:125.75ms | |
step:660/1395 train_time:81738ms step_avg:125.75ms | |
step:661/1395 train_time:81867ms step_avg:125.76ms | |
step:662/1395 train_time:81996ms step_avg:125.76ms | |
step:663/1395 train_time:82125ms step_avg:125.77ms | |
step:664/1395 train_time:82253ms step_avg:125.77ms | |
step:665/1395 train_time:82383ms step_avg:125.78ms | |
step:666/1395 train_time:82512ms step_avg:125.78ms | |
step:667/1395 train_time:82642ms step_avg:125.79ms | |
step:668/1395 train_time:82772ms step_avg:125.79ms | |
step:669/1395 train_time:82903ms step_avg:125.80ms | |
step:670/1395 train_time:83032ms step_avg:125.81ms | |
step:671/1395 train_time:83163ms step_avg:125.81ms | |
step:672/1395 train_time:83291ms step_avg:125.82ms | |
step:673/1395 train_time:83420ms step_avg:125.82ms | |
step:674/1395 train_time:83548ms step_avg:125.83ms | |
step:675/1395 train_time:83678ms step_avg:125.83ms | |
step:676/1395 train_time:83807ms step_avg:125.84ms | |
step:677/1395 train_time:83936ms step_avg:125.84ms | |
step:678/1395 train_time:84065ms step_avg:125.85ms | |
step:679/1395 train_time:84194ms step_avg:125.85ms | |
step:680/1395 train_time:84323ms step_avg:125.86ms | |
step:681/1395 train_time:84453ms step_avg:125.86ms | |
step:682/1395 train_time:84583ms step_avg:125.87ms | |
step:683/1395 train_time:84712ms step_avg:125.87ms | |
step:684/1395 train_time:84841ms step_avg:125.88ms | |
step:685/1395 train_time:84970ms step_avg:125.88ms | |
step:686/1395 train_time:85100ms step_avg:125.89ms | |
step:687/1395 train_time:85228ms step_avg:125.89ms | |
step:688/1395 train_time:85356ms step_avg:125.89ms | |
step:689/1395 train_time:85486ms step_avg:125.90ms | |
step:690/1395 train_time:85614ms step_avg:125.90ms | |
step:691/1395 train_time:85743ms step_avg:125.91ms | |
step:692/1395 train_time:85873ms step_avg:125.91ms | |
step:693/1395 train_time:86004ms step_avg:125.92ms | |
step:694/1395 train_time:86133ms step_avg:125.92ms | |
step:695/1395 train_time:86262ms step_avg:125.93ms | |
step:696/1395 train_time:86392ms step_avg:125.94ms | |
step:697/1395 train_time:86522ms step_avg:125.94ms | |
step:698/1395 train_time:86651ms step_avg:125.95ms | |
step:699/1395 train_time:86782ms step_avg:125.95ms | |
step:700/1395 train_time:86911ms step_avg:125.96ms | |
step:701/1395 train_time:87041ms step_avg:125.96ms | |
step:702/1395 train_time:87170ms step_avg:125.97ms | |
step:703/1395 train_time:87299ms step_avg:125.97ms | |
step:704/1395 train_time:87428ms step_avg:125.98ms | |
step:705/1395 train_time:87557ms step_avg:125.98ms | |
step:706/1395 train_time:87686ms step_avg:125.99ms | |
step:707/1395 train_time:87814ms step_avg:125.99ms | |
step:708/1395 train_time:87943ms step_avg:125.99ms | |
step:709/1395 train_time:88073ms step_avg:126.00ms | |
step:710/1395 train_time:88204ms step_avg:126.01ms | |
step:711/1395 train_time:88332ms step_avg:126.01ms | |
step:712/1395 train_time:88463ms step_avg:126.02ms | |
step:713/1395 train_time:88593ms step_avg:126.02ms | |
step:714/1395 train_time:88722ms step_avg:126.03ms | |
step:715/1395 train_time:88851ms step_avg:126.03ms | |
step:716/1395 train_time:88981ms step_avg:126.04ms | |
step:717/1395 train_time:89110ms step_avg:126.04ms | |
step:718/1395 train_time:89239ms step_avg:126.04ms | |
step:719/1395 train_time:89367ms step_avg:126.05ms | |
step:720/1395 train_time:89497ms step_avg:126.05ms | |
step:721/1395 train_time:89625ms step_avg:126.05ms | |
step:722/1395 train_time:89754ms step_avg:126.06ms | |
step:723/1395 train_time:89884ms step_avg:126.06ms | |
step:724/1395 train_time:90013ms step_avg:126.07ms | |
step:725/1395 train_time:90143ms step_avg:126.07ms | |
step:726/1395 train_time:90276ms step_avg:126.08ms | |
step:727/1395 train_time:90408ms step_avg:126.09ms | |
step:728/1395 train_time:90538ms step_avg:126.10ms | |
step:729/1395 train_time:90669ms step_avg:126.10ms | |
step:730/1395 train_time:90802ms step_avg:126.11ms | |
step:731/1395 train_time:90932ms step_avg:126.12ms | |
step:732/1395 train_time:91063ms step_avg:126.13ms | |
step:733/1395 train_time:91193ms step_avg:126.13ms | |
step:734/1395 train_time:91324ms step_avg:126.14ms | |
step:735/1395 train_time:91455ms step_avg:126.14ms | |
step:736/1395 train_time:91586ms step_avg:126.15ms | |
step:737/1395 train_time:91718ms step_avg:126.16ms | |
step:738/1395 train_time:91848ms step_avg:126.17ms | |
step:739/1395 train_time:91979ms step_avg:126.17ms | |
step:740/1395 train_time:92110ms step_avg:126.18ms | |
step:741/1395 train_time:92243ms step_avg:126.19ms | |
step:742/1395 train_time:92375ms step_avg:126.20ms | |
step:743/1395 train_time:92507ms step_avg:126.20ms | |
step:744/1395 train_time:92639ms step_avg:126.21ms | |
step:745/1395 train_time:92772ms step_avg:126.22ms | |
step:746/1395 train_time:92904ms step_avg:126.23ms | |
step:747/1395 train_time:93034ms step_avg:126.23ms | |
step:748/1395 train_time:93165ms step_avg:126.24ms | |
step:749/1395 train_time:93299ms step_avg:126.25ms | |
step:750/1395 train_time:93429ms step_avg:126.26ms | |
step:750/1395 val_loss:3.5273 train_time:93559ms step_avg:126.43ms | |
step:751/1395 train_time:93579ms step_avg:126.29ms | |
step:752/1395 train_time:93702ms step_avg:126.28ms | |
step:753/1395 train_time:93833ms step_avg:126.29ms | |
step:754/1395 train_time:93963ms step_avg:126.29ms | |
step:755/1395 train_time:94093ms step_avg:126.30ms | |
step:756/1395 train_time:94224ms step_avg:126.31ms | |
step:757/1395 train_time:94354ms step_avg:126.31ms | |
step:758/1395 train_time:94484ms step_avg:126.32ms | |
step:759/1395 train_time:94618ms step_avg:126.33ms | |
step:760/1395 train_time:94751ms step_avg:126.34ms | |
step:761/1395 train_time:94883ms step_avg:126.34ms | |
step:762/1395 train_time:95013ms step_avg:126.35ms | |
step:763/1395 train_time:95143ms step_avg:126.35ms | |
step:764/1395 train_time:95274ms step_avg:126.36ms | |
step:765/1395 train_time:95403ms step_avg:126.36ms | |
step:766/1395 train_time:95534ms step_avg:126.37ms | |
step:767/1395 train_time:95667ms step_avg:126.38ms | |
step:768/1395 train_time:95800ms step_avg:126.39ms | |
step:769/1395 train_time:95933ms step_avg:126.39ms | |
step:770/1395 train_time:96064ms step_avg:126.40ms | |
step:771/1395 train_time:96194ms step_avg:126.41ms | |
step:772/1395 train_time:96324ms step_avg:126.41ms | |
step:773/1395 train_time:96455ms step_avg:126.42ms | |
step:774/1395 train_time:96586ms step_avg:126.42ms | |
step:775/1395 train_time:96716ms step_avg:126.43ms | |
step:776/1395 train_time:96847ms step_avg:126.43ms | |
step:777/1395 train_time:96979ms step_avg:126.44ms | |
step:778/1395 train_time:97109ms step_avg:126.44ms | |
step:779/1395 train_time:97240ms step_avg:126.45ms | |
step:780/1395 train_time:97371ms step_avg:126.46ms | |
step:781/1395 train_time:97502ms step_avg:126.46ms | |
step:782/1395 train_time:97632ms step_avg:126.47ms | |
step:783/1395 train_time:97763ms step_avg:126.47ms | |
step:784/1395 train_time:97893ms step_avg:126.48ms | |
step:785/1395 train_time:98025ms step_avg:126.48ms | |
step:786/1395 train_time:98157ms step_avg:126.49ms | |
step:787/1395 train_time:98287ms step_avg:126.50ms | |
step:788/1395 train_time:98417ms step_avg:126.50ms | |
step:789/1395 train_time:98548ms step_avg:126.51ms | |
step:790/1395 train_time:98679ms step_avg:126.51ms | |
step:791/1395 train_time:98810ms step_avg:126.52ms | |
step:792/1395 train_time:98942ms step_avg:126.52ms | |
step:793/1395 train_time:99071ms step_avg:126.53ms | |
step:794/1395 train_time:99202ms step_avg:126.53ms | |
step:795/1395 train_time:99333ms step_avg:126.54ms | |
step:796/1395 train_time:99463ms step_avg:126.54ms | |
step:797/1395 train_time:99594ms step_avg:126.55ms | |
step:798/1395 train_time:99725ms step_avg:126.56ms | |
step:799/1395 train_time:99860ms step_avg:126.57ms | |
step:800/1395 train_time:99988ms step_avg:126.57ms | |
step:801/1395 train_time:100121ms step_avg:126.57ms | |
step:802/1395 train_time:100254ms step_avg:126.58ms | |
step:803/1395 train_time:100385ms step_avg:126.59ms | |
step:804/1395 train_time:100516ms step_avg:126.59ms | |
step:805/1395 train_time:100647ms step_avg:126.60ms | |
step:806/1395 train_time:100777ms step_avg:126.60ms | |
step:807/1395 train_time:100907ms step_avg:126.61ms | |
step:808/1395 train_time:101040ms step_avg:126.62ms | |
step:809/1395 train_time:101170ms step_avg:126.62ms | |
step:810/1395 train_time:101301ms step_avg:126.63ms | |
step:811/1395 train_time:101432ms step_avg:126.63ms | |
step:812/1395 train_time:101563ms step_avg:126.64ms | |
step:813/1395 train_time:101693ms step_avg:126.64ms | |
step:814/1395 train_time:101824ms step_avg:126.65ms | |
step:815/1395 train_time:101959ms step_avg:126.66ms | |
step:816/1395 train_time:102086ms step_avg:126.66ms | |
step:817/1395 train_time:102216ms step_avg:126.66ms | |
step:818/1395 train_time:102347ms step_avg:126.67ms | |
step:819/1395 train_time:102477ms step_avg:126.67ms | |
step:820/1395 train_time:102607ms step_avg:126.68ms | |
step:821/1395 train_time:102739ms step_avg:126.68ms | |
step:822/1395 train_time:102868ms step_avg:126.69ms | |
step:823/1395 train_time:103000ms step_avg:126.69ms | |
step:824/1395 train_time:103131ms step_avg:126.70ms | |
step:825/1395 train_time:103264ms step_avg:126.70ms | |
step:826/1395 train_time:103396ms step_avg:126.71ms | |
step:827/1395 train_time:103527ms step_avg:126.72ms | |
step:828/1395 train_time:103660ms step_avg:126.72ms | |
step:829/1395 train_time:103791ms step_avg:126.73ms | |
step:830/1395 train_time:103923ms step_avg:126.73ms | |
step:831/1395 train_time:104055ms step_avg:126.74ms | |
step:832/1395 train_time:104186ms step_avg:126.75ms | |
step:833/1395 train_time:104317ms step_avg:126.75ms | |
step:834/1395 train_time:104448ms step_avg:126.76ms | |
step:835/1395 train_time:104580ms step_avg:126.76ms | |
step:836/1395 train_time:104712ms step_avg:126.77ms | |
step:837/1395 train_time:104844ms step_avg:126.78ms | |
step:838/1395 train_time:104976ms step_avg:126.78ms | |
step:839/1395 train_time:105106ms step_avg:126.79ms | |
step:840/1395 train_time:105238ms step_avg:126.79ms | |
step:841/1395 train_time:105368ms step_avg:126.80ms | |
step:842/1395 train_time:105500ms step_avg:126.80ms | |
step:843/1395 train_time:105631ms step_avg:126.81ms | |
step:844/1395 train_time:105762ms step_avg:126.81ms | |
step:845/1395 train_time:105894ms step_avg:126.82ms | |
step:846/1395 train_time:106025ms step_avg:126.82ms | |
step:847/1395 train_time:106157ms step_avg:126.83ms | |
step:848/1395 train_time:106288ms step_avg:126.84ms | |
step:849/1395 train_time:106421ms step_avg:126.84ms | |
step:850/1395 train_time:106556ms step_avg:126.85ms | |
step:851/1395 train_time:106684ms step_avg:126.85ms | |
step:852/1395 train_time:106813ms step_avg:126.86ms | |
step:853/1395 train_time:106944ms step_avg:126.86ms | |
step:854/1395 train_time:107076ms step_avg:126.87ms | |
step:855/1395 train_time:107207ms step_avg:126.87ms | |
step:856/1395 train_time:107338ms step_avg:126.88ms | |
step:857/1395 train_time:107469ms step_avg:126.88ms | |
step:858/1395 train_time:107601ms step_avg:126.89ms | |
step:859/1395 train_time:107733ms step_avg:126.89ms | |
step:860/1395 train_time:107865ms step_avg:126.90ms | |
step:861/1395 train_time:107995ms step_avg:126.90ms | |
step:862/1395 train_time:108127ms step_avg:126.91ms | |
step:863/1395 train_time:108261ms step_avg:126.92ms | |
step:864/1395 train_time:108393ms step_avg:126.92ms | |
step:865/1395 train_time:108524ms step_avg:126.93ms | |
step:866/1395 train_time:108658ms step_avg:126.94ms | |
step:867/1395 train_time:108788ms step_avg:126.94ms | |
step:868/1395 train_time:108919ms step_avg:126.95ms | |
step:869/1395 train_time:109051ms step_avg:126.95ms | |
step:870/1395 train_time:109182ms step_avg:126.96ms | |
step:871/1395 train_time:109312ms step_avg:126.96ms | |
step:872/1395 train_time:109450ms step_avg:126.97ms | |
step:873/1395 train_time:109576ms step_avg:126.97ms | |
step:874/1395 train_time:109706ms step_avg:126.97ms | |
step:875/1395 train_time:109840ms step_avg:126.98ms | |
step:875/1395 val_loss:3.4783 train_time:109970ms step_avg:127.13ms | |
step:876/1395 train_time:109990ms step_avg:127.01ms | |
step:877/1395 train_time:110113ms step_avg:127.00ms | |
step:878/1395 train_time:110246ms step_avg:127.01ms | |
step:879/1395 train_time:110378ms step_avg:127.02ms | |
step:880/1395 train_time:110508ms step_avg:127.02ms | |
step:881/1395 train_time:110637ms step_avg:127.02ms | |
step:882/1395 train_time:110766ms step_avg:127.03ms | |
step:883/1395 train_time:110899ms step_avg:127.03ms | |
step:884/1395 train_time:111030ms step_avg:127.04ms | |
step:885/1395 train_time:111164ms step_avg:127.04ms | |
step:886/1395 train_time:111297ms step_avg:127.05ms | |
step:887/1395 train_time:111429ms step_avg:127.06ms | |
step:888/1395 train_time:111562ms step_avg:127.06ms | |
step:889/1395 train_time:111695ms step_avg:127.07ms | |
step:890/1395 train_time:111825ms step_avg:127.07ms | |
step:891/1395 train_time:111956ms step_avg:127.08ms | |
step:892/1395 train_time:112088ms step_avg:127.08ms | |
step:893/1395 train_time:112219ms step_avg:127.09ms | |
step:894/1395 train_time:112352ms step_avg:127.09ms | |
step:895/1395 train_time:112483ms step_avg:127.10ms | |
step:896/1395 train_time:112616ms step_avg:127.11ms | |
step:897/1395 train_time:112747ms step_avg:127.11ms | |
step:898/1395 train_time:112878ms step_avg:127.12ms | |
step:899/1395 train_time:113010ms step_avg:127.12ms | |
step:900/1395 train_time:113141ms step_avg:127.12ms | |
step:901/1395 train_time:113273ms step_avg:127.13ms | |
step:902/1395 train_time:113404ms step_avg:127.13ms | |
step:903/1395 train_time:113539ms step_avg:127.14ms | |
step:904/1395 train_time:113670ms step_avg:127.15ms | |
step:905/1395 train_time:113803ms step_avg:127.15ms | |
step:906/1395 train_time:113933ms step_avg:127.16ms | |
step:907/1395 train_time:114065ms step_avg:127.16ms | |
step:908/1395 train_time:114197ms step_avg:127.17ms | |
step:909/1395 train_time:114327ms step_avg:127.17ms | |
step:910/1395 train_time:114459ms step_avg:127.18ms | |
step:911/1395 train_time:114589ms step_avg:127.18ms | |
step:912/1395 train_time:114723ms step_avg:127.19ms | |
step:913/1395 train_time:114854ms step_avg:127.19ms | |
step:914/1395 train_time:114985ms step_avg:127.20ms | |
step:915/1395 train_time:115116ms step_avg:127.20ms | |
step:916/1395 train_time:115246ms step_avg:127.20ms | |
step:917/1395 train_time:115377ms step_avg:127.21ms | |
step:918/1395 train_time:115507ms step_avg:127.21ms | |
step:919/1395 train_time:115638ms step_avg:127.21ms | |
step:920/1395 train_time:115768ms step_avg:127.22ms | |
step:921/1395 train_time:115901ms step_avg:127.22ms | |
step:922/1395 train_time:116033ms step_avg:127.23ms | |
step:923/1395 train_time:116164ms step_avg:127.23ms | |
step:924/1395 train_time:116295ms step_avg:127.24ms | |
step:925/1395 train_time:116426ms step_avg:127.24ms | |
step:926/1395 train_time:116559ms step_avg:127.25ms | |
step:927/1395 train_time:116690ms step_avg:127.25ms | |
step:928/1395 train_time:116821ms step_avg:127.26ms | |
step:929/1395 train_time:116951ms step_avg:127.26ms | |
step:930/1395 train_time:117084ms step_avg:127.27ms | |
step:931/1395 train_time:117214ms step_avg:127.27ms | |
step:932/1395 train_time:117345ms step_avg:127.27ms | |
step:933/1395 train_time:117480ms step_avg:127.28ms | |
step:934/1395 train_time:117612ms step_avg:127.29ms | |
step:935/1395 train_time:117748ms step_avg:127.30ms | |
step:936/1395 train_time:117882ms step_avg:127.30ms | |
step:937/1395 train_time:118015ms step_avg:127.31ms | |
step:938/1395 train_time:118147ms step_avg:127.31ms | |
step:939/1395 train_time:118280ms step_avg:127.32ms | |
step:940/1395 train_time:118415ms step_avg:127.33ms | |
step:941/1395 train_time:118546ms step_avg:127.33ms | |
step:942/1395 train_time:118680ms step_avg:127.34ms | |
step:943/1395 train_time:118819ms step_avg:127.35ms | |
step:944/1395 train_time:118948ms step_avg:127.35ms | |
step:945/1395 train_time:119080ms step_avg:127.36ms | |
step:946/1395 train_time:119213ms step_avg:127.36ms | |
step:947/1395 train_time:119346ms step_avg:127.37ms | |
step:948/1395 train_time:119480ms step_avg:127.38ms | |
step:949/1395 train_time:119613ms step_avg:127.38ms | |
step:950/1395 train_time:119745ms step_avg:127.39ms | |
step:951/1395 train_time:119878ms step_avg:127.39ms | |
step:952/1395 train_time:120010ms step_avg:127.40ms | |
step:953/1395 train_time:120145ms step_avg:127.41ms | |
step:954/1395 train_time:120278ms step_avg:127.41ms | |
step:955/1395 train_time:120411ms step_avg:127.42ms | |
step:956/1395 train_time:120545ms step_avg:127.43ms | |
step:957/1395 train_time:120677ms step_avg:127.43ms | |
step:958/1395 train_time:120811ms step_avg:127.44ms | |
step:959/1395 train_time:120942ms step_avg:127.44ms | |
step:960/1395 train_time:121074ms step_avg:127.45ms | |
step:961/1395 train_time:121207ms step_avg:127.45ms | |
step:962/1395 train_time:121341ms step_avg:127.46ms | |
step:963/1395 train_time:121473ms step_avg:127.46ms | |
step:964/1395 train_time:121607ms step_avg:127.47ms | |
step:965/1395 train_time:121741ms step_avg:127.48ms | |
step:966/1395 train_time:121875ms step_avg:127.48ms | |
step:967/1395 train_time:122007ms step_avg:127.49ms | |
step:968/1395 train_time:122140ms step_avg:127.49ms | |
step:969/1395 train_time:122273ms step_avg:127.50ms | |
step:970/1395 train_time:122406ms step_avg:127.51ms | |
step:971/1395 train_time:122539ms step_avg:127.51ms | |
step:972/1395 train_time:122671ms step_avg:127.52ms | |
step:973/1395 train_time:122804ms step_avg:127.52ms | |
step:974/1395 train_time:122938ms step_avg:127.53ms | |
step:975/1395 train_time:123070ms step_avg:127.53ms | |
step:976/1395 train_time:123206ms step_avg:127.54ms | |
step:977/1395 train_time:123339ms step_avg:127.55ms | |
step:978/1395 train_time:123472ms step_avg:127.55ms | |
step:979/1395 train_time:123605ms step_avg:127.56ms | |
step:980/1395 train_time:123739ms step_avg:127.57ms | |
step:981/1395 train_time:123872ms step_avg:127.57ms | |
step:982/1395 train_time:124006ms step_avg:127.58ms | |
step:983/1395 train_time:124138ms step_avg:127.58ms | |
step:984/1395 train_time:124270ms step_avg:127.59ms | |
step:985/1395 train_time:124404ms step_avg:127.59ms | |
step:986/1395 train_time:124537ms step_avg:127.60ms | |
step:987/1395 train_time:124670ms step_avg:127.60ms | |
step:988/1395 train_time:124805ms step_avg:127.61ms | |
step:989/1395 train_time:124937ms step_avg:127.62ms | |
step:990/1395 train_time:125069ms step_avg:127.62ms | |
step:991/1395 train_time:125203ms step_avg:127.63ms | |
step:992/1395 train_time:125335ms step_avg:127.63ms | |
step:993/1395 train_time:125469ms step_avg:127.64ms | |
step:994/1395 train_time:125602ms step_avg:127.64ms | |
step:995/1395 train_time:125734ms step_avg:127.65ms | |
step:996/1395 train_time:125867ms step_avg:127.65ms | |
step:997/1395 train_time:125999ms step_avg:127.66ms | |
step:998/1395 train_time:126130ms step_avg:127.66ms | |
step:999/1395 train_time:126264ms step_avg:127.67ms | |
step:1000/1395 train_time:126400ms step_avg:127.68ms | |
step:1000/1395 val_loss:3.4145 train_time:126530ms step_avg:127.81ms | |
step:1001/1395 train_time:126550ms step_avg:127.70ms | |
step:1002/1395 train_time:126674ms step_avg:127.70ms | |
step:1003/1395 train_time:126808ms step_avg:127.70ms | |
step:1004/1395 train_time:126939ms step_avg:127.71ms | |
step:1005/1395 train_time:127074ms step_avg:127.71ms | |
step:1006/1395 train_time:127206ms step_avg:127.72ms | |
step:1007/1395 train_time:127337ms step_avg:127.72ms | |
step:1008/1395 train_time:127472ms step_avg:127.73ms | |
step:1009/1395 train_time:127607ms step_avg:127.73ms | |
step:1010/1395 train_time:127740ms step_avg:127.74ms | |
step:1011/1395 train_time:127875ms step_avg:127.75ms | |
step:1012/1395 train_time:128007ms step_avg:127.75ms | |
step:1013/1395 train_time:128141ms step_avg:127.76ms | |
step:1014/1395 train_time:128274ms step_avg:127.76ms | |
step:1015/1395 train_time:128406ms step_avg:127.77ms | |
step:1016/1395 train_time:128538ms step_avg:127.77ms | |
step:1017/1395 train_time:128676ms step_avg:127.78ms | |
step:1018/1395 train_time:128811ms step_avg:127.79ms | |
step:1019/1395 train_time:128944ms step_avg:127.79ms | |
step:1020/1395 train_time:129076ms step_avg:127.80ms | |
step:1021/1395 train_time:129207ms step_avg:127.80ms | |
step:1022/1395 train_time:129339ms step_avg:127.81ms | |
step:1023/1395 train_time:129473ms step_avg:127.81ms | |
step:1024/1395 train_time:129604ms step_avg:127.81ms | |
step:1025/1395 train_time:129738ms step_avg:127.82ms | |
step:1026/1395 train_time:129873ms step_avg:127.83ms | |
step:1027/1395 train_time:130009ms step_avg:127.84ms | |
step:1028/1395 train_time:130141ms step_avg:127.84ms | |
step:1029/1395 train_time:130277ms step_avg:127.85ms | |
step:1030/1395 train_time:130411ms step_avg:127.85ms | |
step:1031/1395 train_time:130543ms step_avg:127.86ms | |
step:1032/1395 train_time:130674ms step_avg:127.86ms | |
step:1033/1395 train_time:130807ms step_avg:127.87ms | |
step:1034/1395 train_time:130941ms step_avg:127.87ms | |
step:1035/1395 train_time:131076ms step_avg:127.88ms | |
step:1036/1395 train_time:131209ms step_avg:127.88ms | |
step:1037/1395 train_time:131343ms step_avg:127.89ms | |
step:1038/1395 train_time:131476ms step_avg:127.90ms | |
step:1039/1395 train_time:131610ms step_avg:127.90ms | |
step:1040/1395 train_time:131743ms step_avg:127.91ms | |
step:1041/1395 train_time:131876ms step_avg:127.91ms | |
step:1042/1395 train_time:132008ms step_avg:127.91ms | |
step:1043/1395 train_time:132142ms step_avg:127.92ms | |
step:1044/1395 train_time:132275ms step_avg:127.93ms | |
step:1045/1395 train_time:132409ms step_avg:127.93ms | |
step:1046/1395 train_time:132544ms step_avg:127.94ms | |
step:1047/1395 train_time:132677ms step_avg:127.94ms | |
step:1048/1395 train_time:132810ms step_avg:127.95ms | |
step:1049/1395 train_time:132944ms step_avg:127.95ms | |
step:1050/1395 train_time:133076ms step_avg:127.96ms | |
step:1051/1395 train_time:133211ms step_avg:127.96ms | |
step:1052/1395 train_time:133345ms step_avg:127.97ms | |
step:1053/1395 train_time:133477ms step_avg:127.97ms | |
step:1054/1395 train_time:133613ms step_avg:127.98ms | |
step:1055/1395 train_time:133745ms step_avg:127.99ms | |
step:1056/1395 train_time:133878ms step_avg:127.99ms | |
step:1057/1395 train_time:134011ms step_avg:128.00ms | |
step:1058/1395 train_time:134144ms step_avg:128.00ms | |
step:1059/1395 train_time:134276ms step_avg:128.00ms | |
step:1060/1395 train_time:134409ms step_avg:128.01ms | |
step:1061/1395 train_time:134541ms step_avg:128.01ms | |
step:1062/1395 train_time:134675ms step_avg:128.02ms | |
step:1063/1395 train_time:134810ms step_avg:128.02ms | |
step:1064/1395 train_time:134941ms step_avg:128.03ms | |
step:1065/1395 train_time:135076ms step_avg:128.03ms | |
step:1066/1395 train_time:135208ms step_avg:128.04ms | |
step:1067/1395 train_time:135341ms step_avg:128.04ms | |
step:1068/1395 train_time:135476ms step_avg:128.05ms | |
step:1069/1395 train_time:135610ms step_avg:128.05ms | |
step:1070/1395 train_time:135742ms step_avg:128.06ms | |
step:1071/1395 train_time:135875ms step_avg:128.06ms | |
step:1072/1395 train_time:136008ms step_avg:128.07ms | |
step:1073/1395 train_time:136141ms step_avg:128.07ms | |
step:1074/1395 train_time:136274ms step_avg:128.08ms | |
step:1075/1395 train_time:136408ms step_avg:128.08ms | |
step:1076/1395 train_time:136542ms step_avg:128.09ms | |
step:1077/1395 train_time:136677ms step_avg:128.09ms | |
step:1078/1395 train_time:136810ms step_avg:128.10ms | |
step:1079/1395 train_time:136945ms step_avg:128.11ms | |
step:1080/1395 train_time:137078ms step_avg:128.11ms | |
step:1081/1395 train_time:137213ms step_avg:128.12ms | |
step:1082/1395 train_time:137345ms step_avg:128.12ms | |
step:1083/1395 train_time:137478ms step_avg:128.13ms | |
step:1084/1395 train_time:137612ms step_avg:128.13ms | |
step:1085/1395 train_time:137746ms step_avg:128.14ms | |
step:1086/1395 train_time:137879ms step_avg:128.14ms | |
step:1087/1395 train_time:138013ms step_avg:128.15ms | |
step:1088/1395 train_time:138147ms step_avg:128.15ms | |
step:1089/1395 train_time:138281ms step_avg:128.16ms | |
step:1090/1395 train_time:138415ms step_avg:128.16ms | |
step:1091/1395 train_time:138551ms step_avg:128.17ms | |
step:1092/1395 train_time:138683ms step_avg:128.17ms | |
step:1093/1395 train_time:138815ms step_avg:128.18ms | |
step:1094/1395 train_time:138951ms step_avg:128.18ms | |
step:1095/1395 train_time:139084ms step_avg:128.19ms | |
step:1096/1395 train_time:139217ms step_avg:128.19ms | |
step:1097/1395 train_time:139350ms step_avg:128.20ms | |
step:1098/1395 train_time:139484ms step_avg:128.20ms | |
step:1099/1395 train_time:139617ms step_avg:128.21ms | |
step:1100/1395 train_time:139751ms step_avg:128.21ms | |
step:1101/1395 train_time:139883ms step_avg:128.22ms | |
step:1102/1395 train_time:140016ms step_avg:128.22ms | |
step:1103/1395 train_time:140149ms step_avg:128.22ms | |
step:1104/1395 train_time:140283ms step_avg:128.23ms | |
step:1105/1395 train_time:140415ms step_avg:128.23ms | |
step:1106/1395 train_time:140548ms step_avg:128.24ms | |
step:1107/1395 train_time:140682ms step_avg:128.24ms | |
step:1108/1395 train_time:140814ms step_avg:128.25ms | |
step:1109/1395 train_time:140948ms step_avg:128.25ms | |
step:1110/1395 train_time:141079ms step_avg:128.25ms | |
step:1111/1395 train_time:141213ms step_avg:128.26ms | |
step:1112/1395 train_time:141347ms step_avg:128.26ms | |
step:1113/1395 train_time:141479ms step_avg:128.27ms | |
step:1114/1395 train_time:141613ms step_avg:128.27ms | |
step:1115/1395 train_time:141748ms step_avg:128.28ms | |
step:1116/1395 train_time:141880ms step_avg:128.28ms | |
step:1117/1395 train_time:142014ms step_avg:128.29ms | |
step:1118/1395 train_time:142150ms step_avg:128.29ms | |
step:1119/1395 train_time:142281ms step_avg:128.30ms | |
step:1120/1395 train_time:142415ms step_avg:128.30ms | |
step:1121/1395 train_time:142550ms step_avg:128.31ms | |
step:1122/1395 train_time:142682ms step_avg:128.31ms | |
step:1123/1395 train_time:142813ms step_avg:128.31ms | |
step:1124/1395 train_time:142948ms step_avg:128.32ms | |
step:1125/1395 train_time:143080ms step_avg:128.32ms | |
step:1125/1395 val_loss:3.3641 train_time:143212ms step_avg:128.44ms | |
step:1126/1395 train_time:143232ms step_avg:128.34ms | |
step:1127/1395 train_time:143355ms step_avg:128.34ms | |
step:1128/1395 train_time:143490ms step_avg:128.34ms | |
step:1129/1395 train_time:143621ms step_avg:128.35ms | |
step:1130/1395 train_time:143753ms step_avg:128.35ms | |
step:1131/1395 train_time:143887ms step_avg:128.36ms | |
step:1132/1395 train_time:144018ms step_avg:128.36ms | |
step:1133/1395 train_time:144151ms step_avg:128.36ms | |
step:1134/1395 train_time:144289ms step_avg:128.37ms | |
step:1135/1395 train_time:144424ms step_avg:128.38ms | |
step:1136/1395 train_time:144557ms step_avg:128.38ms | |
step:1137/1395 train_time:144690ms step_avg:128.39ms | |
step:1138/1395 train_time:144823ms step_avg:128.39ms | |
step:1139/1395 train_time:144957ms step_avg:128.39ms | |
step:1140/1395 train_time:145091ms step_avg:128.40ms | |
step:1141/1395 train_time:145226ms step_avg:128.40ms | |
step:1142/1395 train_time:145362ms step_avg:128.41ms | |
step:1143/1395 train_time:145496ms step_avg:128.42ms | |
step:1144/1395 train_time:145631ms step_avg:128.42ms | |
step:1145/1395 train_time:145766ms step_avg:128.43ms | |
step:1146/1395 train_time:145899ms step_avg:128.43ms | |
step:1147/1395 train_time:146033ms step_avg:128.44ms | |
step:1148/1395 train_time:146167ms step_avg:128.44ms | |
step:1149/1395 train_time:146302ms step_avg:128.45ms | |
step:1150/1395 train_time:146439ms step_avg:128.45ms | |
step:1151/1395 train_time:146575ms step_avg:128.46ms | |
step:1152/1395 train_time:146708ms step_avg:128.47ms | |
step:1153/1395 train_time:146845ms step_avg:128.47ms | |
step:1154/1395 train_time:146980ms step_avg:128.48ms | |
step:1155/1395 train_time:147113ms step_avg:128.48ms | |
step:1156/1395 train_time:147252ms step_avg:128.49ms | |
step:1157/1395 train_time:147389ms step_avg:128.50ms | |
step:1158/1395 train_time:147524ms step_avg:128.50ms | |
step:1159/1395 train_time:147656ms step_avg:128.51ms | |
step:1160/1395 train_time:147790ms step_avg:128.51ms | |
step:1161/1395 train_time:147924ms step_avg:128.52ms | |
step:1162/1395 train_time:148059ms step_avg:128.52ms | |
step:1163/1395 train_time:148192ms step_avg:128.53ms | |
step:1164/1395 train_time:148329ms step_avg:128.53ms | |
step:1165/1395 train_time:148462ms step_avg:128.54ms | |
step:1166/1395 train_time:148598ms step_avg:128.54ms | |
step:1167/1395 train_time:148734ms step_avg:128.55ms | |
step:1168/1395 train_time:148869ms step_avg:128.56ms | |
step:1169/1395 train_time:149002ms step_avg:128.56ms | |
step:1170/1395 train_time:149137ms step_avg:128.57ms | |
step:1171/1395 train_time:149271ms step_avg:128.57ms | |
step:1172/1395 train_time:149407ms step_avg:128.58ms | |
step:1173/1395 train_time:149541ms step_avg:128.58ms | |
step:1174/1395 train_time:149680ms step_avg:128.59ms | |
step:1175/1395 train_time:149814ms step_avg:128.60ms | |
step:1176/1395 train_time:149948ms step_avg:128.60ms | |
step:1177/1395 train_time:150085ms step_avg:128.61ms | |
step:1178/1395 train_time:150220ms step_avg:128.61ms | |
step:1179/1395 train_time:150353ms step_avg:128.62ms | |
step:1180/1395 train_time:150489ms step_avg:128.62ms | |
step:1181/1395 train_time:150625ms step_avg:128.63ms | |
step:1182/1395 train_time:150759ms step_avg:128.63ms | |
step:1183/1395 train_time:150894ms step_avg:128.64ms | |
step:1184/1395 train_time:151028ms step_avg:128.64ms | |
step:1185/1395 train_time:151163ms step_avg:128.65ms | |
step:1186/1395 train_time:151298ms step_avg:128.65ms | |
step:1187/1395 train_time:151436ms step_avg:128.66ms | |
step:1188/1395 train_time:151571ms step_avg:128.67ms | |
step:1189/1395 train_time:151706ms step_avg:128.67ms | |
step:1190/1395 train_time:151840ms step_avg:128.68ms | |
step:1191/1395 train_time:151974ms step_avg:128.68ms | |
step:1192/1395 train_time:152108ms step_avg:128.69ms | |
step:1193/1395 train_time:152241ms step_avg:128.69ms | |
step:1194/1395 train_time:152375ms step_avg:128.70ms | |
step:1195/1395 train_time:152512ms step_avg:128.70ms | |
step:1196/1395 train_time:152649ms step_avg:128.71ms | |
step:1197/1395 train_time:152784ms step_avg:128.71ms | |
step:1198/1395 train_time:152923ms step_avg:128.72ms | |
step:1199/1395 train_time:153056ms step_avg:128.73ms | |
step:1200/1395 train_time:153191ms step_avg:128.73ms | |
step:1201/1395 train_time:153325ms step_avg:128.74ms | |
step:1202/1395 train_time:153461ms step_avg:128.74ms | |
step:1203/1395 train_time:153600ms step_avg:128.75ms | |
step:1204/1395 train_time:153734ms step_avg:128.76ms | |
step:1205/1395 train_time:153868ms step_avg:128.76ms | |
step:1206/1395 train_time:154002ms step_avg:128.76ms | |
step:1207/1395 train_time:154136ms step_avg:128.77ms | |
step:1208/1395 train_time:154270ms step_avg:128.77ms | |
step:1209/1395 train_time:154407ms step_avg:128.78ms | |
step:1210/1395 train_time:154542ms step_avg:128.78ms | |
step:1211/1395 train_time:154679ms step_avg:128.79ms | |
step:1212/1395 train_time:154812ms step_avg:128.80ms | |
step:1213/1395 train_time:154947ms step_avg:128.80ms | |
step:1214/1395 train_time:155084ms step_avg:128.81ms | |
step:1215/1395 train_time:155223ms step_avg:128.82ms | |
step:1216/1395 train_time:155357ms step_avg:128.82ms | |
step:1217/1395 train_time:155495ms step_avg:128.83ms | |
step:1218/1395 train_time:155630ms step_avg:128.83ms | |
step:1219/1395 train_time:155765ms step_avg:128.84ms | |
step:1220/1395 train_time:155899ms step_avg:128.84ms | |
step:1221/1395 train_time:156034ms step_avg:128.85ms | |
step:1222/1395 train_time:156168ms step_avg:128.85ms | |
step:1223/1395 train_time:156301ms step_avg:128.85ms | |
step:1224/1395 train_time:156437ms step_avg:128.86ms | |
step:1225/1395 train_time:156571ms step_avg:128.87ms | |
step:1226/1395 train_time:156705ms step_avg:128.87ms | |
step:1227/1395 train_time:156839ms step_avg:128.87ms | |
step:1228/1395 train_time:156974ms step_avg:128.88ms | |
step:1229/1395 train_time:157109ms step_avg:128.88ms | |
step:1230/1395 train_time:157245ms step_avg:128.89ms | |
step:1231/1395 train_time:157380ms step_avg:128.89ms | |
step:1232/1395 train_time:157515ms step_avg:128.90ms | |
step:1233/1395 train_time:157648ms step_avg:128.90ms | |
step:1234/1395 train_time:157782ms step_avg:128.91ms | |
step:1235/1395 train_time:157915ms step_avg:128.91ms | |
step:1236/1395 train_time:158048ms step_avg:128.91ms | |
step:1237/1395 train_time:158185ms step_avg:128.92ms | |
step:1238/1395 train_time:158323ms step_avg:128.93ms | |
step:1239/1395 train_time:158456ms step_avg:128.93ms | |
step:1240/1395 train_time:158589ms step_avg:128.93ms | |
step:1241/1395 train_time:158725ms step_avg:128.94ms | |
step:1242/1395 train_time:158859ms step_avg:128.94ms | |
step:1243/1395 train_time:158993ms step_avg:128.95ms | |
step:1244/1395 train_time:159128ms step_avg:128.95ms | |
step:1245/1395 train_time:159263ms step_avg:128.96ms | |
step:1246/1395 train_time:159397ms step_avg:128.96ms | |
step:1247/1395 train_time:159532ms step_avg:128.97ms | |
step:1248/1395 train_time:159667ms step_avg:128.97ms | |
step:1249/1395 train_time:159800ms step_avg:128.97ms | |
step:1250/1395 train_time:159934ms step_avg:128.98ms | |
step:1250/1395 val_loss:3.3178 train_time:160068ms step_avg:129.09ms | |
step:1251/1395 train_time:160088ms step_avg:129.00ms | |
step:1252/1395 train_time:160211ms step_avg:128.99ms | |
step:1253/1395 train_time:160347ms step_avg:129.00ms | |
step:1254/1395 train_time:160482ms step_avg:129.00ms | |
step:1255/1395 train_time:160620ms step_avg:129.01ms | |
step:1256/1395 train_time:160753ms step_avg:129.01ms | |
step:1257/1395 train_time:160887ms step_avg:129.02ms | |
step:1258/1395 train_time:161022ms step_avg:129.02ms | |
step:1259/1395 train_time:161159ms step_avg:129.03ms | |
step:1260/1395 train_time:161294ms step_avg:129.04ms | |
step:1261/1395 train_time:161429ms step_avg:129.04ms | |
step:1262/1395 train_time:161567ms step_avg:129.05ms | |
step:1263/1395 train_time:161704ms step_avg:129.05ms | |
step:1264/1395 train_time:161837ms step_avg:129.06ms | |
step:1265/1395 train_time:161971ms step_avg:129.06ms | |
step:1266/1395 train_time:162106ms step_avg:129.07ms | |
step:1267/1395 train_time:162240ms step_avg:129.07ms | |
step:1268/1395 train_time:162374ms step_avg:129.07ms | |
step:1269/1395 train_time:162510ms step_avg:129.08ms | |
step:1270/1395 train_time:162644ms step_avg:129.08ms | |
step:1271/1395 train_time:162778ms step_avg:129.09ms | |
step:1272/1395 train_time:162911ms step_avg:129.09ms | |
step:1273/1395 train_time:163045ms step_avg:129.09ms | |
step:1274/1395 train_time:163179ms step_avg:129.10ms | |
step:1275/1395 train_time:163313ms step_avg:129.10ms | |
step:1276/1395 train_time:163448ms step_avg:129.11ms | |
step:1277/1395 train_time:163583ms step_avg:129.11ms | |
step:1278/1395 train_time:163720ms step_avg:129.12ms | |
step:1279/1395 train_time:163856ms step_avg:129.12ms | |
step:1280/1395 train_time:163992ms step_avg:129.13ms | |
step:1281/1395 train_time:164127ms step_avg:129.13ms | |
step:1282/1395 train_time:164262ms step_avg:129.14ms | |
step:1283/1395 train_time:164397ms step_avg:129.14ms | |
step:1284/1395 train_time:164531ms step_avg:129.15ms | |
step:1285/1395 train_time:164666ms step_avg:129.15ms | |
step:1286/1395 train_time:164800ms step_avg:129.15ms | |
step:1287/1395 train_time:164937ms step_avg:129.16ms | |
step:1288/1395 train_time:165070ms step_avg:129.16ms | |
step:1289/1395 train_time:165210ms step_avg:129.17ms | |
step:1290/1395 train_time:165345ms step_avg:129.18ms | |
step:1291/1395 train_time:165480ms step_avg:129.18ms | |
step:1292/1395 train_time:165614ms step_avg:129.18ms | |
step:1293/1395 train_time:165749ms step_avg:129.19ms | |
step:1294/1395 train_time:165884ms step_avg:129.19ms | |
step:1295/1395 train_time:166018ms step_avg:129.20ms | |
step:1296/1395 train_time:166152ms step_avg:129.20ms | |
step:1297/1395 train_time:166288ms step_avg:129.21ms | |
step:1298/1395 train_time:166423ms step_avg:129.21ms | |
step:1299/1395 train_time:166559ms step_avg:129.22ms | |
step:1300/1395 train_time:166693ms step_avg:129.22ms | |
step:1301/1395 train_time:166828ms step_avg:129.22ms | |
step:1302/1395 train_time:166963ms step_avg:129.23ms | |
step:1303/1395 train_time:167099ms step_avg:129.23ms | |
step:1304/1395 train_time:167235ms step_avg:129.24ms | |
step:1305/1395 train_time:167368ms step_avg:129.24ms | |
step:1306/1395 train_time:167505ms step_avg:129.25ms | |
step:1307/1395 train_time:167640ms step_avg:129.25ms | |
step:1308/1395 train_time:167775ms step_avg:129.26ms | |
step:1309/1395 train_time:167910ms step_avg:129.26ms | |
step:1310/1395 train_time:168044ms step_avg:129.26ms | |
step:1311/1395 train_time:168178ms step_avg:129.27ms | |
step:1312/1395 train_time:168313ms step_avg:129.27ms | |
step:1313/1395 train_time:168447ms step_avg:129.28ms | |
step:1314/1395 train_time:168581ms step_avg:129.28ms | |
step:1315/1395 train_time:168716ms step_avg:129.28ms | |
step:1316/1395 train_time:168849ms step_avg:129.29ms | |
step:1317/1395 train_time:168987ms step_avg:129.29ms | |
step:1318/1395 train_time:169122ms step_avg:129.30ms | |
step:1319/1395 train_time:169258ms step_avg:129.30ms | |
step:1320/1395 train_time:169391ms step_avg:129.31ms | |
step:1321/1395 train_time:169526ms step_avg:129.31ms | |
step:1322/1395 train_time:169663ms step_avg:129.32ms | |
step:1323/1395 train_time:169797ms step_avg:129.32ms | |
step:1324/1395 train_time:169932ms step_avg:129.32ms | |
step:1325/1395 train_time:170067ms step_avg:129.33ms | |
step:1326/1395 train_time:170201ms step_avg:129.33ms | |
step:1327/1395 train_time:170336ms step_avg:129.34ms | |
step:1328/1395 train_time:170469ms step_avg:129.34ms | |
step:1329/1395 train_time:170606ms step_avg:129.35ms | |
step:1330/1395 train_time:170742ms step_avg:129.35ms | |
step:1331/1395 train_time:170880ms step_avg:129.36ms | |
step:1332/1395 train_time:171015ms step_avg:129.36ms | |
step:1333/1395 train_time:171152ms step_avg:129.37ms | |
step:1334/1395 train_time:171286ms step_avg:129.37ms | |
step:1335/1395 train_time:171421ms step_avg:129.37ms | |
step:1336/1395 train_time:171557ms step_avg:129.38ms | |
step:1337/1395 train_time:171691ms step_avg:129.38ms | |
step:1338/1395 train_time:171825ms step_avg:129.39ms | |
step:1339/1395 train_time:171959ms step_avg:129.39ms | |
step:1340/1395 train_time:172095ms step_avg:129.39ms | |
step:1341/1395 train_time:172228ms step_avg:129.40ms | |
step:1342/1395 train_time:172363ms step_avg:129.40ms | |
step:1343/1395 train_time:172497ms step_avg:129.40ms | |
step:1344/1395 train_time:172630ms step_avg:129.41ms | |
step:1345/1395 train_time:172765ms step_avg:129.41ms | |
step:1346/1395 train_time:172900ms step_avg:129.42ms | |
step:1347/1395 train_time:173037ms step_avg:129.42ms | |
step:1348/1395 train_time:173172ms step_avg:129.43ms | |
step:1349/1395 train_time:173309ms step_avg:129.43ms | |
step:1350/1395 train_time:173445ms step_avg:129.44ms | |
step:1351/1395 train_time:173581ms step_avg:129.44ms | |
step:1352/1395 train_time:173718ms step_avg:129.45ms | |
step:1353/1395 train_time:173856ms step_avg:129.45ms | |
step:1354/1395 train_time:173991ms step_avg:129.46ms | |
step:1355/1395 train_time:174127ms step_avg:129.46ms | |
step:1356/1395 train_time:174261ms step_avg:129.47ms | |
step:1357/1395 train_time:174398ms step_avg:129.47ms | |
step:1358/1395 train_time:174535ms step_avg:129.48ms | |
step:1359/1395 train_time:174671ms step_avg:129.48ms | |
step:1360/1395 train_time:174810ms step_avg:129.49ms | |
step:1361/1395 train_time:174946ms step_avg:129.49ms | |
step:1362/1395 train_time:175083ms step_avg:129.50ms | |
step:1363/1395 train_time:175221ms step_avg:129.51ms | |
step:1364/1395 train_time:175356ms step_avg:129.51ms | |
step:1365/1395 train_time:175489ms step_avg:129.51ms | |
step:1366/1395 train_time:175625ms step_avg:129.52ms | |
step:1367/1395 train_time:175761ms step_avg:129.52ms | |
step:1368/1395 train_time:175897ms step_avg:129.53ms | |
step:1369/1395 train_time:176034ms step_avg:129.53ms | |
step:1370/1395 train_time:176174ms step_avg:129.54ms | |
step:1371/1395 train_time:176309ms step_avg:129.54ms | |
step:1372/1395 train_time:176444ms step_avg:129.55ms | |
step:1373/1395 train_time:176578ms step_avg:129.55ms | |
step:1374/1395 train_time:176717ms step_avg:129.56ms | |
step:1375/1395 train_time:176851ms step_avg:129.56ms | |
step:1375/1395 val_loss:3.2825 train_time:176984ms step_avg:129.66ms | |
step:1376/1395 train_time:177003ms step_avg:129.58ms | |
step:1377/1395 train_time:177129ms step_avg:129.58ms | |
step:1378/1395 train_time:177265ms step_avg:129.58ms | |
step:1379/1395 train_time:177399ms step_avg:129.58ms | |
step:1380/1395 train_time:177534ms step_avg:129.59ms | |
step:1381/1395 train_time:177671ms step_avg:129.59ms | |
step:1382/1395 train_time:177808ms step_avg:129.60ms | |
step:1383/1395 train_time:177942ms step_avg:129.60ms | |
step:1384/1395 train_time:178079ms step_avg:129.61ms | |
step:1385/1395 train_time:178215ms step_avg:129.61ms | |
step:1386/1395 train_time:178349ms step_avg:129.61ms | |
step:1387/1395 train_time:178487ms step_avg:129.62ms | |
step:1388/1395 train_time:178623ms step_avg:129.62ms | |
step:1389/1395 train_time:178757ms step_avg:129.63ms | |
step:1390/1395 train_time:178890ms step_avg:129.63ms | |
step:1391/1395 train_time:179027ms step_avg:129.64ms | |
step:1392/1395 train_time:179164ms step_avg:129.64ms | |
step:1393/1395 train_time:179299ms step_avg:129.65ms | |
step:1394/1395 train_time:179435ms step_avg:129.65ms | |
step:1395/1395 train_time:179570ms step_avg:129.65ms | |
step:1395/1395 val_loss:3.2786 train_time:179704ms step_avg:129.75ms | |
peak memory allocated: 37652 MiB reserved: 39236 MiB |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment