start new:
tmux
start new with session name:
tmux new -s myname
| import torch | |
| # Create a tensor with values between 0 and 1 | |
| tensor = torch.rand(3, 3) | |
| # Set a threshold value | |
| threshold = 0.5 | |
| # Use torch.where to threshold the tensor | |
| binary_tensor = torch.where(tensor > threshold, torch.tensor(1), torch.tensor(0)) |
| import torch | |
| from torch.utils.data import DataLoader, TensorDataset | |
| # Define the dataset | |
| X = torch.Tensor([[1, 2], [3, 4], [5, 6], [7, 8]]) | |
| y = torch.Tensor([0, 1, 0, 1]) | |
| dataset = TensorDataset(X, y) | |
| # Create the dataloader | |
| dataloader = DataLoader(dataset, batch_size=2, shuffle=True) |
| coverage: ## Run tests with coverage | |
| coverage erase | |
| coverage run --include=podsearch/* -m pytest -ra | |
| coverage report -m | |
| deps: ## Install dependencies | |
| pip install black coverage flake8 mypy pylint pytest tox | |
| lint: ## Lint and static-check | |
| flake8 podsearch |
| ## Check space on specfic directory | |
| du -h --max-depth=0 /u/iali | |
| du -h --max-depth=1 /u/iali |
| # Create translation table. | |
| trans = str.maketrans("abcdefghijklmnopqrstuvwxyz", | |
| "nopqrstuvwxyzabcdefghijklm") | |
| # Apply rot13 translation. | |
| print("gandalf".translate(trans)) | |
| print("gandalf".translate(trans).translate(trans)) |
| py.test test_sample.py --collect-only # collects information test suite | |
| py.test test_sample.py -v # outputs verbose messages | |
| py.test -q test_sample.py # omit filename output | |
| python -m pytest -q test_sample.py # calling pytest through python | |
| py.test --markers # show available markers |
| # copy from local machine to remote machine | |
| scp localfile user@host:/path/to/whereyouwant/thefile | |
| # copy from remote machine to local machine | |
| scp user@host:/path/to/remotefile localfile |
| # Encode: unicode code point to bytes | |
| >>> s = u'Café' | |
| >>> type(s.encode('utf-8')) | |
| <class 'bytes'> | |
| Decode: bytes to unicode code point | |
| >>> s = bytes('Café', encoding='utf-8') | |
| >>> s.decode('utf-8') | |
| 'Café' | |
| Get unicode code point |
| # -*- coding: utf-8 -*- | |
| import json | |
| from codecs import open | |
| o = { 'text': 'پنجاب' } | |
| with open('foo.json', 'w', encoding= 'utf-8') as fp: | |
| json.dump(o, fp, ensure_ascii= False) |