git branch -a --contains 9f12d89998fbd38a3fd3a1d21e7b028b9659c107
gh pr list --search "${SHA}" --state merged gh pr list --search 9f12d89998fbd38a3fd3a1d21e7b028b9659c107
- https://www.youtube.com/watch?v=Uv7I75pLmb4 | |
- https://aws.amazon.com/solutions/implementations/distributed-load-testing-on-aws/ |
import { | |
SyntheticEvent, | |
useCallback, | |
useEffect, | |
useRef, | |
useState, | |
} from "react"; | |
import RadioList from "@/ui/radio-list"; |
Set out in our Royal Charter, the BBC’s Mission is:
To act in the public interest, serving all audiences through the provision of impartial, high-quality and distinctive output and services which inform, educate and entertain.
The BBC's Public Purposes, which describe what the BBC must do for its audiences, are also laid out in the BBC's Royal Charter. Our use of AI must be consistent with them.
Notes taken from https://www.youtube.com/watch?v=TQQPAU21ZUw
Good User Experience Cheap maintenance Fast performance
import http from "k6/http"; | |
import { check } from "k6"; | |
const URL = "https://example.com"; // Target URL | |
export let options = { | |
// If a cert is required to access the endpoint | |
// | |
// tlsAuth: | |
// [ |
Useful when two components are not under the same line of inheritance (from two separate branch) and use the same shared state and need to keep the state in synch.
Atom
from sklearn.metrics import confusion_matrix | |
import seaborn as sns | |
import matplotlib.pyplot as plt | |
cm = confusion_matrix(y_test, y_pred) | |
plt.figure(figsize=(12,12)) | |
sns.heatmap(data=cm, # Data to plot | |
square=True, # Make the cells square, looks nicer. |
# https://www.tensorflow.org/tutorials/distribute/keras | |
logical_device_names = [logical_device.name for logical_device in tf.config.list_logical_devices()] | |
if 'GPU' in ''.join(logical_device_names): | |
distribution_strategy = tf.distribute.MirroredStrategy() | |
elif 'TPU' in ''.join(logical_device_names): | |
tf.tpu.experimental.initialize_tpu_system() | |
tpu = tf.distribute.cluster_resolver.TPUClusterResolver(tpu='/device:TPU_SYSTEM:0') | |
distribution_strategy = tf.distribute.experimental.TPUStrategy(tpu) |