Skip to content

Instantly share code, notes, and snippets.

View MLWhiz's full-sized avatar
🤓
Focusing

Rahul Agarwal MLWhiz

🤓
Focusing
View GitHub Profile
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
import chromadb
import os
# Load the MovieLens dataset
movies = pd.read_csv('tmdb_5000_movies.csv')
credits = pd.read_csv('tmdb_5000_credits.csv')
@MLWhiz
MLWhiz / plot_times.py
Last active August 11, 2021 20:42
plot_times.py
import pandas as pd
import plotly.express as px
new_movies_list = movies_list*8
times_taken = []
for i in range(50, len(new_movies_list),50):
print(i)
movies_to_process = new_movies_list[:i]
# Multiprocess:
from multiprocessing import Pool
import time
import plotly.express as px
import plotly
import pandas as pd
from joblib import Parallel, delayed
def f(x):
time.sleep(2)
return x**2
from multiprocessing import Pool
import time
import plotly.express as px
import plotly
import pandas as pd
def f(x):
time.sleep(2)
return x**2
from multiprocessing import Pool
import time
import plotly.express as px
import plotly
import pandas as pd
def f(x):
time.sleep(2)
return x**2
from multiprocessing import Pool
import time
import plotly.express as px
import plotly
import pandas as pd
def f(x):
return x**2
def runner(list_length):
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
# Can Koko finish the piles given this speed k?
def check(k):
hours_taken = 0
for n in piles:
if n%k==0:
hours_taken += n//k
else:
hours_taken += n//k + 1
def binary_search(array):
def condition(value):
pass
# could be [0, n], [1, n] etc. Depends on problem
left, right = min(search_space), max(search_space)
while left < right:
mid = left + (right - left) // 2
if condition(mid):
right = mid
else:
text = r"""
George Washington (February 22, 1732[b] – December 14, 1799) was an American political leader, military general, statesman, and Founding Father who served as the first president of the United States from 1789 to 1797. Previously, he led Patriot forces to victory in the nation's War for Independence. He presided at the Constitutional Convention of 1787, which established the U.S. Constitution and a federal government. Washington has been called the "Father of His Country" for his manifold leadership in the formative days of the new nation.
"""
question = "Who was the first president?"
inputs = tokenizer.encode_plus(question, text, add_special_tokens=True, return_tensors="pt")
input_ids = inputs["input_ids"].tolist()[0]
text_tokens = tokenizer.convert_ids_to_tokens(input_ids)
model = AutoModelForQuestionAnswering.from_pretrained("test-squad-trained")
text = r"""
🤗 Transformers (formerly known as pytorch-transformers and pytorch-pretrained-bert) provides general-purpose
architectures (BERT, GPT-2, RoBERTa, XLM, DistilBert, XLNet…) for Natural Language Understanding (NLU) and Natural
Language Generation (NLG) with over 32+ pretrained models in 100+ languages and deep interoperability between
TensorFlow 2.0 and PyTorch
"""
questions = [
"How many pretrained models are available in Transformers?",
"What does Transformers provide?",