Last active
September 14, 2020 17:13
-
-
Save MarcoBuster/26ca7c9b063d9ca1e4043f1e2124a79f to your computer and use it in GitHub Desktop.
Reddit posts per second calculator
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import csv | |
from datetime import datetime | |
import praw | |
import base36 | |
import plotly # plotly==2.0.8, latest version without the bullshit | |
import plotly.graph_objs as go | |
import plotly.plotly as py | |
from config import PLOTLY_USERNAME, PLOTLY_APIKEY, CLIENT_ID, CLIENT_SECRET, TARGET | |
plotly.tools.set_credentials_file(username=PLOTLY_USERNAME, api_key=PLOTLY_APIKEY) | |
reddit = praw.Reddit( | |
client_id=CLIENT_ID, | |
client_secret=CLIENT_SECRET, | |
user_agent="PRAW/1.0" | |
) | |
def _get_post(): | |
gen = reddit.subreddit("all").new(limit=5) | |
return next(gen) | |
def _elements_since_ts(since, current): | |
for e in rows: | |
if e == current: | |
break | |
if float(e[0]) > since: | |
yield e | |
ok = False | |
while not ok: | |
try: | |
submission = _get_post() | |
latest_n = base36.loads(submission.id) | |
target_n = base36.loads(TARGET) | |
ok = True | |
except StopIteration: | |
continue | |
def _calculate_eta(cur_rate): | |
delta = target_n - latest_n | |
return delta / cur_rate / 60 / 60 | |
def avg(elements): | |
return sum(elements) / len(elements) | |
rows = [] | |
with open("analyze.csv") as f: | |
reader = csv.reader(f) | |
for row in reader: | |
rows.append(row) | |
x_axis = [datetime.fromtimestamp(float(row[0])) for row in rows] | |
y1_axis = [] | |
y2_axis = [] | |
seen = [] | |
for row in rows: | |
if int(row[1]) == 0 or int(row[1]) > 1000000: | |
row[1] = 25 | |
seen.append(row) | |
previous_values = [] | |
for row2 in _elements_since_ts(int(float(seen[0][0])) - (60 * 60), row): | |
previous_values.append(int(row2[1])) | |
if (len(previous_values)) == 0: | |
continue | |
rate = avg(previous_values) | |
y1_axis.append(rate) | |
y2_axis.append(_calculate_eta(rate)) | |
line1 = go.Scatter( | |
x=x_axis, | |
y=y1_axis, | |
name='Reddit posts per second', | |
line=dict( | |
color='rgb(205, 12, 24)', | |
width=2, | |
) | |
) | |
fig1 = dict(data=[line1], layout=dict(title='Reddit posts per second', | |
xasis=dict(title='Datetime'), | |
yaxis=dict(title='Posts per second (cumulative average of last hour)'))) | |
py.image.save_as(fig1, filename='rate.png') | |
line2 = go.Scatter( | |
x=x_axis, | |
y=y2_axis, | |
name='Time to reach target', | |
line=dict( | |
color='rgb(205, 12, 24)', | |
width=2, | |
) | |
) | |
fig2 = dict(data=[line2], layout=dict(title='Time to reach target', | |
xasis=dict(title='Datetime'), | |
yaxis=dict(title='Time to reach target in hours (cumulative average of last hour)'))) | |
py.image.save_as(fig2, filename='target.png') |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import csv | |
import time | |
import base36 | |
import praw | |
from config import CLIENT_ID, CLIENT_SECRET | |
reddit = praw.Reddit( | |
client_id=CLIENT_ID, | |
client_secret=CLIENT_SECRET, | |
user_agent="PRAW/1.0" | |
) | |
def _calculate_posts_delta(id_1, id_2): | |
print(id_1, id_2) | |
return base36.loads(id_1) - base36.loads(id_2) | |
def _get_post(): | |
gen = reddit.subreddit("all").new(limit=5) | |
return next(gen) | |
def _write_to_csv(d): | |
with open("analyze.csv", "a+") as f: | |
writer = csv.writer(f) | |
writer.writerow([time.time(), d]) | |
old_id = "000000" | |
old_delta = 0 | |
while True: | |
_write_to_csv(old_delta) | |
time.sleep(1) | |
try: | |
submission = _get_post() | |
except StopIteration: | |
continue | |
s_id = submission.id | |
delta = _calculate_posts_delta(s_id, old_id) | |
old_id = s_id | |
old_delta = delta |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment