Last active
March 16, 2025 02:25
-
-
Save duplaja/8fcdfb2b043e14c8637da96fcdc9514b to your computer and use it in GitHub Desktop.
Rough Discord Kavita New / Update Notification Script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import json | |
from datetime import datetime | |
import re | |
import io | |
from PIL import Image | |
import pickle | |
import time | |
######################################################## | |
# | |
# Change these settings, as desired. | |
# | |
######################################################## | |
#Kavita ODPS url (requierd) | |
odps_url = 'https://xxxxxxxxxx.com/api/opds/dxxxxxxx-xxxx-xxx-bxxxx-2xxxxxxx' | |
# STRONGLY suggest changing this to a date within the last week, for first run. | |
starting_reference_time = "2025-03-01T16:13:02.6280232" | |
# Libraries you want updates for | |
allowed_libraries = [2, 5, 9] | |
# One per discord channel / library, in the same order. | |
# You can use the same webhook URL for all, if you want all updates in the same channel | |
webhook_urls = [ | |
'https://discord.com/api/webhooks/xxxxxxxxxxxxx/xxxxxxxxxxxxxxxxxxxx', | |
'https://discord.com/api/webhooks/xxxxxxxxxxxxx/xxxxxxxxxxxxxxxxxxxx', | |
'https://discord.com/api/webhooks/xxxxxxxxxxxxx/xxxxxxxxxxxxxxxxxxxx' | |
] | |
# Discord embed color: uses a decimal color value, 65280 is bright green (#00FF00) | |
embed_color = 65280 | |
######################################################## | |
# | |
# Stop changing here | |
# | |
######################################################## | |
# Calculated from odps_url | |
base_url = odps_url.split('/api')[0] | |
api_key = odps_url.split('/opds/')[1] | |
def kauth(): | |
auth_url = base_url+'/api/Plugin/authenticate/?apiKey='+api_key+'&pluginName=Kavita_List' | |
response = requests.post(auth_url) | |
token = response.json()['token'] | |
return token | |
def krecentlyadded(library_id, kavita_token): | |
headers = { | |
'Authorization': f'Bearer {kavita_token}', | |
'accept': "text/plain", | |
'Content-Type': "application/json" | |
} | |
new_series_url = base_url+'/api/Series/recently-added-v2/?PageNumber=1&PageSize=0' | |
data = { | |
"id": 0, | |
"name": None, | |
"statements": [ | |
{ | |
"comparison": 0, | |
"field": 19, | |
"value": str(library_id), | |
}, | |
], | |
"combination": 0, | |
"sortOptions": { | |
"sortField": 4, | |
"isAscending": False | |
}, | |
"limitTo": 100 | |
} | |
response = requests.post(new_series_url, headers=headers, data=json.dumps(data)) | |
new_series_data = response.json() | |
return new_series_data | |
def krecentlychanged(library_id, kavita_token): | |
headers = { | |
'Authorization': f'Bearer {kavita_token}', | |
'accept': "text/plain", | |
'Content-Type': "application/json" | |
} | |
library_series_url = base_url+'/api/Series/all-v2/?PageNumber=1&PageSize=0' | |
data = { | |
"id": 0, | |
"name": None, | |
"statements": [ | |
{ | |
"comparison": 0, | |
"field": 19, | |
"value": str(library_id), | |
}, | |
], | |
"combination": 0, | |
"sortOptions": { | |
"sortField": 4, | |
"isAscending": False, | |
}, | |
"limitTo": 500 | |
} | |
response = requests.post(library_series_url, headers=headers, data=json.dumps(data)) | |
library_data = response.json() | |
return library_data | |
def kserieschapters(series_id, kavita_token): | |
headers = {'Authorization': f'Bearer {kavita_token}'} | |
series_url = base_url+'/api/Series/volumes/?seriesId='+str(series_id) | |
response = requests.get(series_url, headers=headers) | |
seriesinfo = response.json() | |
return seriesinfo | |
def parse_iso_datetime(date_str): | |
# Match an ISO format datetime with optional microseconds (1 to 7 digits) | |
match = re.match(r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(?:\.(\d{1,7}))?", date_str) | |
if match: | |
date_part, microseconds = match.groups() | |
if microseconds: | |
microseconds = (microseconds + "000000")[:6] # Ensure exactly 6 digits | |
truncated_date_str = f"{date_part}.{microseconds}" | |
else: | |
truncated_date_str = date_part # No microseconds in the input | |
return datetime.fromisoformat(truncated_date_str) | |
def send_discord_notification_with_url_image(webhook_url, message, image_url, title=None, footer=None): | |
""" | |
Send a notification to a Discord webhook with an image fetched from a URL. | |
Args: | |
webhook_url (str): The Discord webhook URL | |
message (str): The message to send | |
image_url (str): URL of the image to download and attach | |
title (str): Title | |
footer (str): footer | |
""" | |
# Download the image from the URL | |
try: | |
image_response = requests.get(image_url, stream=True) | |
image_response.raise_for_status() # Raise an exception for bad responses | |
# Load the image using PIL | |
image = Image.open(io.BytesIO(image_response.content)) | |
# Create a BytesIO object to store the image | |
image_bytesio = io.BytesIO() | |
# Save as PNG (Discord handles this well) | |
# You can change format if needed, but PNG works well for webp conversions | |
image.save(image_bytesio, format='PNG') | |
image_bytesio.seek(0) # Go back to the start of the BytesIO object | |
# Prepare the filename | |
filename = "image.png" | |
embed = { | |
"color": embed_color, | |
"image": { | |
"url": f"attachment://{filename}" | |
} | |
} | |
# Add title if provided | |
if title: | |
embed["title"] = title | |
# Add footer if provided | |
if footer: | |
embed["footer"] = {"text": footer} | |
# Prepare the payload with the message and embed | |
payload = { | |
"content": message, | |
"embeds": [embed] | |
} | |
# Prepare the file for sending | |
files = { | |
'file': (filename, image_bytesio.getvalue()) | |
} | |
# Send the webhook request | |
response = requests.post( | |
webhook_url, | |
data={"payload_json": json.dumps(payload)}, | |
files=files | |
) | |
response.raise_for_status() | |
print("Notification with image sent successfully!") | |
return True | |
except Exception as e: | |
print(f"Error sending notification with image: {e}") | |
return False | |
def getreferencetime(): | |
file_name = "kupdate-reference.pickle" | |
try: | |
with open(file_name,"rb") as file: | |
data = pickle.load(file) | |
reference_time = data["reference_time"] | |
file.close() | |
except IOError: | |
reference_time = '' | |
if not reference_time: | |
data = {"reference_time":starting_reference_time} | |
with open(file_name,"wb") as file: | |
pickle.dump(data, file) | |
file.close() | |
reference_time = starting_reference_time | |
return reference_time | |
def main(): | |
kavita_token = kauth() | |
reference_time = getreferencetime() | |
print(reference_time) | |
reference_dt = parse_iso_datetime(reference_time) | |
future_reference_time = reference_time | |
future_dt = parse_iso_datetime(future_reference_time) | |
for x in range(len(allowed_libraries)): | |
library_id = allowed_libraries[x] | |
webhook_url = webhook_urls[x] | |
# Handles new series in a library | |
recently_added = krecentlyadded(library_id, kavita_token) | |
used_series = [] | |
for series in recently_added: | |
series_id = series['id'] | |
series_name = series['name'] | |
library_name = series['libraryName'] | |
cover_url = base_url+"/api/Image/series-cover?seriesId="+str(series_id)+"&apiKey="+api_key | |
story_url = base_url+"/library/"+str(library_id)+"/series/"+str(series_id) | |
created = series['created'] | |
series_dt = parse_iso_datetime(created) | |
if series_dt > reference_dt: | |
if(series_dt > future_dt): | |
future_dt = series_dt | |
future_reference_time = created | |
title = series_name | |
to_print = library_name+": "+series_name+" - "+created | |
print(to_print) | |
body_text = "## [New Series - Click to Read]("+story_url+")" | |
footer = "Still deciding what to put here." | |
used_series.append(series_id) | |
send_discord_notification_with_url_image(webhook_url, body_text, cover_url,title,footer) | |
time.sleep(2) | |
else: | |
break | |
# Handles recently changed | |
recently_changed = krecentlychanged(library_id, kavita_token) | |
for series in recently_changed: | |
series_id = series['id'] | |
if series_id in used_series: | |
continue | |
used_series.append(series_id) | |
series_name = series['name'] | |
library_name = series['libraryName'] | |
cover_url = base_url+"/api/Image/series-cover?seriesId="+str(series_id)+"&apiKey="+api_key | |
story_url = base_url+"/library/"+str(library_id)+"/series/"+str(series_id) | |
chapter_added = series['lastChapterAdded'] | |
series_dt = parse_iso_datetime(chapter_added) | |
if series_dt > reference_dt: | |
if(series_dt > future_dt): | |
future_dt = series_dt | |
future_reference_time = chapter_added | |
title = series_name | |
to_print = 'Changed - '+library_name+": "+series_name+" - "+chapter_added | |
print(to_print) | |
body_text = "## [Updated Series - Click to Read]("+story_url+")" | |
series_info = kserieschapters(series_id,kavita_token) | |
chapters = series_info[0]['chapters'] | |
chapters.reverse() | |
footer = "Newly Added Chapters: " | |
for chapter in chapters: | |
chapter_added = chapter['created'] | |
chapter_dt = parse_iso_datetime(chapter_added) | |
if chapter_dt > reference_dt: | |
if(chapter_dt > future_dt): | |
future_dt = chapter_dt | |
future_reference_time = chapter_added | |
footer += "\n* Chapter "+str(chapter['number']) | |
send_discord_notification_with_url_image(webhook_url, body_text, cover_url,title,footer) | |
time.sleep(2) | |
else: | |
break | |
data = {"reference_time":future_reference_time} | |
file_name = "kupdate-reference.pickle" | |
with open(file_name,"wb") as file: | |
pickle.dump(data, file) | |
file.close() | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment