Alteroxy is a proxy API service that lets you fetch web content through proxy servers. Basically it hides your real IP and helps you access stuff that might be blocked in your region.
https://alterarchive.web.id/api
| <!DOCTYPE html> | |
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| <title>For Someone Who Makes My Heart Skip ❤️</title> | |
| <style> | |
| body { | |
| background: linear-gradient(135deg, #ffe6f0, #fff0f5, #ffeaf7); | |
| font-family: 'Segoe Script', cursive; |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 | |
| Mozilla/5.0 (Win |
What is AlterArchive? It's like two things in one:
| init = function() | |
| player = object | |
| x = 0 | |
| y = 0 | |
| speed = 2 | |
| width = 50 | |
| height = 50 | |
| sprite = "ghost" | |
| end | |
| import requests | |
| from bs4 import BeautifulSoup | |
| def crawl_website(url): | |
| try: | |
| response = requests.get(url) | |
| response.raise_for_status() | |
| soup = BeautifulSoup(response.text, 'html.parser') | |
| urls = [a['href'] for a in soup.find_all('a', href=True)] | |
| for link in urls: |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeModyolo() { | |
| try { | |
| const { data: html } = await axios.get("https://modyolo.com/", { | |
| headers: { | |
| "User-Agent": | |
| "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", | |
| }, |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeFuDomainsWhoIs(url = "example.com") { | |
| try { | |
| let rawUrl = url.trim().replace(/^https?:\/\//, '').replace(/\/.*$/, ''); | |
| const { data: html } = await axios.get('https://fudomains.com/whois/' + rawUrl, { | |
| headers: { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36' } | |
| }); | |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function example() { | |
| try { | |
| const { data: html } = await axios.get('https://www.example.com/', { | |
| headers: { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' } | |
| }); | |
| const $ = cheerio.load(html); |