what is AlterArchive? it's like two things in one:
- File Uploader & Archive thingy: you can upload files and stuff
what is AlterArchive? it's like two things in one:
| init = function() | |
| player = object | |
| x = 0 | |
| y = 0 | |
| speed = 2 | |
| width = 50 | |
| height = 50 | |
| sprite = "ghost" | |
| end | |
| import requests | |
| from bs4 import BeautifulSoup | |
| def crawl_website(url): | |
| try: | |
| response = requests.get(url) | |
| response.raise_for_status() | |
| soup = BeautifulSoup(response.text, 'html.parser') | |
| urls = [a['href'] for a in soup.find_all('a', href=True)] | |
| for link in urls: |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeModyolo() { | |
| try { | |
| const { data: html } = await axios.get("https://modyolo.com/", { | |
| headers: { | |
| "User-Agent": | |
| "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", | |
| }, |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeFuDomainsWhoIs(url = "example.com") { | |
| try { | |
| let rawUrl = url.trim().replace(/^https?:\/\//, '').replace(/\/.*$/, ''); | |
| const { data: html } = await axios.get('https://fudomains.com/whois/' + rawUrl, { | |
| headers: { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36' } | |
| }); | |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function example() { | |
| try { | |
| const { data: html } = await axios.get('https://www.example.com/', { | |
| headers: { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' } | |
| }); | |
| const $ = cheerio.load(html); |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeGistGithubUserProfile(profile) { | |
| try { | |
| let username = profile || "Frenzycore"; | |
| const { data: html } = await axios.get( | |
| "https://gist.github.com/" + encodeURIComponent(username), | |
| { | |
| headers: { |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeWikipediaRandom() { | |
| const url = 'https://wikipedia.org/wiki/Special:Random'; | |
| try { | |
| const response = await axios.get(url, { | |
| headers: { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36' } | |
| }); |
| import axios from "axios"; | |
| import * as cheerio from "cheerio"; | |
| async function scrapeQuotesToScrape() { | |
| let currentUrl = "https://quotes.toscrape.com/"; | |
| const allQuotes = []; | |
| let page = 1; | |
| try { | |
| while (currentUrl) { |