Created
February 6, 2026 12:14
-
-
Save SpiZeak/c039155943f0fe80dcfa6203b0db229c to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| # A list of top domains to ensure we aren't just hitting a single cached record | |
| DOMAINS=( | |
| "google.com" "youtube.com" "facebook.com" "baidu.com" "yahoo.com" "amazon.com" "wikipedia.org" "google.co.in" "twitter.com" "qq.com" | |
| "live.com" "taobao.com" "bing.com" "instagram.com" "weibo.com" "vk.com" "yandex.ru" "google.ru" "ebay.com" "reddit.com" | |
| "google.co.uk" "google.com.br" "mail.ru" "t.co" "pinterest.com" "amazon.co.jp" "google.fr" "netflix.com" "tmall.com" "360.cn" | |
| "google.it" "microsoft.com" "google.es" "paypal.com" "sohu.com" "wordpress.com" "tumblr.com" "blogspot.com" "imgur.com" "xvideos.com" | |
| "apple.com" "stackoverflow.com" "github.com" "imdb.com" "google.ca" "google.co.kr" "ok.ru" "pornhub.com" "whatsapp.com" "office.com" | |
| "rakuten.co.jp" "google.co.id" "diply.com" "amazon.de" "craigslist.org" "google.com.tr" "t.me" "uol.com.br" "google.pl" "nicovideo.jp" | |
| "outbrain.com" "google.com.au" "fandom.com" "bitly.com" "google.com.hk" "booking.com" "quora.com" "chase.com" "adobe.com" "163.com" | |
| "bbc.co.uk" "nytimes.com" "foxnews.com" "espn.com" "cnn.com" "dailymail.co.uk" "forbes.com" "theguardian.com" "huffpost.com" "wsj.com" | |
| "ebay.co.uk" "aliexpress.com" "walmart.com" "etsy.com" "target.com" "bestbuy.com" "homedepot.com" "ikea.com" "macys.com" "costco.com" | |
| "dropbox.com" "vimeo.com" "soundcloud.com" "twitch.tv" "dailymotion.com" "spotify.com" "flickr.com" "deviantart.com" "disney.com" "hulu.com" | |
| "discord.com" "zoom.us" "slack.com" "canva.com" "medium.com" "notion.so" "trello.com" "asana.com" "monday.com" "zendesk.com" | |
| "cloudflare.com" "digitalocean.com" "heroku.com" "aws.amazon.com" "azure.com" "godaddy.com" "namecheap.com" "bluehost.com" "siteground.com" "wix.com" | |
| "espn.go.com" "bleacherreport.com" "nfl.com" "nba.com" "mlb.com" "nhl.com" "goal.com" "skysports.com" "eurosport.com" "sportingnews.com" | |
| "bloomberg.com" "reuters.com" "cnbc.com" "ft.com" "economist.com" "businessinsider.com" "hbr.org" "fastcompany.com" "inc.com" "entrepreneur.com" | |
| "wired.com" "theverge.com" "techcrunch.com" "engadget.com" "gizmodo.com" "arstechnica.com" "cnet.com" "zdnet.com" "mashable.com" "pcgamer.com" | |
| "nationalgeographic.com" "history.com" "discovery.com" "smithsonianmag.com" "scientificamerican.com" "nature.com" "nasa.gov" "ted.com" "khanacademy.org" "coursera.org" | |
| "nike.com" "adidas.com" "zara.com" "hm.com" "uniqlo.com" " Sephora.com" "ulta.com" "nordstrom.com" "gap.com" "oldnavy.com" | |
| "tripadvisor.com" "expedia.com" "kayak.com" "airbnb.com" "hotels.com" "skyscanner.net" "lonelyplanet.com" "yelp.com" "zillow.com" "realtor.com" | |
| "allrecipes.com" "foodnetwork.com" "epicurious.com" "seriouseats.com" "bonappetit.com" "tasty.co" "healthline.com" "webmd.com" "mayoclinic.org" "nih.gov" | |
| "weather.com" "accuweather.com" "wunderground.com" "bbc.com/weather" "noaa.gov" "timeanddate.com" "time.com" "rollingstone.com" "variety.com" "hollywoodreporter.com" | |
| ) | |
| # Configuration | |
| CONCURRENCY=10 # Number of parallel requests | |
| TIMEOUT=2 # Seconds to wait for each response | |
| LOG_FILE="/tmp/dns_results.log" | |
| # Clear previous logs | |
| > "$LOG_FILE" | |
| echo "--- Starting Parallel DNS Stress Test (100 Domains) ---" | |
| echo "Concurrency: $CONCURRENCY | Timeout: $TIMEOUTs" | |
| # Export the function so xargs can see it | |
| do_lookup() { | |
| local domain=$1 | |
| local log=$2 | |
| local timeout=$3 | |
| # Generate a random prefix (e.g., 4291.google.com) | |
| # This forces the DNS server to actually do a lookup instead of using cache. | |
| local random_prefix=$(head /dev/urandom | tr -dc a-z0-0 | head -c 4) | |
| local target="${random_prefix}.${domain}" | |
| # Change '192.168.1.1' to your actual Router IP to test the router | |
| # Or keep it as is to test your system's current default path | |
| if dig +short +time="$timeout" +tries=1 "$target" > /dev/null 2>&1; then | |
| echo "SUCCESS: $target" >> "$log" | |
| else | |
| echo "FAILED: $target" >> "$log" | |
| fi | |
| } | |
| export -f do_lookup | |
| # Use xargs to run in parallel | |
| printf "%s\n" "${DOMAINS[@]}" | xargs -I {} -P "$CONCURRENCY" bash -c "do_lookup {} $LOG_FILE $TIMEOUT" | |
| # Calculate Results | |
| TOTAL=${#DOMAINS[@]} | |
| SUCCESSES=$(grep -c "SUCCESS" "$LOG_FILE") | |
| FAILURES=$(grep -c "FAILED" "$LOG_FILE") | |
| echo "--- Stress Test Complete ---" | |
| echo "Total Domains Tested: $TOTAL" | |
| echo "Successes: $SUCCESSES" | |
| echo "Failures: $FAILURES" | |
| echo "Success Rate: $(( (SUCCESSES * 100) / TOTAL ))%" | |
| if [ "$FAILURES" -gt 0 ]; then | |
| echo -e "\nFailed Domains:" | |
| grep "FAILED" "$LOG_FILE" | cut -d' ' -f2 | |
| fi |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment