respodns/respodns/checks.py
2020-09-04 15:49:21 +02:00

96 lines
3.6 KiB
Python

from .top1m import retrieve_top1m_entries
from .util import concat_nonsense, rot13, head
from .structs import Check
def order_by_failures(checks): # descending
return sorted(checks, key=lambda check: -check.failures)
first = [
# checking this first this avoids issues with censorship in China. see:
# https://www.bortzmeyer.org/sichuan-pepper.html
Check("common", "baidu.com", 491),
]
_sinkhole = "iuqerfsodp9ifjaposdfjhgosurijfaewrwergwea.com"
_weirdsub = concat_nonsense("javarevisited.blogspot.com")
likely = order_by_failures([
# these checks are, in practice, the most likely
# to weed out unwanted DNS servers.
Check("adtrack", "ad.doubleclick.net", 81),
Check("adtrack", "google-analytics.com", 75),
Check("adtrack", "media.fastclick.net", 116),
Check("adware", rot13("nqf789.pbz"), 168),
Check("bad", concat_nonsense("com"), 153),
Check("badsub", concat_nonsense("google.com"), 63),
Check("common", "archive.org", 98),
Check("common", "duckduckgo.com", 78),
Check("common", "en.wikipedia.org", 75),
Check("common", "facebook.com", 94),
Check("common", "google.com", 69),
# Check("common", "naver.com", 57),
Check("common", "paypal.com", 74),
Check("common", "wikileaks.com", 86),
Check("common", "xn--b1aew.xn--p1ai", 85),
Check("gambling", "bet365.com", 157),
Check("gambling", "betonline.ag", 168),
Check("gambling", "unibet.com", 137),
Check("infosec", _sinkhole, 98),
Check("infosec", "scan.shadowserver.org", 73),
Check("news", "huanqiu.com", 435),
Check("news", "telegram.com", 71),
Check("parking", "scmp.org", 132),
Check("piracy", "thehiddenbay.org", 77),
Check("porn", "pornhub.com", 151),
Check("shock", rot13("tbng.pk"), 209),
Check("uncommon", "cybre.space", 88),
Check("uncommon", "react.uni-saarland.de", 74),
Check("usercontent", "4chan.org", 116),
# Check("weird", "archive.is", 0),
Check("weirdsub", _weirdsub, 126),
])
unlikely = order_by_failures([
Check("common", "aliexpress.com", 2),
Check("common", "ebay.com", 4),
Check("common", "qq.com", 15),
Check("common", "stackoverflow.com", 1),
Check("common", "tmall.com", 8),
Check("news", "nytimes.com", 6),
Check("news", "scmp.com", 3),
Check("piracy", "thepiratebay.org", 24),
Check("porn", "chaturbate.com", 18),
Check("porn", "xvideos.com", 23),
Check("usercontent", "facebook.com", 12),
Check("usercontent", "flickr.com", 5),
Check("usercontent", "github.com", 19),
Check("usercontent", "imgur.com", 22),
Check("usercontent", "instagram.com", 7),
Check("usercontent", "reddit.com", 13),
Check("usercontent", "tumblr.com", 10),
Check("usercontent", "twitter.com", 21),
Check("usercontent", "weibo.com", 20),
Check("usercontent", "wordpress.com", 9),
Check("video", "bilibili.com", 17),
Check("video", "netflix.com", 14),
Check("video", "twitch.tv", 16),
Check("video", "youtube.com", 11),
])
defunct = [
"panda.tv", # imochen.github.io
]
def _top1m_gen():
return (Check("top", entry, 0)
for i, entry in retrieve_top1m_entries()
if entry not in defunct)
top100 = head(100, _top1m_gen())
top1000 = head(1000, _top1m_gen())