IP = 173.212.231.191
robots.txt# Only allow google
#User-agent: googlebot
#Disallow:
# if you want to add own robot rules, do it BEFORE the final rule matching *
User-agent: *
Crawl-delay: 20
# This has to match script url + cfg.url_prefix_action - it
# saves lots of search engine load and traffic by disallowing crawlers
# to request action related URLs.
#
# NOTE - in order to make this have any effect, you have to set
# url_prefix_action to "action", cf. HelpOnConfiguration
Disallow: /action/
Look up this url in the url tool
https://wiki.wxpython.org/.well-known/acme-challenge: 200 text/html;charset=UTF-8
https://wiki.wxpython.org/.well-known/csvm: 404 text/html; charset=iso-8859-1
https://wiki.wxpython.org/.well-known/nostr.json: 404 text/html; charset=iso-8859-1
https://wiki.wxpython.org/.well-known/security.txt: 404 text/html; charset=iso-8859-1
https://wiki.wxpython.org/.well-known/traffic-advice: 404 text/html; charset=iso-8859-1