📄 robots.txt
1 2 3 4
# allow crawling everything by default
User-agent: *
Disallow: