# Allow all robots complete access to important public areas User-agent: * Allow: / Disallow: /tmp/ Disallow: /hidden/ Disallow: /nopublic/ # Prevent web crawlers from accessing areas that are irrelevant or sensitive User-agent: * Disallow: /cgi-bin/ Disallow: /private/ Disallow: /admin/ # Additional rules for specific crawlers User-agent: Googlebot Disallow: /no-google/ # Sitemap location Sitemap: https://www.laetor.co/sitemap.xml # Delay between successive crawler accesses to prevent server overload Crawl-delay: 10 Sitemap: https://www.laetor.co/sitemap.xml