# robots.txt for https://codewithgarry.com/ # Global robots.txt for CodeWithGarry - Updated September 8, 2025 # Allow all major search engines to crawl User-agent: * Allow: / # Disallow backup directories and temporary files Disallow: /backup/ Disallow: /dist/ Disallow: /*.backup Disallow: /*.js_backup Disallow: /*.scroll_backup Disallow: /node_modules/ Disallow: /.git/ Disallow: /.vscode/ Disallow: /logs/ Disallow: /tmp/ # Allow specific bots full access User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / # Crawl-delay for general bots (in seconds) Crawl-delay: 1 # Sitemap locations Sitemap: https://codewithgarry.com/sitemap.xml # Host directive (preferred domain) Host: https://codewithgarry.com