# Robots.txt file for lovableproduct.agency # Allow all user agents User-agent: * Allow: / # Sitemap location Sitemap: https://lovableproduct.agency/sitemap.xml # Allow search engines to index important content Allow: / Allow: /index.html Allow: /*.html$ Allow: /*.png$ Allow: /*.jpg$ Allow: /*.svg$ Allow: /*.webp$ Allow: /*.avif$ # Explicitly allow favicon files Allow: /favicon.ico Allow: /favicon-16x16.png Allow: /favicon-32x32.png Allow: /apple-touch-icon.png Allow: /site.webmanifest # Disallow access to certain files and directories Disallow: /private/ Disallow: /*.json$ Disallow: /api/ Disallow: /tmp/ Disallow: /admin/ Disallow: /dev/ # Special instructions for specific bots User-agent: Googlebot Allow: / Allow: /*.css$ Allow: /*.js$ Allow: /favicon.ico Allow: /favicon-*.png Allow: /apple-touch-icon.png Allow: /site.webmanifest # Note: Googlebot ignores Crawl-delay directive User-agent: Googlebot-Image Allow: /*.png$ Allow: /*.jpg$ Allow: /*.svg$ Allow: /*.webp$ Allow: /*.avif$ Allow: /favicon.ico Allow: /favicon-*.png Allow: /apple-touch-icon.png User-agent: Bingbot Allow: / Allow: /favicon.ico Allow: /favicon-*.png Allow: /apple-touch-icon.png Allow: /site.webmanifest # Bing respects the crawl-delay directive Crawl-delay: 5 # Note: Googlebot does not respect the Crawl-delay directive. # Use Google Search Console to set crawl rate instead. # Note: HTTP headers like Cache-Control and Strict-Transport-Security # should be set via server configuration, not in robots.txt