# Robots.txt for DaxDuckDex - Mak Dulac's Website # https://www.makdulac.com User-agent: * Allow: / # === SECURITY: Block sensitive directories and files === Disallow: /backend/ Disallow: /node_modules/ Disallow: /.git/ Disallow: /logs/ Disallow: /snap/ Disallow: /admin/ Disallow: /.env Disallow: /.env.* Disallow: /package.json Disallow: /package-lock.json Disallow: /submissions.json Disallow: /database/ Disallow: /*.sqlite Disallow: /*.sqlite3 Disallow: /*.db Disallow: /backup/ Disallow: /temp/ Disallow: /tmp/ # === ALLOW: Public assets and content === Allow: /css/ Allow: /js/ Allow: /img/ Allow: /logos_archives/ Allow: /data/daxdex.json Allow: /favicon*.ico Allow: /daxduckdex.html Allow: /index.html # === SEO OPTIMIZATION === # Sitemap location Sitemap: https://www.makdulac.com/sitemap.xml # Crawl delay for respectful crawling Crawl-delay: 1 # === SPECIFIC USER-AGENTS === # Google Bot - Allow faster crawling for important search engine User-agent: Googlebot Crawl-delay: 0.5 Allow: / # Block aggressive or unnecessary bots User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: SemrushBot Disallow: / # Allow social media crawlers for OpenGraph/SEO User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: /