# This robots.txt file controls crawling of URLs under https://firstsing.com. # All crawlers are disallowed to crawl files in the "includes" directory, such # as .css, .js, but Google needs them for rendering, so Googlebot is allowed # to crawl them. User-agent: * Disallow: /includes/ User-agent: baiduspider Allow: /includes/ User-agent: baiduspider-image Allow: /includes/ User-agent: baiduspider-mobile Allow: /includes/ User-agent: baiduspider-news Allow: /includes/ User-agent: baiduspider-video Allow: /includes/ User-agent: bingbot Allow: /includes/ User-agent: msnbot-media Allow: /includes/ User-agent: adidxbot Allow: /includes/ User-agent: Googlebot Allow: /includes/ User-agent: Googlebot-Image Allow: /includes/ User-agent: Googlebot-Mobile Allow: /includes/ User-agent: Googlebot-News Allow: /includes/ User-agent: Googlebot-Video Allow: /includes/ User-agent: Mediapartners-Google Allow: /includes/ User-agent: AdsBot-Google Allow: /includes/ User-agent: slurp Allow: /includes/ User-agent: yandex Allow: /includes/ Sitemap: https://firstsing.com/sitemap.xml