# robots.txt for http://www.crucial.com User-agent: Morning Paper 1.0 (robots.txt compliant!) Disallow: / # User-agent: ia_archiver # Disallow: / User-agent: * Disallow: /bin/ Disallow: /cache/ Disallow: /sandbox/ Disallow: /tools/ Disallow: /webservices/ Disallow: /websitedown/ Disallow: /w3c/ Disallow: /ajax/ Disallow: /common/ Disallow: /LexarEnterprise/ Disallow: /Master/ Disallow: /store/checkout/ Disallow: /uk/store/checkout/ Disallow: /uk/store/checkout/ Disallow: /eu/mysite/ Disallow: /uk/mysite/ Disallow: /us/mysite/ Disallow: /support/cookie.aspx Disallow: /uk/support/cookie.aspx Disallow: /eu/support/cookie.aspx User-agent: Yandex User-agent: moget User-agent: ichiro User-agent: NaverBot User-agent: Yeti User-agent: Baiduspider User-agent: Baiduspider-video User-agent: Baiduspider-image User-agent: sogou spider User-agent: YoudaoBot Crawl-Delay: 10 # This file can be used to affect how search engines and other web site crawlers see your site. # For more information, please see http://www.w3.org/TR/html4/appendix/notes.html#h-B.4.1.1 # WebMatrix 2.0