# # robots.txt # # This file is read *ONLY* when the web-bot(s) needs to read it # there is NOTHING you can do to force a webbot to read this file # and obey it. # User-agent: AhrefsBot/5.0 Disallow: / User-agent: AhrefsBot/5.1 Disallow: / User-agent: AhrefsBot/6.1 Disallow: / User-agent: Amazonbot/0.1 Disallow: / User-agent: Applebot/0.1 Disallow: / User-agent: archive.org_bot Disallow: / User-agent: Baiduspider Disallow: / User-agent: Baiduspider/2.0 Disallow: / User-agent: Barkrowler Disallow: / User-agent: Barkrowler/0.9 Disallow: / #Allowed 1/31/2023 User-agent: bingbot #Disallow: / Allow: / #Allowed 1/31/2023 User-agent: bingbot/2.0 #Disallow: / Allow: / User-agent: BLEXBot/1.0 Disallow: / User-agent: Bumble Bee/1.0 Disallow: / User-agent: CCBot/2.0 Disallow: / User-agent: Cliqzbot/1.0 Disallow: / User-agent: Crawler/1.0 Disallow: / User-agent: crawler4j Disallow: / User-agent: Daum/4.1 Disallow: / User-agent: DeuSu/5.0.2 Disallow: / User-agent: DoCoMo/2.0 Disallow: / User-agent: DoCoMo Disallow: / User-agent: DotBot/1.1 Disallow: / User-agent: Drupal Disallow: / User-agent: Exabot/3.0 Disallow: / User-agent: Feedfetcher Disallow: / User-agent: gocrawl v0.4 Disallow: / User-agent: Go-http-client/1.1 Disallow: / User-agent: Googlebot-Image Disallow: / User-agent: Googlebot-Video/1.0 Disallow: / User-agent: Googlebot/Nutch-1.7 Disallow: / User-agent: Googlebot/2.1 Disallow: / User-agent: Googlebot Disallow: / User-agent: IABot/2.0 Disallow: / User-agent: ia_archiver Disallow: / User-agent: ICC-Crawler/2.0 Disallow: / User-agent: ichiro/4.0 Disallow: / User-agent: ichiro Disallow: / User-agent: istellabot/t.1 Disallow: / User-agent: istellabot/Nutch-1.10 Disallow: / User-agent: istellabot/Nutch-1.11 Disallow: / User-agent: jpg-newsbot/2.0 Disallow: / User-agent: Jyxobot Disallow: / User-agent: Jyxobot/1 Disallow: / User-agent: ia_archiver Disallow: / User-agent: KomodiaBot/1.0 Disallow: / User-agent: linkdexbot/2.0 Disallow: / User-agent: MauiBot Disallow: / User-agent: MegaIndex.ru/2.0 Disallow: / User-agent: MJ12bot/v1.4.5 Disallow: / User-agent: mfibot/1.1 Disallow: / User-agent: MLBot Disallow: / User-agent: MSNbot Disallow: / User-agent: msnbot Disallow: / User-agent: msnbot/2.0b Disallow: / User-agent: msnbot-media/1.1 Disallow: / User-agent: msnbot-media Disallow: / User-agent: msnbot-images Disallow: / User-agent: Nutch-1.10 Disallow: / User-agent: Bot/1.0 Disallow: / User-agent: OER Commons Bot/1.0 Disallow: / User-agent: Commons Bot/1.0 Disallow: / User-agent: OrangeBot/2.0 Disallow: / User-agent: Qwantify/2.2w Disallow: / User-agent: PiplBot Disallow: / User-agent: PrivateSearch/0.1.0 Disallow: / User-agent: Scirus Disallow: / User-agent: SemanticScholarBot Disallow: / User-agent: SearchmetricsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: SemrushBot/3 Disallow: / User-agent: SemrushBot/6~bl Disallow: / User-agent: SemrushBot/3~bl Disallow: / User-agent: SeznamBot/3.2 Disallow: / User-agent: Slurp Disallow: / User-agent: Sogou web spider/4.0 Disallow: / User-agent: Sogou web spider Disallow: / User-agent: Sogou Disallow: / User-agent: Sosospider Disallow: / User-agent: spbot/5.0.1 Disallow: / User-agent: SputnikBot/2.3 Disallow: / User-agent: SurdotlyBot/1.0 Disallow: / User-agent: tbot-nutch Disallow: / User-agent: tbot-nutch/Nutch-1.10 Disallow: / User-agent: tigerbot Disallow: / User-agent: Twiceler Disallow: / User-agent: Twitterbot/1.0 Disallow: / User-agent: UT-Dorkbot/1.0 Disallow: / User-agent: vebidoobot Disallow: / User-agent: voltron Disallow: / User-agent: WikiDo/1.1 Disallow: / User-agent: YaCy Disallow: / User-agent: Yahoo! Slurp Disallow: / User-agent: Yandex Disallow: / User-agent: YandexBot Disallow: / User-agent: YandexBot/3.0 Disallow: / User-agent: Yandex/2.01.000 Disallow: / User-agent: YandexImages/3.0 Disallow: / User-agent: Yeti Disallow: / User-agent: Yeti/1.1 Disallow: / User-agent: YisouSpider Disallow: / User-agent: yoozBot-2.2 Disallow: / User-agent: yrspider Disallow: / User-agent: * Disallow: /cgi-bin/ Disallow: /tmp7/ Disallow: /graphics/ Disallow: /images/ Disallow: rhessiws_book/ Disallow: /data/ Disallow: /wind3dp/ # Sitemap: https://research.ssl.berkeley.edu/sitemap.xml # Crawl-Delay: 20