# The FULL URL to the DSpace sitemaps # The http://ntgspublic.intersearch.com.au/ntgspublicjspui will be auto-filled with the value in dspace.cfg # XML sitemap is listed first as it is preferred by most search engines Sitemap: https://www.geoscience.nt.gov.au/gemis/ntgsjspui/sitemap ########################## # Default Access Group # (NOTE: blank lines are not allowable in a group record) ########################## User-agent: * # Disable access to Discovery search and filters Disallow: /discover Disallow: /simple-search Disallow: /gemis/ntgsjspui/discover Disallow: /gemis/ntgsjspui/simple-search Disallow: /gemis/devjspui/discover Disallow: /gemis/devjspui/simple-search Disallow: /contact Disallow: /feedback Disallow: /forgot Disallow: /login Disallow: /register Disallow: /gemis/ntgsjspui/simple-search Disallow: /gemis/ntgsjspui/browse Disallow: /gemis/ntgsjspui/statistics Disallow: /gemis/ntgsjspui/contact Disallow: /gemis/ntgsjspui/feedback Disallow: /gemis/ntgsjspui/forgot Disallow: /gemis/ntgsjspui/login Disallow: /gemis/ntgsjspui/register Disallow: /gemis/devjspui/simple-search Disallow: /gemis/devjspui/browse Disallow: /gemis/devjspui/statistics Disallow: /gemis/devjspui/contact Disallow: /gemis/devjspui/feedback Disallow: /gemis/devjspui/forgot Disallow: /gemis/devjspui/login Disallow: /gemis/devjspui/register ############################## # Section for misbehaving bots # The following directives to block specific robots were borrowed from Wikipedia's robots.txt ############################## # advertising-related bots: User-agent: Mediapartners-Google* Disallow: / # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Misbehaving: requests much too fast: User-agent: fast Disallow: / # # If your DSpace is going down because of someone using recursive wget, # you can activate the following rule. # # If your own faculty is bringing down your dspace with recursive wget, # you can advise them to use the --wait option to set the delay between hits. # #User-agent: wget #Disallow: / # # The 'grub' distributed client has been *very* poorly behaved. # User-agent: grub-client Disallow: / # # Doesn't follow robots.txt anyway, but... # User-agent: k2spider Disallow: / # # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: /