# robots.txt for http://ssd.jpl.nasa.gov/ # Instruct all robots to ignore the following directories and files ... User-agent: * Disallow: /cgi-bin/ Disallow: /lost+found/ Disallow: /css/ Disallow: /dat/ Disallow: /images/ Disallow: /include/ Disallow: /lib/ Disallow: /bin/ Disallow: /page/ Disallow: /portal_images/ Disallow: /redirect/ Disallow: /SOET/ Disallow: /txt/ Disallow: /test Disallow: /eph_spans.cgi Disallow: /_ # Some robots understand wild cards (so prevent CGI execution): Disallow: /*? # Does this work universally? Disallow: /sbdb.cgi?