# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-Agent: *
Disallow: /
# We want to allow crawling of these pages in order for google to de-index them
Allow: /store/catering/sd/jefferson$
Allow: /catering/subway-groton$
Allow: /catering/ct/bethel$
Allow: /catering/sd/leola$
Allow: /$