# Robots.txt last edited by Jos Jonkeren 2022-12-05 15:02. # Set filters to not be indexed. Bot for Merchant center should be able to crawl and index. # Crawlers Setup user-agent: * crawl-delay: 5 # Google doesn't support the crawl-delay directive, so her crawlers will just ignore it. # Bing: set crawl to maximum one page per 5 seconds. # Sitemap location Sitemap: https://www.tommyteleshopping.com/sitemap/tommynl/sitemap.xml # Allowable Index allow: /*?p= allow: /* # Disallow different URL parameters to be indexed disallow: *brand=* disallow: *cat=* disallow: *price=* disallow: *size=* # Directories Disallow: /app/ Disallow: /bin/ Disallow: /dev/ Disallow: /lib/ Disallow: /phpserver/ Disallow: /pkginfo/ Disallow: /report/ Disallow: /setup/ Disallow: /update/ Disallow: /var/ Disallow: /vendor/ # Paths (clean URLs) Disallow: /index.php/ Disallow: /catalog/product_compare/ Disallow: /catalog/category/view/ # Disallow: /catalog/product/view/ Disallow: /catalogsearch/ Disallow: /checkout/ Disallow: /control/ Disallow: /contacts/ Disallow: /customer/ Disallow: /customize/ Disallow: /newsletter/ Disallow: /review/ Disallow: /sendfriend/ Disallow: /wishlist/ # Files Disallow: /composer.json Disallow: /composer.lock Disallow: /CONTRIBUTING.md Disallow: /CONTRIBUTOR_LICENSE_AGREEMENT.html Disallow: /COPYING.txt Disallow: /Gruntfile.js Disallow: /LICENSE.txt Disallow: /LICENSE_AFL.txt Disallow: /nginx.conf.sample Disallow: /package.json Disallow: /php.ini.sample Disallow: /RELEASE_NOTES.txt # Do not index pages that are sorted or filtered. #Disallow: /*?*product_list_mode= #Disallow: /*?*product_list_order= #Disallow: /*?*product_list_limit= #Disallow: /*?*product_list_dir= # CVS, SVN directory and dump files Disallow: /*.CVS Disallow: /*.Zip$ Disallow: /*.Svn$ Disallow: /*.Idea$ Disallow: /*.Sql$ Disallow: /*.Tgz$