Sitemap: https://www.yesterdaystractors.com/sitemap.xml # # Last Modified 08/17/2020 KP # Google has been sending us warnings that there are too many URLs # for googlebot to follow. Telling it now to ignore redunant links # to printer-friendly versions of a content page, plus admin links # and user option links. All of these provide either duplicate content # or content unremarkable to a visitor. # User-agent: * # Match all bots Disallow: /testrewrite/ # Chris test directory Disallow: /simon/ # Disallow access to Simon admin stuff Disallow: /*simon # Disallow access to Simon admin stuff Disallow: /ttales/ # Disallow access to any of the ttales/messages/9999.html files Disallow: /*ttales # Disallow access to any of the ttales viewits Disallow: /dbtest/ # Disallow access to any of the dbtest/messages/9999.html files Disallow: /*dbtest # Disallow access to any of the dbtest viewits Disallow: /*safemail # Block any url that calls Safemail Disallow: /*printerfriendly # Block redundant printer friendly pages Disallow: /*catalog-print.html # Block redundant printer friendly pages Disallow: /print_catalog.cgi # Block redundant printer friendly pages Disallow: /*showdetail # Block redundant visitor detail links Disallow: /*ytcpanel # Block Options links meant for user config only Disallow: /*ytforums # Block links meant for user pw retrieval and etc Disallow: /*reportcv # Block non-content links Disallow: /*reportscam # Block non-content links Disallow: /*boardshim # Block non-content links # Block bots that are CPU hogs #User-Agent: The Knowledge AI #Disallow: / #User-agent: SemrushBot #Disallow: / #User-agent: AhrefsBot #Disallow:/ #User-agent: MJ12Bot #Disallow:/ #User-agent: DotBot #Disallow:/ #User-agent: MauiBot #Disallow:/ #User-agent: YandexBot #Disallow:/ #User-agent: BLEXBot #Disallow:/