Rev 410 Rev 411
1 # robots.txt 1 # robots.txt
2 # 2 #
3 # Some directories should not be searched because they are dynamically generated 3 # Some directories should not be searched because they are dynamically generated
4 # and they create link loops. 4 # and they create link loops.
5 # 5 #
6 # Robots Exclusion Standard: 6 # Robots Exclusion Standard:
7 # http://www.robotstxt.org/wc/exclusion.html#robotstxt 7 # http://www.robotstxt.org/wc/exclusion.html#robotstxt
8 # 8 #
9 # User-agent: <agent-string> 9 # User-agent: <agent-string>
10 # Disallow: <path> 10 # Disallow: <path>
11 # -------------------------------------------------------------------------------- 11 # ---------------------------------------------------------------------------------
12   12  
13 User-agent: * 13 User-agent: *
14 Disallow: /WebSVN/ 14 Disallow: /WebSVN/
15 Disallow: /Web/Maintenance/ 15 Disallow: /Web/Maintenance/