@@ -415,23 +415,19 @@ def setup_switchers(
415415 ofile .write (line )
416416
417417
418- def build_robots_txt (
419- versions : Iterable [Version ],
420- languages : Iterable [Language ],
418+ def copy_robots_txt (
421419 www_root : Path ,
422420 group ,
423421 skip_cache_invalidation ,
424422 http : urllib3 .PoolManager ,
425423) -> None :
426- """Disallow crawl of EOL versions in robots.txt."""
424+ """Copy robots.txt to www_root ."""
427425 if not www_root .exists ():
428- logging .info ("Skipping robots.txt generation (www root does not even exist)." )
426+ logging .info ("Skipping copying robots.txt (www root does not even exist)." )
429427 return
430428 template_path = HERE / "templates" / "robots.txt"
431- template = jinja2 .Template (template_path .read_text (encoding = "UTF-8" ))
432- rendered_template = template .render (languages = languages , versions = versions )
433429 robots_path = www_root / "robots.txt"
434- robots_path . write_text ( rendered_template + " \n " , encoding = "UTF-8" )
430+ shutil . copyfile ( template_path , robots_path )
435431 robots_path .chmod (0o775 )
436432 run (["chgrp" , group , robots_path ])
437433 if not skip_cache_invalidation :
@@ -1204,9 +1200,7 @@ def build_docs(args) -> bool:
12041200
12051201 build_sitemap (versions , languages , args .www_root , args .group )
12061202 build_404 (args .www_root , args .group )
1207- build_robots_txt (
1208- versions ,
1209- languages ,
1203+ copy_robots_txt (
12101204 args .www_root ,
12111205 args .group ,
12121206 args .skip_cache_invalidation ,
0 commit comments