@@ -14,7 +14,7 @@ let HTTPError: typeof HTTPErrorClass;
1414 * **Example usage:**
1515 * ```javascript
1616 * // Load the robots.txt file
17- * const robots = await RobotsFile .find('https://crawlee.dev/js/docs/introduction/first-crawler');
17+ * const robots = await RobotsTxtFile .find('https://crawlee.dev/js/docs/introduction/first-crawler');
1818 *
1919 * // Check if a URL should be crawled according to robots.txt
2020 * const url = 'https://crawlee.dev/api/puppeteer-crawler/class/PuppeteerCrawler';
@@ -26,7 +26,7 @@ let HTTPError: typeof HTTPErrorClass;
2626 * await crawler.addRequests(await robots.parseUrlsFromSitemaps());
2727 * ```
2828 */
29- export class RobotsFile {
29+ export class RobotsTxtFile {
3030 private constructor (
3131 private robots : Pick < Robot , 'isAllowed' | 'getSitemaps' > ,
3232 private proxyUrl ?: string ,
@@ -37,12 +37,12 @@ export class RobotsFile {
3737 * @param url the URL to fetch robots.txt for
3838 * @param [proxyUrl] a proxy to be used for fetching the robots.txt file
3939 */
40- static async find ( url : string , proxyUrl ?: string ) : Promise < RobotsFile > {
40+ static async find ( url : string , proxyUrl ?: string ) : Promise < RobotsTxtFile > {
4141 const robotsTxtFileUrl = new URL ( url ) ;
4242 robotsTxtFileUrl . pathname = '/robots.txt' ;
4343 robotsTxtFileUrl . search = '' ;
4444
45- return RobotsFile . load ( robotsTxtFileUrl . toString ( ) , proxyUrl ) ;
45+ return RobotsTxtFile . load ( robotsTxtFileUrl . toString ( ) , proxyUrl ) ;
4646 }
4747
4848 /**
@@ -51,11 +51,11 @@ export class RobotsFile {
5151 * @param content contents of robots.txt
5252 * @param [proxyUrl] a proxy to be used for fetching the robots.txt file
5353 */
54- static from ( url : string , content : string , proxyUrl ?: string ) : RobotsFile {
55- return new RobotsFile ( robotsParser ( url , content ) , proxyUrl ) ;
54+ static from ( url : string , content : string , proxyUrl ?: string ) : RobotsTxtFile {
55+ return new RobotsTxtFile ( robotsParser ( url , content ) , proxyUrl ) ;
5656 }
5757
58- protected static async load ( url : string , proxyUrl ?: string ) : Promise < RobotsFile > {
58+ protected static async load ( url : string , proxyUrl ?: string ) : Promise < RobotsTxtFile > {
5959 if ( ! HTTPError ) {
6060 HTTPError = ( await import ( 'got-scraping' ) ) . HTTPError ;
6161 }
@@ -68,10 +68,10 @@ export class RobotsFile {
6868 responseType : 'text' ,
6969 } ) ;
7070
71- return new RobotsFile ( robotsParser ( url . toString ( ) , response . body ) , proxyUrl ) ;
71+ return new RobotsTxtFile ( robotsParser ( url . toString ( ) , response . body ) , proxyUrl ) ;
7272 } catch ( e ) {
7373 if ( e instanceof HTTPError && e . response . statusCode === 404 ) {
74- return new RobotsFile (
74+ return new RobotsTxtFile (
7575 {
7676 isAllowed ( ) {
7777 return true ;
@@ -117,3 +117,6 @@ export class RobotsFile {
117117 return ( await this . parseSitemaps ( ) ) . urls ;
118118 }
119119}
120+
121+ // to stay backwards compatible
122+ export { RobotsTxtFile as RobotsFile } ;
0 commit comments