@@ -105,7 +105,7 @@ def manual():
105105 user_org = current_user .get_org ()
106106 user_id = current_user .get_user_id ()
107107 l_cookiejar = crawlers .api_get_cookiejars_selector (user_org , user_id )
108- crawlers_types = crawlers . get_crawler_all_types ()
108+ crawlers_types = [ 'onion' , 'web' ]
109109 proxies = [] # TODO HANDLE PROXIES
110110 return render_template ("crawler_manual.html" ,
111111 is_manager_connected = crawlers .get_lacus_connection_metadata (),
@@ -219,6 +219,7 @@ def send_to_spider():
219219 return create_json_response (res [0 ], res [1 ])
220220 return redirect (url_for ('crawler_splash.manual' ))
221221
222+ # Send Unknown onion to crawler
222223@crawler_splash .route ("/crawlers/domain_discovery" , methods = ['GET' ])
223224@login_required
224225@login_user_no_api
@@ -482,6 +483,7 @@ def crawlers_domain_download():
482483@login_read_only
483484def domains_explorer_post_filter ():
484485 domain_onion = request .form .get ('domain_onion_switch' )
486+ domain_i2p = request .form .get ('domain_i2p_switch' )
485487 domain_regular = request .form .get ('domain_regular_switch' )
486488 date_from = request .form .get ('date_from' )
487489 date_to = request .form .get ('date_to' )
@@ -493,7 +495,7 @@ def domains_explorer_post_filter():
493495 date_from = None
494496 date_to = None
495497
496- if domain_onion and domain_regular :
498+ if domain_onion and domain_regular and domain_i2p :
497499 if date_from and date_to :
498500 return redirect (url_for ('crawler_splash.domains_explorer_all' , date_from = date_from , date_to = date_to ))
499501 else :
@@ -503,6 +505,11 @@ def domains_explorer_post_filter():
503505 return redirect (url_for ('crawler_splash.domains_explorer_web' , date_from = date_from , date_to = date_to ))
504506 else :
505507 return redirect (url_for ('crawler_splash.domains_explorer_web' ))
508+ elif domain_i2p :
509+ if date_from and date_to :
510+ return redirect (url_for ('crawler_splash.domains_explorer_i2p' , date_from = date_from , date_to = date_to ))
511+ else :
512+ return redirect (url_for ('crawler_splash.domains_explorer_i2p' ))
506513 else :
507514 if date_from and date_to :
508515 return redirect (url_for ('crawler_splash.domains_explorer_onion' , date_from = date_from , date_to = date_to ))
@@ -522,7 +529,7 @@ def domains_explorer_all():
522529 except :
523530 page = 1
524531
525- dict_data = Domains .get_domains_up_by_filers ([ 'onion' , 'web' ] , page = page , date_from = date_from , date_to = date_to )
532+ dict_data = Domains .get_domains_up_by_filers (Domains . get_all_domains_types () , page = page , date_from = date_from , date_to = date_to )
526533 return render_template ("domain_explorer.html" , dict_data = dict_data , bootstrap_label = bootstrap_label , domain_type = 'all' )
527534
528535
@@ -542,6 +549,21 @@ def domains_explorer_onion():
542549 return render_template ("domain_explorer.html" , dict_data = dict_data , bootstrap_label = bootstrap_label ,
543550 domain_type = 'onion' )
544551
552+ @crawler_splash .route ('/domains/explorer/i2p' , methods = ['GET' ])
553+ @login_required
554+ @login_read_only
555+ def domains_explorer_i2p ():
556+ page = request .args .get ('page' )
557+ date_from = request .args .get ('date_from' )
558+ date_to = request .args .get ('date_to' )
559+ try :
560+ page = int (page )
561+ except :
562+ page = 1
563+
564+ dict_data = Domains .get_domains_up_by_filers (['i2p' ], page = page , date_from = date_from , date_to = date_to )
565+ return render_template ("domain_explorer.html" , dict_data = dict_data , bootstrap_label = bootstrap_label ,
566+ domain_type = 'i2p' )
545567
546568@crawler_splash .route ('/domains/explorer/web' , methods = ['GET' ])
547569@login_required
0 commit comments