1515// specific language governing permissions and limitations
1616// under the License.
1717
18+ use crate :: execution:: operators:: ExecutionError ;
1819use arrow:: {
1920 array:: {
2021 cast:: AsArray ,
@@ -35,13 +36,12 @@ use arrow_array::builder::StringBuilder;
3536use arrow_array:: { DictionaryArray , StringArray , StructArray } ;
3637use arrow_schema:: DataType ;
3738use chrono:: { NaiveDate , NaiveDateTime , TimeZone , Timelike } ;
39+ use datafusion:: execution:: object_store:: ObjectStoreUrl ;
40+ use datafusion:: prelude:: SessionContext ;
3841use datafusion_comet_spark_expr:: utils:: array_with_timezone;
3942use datafusion_comet_spark_expr:: { timezone, EvalMode , SparkError , SparkResult } ;
4043use datafusion_common:: { cast:: as_generic_string_array, Result as DataFusionResult , ScalarValue } ;
4144use datafusion_expr:: ColumnarValue ;
42- // use datafusion_physical_expr::PhysicalExpr;
43- use crate :: execution:: operators:: ExecutionError ;
44- use datafusion:: prelude:: SessionContext ;
4545use num:: {
4646 cast:: AsPrimitive , integer:: div_floor, traits:: CheckedNeg , CheckedSub , Integer , Num ,
4747 ToPrimitive ,
@@ -50,7 +50,6 @@ use regex::Regex;
5050use std:: collections:: HashMap ;
5151use std:: str:: FromStr ;
5252use std:: { fmt:: Debug , hash:: Hash , num:: Wrapping , sync:: Arc } ;
53- use url:: Url ;
5453
5554static TIMESTAMP_FORMAT : Option < & str > = Some ( "%Y-%m-%d %H:%M:%S%.f" ) ;
5655
@@ -1867,29 +1866,29 @@ fn trim_end(s: &str) -> &str {
18671866#[ cfg( not( feature = "hdfs" ) ) ]
18681867pub ( crate ) fn register_object_store (
18691868 session_context : Arc < SessionContext > ,
1870- ) -> Result < ( ) , ExecutionError > {
1869+ ) -> Result < ObjectStoreUrl , ExecutionError > {
18711870 let object_store = object_store:: local:: LocalFileSystem :: new ( ) ;
1872- let url = Url :: try_from ( "file://" ) . unwrap ( ) ;
1871+ let url = ObjectStoreUrl :: parse ( "file://" ) . unwrap ( ) ;
18731872 session_context
18741873 . runtime_env ( )
1875- . register_object_store ( & url, Arc :: new ( object_store) ) ;
1876- Ok ( ( ) )
1874+ . register_object_store ( ( & url) . as_ref ( ) , Arc :: new ( object_store) ) ;
1875+ Ok ( url )
18771876}
18781877
18791878#[ cfg( feature = "hdfs" ) ]
18801879pub ( crate ) fn register_object_store (
18811880 session_context : Arc < SessionContext > ,
1882- ) -> Result < ( ) , ExecutionError > {
1881+ ) -> Result < ObjectStoreUrl , ExecutionError > {
18831882 // TODO: read the namenode configuration from file schema or from spark.defaultFS
1884- let url = Url :: try_from ( "hdfs://namenode:9000" ) . unwrap ( ) ;
1883+ let url = ObjectStoreUrl :: parse ( "hdfs://namenode:9000" ) ? ;
18851884 if let Some ( object_store) =
18861885 datafusion_objectstore_hdfs:: object_store:: hdfs:: HadoopFileSystem :: new ( ( & url) . as_ref ( ) )
18871886 {
18881887 session_context
18891888 . runtime_env ( )
1890- . register_object_store ( & url, Arc :: new ( object_store) ) ;
1889+ . register_object_store ( ( & url) . as_ref ( ) , Arc :: new ( object_store) ) ;
18911890
1892- return Ok ( ( ) ) ;
1891+ return Ok ( url ) ;
18931892 }
18941893
18951894 Err ( ExecutionError :: GeneralError ( format ! (
0 commit comments