diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b38495c48dd..125a2266289f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,6 +40,7 @@ - [New `Profile` API for timing code execution.][14827] - [Expanded S3 API with versions and signed uri][14831] - [Support for reading JSON data from database connections.][14872] +- [JSON functions on Table and Column. XML and Hashset fixes.][14884] [14522]: https://github.com/enso-org/enso/pull/14522 [14476]: https://github.com/enso-org/enso/pull/14476 @@ -56,6 +57,7 @@ [14827]: https://github.com/enso-org/enso/pull/14827 [14831]: https://github.com/enso-org/enso/pull/14831 [14872]: https://github.com/enso-org/enso/pull/14872 +[14884]: https://github.com/enso-org/enso/pull/14884 #### Enso Language & Runtime diff --git a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Dictionary.md b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Dictionary.md index 699b329f46d9..8e1e61f04a0f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Dictionary.md +++ b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Dictionary.md @@ -20,13 +20,16 @@ - map_keys self function:Standard.Base.Any.Any -> Standard.Base.Any.Any - map_with_key self function:Standard.Base.Any.Any -> Standard.Base.Any.Any - not_empty self -> Standard.Base.Any.Any + - pretty self -> Standard.Base.Any.Any - remove self key:Standard.Base.Any.Any= -> Standard.Base.Any.Any - singleton key:Standard.Base.Any.Any value:Standard.Base.Any.Any -> Standard.Base.Any.Any - size self -> Standard.Base.Any.Any + - to_display_text self -> Standard.Base.Any.Any - to_text self -> Standard.Base.Any.Any - to_vector self -> Standard.Base.Any.Any - transform self function:Standard.Base.Any.Any -> Standard.Base.Any.Any - values self -> Standard.Base.Any.Any - key_value_widget -> Standard.Base.Metadata.Widget - key_widget dict:Standard.Base.Data.Dictionary.Dictionary -> Standard.Base.Metadata.Widget -- Standard.Base.Data.Dictionary.Dictionary.from that:Standard.Base.Data.Vector.Vector -> Standard.Base.Data.Dictionary.Dictionary +- Standard.Base.Data.Dictionary.Dictionary.from that:Standard.Base.Data.Vector.Vector error_on_duplicates:Standard.Base.Data.Boolean.Boolean= -> Standard.Base.Data.Dictionary.Dictionary +- Standard.Base.Data.Vector.Vector.from that:Standard.Base.Data.Dictionary.Dictionary -> Standard.Base.Data.Vector.Vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Hashset.md b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Hashset.md index f71f972f73da..8d4a2c4b8dec 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Hashset.md +++ b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Hashset.md @@ -1,21 +1,21 @@ ## Enso Signatures 1.0 ## module Standard.Base.Data.Hashset - type Hashset - - Value underlying_dictionary:(Standard.Base.Data.Dictionary.Dictionary Standard.Base.Any.Any Standard.Base.Nothing.Nothing) - contains self value:Standard.Base.Any.Any -> Standard.Base.Any.Any - contains_relational self value:Standard.Base.Any.Any -> Standard.Base.Any.Any - difference self other:Standard.Base.Data.Hashset.Hashset -> Standard.Base.Any.Any - empty -> Standard.Base.Any.Any - - from_vector vector:Standard.Base.Data.Vector.Vector error_on_duplicates:Standard.Base.Data.Boolean.Boolean= -> Standard.Base.Any.Any - - insert self value:Standard.Base.Any.Any -> Standard.Base.Any.Any + - from_vector values:Standard.Base.Data.Vector.Vector error_on_duplicates:Standard.Base.Data.Boolean.Boolean= -> Standard.Base.Data.Hashset.Hashset!Standard.Base.Errors.Illegal_Argument.Illegal_Argument + - insert self value:Standard.Base.Any.Any -> Standard.Base.Data.Hashset.Hashset - intersection self other:Standard.Base.Data.Hashset.Hashset -> Standard.Base.Any.Any - is_empty self -> Standard.Base.Any.Any - length self -> Standard.Base.Any.Any - not_empty self -> Standard.Base.Any.Any + - pretty self -> Standard.Base.Any.Any - size self -> Standard.Base.Any.Any + - to_display_text self -> Standard.Base.Any.Any - to_text self -> Standard.Base.Any.Any - to_vector self -> Standard.Base.Any.Any - union self other:Standard.Base.Data.Hashset.Hashset -> Standard.Base.Any.Any -- type Hashset_Comparator - - compare x:Standard.Base.Any.Any y:Standard.Base.Any.Any -> Standard.Base.Any.Any - - hash x:Standard.Base.Any.Any -> Standard.Base.Any.Any +- Standard.Base.Data.Hashset.Hashset.from that:Standard.Base.Data.Vector.Vector error_on_duplicates:Standard.Base.Data.Boolean.Boolean= -> Standard.Base.Data.Hashset.Hashset +- Standard.Base.Data.Vector.Vector.from that:Standard.Base.Data.Hashset.Hashset -> Standard.Base.Data.Vector.Vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Json/Extensions.md b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Json/Extensions.md index c5691618ea5b..9cd70d308f0a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Json/Extensions.md +++ b/distribution/lib/Standard/Base/0.0.0-dev/docs/api/Data/Json/Extensions.md @@ -1,11 +1,13 @@ ## Enso Signatures 1.0 ## module Standard.Base.Data.Json.Extensions +- Standard.Base.Any.Any.json_stringify self -> Standard.Base.Data.Text.Text - Standard.Base.Any.Any.to_js_object self -> Standard.Base.Any.Any - Standard.Base.Any.Any.to_json self -> Standard.Base.Any.Any - Standard.Base.Data.Array.Array.to_js_object self -> Standard.Base.Any.Any - Standard.Base.Data.Boolean.Boolean.to_js_object self -> Standard.Base.Any.Any - Standard.Base.Data.Decimal.Decimal.to_js_object self -> Standard.Base.Any.Any - Standard.Base.Data.Dictionary.Dictionary.to_js_object self -> Standard.Base.Any.Any +- Standard.Base.Error.Error.json_stringify self -> Standard.Base.Data.Text.Text - Standard.Base.Error.Error.to_js_object self -> Standard.Base.Any.Any - Standard.Base.Error.Error.to_json self -> Standard.Base.Any.Any - Standard.Base.Data.Locale.Locale.to_js_object self -> Standard.Base.Any.Any diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso index 05657d3f3bf7..35fdab9b925b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso @@ -515,6 +515,22 @@ type Dictionary key value to_text : Text to_text self = @Builtin_Method "Dictionary.to_text" + ## --- + private: true + --- + to_display_text : Text + to_display_text self = + texts = self.keys.take (..First 40) . map k-> k.to_display_text + "=" + (self.get k . to_display_text) + suffix = if self.length > 40 then " and " + (self.length - 40).to_text + " more elements}" else "}" + "{" + (texts.join ", ") + suffix + + ## --- + private: true + --- + pretty : Text + pretty self = + "Dictionary.from_vector " + self.to_vector.pretty + ## --- private: true --- @@ -538,7 +554,9 @@ key_value_widget -> Widget = item_editor = Single_Choice display=..Always values=[pair] Vector_Editor item_editor=item_editor display=..Always item_default=default -## --- - private: true - --- -Dictionary.from (that:Vector) = Dictionary.from_vector that +## Convert a Vector of key-value pairs to a Dictionary. +Dictionary.from (that:Vector) (error_on_duplicates:Boolean=True) = + Dictionary.from_vector that error_on_duplicates + +## Convert Dictionary to a Vector. +Vector.from (that:Dictionary) = that.to_vector \ No newline at end of file diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso index 0c9d2480ccc9..8ab004c96362 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso @@ -2,23 +2,25 @@ import project.Any.Any import project.Data.Array_Proxy.Array_Proxy import project.Data.Dictionary.Dictionary import project.Data.Numbers.Integer -import project.Data.Ordering.Ordering import project.Data.Text.Text import project.Data.Vector.Vector import project.Errors.Illegal_Argument.Illegal_Argument import project.Nothing.Nothing from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all +from project.Metadata.Widget import Vector_Editor +from project.Widget_Helpers import make_all_selector ## An unordered collection of unique values. type Hashset ## --- private: true --- - Value (underlying_dictionary : Dictionary Any Nothing) + private Value (underlying_dictionary : Dictionary Any Nothing) ## --- - private: true + aliases: [set] + icon: array_new2 advanced: true --- Constructs a new set from a vector. @@ -30,9 +32,9 @@ type Hashset occurrence of each duplicated element is retained in the set. If set to `True` it will raise an `Illegal_Argument` if duplicate elements are encountered. - from_vector : Vector Any -> Boolean -> Hashset ! Illegal_Argument - from_vector (vector : Vector) (error_on_duplicates : Boolean = False) = - pairs_array = Array_Proxy.new vector.length (i-> [vector.at i, Nothing]) + @values (Vector_Editor item_editor=make_all_selector display=..Always item_default="Nothing") + from_vector (values : Vector) (error_on_duplicates : Boolean = False) -> Hashset ! Illegal_Argument = + pairs_array = Array_Proxy.new values.length (i-> [values.at i, Nothing]) pairs = Vector.from_polyglot_array pairs_array dictionary = Dictionary.from_vector pairs error_on_duplicates=error_on_duplicates Hashset.Value dictionary @@ -120,8 +122,8 @@ type Hashset icon: row_add --- Adds a value to this set. - insert : Any -> Hashset - insert self value = + @value (make_all_selector ..Always) + insert self value:Any -> Hashset = dictionary = self.underlying_dictionary.insert value Nothing Hashset.Value dictionary @@ -165,22 +167,30 @@ type Hashset private: true --- to_text : Text - to_text self = self.to_vector.map .pretty . join ", " "Hashset{" "}" + to_text self = + inner = self.to_vector.to_text + "{" + (inner.drop (..First 1) . drop (..Last 1)) + "}" -## --- - private: true - --- -type Hashset_Comparator ## --- private: true --- - compare x y = - if x.size != y.size then Nothing else - if (x.difference y).is_empty then Ordering.Equal else Nothing + to_display_text : Text + to_display_text self = + texts = self.underlying_dictionary.keys.take (..First 40) . map _.to_display_text + suffix = if self.length > 40 then " and " + (self.length - 40).to_text + " more elements}" else "}" + "{" + (texts.join ", ") + suffix ## --- private: true --- - hash x = - vec = x.to_vector.sort . remove_warnings - Ordering.hash vec + pretty : Text + pretty self = + "Hashset.from_vector " + self.to_vector.pretty + +## Convert from a Vector to a Hashset +Hashset.from (that:Vector) (error_on_duplicates:Boolean=False) = + Hashset.from_vector that error_on_duplicates + +## Convert from a Hashset to a Vector +Vector.from (that:Hashset) = + that.to_vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso index 65dc02701136..c3ce25534609 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso @@ -32,6 +32,14 @@ from project.Data.Text.Extensions import all Any.to_json : Text Any.to_json self = Json.stringify self +## --- + group: Conversions + icon: convert + --- + Converts the given value to a JSON serialized value. +Any.json_stringify : Text +Any.json_stringify self = self.to_json + ## --- private: true --- @@ -39,6 +47,13 @@ Any.to_json self = Json.stringify self Error.to_json : Text Error.to_json self = self.to_js_object.to_text +## --- + group: Conversions + icon: convert + --- + Converts the given value to a JSON serialized value. +Error.json_stringify self -> Text = self + ## --- private: true --- diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/DB_Column_Implementation.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/DB_Column_Implementation.enso index 270b034e4a09..a625870d18d8 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/DB_Column_Implementation.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/DB_Column_Implementation.enso @@ -7,15 +7,12 @@ import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Internal.Rounding_Helpers -from Standard.Base.Metadata.Widget import Text_Input -from Standard.Base.Widget_Helpers import make_format_chooser import Standard.Table.Fill_With.Fill_With import Standard.Table.Internal.Column_Naming_Helper.Column_Naming_Helper import Standard.Table.Internal.Date_Time_Helpers import Standard.Table.Internal.Problem_Builder.Problem_Builder import Standard.Table.Internal.Value_Type_Helpers -import Standard.Table.Internal.Widget_Helpers import Standard.Table.Refined_Types.Text_Column.Text_Column import Standard.Table.Rows_To_Read.Rows_To_Read from Standard.Table import Auto, Column, Data_Formatter, Previous_Value, Table, Value_Type @@ -394,7 +391,6 @@ type DB_Column_Implementation if this_column.connection.dialect.is_operation_supported operator then make_binary_op this_column operator what new_name else Error.throw (Unsupported_Database_Operation.Error ("`trim "+where.to_text+"`")) - @new_text (Text_Input display=..Always) text_replace (this_column : Column & DB_Column) (term : Text | Regex | Column) (new_text : Text | Column) case_sensitivity:Case_Sensitivity only_first:Boolean = Value_Type.expect_text this_column <| case_sensitivity.disallow_non_default_locale <| input_type = if term.is_error then term else Meta.type_of term @@ -483,7 +479,6 @@ type DB_Column_Implementation Error.throw (Illegal_State.Error "The dialect "+this_column.connection.dialect.name+" does not support a boolean type. The implementation of `is_in` should be revised to account for this. This is an internal issue with the Database library.") DB_Column.new new_name this_column.connection new_type_ref new_expr this_column.context - @format (make_format_chooser include_number=False) parse (this_column : Column & DB_Column) (type : Value_Type | Auto) (format : Text | Data_Formatter) on_problems:Problem_Behavior = if type == Auto then Error.throw (Unsupported_Database_Operation.Error "`Auto` parse type") else if format != "" then Error.throw (Unsupported_Database_Operation.Error "Custom formatting") else @@ -494,12 +489,14 @@ type DB_Column_Implementation Database parse just boils down to a simple CAST. internal_do_cast this_column type on_problems - - @format (this_column-> Widget_Helpers.make_format_chooser_for_type this_column.value_type) format (this_column : Column & DB_Column) (format : Text | Date_Time_Formatter | Column) locale:Locale = if format != "" || locale != Locale.default then Error.throw (Unsupported_Database_Operation.Error "Custom formatting") DB_Column_Implementation.cast this_column Value_Type.Char on_problems=Problem_Behavior.Ignore + json_stringify (this_column : Column & DB_Column) = + _ = [this_column] + Error.throw <| Unsupported_Database_Operation.Error "json_stringify" + cast (this_column : Column & DB_Column) value_type:Value_Type on_problems:Problem_Behavior = check_cast_compatibility this_column.value_type value_type allow_unsupported=True internal_do_cast this_column value_type on_problems diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Typed_DB_Column_Implementations.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Typed_DB_Column_Implementations.enso index cf3e1a0d795c..bc42874acba2 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Typed_DB_Column_Implementations.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Typed_DB_Column_Implementations.enso @@ -157,6 +157,10 @@ type DB_Text_Column_Implementation new_name = (naming_helper column).function_name "index_of" [column, other] make_text_case_op column "INDEX_OF" other case_sensitivity new_name + parse_json column = + _ = [column] + Unimplemented.throw "parse_json is not implemented for database backends." + type DB_Date_Column_Implementation first_of_month column = Value_Type.expect_date column <| diff --git a/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Column.md b/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Column.md index 5e4fa49ab0da..43166d7133df 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Column.md +++ b/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Column.md @@ -59,6 +59,7 @@ - is_nan self -> (Standard.Table.Column.Column&Standard.Base.Any.Any) - is_nothing self -> (Standard.Table.Column.Column&Standard.Base.Any.Any) - is_present self -> (Standard.Table.Column.Column&Standard.Base.Any.Any) + - json_stringify self -> (Standard.Table.Column.Column&Standard.Base.Any.Any) - last self -> Standard.Base.Any.Any!Standard.Base.Errors.Common.Index_Out_Of_Bounds - like self pattern:(Standard.Table.Column.Column|Standard.Base.Data.Text.Text|Standard.Base.Any.Any) -> (Standard.Table.Column.Column&Standard.Base.Any.Any) - map self function:(Standard.Base.Any.Any -> Standard.Base.Any.Any) skip_nothing:Standard.Base.Data.Boolean.Boolean= expected_value_type:(Standard.Table.Value_Type.Value_Type|Standard.Table.Value_Type.Auto)= -> (Standard.Table.Column.Column&Standard.Base.Any.Any)!Standard.Table.Errors.Invalid_Value_Type @@ -102,10 +103,10 @@ - year self -> (Standard.Table.Column.Column&Standard.Base.Any.Any)!Standard.Table.Errors.Invalid_Value_Type - zip self right:(Standard.Table.Column.Column|Standard.Table.Table.Table)= keep_unmatched:(Standard.Base.Data.Boolean.Boolean|Standard.Base.Data.Vector.Report_Unmatched)= right_prefix:Standard.Base.Data.Text.Text= on_problems:Standard.Base.Errors.Problem_Behavior.Problem_Behavior= -> Standard.Table.Table.Table - || self other:(Standard.Table.Column.Column|Standard.Base.Any.Any) -> (Standard.Table.Column.Column&Standard.Base.Any.Any) +- default_row_limit_for_read column:Standard.Base.Any.Any -> Standard.Base.Any.Any - Standard.Table.Column.Column.from that:Standard.Base.Data.Vector.Vector name:Standard.Base.Data.Text.Text= -> Standard.Table.Column.Column - type Rest_Of_String - default_date_period column:Standard.Base.Any.Any -> Standard.Base.Any.Any -- default_row_limit_for_read column:Standard.Base.Any.Any -> Standard.Base.Any.Any - Standard.Base.Data.Vector.Vector.from that:Standard.Table.Column.Column -> Standard.Base.Data.Vector.Vector - Standard.Base.Data.Vector.Vector.from that:Standard.Table.Table.Table -> Standard.Base.Data.Vector.Vector - Standard.Table.Column.Column.from that:Standard.Base.Data.Range.Range name:Standard.Base.Data.Text.Text= -> Standard.Table.Column.Column diff --git a/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Row.md b/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Row.md index cd98567ef363..13796fd85db7 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Row.md +++ b/distribution/lib/Standard/Table/0.0.0-dev/docs/api/Row.md @@ -8,7 +8,7 @@ - length self -> Standard.Base.Any.Any - to_dictionary self -> Standard.Base.Any.Any - to_js_object self -> Standard.Base.Any.Any - - to_json_data self -> Standard.Base.Any.Any + - to_json_data self -> Standard.Base.Data.Text.Text - to_vector self -> Standard.Base.Any.Any - type Row_Comparator - compare obj1:Standard.Base.Any.Any obj2:Standard.Base.Any.Any -> Standard.Base.Any.Any diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index cc3ef52a3e74..eabe676419dd 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -1858,6 +1858,15 @@ type Column format self (format : Text | Date_Time_Formatter | Column = "") locale:Locale=Locale.default -> Column & Any ! Illegal_Argument = self.implementation.format self format locale + ## --- + aliases: [to_json] + group: Standard.Base.Conversions + icon: conversion + --- + Converts a column into a JSON string representation of its values. + json_stringify self -> Column & Any = + self.implementation.json_stringify self + ## --- group: Standard.Base.Conversions icon: convert diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/In_Memory_Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/In_Memory_Column.enso index 8aa1471315a2..f741bb2d6069 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/In_Memory_Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/In_Memory_Column.enso @@ -17,7 +17,7 @@ from project.Internal.Storage import enso_to_java polyglot java import org.enso.table.data.column.DataQualityMetrics polyglot java import org.enso.table.data.column.operation.cast.CastOperation -polyglot java import org.enso.table.data.column.operation.JsonOperation +polyglot java import org.enso.table.data.column.operation.TableVizOperation polyglot java import org.enso.table.data.table.Column as Java_Column polyglot java import org.enso.table.error.ValueTypeMismatchException @@ -82,9 +82,9 @@ type In_Memory_Column ## --- private: true --- - Specialised implementation of to_json for a Column. + Specialised implementation of to_json for a Column for use in visualization. to_json_data self start:Integer=0 (row_count:Integer=(self:Column).row_count) -> Text = - JsonOperation.apply self.java_column start row_count + TableVizOperation.makeJSONForColumn self.java_column start row_count ## --- private: true diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/In_Memory_Column_Implementation.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/In_Memory_Column_Implementation.enso index 4a0b72740f28..9d7d201577e1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/In_Memory_Column_Implementation.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/In_Memory_Column_Implementation.enso @@ -60,6 +60,7 @@ polyglot java import org.enso.table.data.column.operation.unary.DuplicateCountOp polyglot java import org.enso.table.data.column.operation.unary.FillFromPreviousOperation polyglot java import org.enso.table.data.column.operation.unary.IsEmptyOperation polyglot java import org.enso.table.data.column.operation.unary.IsNothingOperation +polyglot java import org.enso.table.data.column.operation.unary.JsonOperation polyglot java import org.enso.table.data.column.operation.unary.NotOperation polyglot java import org.enso.table.data.column.operation.unary.RoundOperation polyglot java import org.enso.table.data.column.operation.unary.TextLengthOperation @@ -579,6 +580,9 @@ type In_Memory_Column_Implementation formatter = make_value_formatter_for_value_type this_column.value_type locale format apply_unary_map this_column this_column.name formatter Value_Type.Char on_problems=..Report_Error + json_stringify (this_column : Column & In_Memory_Column) = + apply_unary_operation this_column JsonOperation.INSTANCE + cast (this_column : Column & In_Memory_Column) value_type:Value_Type on_problems:Problem_Behavior = Cast_Helpers.check_cast_compatibility this_column.value_type value_type target_storage_type = Storage.from_value_type value_type on_problems diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Typed_Column_Implementations.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Typed_Column_Implementations.enso index f33aee592720..629712465313 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Typed_Column_Implementations.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Typed_Column_Implementations.enso @@ -18,6 +18,7 @@ polyglot java import org.enso.table.data.column.operation.text.TextPartOperation polyglot java import org.enso.table.data.column.operation.unary.AbsOperation polyglot java import org.enso.table.data.column.operation.unary.CaseOperation polyglot java import org.enso.table.data.column.operation.unary.DoubleUnaryOperation +polyglot java import org.enso.table.data.column.operation.unary.JsonParseOperation polyglot java import org.enso.table.data.column.operation.unary.SignumOperation polyglot java import org.enso.table.data.column.operation.unary.TextCharLengthOperation polyglot java import org.enso.table.data.column.operation.unary.TextLengthOperation @@ -34,7 +35,7 @@ type In_Memory_Numeric_Column_Implementation new_name = naming_helper.function_name "signum" [column] apply_unary_operation column SignumOperation.INSTANCE new_name - randbetween (column : Column & In_Memory_Column) limit seed:Integer=0 = + randbetween (column : Column & In_Memory_Column) limit seed:Integer = Value_Type.expect_numeric column <| Value_Type.expect_numeric limit <| operation = RandBetweenOperation.create (column:In_Memory_Column).java_column (_java_other limit) seed if operation.is_nothing then Error.throw (Illegal_State.Error "Unsupported randbetween operation.") @@ -107,7 +108,7 @@ type In_Memory_Numeric_Column_Implementation apply_unary_operation column DoubleUnaryOperation.LOG10_INSTANCE new_name type In_Memory_Text_Column_Implementation - to_case (column : Column & In_Memory_Column) case_option:Case=..Lower locale:Locale=Locale.default = + to_case (column : Column & In_Memory_Column) case_option:Case locale:Locale = Value_Type.expect_text column <| new_name = naming_helper.function_name "to_case" [column, case_option.to_text] operation = CaseOperation.new case_option.to_java locale.java_locale @@ -202,6 +203,12 @@ type In_Memory_Text_Column_Implementation new_name = naming_helper.function_name "index_of" [column, other] apply_case_sensitive_text_operation column other case_sensitivity TextIndexOf.INSTANCE (a -> b -> a.index_of b 0 case_sensitivity) new_name Value_Type.Integer + parse_json (column : Column & In_Memory_Column) = + Value_Type.expect_text column <| + new_name = naming_helper.function_name "parse_json" [column] + operation = JsonParseOperation.new + apply_unary_operation column operation new_name + type In_Memory_Date_Column_Implementation first_of_month (column : Column & In_Memory_Column) = Value_Type.expect_date column <| diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Refined_Types/Text_Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Refined_Types/Text_Column.enso index f6dc222bfefb..f748ea72500b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Refined_Types/Text_Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Refined_Types/Text_Column.enso @@ -203,3 +203,13 @@ type Text_Column ``` index_of self (other : Column | Text | Any) case_sensitivity:Case_Sensitivity=..Default -> Column & Numeric_Column = self.operations_implementation.index_of self.column other case_sensitivity + + ## --- + group: Standard.Base.Conversions + icon: convert + --- + Parses the column assuming it contains JSON strings, and returns a column of the resulting values. + + ## ToDo: Add problem handling for invalid JSON strings. + parse_json self -> Column = + self.operations_implementation.parse_json self.column diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso index 3e9f113aff68..166a228bca14 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso @@ -6,6 +6,7 @@ import project.Errors.No_Such_Column import project.Internal.Widget_Helpers from project.Internal.Storage import java_to_enso +polyglot java import org.enso.table.data.column.operation.TableVizOperation polyglot java import org.enso.table.data.table.Row as Java_Row # necessary for reflection metadata ## Represents a single row of some in-memory Table. @@ -99,9 +100,8 @@ type Row private: true --- Converts this row into a JSON representation. - to_json_data : Text - to_json_data self = - self.java_row.toJsonData + to_json_data self -> Text = + TableVizOperation.makeJSONForRow self.java_row ## --- private: true diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 29b1ad169fb5..3ee366ffa5a7 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -1780,6 +1780,19 @@ type Table columns.act_if_any self error_on_missing_columns selected-> self.implementation.format self.underlying selected format locale error_on_missing_columns=True on_problems + ## --- + aliases: [to_json] + group: Standard.Base.Conversions + icon: conversion + --- + Converts a column into a JSON string representation of its values. + @columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True) + json_stringify self (columns : Table_Selector = Missing_Argument.throw "columns") error_on_missing_columns:Boolean=True -> Table & Any ! Date_Time_Format_Parse_Error | Illegal_Argument = Profile.time_execution _profile_level "Table.json_stringify" <| + columns.act_if_any self error_on_missing_columns selected-> + selected.fold self current->column_name-> + json_column = current.get column_name . json_stringify + current.set json_column column_name + ## --- group: Standard.Base.Conversions icon: convert diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/docs/api/Table/Visualization.md b/distribution/lib/Standard/Visualization/0.0.0-dev/docs/api/Table/Visualization.md index b036b0717351..4c5212524291 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/docs/api/Table/Visualization.md +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/docs/api/Table/Visualization.md @@ -2,8 +2,6 @@ ## module Standard.Visualization.Table.Visualization - type Helper - make_json value:Standard.Base.Any.Any -> Standard.Base.Any.Any -- _make_json_for_other x:Standard.Base.Any.Any -> Standard.Base.Any.Any -- _make_json_for_value val:Standard.Base.Any.Any level:Standard.Base.Any.Any= -> Standard.Base.Any.Any - apply_filter_to_table table:Standard.Base.Any.Any i:Standard.Base.Any.Any filter_cols:Standard.Base.Any.Any filter_conditions:Standard.Base.Any.Any has_index_col:Standard.Base.Any.Any -> Standard.Base.Any.Any - apply_sort_to_table table:Standard.Base.Any.Any sort_col_index_list:Standard.Base.Any.Any sort_direction_list:Standard.Base.Any.Any -> Standard.Base.Any.Any - get_distinct_values_for_column table:Standard.Base.Any.Any column_index:Standard.Base.Any.Any filter_col:Standard.Base.Any.Any= filter_condition:Standard.Base.Any.Any= -> Standard.Base.Any.Any @@ -15,7 +13,6 @@ - make_json_for_object_matrix current:Standard.Base.Any.Any vector:Standard.Base.Any.Any idx:Standard.Base.Any.Any= -> Standard.Base.Any.Any - make_json_for_row row:Standard.Base.Any.Any -> Standard.Base.Any.Any - make_json_for_vector vector:Standard.Base.Any.Any max_rows:Standard.Base.Any.Any -> Standard.Base.Any.Any -- make_json_for_xml_element xml_element:Standard.Base.Any.Any max_items:Standard.Base.Any.Any type:Standard.Base.Data.Text.Text= -> Standard.Base.Any.Any - max_columns -> Standard.Base.Any.Any - prepare_visualization y:Standard.Base.Any.Any max_rows:Standard.Base.Any.Any= -> Standard.Base.Any.Any - Standard.Base.Visualization.Table_Viz_Data.Table_Viz_Data.from that:Standard.Base.Any.Any -> Standard.Base.Visualization.Table_Viz_Data.Table_Viz_Data diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso index 92c6aaefe863..b44351b106bc 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso @@ -16,7 +16,7 @@ from project.Helpers import time_visualization, short_display ## --- private: true --- - Helper for JsonOperation + Provides the callback for TableVizOperation which converts tables, columns and rows to JSON type Helper ## --- private: true @@ -41,6 +41,7 @@ prepare_visualization y max_rows=1000 = time_visualization Visualization ("prepa v : Vector -> make_json_for_vector v max_rows . to_json v : Array -> prepare_visualization v.to_vector max_rows v : Dictionary -> make_json_for_dictionary v max_rows + v : Hashset -> _make_json_for_hashset v max_rows v : JS_Object -> make_json_for_js_object v max_rows v : Row -> make_json_for_row v t : In_Memory_Table -> t.to_table_viz_json @@ -53,8 +54,8 @@ prepare_visualization y max_rows=1000 = time_visualization Visualization ("prepa JS_Object.from_pairs [["json", value]] . to_json v : Number -> JS_Object.from_pairs [["json", _make_json_for_value v]] . to_json - v : XML_Document -> make_json_for_xml_element v.root_element max_rows "XML_Document" - v : XML_Element -> make_json_for_xml_element v max_rows + v : XML_Document -> _make_json_for_xml_element v.root_element max_rows "XML_Document" + v : XML_Element -> _make_json_for_xml_element v max_rows _ -> (Table_Viz_Data.from x).get_js_object.to_json ## --- @@ -164,6 +165,18 @@ make_json_for_dictionary dict max_items = get_child_node_action_link_name = ["get_child_node_link_name", "key"] JS_Object.from_pairs [header, data, all_rows, links, get_child_node_action_link_name, ["type", "Map"], ["child_label", "value"]] . to_json +## --- + private: true + --- + Render Hashset to JSON +private _make_json_for_hashset hashset:Hashset max_items:Integer -> Text = + header = ["header", ["key"]] + all_rows = ["all_rows_count", hashset.size] + as_vector = Warning.clear (hashset.to_vector.take max_items) + mapped = as_vector . map _make_json_for_value + data = ["data", [mapped]] + JS_Object.from_pairs [header, data, all_rows, ["type", "Map"]] . to_json + ## --- private: true --- @@ -182,8 +195,7 @@ make_json_for_js_object js_object max_items = private: true --- Render XML_Element to JSON -make_json_for_xml_element : XML_Element -> Integer -> Text -> Text -make_json_for_xml_element xml_element max_items type:Text="XML_Element" = +private _make_json_for_xml_element xml_element:XML_Element max_items:Integer type:Text="XML_Element" -> Text = header = ["header", ["key", "type", "value"]] all_rows = ["all_rows_count", xml_element.attributes.length + xml_element.children.length] @@ -308,7 +320,7 @@ Table_Viz_Data.from (that:Any) = Table_Viz_Data.Value (_make_json_for_other that --- Create JSON serialization of values. _make_json_for_other : Any -> JS_Object -_make_json_for_other x = +private _make_json_for_other x = js_value = Panic.recover Any x.to_js_object supports_js_object = js_value.is_error.not case supports_js_object of @@ -336,7 +348,7 @@ _make_json_for_other x = --- Create JSON serialization of values for the table. _make_json_for_value : Any -> Integer -> JS_Object -_make_json_for_value val level=0 = case val of +private _make_json_for_value val level:Integer=0 = case val of Nothing -> Nothing txt : Text -> txt num : Number -> @@ -352,10 +364,15 @@ _make_json_for_value val level=0 = case val of "[" + (prepared.join ", ") + "]" array : Array -> _make_json_for_value array.to_vector level dict : Dictionary -> - if level != 0 then "{… "+dict.size.to_text+" items}" else + if level != 0 then "{… "+dict.length.to_text+" items}" else truncated = dict.keys.take 5 . map k-> k.to_text + ": " + (_make_json_for_value (val.get k) level+1).to_text prepared = if dict.length > 5 then truncated + ["… " + (dict.length - 5).to_text+ " items"] else truncated "{" + (prepared.join ", ") + "}" + set : Hashset -> + if level != 0 then "{… "+set.length.to_text+" items}" else + truncated = set.to_vector.take 5 . map k-> (_make_json_for_value k level+1).to_text + prepared = if set.length > 5 then truncated + ["… " + (set.length - 5).to_text+ " items"] else truncated + "{" + (prepared.join ", ") + "}" row : Row -> if level != 0 then "Row{" + row.length + " columns}" else truncated = row.column_names.take 5 . map _.to_text diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/DataQualityMetrics.java b/std-bits/table/src/main/java/org/enso/table/data/column/DataQualityMetrics.java index e43eee54613f..ab6cdb5528e1 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/DataQualityMetrics.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/DataQualityMetrics.java @@ -17,7 +17,7 @@ import java.util.stream.Collectors; import org.enso.base.Text_Utils; import org.enso.base.polyglot.NumericConverter; -import org.enso.table.data.column.operation.JsonOperation; +import org.enso.table.data.column.operation.unary.JsonOperation; import org.enso.table.data.column.storage.ColumnStorage; import org.enso.table.data.column.storage.ColumnStorageWithInferredStorage; import org.enso.table.data.column.storage.type.AnyObjectType; @@ -211,8 +211,9 @@ public Result getResult() { distinctJson = "[" + distinct.stream() - .map(o -> JsonOperation.objectToJson(o, null)) + .filter(JsonOperation::nativeSupport) .filter(Objects::nonNull) + .map(o -> JsonOperation.INSTANCE.objectToJson(o)) .sorted() .collect(Collectors.joining()) + "]"; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/JsonOperation.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/TableVizOperation.java similarity index 63% rename from std-bits/table/src/main/java/org/enso/table/data/column/operation/JsonOperation.java rename to std-bits/table/src/main/java/org/enso/table/data/column/operation/TableVizOperation.java index ea98fe28473a..702ac63b878c 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/JsonOperation.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/TableVizOperation.java @@ -1,13 +1,13 @@ package org.enso.table.data.column.operation; -import static java.time.temporal.ChronoField.*; +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.YEAR; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.time.LocalDate; import java.time.LocalTime; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -18,61 +18,24 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Function; import java.util.stream.Collectors; -import org.enso.base.polyglot.EnsoMeta; import org.enso.table.data.column.DataQualityMetrics; import org.enso.table.data.column.builder.Builder; -import org.enso.table.data.column.storage.ColumnBooleanStorage; -import org.enso.table.data.column.storage.ColumnDoubleStorage; -import org.enso.table.data.column.storage.ColumnLongStorage; -import org.enso.table.data.column.storage.ColumnStorage; -import org.enso.table.data.column.storage.ColumnStorageWithInferredStorage; -import org.enso.table.data.column.storage.type.BooleanType; -import org.enso.table.data.column.storage.type.FloatType; -import org.enso.table.data.column.storage.type.IntegerType; -import org.enso.table.data.column.storage.type.NullType; -import org.enso.table.data.column.storage.type.StorageType; +import org.enso.table.data.column.operation.unary.JsonOperation; +import org.enso.table.data.column.storage.*; +import org.enso.table.data.column.storage.type.*; import org.enso.table.data.table.Column; +import org.enso.table.data.table.Row; import org.enso.table.util.LeastRecentlyUsedCache; import org.graalvm.polyglot.Context; import org.slf4j.Logger; -/** - * A utility class for converting column data to JSON format. This is used for visualization - * purposes. - */ -public class JsonOperation { - private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(JsonOperation.class); +/** Extension to JsonOperation for TableViz JSON code. */ +public class TableVizOperation { + private static JsonOperation JSON_OPERATION = + new JsonOperation("Standard.Visualization.Table.Visualization", "Helper", "make_json", true); - private static Function _ensoJsonCallback; - - private static Function ensoJsonCallback() { - if (_ensoJsonCallback != null) { - return _ensoJsonCallback; - } - - try { - var jsonType = EnsoMeta.getType("Standard.Visualization.Table.Visualization", "Helper"); - var method = jsonType.getMember("make_json"); - LOGGER.info("Resolved Enso JSON callback: {}", method); - _ensoJsonCallback = - value -> { - LOGGER.info( - "Calling Enso JSON callback for value: {} (class {})", - value, - value == null ? "null" : value.getClass()); - var result = method.execute(jsonType, value); - return result == null || result.isNull() ? "null" : result.asString(); - }; - return _ensoJsonCallback; - } catch (Exception ex) { - LOGGER.warn("Failed to resolve Enso JSON callback.", ex); - return null; - } - } - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(TableVizOperation.class); private record CacheKey(long storageKey, long start, long length) {} @@ -85,8 +48,38 @@ private static LeastRecentlyUsedCache jsonCache() { return _jsonCache; } - public static String apply(Column source, long start, long maxLength) { - var fullStorage = ColumnStorageWithInferredStorage.resolveStorage(source); + private static LeastRecentlyUsedCache _tableVizCache; + + private static LeastRecentlyUsedCache tableVizCache() { + if (_tableVizCache == null) { + _tableVizCache = new LeastRecentlyUsedCache<>(1000); + } + return _tableVizCache; + } + + private static final int MAX_CELLS_FOR_INLINE = 2500; + + /** Creates a JSON string representing a single Row */ + public static String makeJSONForRow(Row row) { + StringBuilder sb = new StringBuilder(); + sb.append("{"); + for (int i = 0; i < row.column_count(); i++) { + if (i > 0) { + sb.append(","); + } + String name = row.get_name(i); + Object value = row.get_value(i, null); + sb.append(JSON_OPERATION.objectToJson(name)) + .append(":") + .append(JSON_OPERATION.objectToJson(value)); + } + sb.append("}"); + return sb.toString(); + } + + /** Creates a JSON string representing a single Row */ + public static String makeJSONForColumn(Column column, long start, long maxLength) { + var fullStorage = ColumnStorageWithInferredStorage.resolveStorage(column); var cacheKey = new CacheKey(fullStorage.uniqueKey(), start, maxLength); final long finalLength = maxLength; return jsonCache().computeIfAbsent(cacheKey, _ -> applyImpl(start, fullStorage, finalLength)); @@ -97,6 +90,7 @@ private static String applyImpl(long start, ColumnStorage fullStorage, long f // If the start is beyond the size of the storage, return an empty array. return "[]"; } + long length = finalLength; if (start + length > fullStorage.getSize()) { // If the requested length goes beyond the size of the storage, adjust it. @@ -125,7 +119,9 @@ private static String createFloatJson( builder.append(","); } builder.append( - doubleStorage.isNothing(i) ? "null" : toJson(doubleStorage.getItemAsDouble(i))); + doubleStorage.isNothing(i) + ? "null" + : JsonOperation.toJson(doubleStorage.getItemAsDouble(i))); context.safepoint(); } builder.append("]"); @@ -140,7 +136,8 @@ private static String createIntegerJson(ColumnLongStorage longStorage, long star if (i > start) { builder.append(","); } - builder.append(longStorage.isNothing(i) ? "null" : toJson(longStorage.getItemAsLong(i))); + builder.append( + longStorage.isNothing(i) ? "null" : JsonOperation.toJson(longStorage.getItemAsLong(i))); context.safepoint(); } builder.append("]"); @@ -157,7 +154,9 @@ private static String createBooleanJson( builder.append(","); } builder.append( - booleanStorage.isNothing(i) ? "null" : toJson(booleanStorage.getItemAsBoolean(i))); + booleanStorage.isNothing(i) + ? "null" + : JsonOperation.toJson(booleanStorage.getItemAsBoolean(i))); context.safepoint(); } builder.append("]"); @@ -174,7 +173,7 @@ private static String createObjectJson(ColumnStorage storage, long start, lon } Object value = storage.getItemBoxed(i); - String jsonValue = objectToJson(value); + String jsonValue = JSON_OPERATION.objectToJson(value); builder.append(jsonValue); context.safepoint(); } @@ -189,175 +188,11 @@ private static String createNullJson(long size) { : "[" + String.join(",", Collections.nCopies(checkedSize, "null")) + "]"; } - public static String objectToJson(Object value) { - return objectToJson(value, ensoJsonCallback()); - } - - public static String objectToJson(Object value, Function ensoJsonCallback) { - return switch (value) { - case null -> "null"; - case Boolean b -> toJson(b); - case Long l -> toJson(l); - case Integer i -> toJson(i); - case Short s -> toJson(s); - case Byte b -> toJson(b & 0xFF); - case Double d -> toJson(d); - case Float f -> toJson(f); - case String s -> toJson(s); - case BigInteger bi -> toJson(bi); - case BigDecimal bd -> toJson(bd); - case LocalDate date -> toJson(date); - case LocalTime time -> toJson(time); - case ZonedDateTime zdt -> toJson(zdt); - default -> { - if (ensoJsonCallback == null) { - LOGGER.debug("Could not serialize value of type {}.", value.getClass()); - yield "null"; - } else { - yield ensoJsonCallback.apply(value); - } - } - }; - } - - private static String toJson(boolean value) { - return value ? "true" : "false"; - } - - private static final long MAX_JSON_LONG = 9007199254740991L; - private static final BigInteger MAX_JSON_LONG_BIGINT = BigInteger.valueOf(MAX_JSON_LONG); - - private static final DateTimeFormatter TIME_SHORT_FORMAT = - DateTimeFormatter.ofPattern("HH:mm:ss"); - private static final DateTimeFormatter TIME_LONG_FORMAT = - DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); - private static final DateTimeFormatter DATE_TIME_SHORT_FORMAT = - DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private static final DateTimeFormatter DATE_TIME_LONG_FORMAT = - DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"); - private static final DateTimeFormatter ZONE_FORMAT = DateTimeFormatter.ofPattern("'['zz']'"); - - private static String toJson(long value) { - if (value < -MAX_JSON_LONG || value > MAX_JSON_LONG) { - return "{\"type\":\"Integer\",\"value\":\"" + value + "\"}"; - } - return String.valueOf(value); - } - - private static String toJson(double value) { - if (Double.isNaN(value)) { - return "{\"_display_text_\":\"NaN\",\"type\":\"Float\",\"value\":\"NaN\"}"; - } - if (Double.isInfinite(value)) { - var txtValue = value > 0 ? "Infinity" : "-Infinity"; - return "{\"_display_text_\":\"" - + txtValue - + "\",\"type\":\"Float\",\"value\":\"" - + txtValue - + "\"}"; - } - return String.valueOf(value); - } - - private static String toJson(BigInteger value) { - if (value.abs().compareTo(MAX_JSON_LONG_BIGINT) > 0) { - return "{\"type\":\"Integer\",\"value\":\"" + value + "\"}"; - } - return value.toString(); - } - - private static String toJson(BigDecimal value) { - return "{\"type\":\"Decimal\",\"value\":\"" - + value - + "\",\"scale\":" - + value.scale() - + ",\"precision\":" - + value.precision() - + "}"; - } - - private static String toJson(String value) { - try { - return OBJECT_MAPPER.writeValueAsString(value); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - private static String toJson(LocalDate date) { - return "{\"type\":\"Date\",\"constructor\":\"new\",\"_display_text_\":\"" - + date.toString() - + "\",\"day\":" - + date.getDayOfMonth() - + ",\"month\":" - + date.getMonthValue() - + ",\"year\":" - + date.getYear() - + "}"; - } - - private static String toJson(LocalTime time) { - var timeString = time.format(time.getNano() == 0 ? TIME_SHORT_FORMAT : TIME_LONG_FORMAT); - return "{\"type\":\"Time_Of_Day\",\"constructor\":\"new\",\"_display_text_\":\"" - + timeString - + "\",\"hour\":" - + time.getHour() - + ",\"minute\":" - + time.getMinute() - + ",\"second\":" - + time.getSecond() - + ",\"nanosecond\":" - + time.getNano() - + "}"; - } - - private static String toJson(ZonedDateTime datetime) { - var datetimeString = - datetime.format(datetime.getNano() == 0 ? DATE_TIME_SHORT_FORMAT : DATE_TIME_LONG_FORMAT); - var zoneString = - datetime.getZone() == ZoneId.systemDefault() ? "" : datetime.format(ZONE_FORMAT); - var zone_json = - "{\"type\":\"Time_Zone\",\"constructor\":\"parse\",\"id\":\"" - + datetime.getZone().getId() - + "\"}"; - return "{\"type\":\"Date_Time\",\"constructor\":\"new\",\"_display_text_\":\"" - + datetimeString - + zoneString - + "\",\"year\":" - + datetime.getYear() - + ",\"month\":" - + datetime.getMonthValue() - + ",\"day\":" - + datetime.getDayOfMonth() - + ",\"hour\":" - + datetime.getHour() - + ",\"minute\":" - + datetime.getMinute() - + ",\"second\":" - + datetime.getSecond() - + ",\"nanosecond\":" - + datetime.getNano() - + ",\"zone\":" - + zone_json - + "}"; - } - - private static LeastRecentlyUsedCache _tableVizCache; - - private static LeastRecentlyUsedCache tableVizCache() { - if (_tableVizCache == null) { - _tableVizCache = new LeastRecentlyUsedCache<>(1000); - } - return _tableVizCache; - } - - private static final int MAX_CELLS_FOR_INLINE = 2500; - /** * Creates a JSON string representing the table visualization metadata, including column headers, * value types, and various properties related to the table's structure and behavior. */ - public static String makeTableVizJSON( + public static String makeJSON( String versionId, Column[] columns, long allRowsCount, @@ -384,7 +219,7 @@ public static String makeTableVizJSON( .computeIfAbsent( versionId, _ -> - makeTableVizJSON( + makeJSON( versionId, columns, columns[0].getSize(), @@ -409,7 +244,7 @@ public static String makeTableVizJSON( headers.append(","); valueTypes.append(","); } - headers.append(toJson(columns[i].getName())); + headers.append(JSON_OPERATION.objectToJson(columns[i].getName())); var columnType = columns[i].getStorageType().ensoConstructorName(); valueTypes @@ -452,7 +287,7 @@ private static void appendProperty(StringBuilder builder, String name, Object va if (builder.length() > 1) { builder.append(","); } - builder.append("\"").append(name).append("\":").append(objectToJson(value)); + builder.append("\"").append(name).append("\":").append(JSON_OPERATION.objectToJson(value)); } private static void makeDataQualityMetrics(StringBuilder json, List> dqs) { @@ -514,7 +349,7 @@ private static boolean addMetric( return false; } - public static boolean addRange( + private static boolean addRange( StringBuilder builder, List> metrics, boolean first) { boolean hasRange = false; List ranges = new ArrayList<>(); @@ -531,7 +366,7 @@ public static boolean addRange( Boolean.TRUE.equals(metric.get(DataQualityMetrics.SINGLE_VALUE)) ? toDisplayText(min) : toDisplayText(min) + " - " + toDisplayText(metric.get(DataQualityMetrics.MAXIMUM)); - ranges.add(objectToJson(rangeValue)); + ranges.add(JSON_OPERATION.objectToJson(rangeValue)); } if (!hasRange) { @@ -610,7 +445,8 @@ private static void appendMetric( if (i != 0) { builder.append(","); } - builder.append(objectToJson(metrics.get(i).getOrDefault(metric, defaultValue))); + builder.append( + JSON_OPERATION.objectToJson(metrics.get(i).getOrDefault(metric, defaultValue))); } builder.append("]"); } @@ -618,7 +454,7 @@ private static void appendMetric( private static String dataToJson(Column[] columns) { var output = new ArrayList(); for (Column column : columns) { - output.add(apply(column, 0, column.getSize())); + output.add(makeJSONForColumn(column, 0, column.getSize())); } return output.stream().collect(Collectors.joining(",", "[", "]")); } diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonOperation.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonOperation.java new file mode 100644 index 000000000000..fe72c3aa939f --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonOperation.java @@ -0,0 +1,287 @@ +package org.enso.table.data.column.operation.unary; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.function.Function; +import org.enso.base.polyglot.EnsoMeta; +import org.enso.table.data.column.builder.Builder; +import org.enso.table.data.column.operation.StorageIterators; +import org.enso.table.data.column.operation.UnaryOperation; +import org.enso.table.data.column.storage.ColumnStorage; +import org.enso.table.data.column.storage.ColumnStorageWithInferredStorage; +import org.enso.table.data.column.storage.type.TextType; +import org.enso.table.data.table.problems.MapOperationProblemAggregator; +import org.slf4j.Logger; + +/** + * Create a JSON serialized column from an input Column. The resulting column will contain JSON + * strings representing the values in the source column. The method will attempt to use native JSON + * serialization for supported types, and will fall back to Enso `Json.stringify` when needed. + */ +public class JsonOperation implements UnaryOperation { + private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(JsonOperation.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + public static JsonOperation INSTANCE = + new JsonOperation("Standard.Base.Data.Json", "Json", "stringify", false); + + private final String ensoCallbackModule; + private final String ensoCallbackType; + private final String ensoCallbackMethod; + private final boolean includeDisplayText; + private Function _ensoCallback; + + public JsonOperation( + String ensoCallbackModule, + String ensoCallbackType, + String ensoCallbackMethod, + boolean includeDisplayText) { + this.ensoCallbackModule = ensoCallbackModule; + this.ensoCallbackType = ensoCallbackType; + this.ensoCallbackMethod = ensoCallbackMethod; + this.includeDisplayText = includeDisplayText; + } + + private Function ensoCallback() { + if (_ensoCallback != null) { + return _ensoCallback; + } + + try { + var jsonType = EnsoMeta.getType(ensoCallbackModule, ensoCallbackType); + var method = jsonType.getMember(ensoCallbackMethod); + LOGGER.info("Resolved Enso JSON callback: {}", method); + _ensoCallback = + value -> { + LOGGER.debug( + "Calling Enso JSON callback for value: {} (class {})", + value, + value == null ? "null" : value.getClass()); + var result = method.execute(jsonType, value); + return result == null || result.isNull() ? "null" : result.asString(); + }; + return _ensoCallback; + } catch (Exception ex) { + LOGGER.warn("Failed to resolve Enso JSON callback.", ex); + return null; + } + } + + @Override + public String getName() { + return "json_stringify"; + } + + @Override + public boolean canApply(ColumnStorage storage) { + return true; + } + + @Override + public ColumnStorage apply( + ColumnStorage storage, MapOperationProblemAggregator problemAggregator) { + var fullStorage = ColumnStorageWithInferredStorage.resolveStorage(storage); + return StorageIterators.buildObjectOverStorage( + fullStorage, + false, + Builder.getForText(TextType.VARIABLE_LENGTH, storage.getSize()), + (builder, _, value) -> builder.append(objectToJson(value))); + } + + public String objectToJson(Object value) { + return switch (value) { + case null -> "null"; + case Boolean b -> toJson(b); + case Long l -> toJson(l); + case Integer i -> toJson(i); + case Short s -> toJson(s); + case Byte b -> toJson(b & 0xFF); + case Double d -> toJson(d); + case Float f -> toJson(f); + case String s -> toJson(s); + case BigInteger bi -> toJson(bi); + case BigDecimal bd -> toJson(bd); + case LocalDate date -> toJson(date, includeDisplayText); + case LocalTime time -> toJson(time, includeDisplayText); + case ZonedDateTime zdt -> toJson(zdt, includeDisplayText); + default -> { + var callback = ensoCallback(); + if (callback == null) { + LOGGER.info("Could not serialize value of type {}.", value.getClass()); + yield "null"; + } else { + yield callback.apply(value); + } + } + }; + } + + /** + * Check if a value is natively supported by the JSON Operation + * + * @param value to check + * @return true if the value is natively supported, false otherwise + */ + public static boolean nativeSupport(Object value) { + return switch (value) { + case null -> true; + case Boolean _, + Long _, + Integer _, + Short _, + Byte _, + Double _, + Float _, + String _, + BigInteger _, + BigDecimal _, + LocalDate _, + LocalTime _, + ZonedDateTime _ -> + true; + default -> false; + }; + } + + public static String toJson(boolean value) { + return value ? "true" : "false"; + } + + private static final long MAX_JSON_LONG = 9007199254740991L; + private static final BigInteger MAX_JSON_LONG_BIGINT = BigInteger.valueOf(MAX_JSON_LONG); + + public static String toJson(long value) { + if (value < -MAX_JSON_LONG || value > MAX_JSON_LONG) { + return "{\"type\":\"Integer\",\"value\":\"" + value + "\"}"; + } + return String.valueOf(value); + } + + public static String toJson(double value) { + if (Double.isNaN(value)) { + return "{\"_display_text_\":\"NaN\",\"type\":\"Float\",\"value\":\"NaN\"}"; + } + if (Double.isInfinite(value)) { + var txtValue = value > 0 ? "Infinity" : "-Infinity"; + return "{\"_display_text_\":\"" + + txtValue + + "\",\"type\":\"Float\",\"value\":\"" + + txtValue + + "\"}"; + } + return String.valueOf(value); + } + + private static String toJson(BigInteger value) { + if (value.abs().compareTo(MAX_JSON_LONG_BIGINT) > 0) { + return "{\"type\":\"Integer\",\"value\":\"" + value + "\"}"; + } + return value.toString(); + } + + private static String toJson(BigDecimal value) { + return "{\"type\":\"Decimal\",\"value\":\"" + + value + + "\",\"scale\":" + + value.scale() + + ",\"precision\":" + + value.precision() + + "}"; + } + + private static String toJson(String value) { + try { + return OBJECT_MAPPER.writeValueAsString(value); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + private static String toJson(LocalDate date, boolean includeDisplayText) { + String displayText = + includeDisplayText ? "\"_display_text_\":\"" + date.toString() + "\"," : ""; + return "{\"type\":\"Date\",\"constructor\":\"new\"," + + displayText + + "\"day\":" + + date.getDayOfMonth() + + ",\"month\":" + + date.getMonthValue() + + ",\"year\":" + + date.getYear() + + "}"; + } + + private static final DateTimeFormatter TIME_SHORT_FORMAT = + DateTimeFormatter.ofPattern("HH:mm:ss"); + private static final DateTimeFormatter TIME_LONG_FORMAT = + DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); + + private static String toJson(LocalTime time, boolean includeDisplayText) { + var timeString = + includeDisplayText + ? time.format(time.getNano() == 0 ? TIME_SHORT_FORMAT : TIME_LONG_FORMAT) + : ""; + String displayText = includeDisplayText ? "\"_display_text_\":\"" + timeString + "\"," : ""; + return "{\"type\":\"Time_Of_Day\",\"constructor\":\"new\"," + + displayText + + "\"hour\":" + + time.getHour() + + ",\"minute\":" + + time.getMinute() + + ",\"second\":" + + time.getSecond() + + ",\"nanosecond\":" + + time.getNano() + + "}"; + } + + private static final DateTimeFormatter DATE_TIME_SHORT_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + private static final DateTimeFormatter DATE_TIME_LONG_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"); + private static final DateTimeFormatter ZONE_FORMAT = DateTimeFormatter.ofPattern("'['zz']'"); + + private static String toJson(ZonedDateTime datetime, boolean includeDisplayText) { + var datetimeString = + includeDisplayText + ? datetime.format( + datetime.getNano() == 0 ? DATE_TIME_SHORT_FORMAT : DATE_TIME_LONG_FORMAT) + : ""; + var zoneString = + includeDisplayText && datetime.getZone() != ZoneId.systemDefault() + ? datetime.format(ZONE_FORMAT) + : ""; + String displayText = + includeDisplayText ? "\"_display_text_\":\"" + datetimeString + zoneString + "\"," : ""; + var zone_json = + "{\"type\":\"Time_Zone\",\"constructor\":\"parse\",\"id\":\"" + + datetime.getZone().getId() + + "\"}"; + return "{\"type\":\"Date_Time\",\"constructor\":\"new\"," + + displayText + + "\"year\":" + + datetime.getYear() + + ",\"month\":" + + datetime.getMonthValue() + + ",\"day\":" + + datetime.getDayOfMonth() + + ",\"hour\":" + + datetime.getHour() + + ",\"minute\":" + + datetime.getMinute() + + ",\"second\":" + + datetime.getSecond() + + ",\"nanosecond\":" + + datetime.getNano() + + ",\"zone\":" + + zone_json + + "}"; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonParseOperation.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonParseOperation.java new file mode 100644 index 000000000000..ce80532bcbac --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/unary/JsonParseOperation.java @@ -0,0 +1,89 @@ +package org.enso.table.data.column.operation.unary; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.function.Function; +import org.enso.base.polyglot.EnsoMeta; +import org.enso.table.data.column.builder.Builder; +import org.enso.table.data.column.operation.StorageIterators; +import org.enso.table.data.column.operation.UnaryOperation; +import org.enso.table.data.column.storage.ColumnStorage; +import org.enso.table.data.column.storage.type.StorageType; +import org.enso.table.data.column.storage.type.TextType; +import org.enso.table.data.table.problems.MapOperationProblemAggregator; +import org.graalvm.polyglot.Value; + +/** Deserializes JSON to Objects in the Table. */ +public class JsonParseOperation implements UnaryOperation { + @Override + public String getName() { + return "parse_json"; + } + + @Override + public boolean canApply(ColumnStorage storage) { + return StorageType.ofStorage(storage) instanceof TextType; + } + + @Override + public ColumnStorage apply( + ColumnStorage storage, MapOperationProblemAggregator problemAggregator) { + var mapper = new ObjectMapper(); + var inferredBuilder = Builder.getInferredBuilder(storage.getSize(), problemAggregator); + + return StorageIterators.buildObjectOverStorage( + TextType.VARIABLE_LENGTH.asTypedStorage(storage), + true, + inferredBuilder, + (builder, index, value) -> { + try { + builder.append(parseJson(mapper, value)); + } catch (JsonProcessingException e) { + problemAggregator.reportIllegalArgumentError( + "Failed to parse JSON: " + e.getMessage(), index); + builder.appendNulls(1); + } catch (IllegalArgumentException e) { + problemAggregator.reportIllegalArgumentError( + "Unsupported JSON node type: " + e.getMessage() + " when parsing " + value, index); + builder.appendNulls(1); + } + }); + } + + private static Object parseJson(ObjectMapper mapper, String json) throws JsonProcessingException { + // ToDo: Object + var node = mapper.readTree(json); + return parseJsonNode(node); + } + + private static Object parseJsonNode(JsonNode node) throws JsonProcessingException { + return switch (node.getNodeType()) { + case NULL -> null; + case BOOLEAN -> node.asBoolean(); + case STRING -> node.asText(); + case NUMBER -> node.isIntegralNumber() ? node.asLong() : node.asDouble(); + case ARRAY -> parseJsonArray(node); + default -> throw new IllegalArgumentException( + "Unsupported JSON node type: " + node.getNodeType() + " when parsing " + node.asText()); + }; + } + + private static Function vectorConstructor; + + private static Value parseJsonArray(JsonNode node) + throws JsonProcessingException { + var array = new Object[node.size()]; + for (int i = 0; i < array.length; i++) { + array[i] = parseJsonNode(node.get(i)); + } + + if (vectorConstructor == null) { + var vectorType = EnsoMeta.getType("Standard.Base.Data.Vector", "Vector"); + var method = vectorType.getMember("from_polyglot_array"); + vectorConstructor = arr -> method.execute(vectorType, arr); + } + + return vectorConstructor.apply(array); + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/table/Column.java b/std-bits/table/src/main/java/org/enso/table/data/table/Column.java index 98a387f276bf..e87ba44d0108 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/table/Column.java +++ b/std-bits/table/src/main/java/org/enso/table/data/table/Column.java @@ -3,7 +3,7 @@ import java.util.List; import org.enso.base.polyglot.Polyglot_Utils; import org.enso.table.data.column.builder.Builder; -import org.enso.table.data.column.operation.JsonOperation; +import org.enso.table.data.column.operation.TableVizOperation; import org.enso.table.data.column.operation.masks.IndexMapper; import org.enso.table.data.column.operation.masks.MaskOperation; import org.enso.table.data.column.storage.ColumnStorage; @@ -229,7 +229,7 @@ public List asList() { public String tableVizJSON( List valueTypeDisplay, long allRowsCount, boolean useServerMode) { - return JsonOperation.makeTableVizJSON( + return TableVizOperation.makeJSON( "Column-" + storage.uniqueKey(), new Column[] {this}, allRowsCount, diff --git a/std-bits/table/src/main/java/org/enso/table/data/table/Row.java b/std-bits/table/src/main/java/org/enso/table/data/table/Row.java index 3db800eab3e6..9a251fe46195 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/table/Row.java +++ b/std-bits/table/src/main/java/org/enso/table/data/table/Row.java @@ -2,7 +2,6 @@ import java.util.Arrays; import java.util.function.Function; -import org.enso.table.data.column.operation.JsonOperation; public class Row { private final Table table; @@ -33,6 +32,10 @@ public int column_count() { return table.getColumns().length; } + public String get_name(int index) { + return table.getColumns()[index].getName(); + } + public Object get_value(int index, Function ifMissing) { var count = column_count(); if (index < -count || index >= count) { @@ -48,21 +51,4 @@ public Object get_value(String name, Function ifMissing) { var column = table.getColumnByName(name); return column == null ? ifMissing.apply(name) : column.getItem(rowIndex); } - - public String toJsonData() { - StringBuilder sb = new StringBuilder(); - sb.append("{"); - for (int i = 0; i < column_count(); i++) { - if (i > 0) { - sb.append(","); - } - String name = table.getColumns()[i].getName(); - Object value = get_value(i, null); - sb.append(JsonOperation.objectToJson(name)) - .append(":") - .append(JsonOperation.objectToJson(value)); - } - sb.append("}"); - return sb.toString(); - } } diff --git a/std-bits/table/src/main/java/org/enso/table/data/table/Table.java b/std-bits/table/src/main/java/org/enso/table/data/table/Table.java index dd9b4bbb4a12..7bccc56f2f27 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/table/Table.java +++ b/std-bits/table/src/main/java/org/enso/table/data/table/Table.java @@ -17,8 +17,8 @@ import org.enso.base.text.TextFoldingStrategy; import org.enso.table.aggregations.Aggregator; import org.enso.table.data.column.builder.Builder; -import org.enso.table.data.column.operation.JsonOperation; import org.enso.table.data.column.operation.StorageIterators; +import org.enso.table.data.column.operation.TableVizOperation; import org.enso.table.data.column.operation.masks.IndexMapper; import org.enso.table.data.column.storage.ColumnBooleanStorage; import org.enso.table.data.column.storage.ColumnStorage; @@ -670,7 +670,7 @@ public Table reverse() { public String tableVizJSON( List valueTypeDisplay, long allRowsCount, boolean useServerMode) { - return JsonOperation.makeTableVizJSON( + return TableVizOperation.makeJSON( versionId, columns, allRowsCount, useServerMode, valueTypeDisplay, "get_row"); } } diff --git a/test/Base_Tests/src/Data/Hashset_Spec.enso b/test/Base_Tests/src/Data/Hashset_Spec.enso index d333bc6fcd05..782929b0352f 100644 --- a/test/Base_Tests/src/Data/Hashset_Spec.enso +++ b/test/Base_Tests/src/Data/Hashset_Spec.enso @@ -66,8 +66,22 @@ add_specs suite_builder = group_builder.specify "should be able to convert to text" <| s1 = Hashset.from_vector ["1", "2", "3"] s2 = Hashset.from_vector [1, 2, 3] - s1.to_text.should_equal "Hashset{'1', '2', '3'}" - s2.to_text.should_equal "Hashset{1, 2, 3}" + s1.to_text.should_equal "{1, 2, 3}" + s2.to_text.should_equal "{1, 2, 3}" + + group_builder.specify "should be able to convert to display text" <| + s1 = Hashset.from_vector ["1", "2", "3"] + s2 = Hashset.from_vector [1, 2, 3] + s3 = Hashset.from_vector (0.up_to 100) + s1.to_display_text.should_equal "{1, 2, 3}" + s2.to_display_text.should_equal "{1, 2, 3}" + s3.to_display_text.should_contain "and 60 more elements" + + group_builder.specify "should be able to convert to Enso code" <| + s1 = Hashset.from_vector ["1", "2", "3"] + s2 = Hashset.from_vector [1, 2, 3] + s1.pretty.should_equal "Hashset.from_vector ['1', '2', '3']" + s2.pretty.should_equal "Hashset.from_vector [1, 2, 3]" main filter=Nothing = suite = Test.build suite_builder-> diff --git a/test/Base_Tests/src/Data/Json_Spec.enso b/test/Base_Tests/src/Data/Json_Spec.enso index ba3546e74aa2..eec228528e65 100644 --- a/test/Base_Tests/src/Data/Json_Spec.enso +++ b/test/Base_Tests/src/Data/Json_Spec.enso @@ -148,6 +148,13 @@ add_specs suite_builder = Author.Value "Tolkien" 1892 . to_json . should_equal <| '{"type":"Author","constructor":"Value","name":"Tolkien","year_of_birth":1892}' + group_builder.specify "should convert arbitrary types to JSON via json_stringify" <| + 1.json_stringify.should_equal "1" + 1.54.json_stringify.should_equal "1.54" + ["foo", "bar", "baz"].json_stringify.should_equal '["foo","bar","baz"]' + Author.Value "Tolkien" 1892 . json_stringify . should_equal <| + '{"type":"Author","constructor":"Value","name":"Tolkien","year_of_birth":1892}' + group_builder.specify "should render NaN and Infinity to null" <| Number.nan.to_json . should_equal "null" Number.positive_infinity.to_json . should_equal "null" diff --git a/test/Table_Tests/src/In_Memory/Json_Function_Spec.enso b/test/Table_Tests/src/In_Memory/Json_Function_Spec.enso new file mode 100644 index 000000000000..3e4b35056942 --- /dev/null +++ b/test/Table_Tests/src/In_Memory/Json_Function_Spec.enso @@ -0,0 +1,55 @@ +from Standard.Base import all + +from Standard.Table import all + +from Standard.Test import all + +from project.Util import all + +type Data + Value ~table raw_data + + setup = + raw_data = [1,3.14,True, False, "Hello World", 'This is "Some" Fun Text With \'Quotes\' and \t and \n', Json.parse '[1,2,3,4]', Json.parse '{"A":true,"B":null}', Date.today, Time_Of_Day.now, Date_Time.now, Date_Time.now.to_utc, 2^53-2, 2^58-5] + column0 = Column.from_vector "mixed" raw_data + table = Table.new [column0] + with_row_number = table.add_row_number + Data.Value with_row_number raw_data + +add_specs suite_builder = + data = Data.setup + + suite_builder.group "Column.json_stringify" group_builder-> + group_builder.specify "should be able to stringify a column of mixed data" <| + json_column = data.table.at "mixed" . json_stringify + json_column.name.should_equal "mixed" + json_column.row_count.should_equal data.raw_data.length + expected = data.raw_data.map .to_json + json_column.to_vector.should_equal expected + + group_builder.specify "should be able to stringify a column of row numbers" <| + json_column = data.table.at "Row" . json_stringify + json_column.name.should_equal "Row" + json_column.row_count.should_equal data.raw_data.length + expected = 0.up_to data.table.row_count . map .to_json + json_column.to_vector.should_equal expected + + group_builder.specify "should be able to stringify a filtered column" <| + json_column = data.table.filter (expr "[Row]%2==0") . at "mixed" . json_stringify + json_column.name.should_equal "mixed" + json_column.row_count.should_equal (data.raw_data.length / 2).floor + expected = 0.up_to data.table.row_count step=2 . map i-> data.raw_data.at i . to_json + json_column.to_vector.should_equal expected + + suite_builder.group "Table.json_stringify" group_builder-> + group_builder.specify "should be able to stringify a column of mixed data" <| + json_column = data.table.at "mixed" . json_stringify + json_column.name.should_equal "mixed" + json_column.row_count.should_equal data.raw_data.length + expected = data.raw_data.map .to_json + json_column.to_vector.should_equal expected + +main filter=Nothing = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter filter