|
74 | 74 |
|
75 | 75 | @sect{Getting Started} |
76 | 76 | @hl.scala |
77 | | - "com.lihaoyi" %% "upickle" % "4.0.2" // SBT |
78 | | - ivy"com.lihaoyi::upickle:4.0.2" // Mill |
| 77 | + "com.lihaoyi" %% "upickle" % "4.1.0" // SBT |
| 78 | + ivy"com.lihaoyi::upickle:4.1.0" // Mill |
79 | 79 |
|
80 | 80 | @p |
81 | 81 | And then you can immediately start writing and reading common Scala |
|
93 | 93 | @p |
94 | 94 | For ScalaJS applications, use this dependencies instead: |
95 | 95 | @hl.scala |
96 | | - "com.lihaoyi" %%% "upickle" % "4.0.2" // SBT |
97 | | - ivy"com.lihaoyi::upickle::4.0.2" // Mill |
| 96 | + "com.lihaoyi" %%% "upickle" % "4.1.0" // SBT |
| 97 | + ivy"com.lihaoyi::upickle::4.1.0" // Mill |
98 | 98 |
|
99 | 99 | @sect{Scala Versions} |
100 | 100 | @p |
|
512 | 512 | follows: |
513 | 513 |
|
514 | 514 | @hl.ref(exampleTests, Seq("stringLongs", "")) |
| 515 | + @sect{@@flatten} |
| 516 | + @p |
| 517 | + The @hl.scala{@@flatten} annotation can only be applied to: |
| 518 | + |
| 519 | + @ul |
| 520 | + @li |
| 521 | + @hl.scala{case class}es: Flatten fields of a nested case class into the parent structure. |
| 522 | + |
| 523 | + @hl.scala |
| 524 | + case class A(i: Int, @@flatten b: B) |
| 525 | + case class B(msg: String) |
| 526 | + implicit val rw: ReadWriter[A] = macroRW |
| 527 | + implicit val rw: ReadWriter[B] = macroRW |
| 528 | + write(A(1, B("Hello"))) // {"i":1, "msg": "Hello"} |
515 | 529 |
|
516 | 530 |
|
| 531 | + @li |
| 532 | + @hl.scala{Iterable}: Flatten key-value pairs of a @hl.scala{Iterable[(String, _)]} into the parent structure. |
| 533 | + |
| 534 | + |
| 535 | + @hl.scala |
| 536 | + case class A(i: Int, @@flatten map: Map[String, String]) |
| 537 | + implicit val rw: ReadWriter[A] = macroRW |
| 538 | + val map = Map("a" -> "1", "b" -> "2") |
| 539 | + write(A(1, map)) // {"i":1, "a":"1", "b": "2"} |
| 540 | + |
| 541 | + @li |
| 542 | + @p |
| 543 | + |
| 544 | + Nested flattening allows you to apply the @hl.scala{@@flatten} annotation recursively to fields within nested case classes. |
| 545 | + @hl.scala |
| 546 | + case class Outer(msg: String, @@flatten inner: Inner) |
| 547 | + case class Inner(@@flatten inner2: Inner2) |
| 548 | + case class Inner2(i: Int) |
| 549 | + |
| 550 | + implicit val rw: ReadWriter[Inner2] = macroRW |
| 551 | + implicit val rw: ReadWriter[Inner] = macroRW |
| 552 | + implicit val rw: ReadWriter[Outer] = macroRW |
| 553 | + |
| 554 | + write(Outer("abc", Inner(Inner2(7)))) // {"msg": "abc", "i": 7} |
| 555 | + |
| 556 | + |
| 557 | + @p |
| 558 | + The Reader also recognizes the @hl.scala{@@flatten} annotation. |
| 559 | + @hl.scala |
| 560 | + case class A(i: Int, @@flatten b: B) |
| 561 | + case class B(msg: String) |
| 562 | + implicit val rw: ReadWriter[A] = macroRW |
| 563 | + implicit val rw: ReadWriter[B] = macroRW |
| 564 | + read("""{"i": 1, "msg": "Hello"}""") |
| 565 | + // The top-level field "msg": "Hello" is correctly mapped to the field in B. |
| 566 | + |
| 567 | + @p |
| 568 | + For collection, during deserialization, all key-value pairs in the JSON that do not directly map to a |
| 569 | + specific field in the case class are attempted to be stored in the @hl.scala{Map}. |
| 570 | + |
| 571 | + @p |
| 572 | + If a key in the JSON does not correspond to any field in the case class, it is stored in the collection. |
| 573 | + |
| 574 | + @hl.scala |
| 575 | + case class A(i: Int, @@flatten Map[String, String]) |
| 576 | + implicit val rw: ReadWriter[A] = macroRW |
| 577 | + read("""{"i":1, "a" -> "1", "b" -> "2"}""") // Output: A(1, Map("a" -> "1", "b" -> "2")) |
| 578 | + |
| 579 | + @p |
| 580 | + If there are no keys in the JSON that can be stored in the collection, it is treated as an empty collection. |
| 581 | + |
| 582 | + @hl.scala |
| 583 | + read("""{"i":1}""") |
| 584 | + // Output: A(1, Map.empty) |
| 585 | + |
| 586 | + @p |
| 587 | + If a key’s value in the JSON cannot be converted to the Map’s value type (e.g., @hl.scala{String}), the deserialization fails. |
| 588 | + @hl.scala |
| 589 | + read("""{"i":1, "a":{"name":"foo"}}""") |
| 590 | + // Error: Failed to deserialize because the value for "a" is not a String, as required by Map[String, String]. |
| 591 | + |
| 592 | + |
| 593 | + @sect{Flatten Limitations} |
| 594 | + @ol |
| 595 | + @li |
| 596 | + Flattening more than two collections to a same level is not supported. |
| 597 | + Flattening multiple collections to a same level feels awkward to support because, when deriving |
| 598 | + a @hl.scala{Reader}, it becomes unclear which collection the data should be stored in. |
| 599 | + @li |
| 600 | + Type parameters do not seem to be properly resolved in the following scenario: |
| 601 | + |
| 602 | + @hl.scala |
| 603 | + case class Param[T](@@flatten t: T) |
| 604 | + object Param { |
| 605 | + // compile error when this function is called to derive instance |
| 606 | + implicit def rw[T: RW]: RW[Param[T]] = upickle.default.macroRW |
| 607 | + // works |
| 608 | + implicit val rw[SomeClass]: RW[Param[SomeClass]] = upickle.default.macroRW |
| 609 | + } |
| 610 | + @li |
| 611 | + When using the @hl.scala{@@flatten} annotation on a @hl.scala{Iterable}, the type |
| 612 | + of key must be @hl.scala{String}. |
| 613 | + |
| 614 | + |
517 | 615 | @sect{Limitations} |
518 | 616 |
|
519 | 617 | @p |
|
915 | 1013 | JSON library, and inherits a lot of it's performance from Erik's work. |
916 | 1014 |
|
917 | 1015 | @sect{Version History} |
| 1016 | + @sect{4.1.0} |
| 1017 | + @ul |
| 1018 | + @li |
| 1019 | + Introduction of the @sect.ref{@@flatten} annotation, to allow nested |
| 1020 | + @hl.scala{case class}es to be serialized into non-nested JSON dictionaries |
| 1021 | + @lnk("#642", "https://github.com/com-lihaoyi/upickle/pull/642") |
918 | 1022 | @sect{4.0.2} |
919 | 1023 | @ul |
920 | 1024 | @li |
|
0 commit comments