From 3d2212c1ce05748d841f5efe1b0344108cf5a51b Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:03:22 +0100 Subject: [PATCH 01/13] Remove non-compiler files --- README.md | 559 +---------- benchmarks/__init__.py | 0 benchmarks/benchmarks.py | 117 --- docs/api.md | 18 - docs/migrating.md | 123 --- docs/quick-start.md | 218 ----- pyproject.toml | 43 +- tests/README.md | 91 -- tests/__init__.py | 0 tests/conftest.py | 11 - tests/generate.py | 172 ---- tests/grpc/__init__.py | 0 tests/grpc/test_grpclib_client.py | 271 ------ tests/grpc/test_stream_stream.py | 89 -- tests/grpc/thing_service.py | 73 -- tests/inputs/bool/bool.json | 3 - tests/inputs/bool/bool.proto | 7 - tests/inputs/bool/test_bool.py | 24 - tests/inputs/bytes/bytes.json | 3 - tests/inputs/bytes/bytes.proto | 7 - tests/inputs/casing/casing.json | 4 - tests/inputs/casing/casing.proto | 20 - tests/inputs/casing/test_casing.py | 17 - .../casing_inner_class.proto | 11 - .../test_casing_inner_class.py | 10 - .../casing_message_field_uppercase.proto | 9 - .../casing_message_field_uppercase.py | 8 - tests/inputs/config.py | 29 - tests/inputs/deprecated/deprecated.json | 6 - tests/inputs/deprecated/deprecated.proto | 21 - .../inputs/documentation/documentation.proto | 44 - tests/inputs/double/double-negative.json | 3 - tests/inputs/double/double.json | 3 - tests/inputs/double/double.proto | 7 - .../inputs/empty_repeated/empty_repeated.json | 3 - .../empty_repeated/empty_repeated.proto | 11 - .../inputs/empty_service/empty_service.proto | 7 - tests/inputs/entry/entry.proto | 20 - tests/inputs/enum/enum.json | 9 - tests/inputs/enum/enum.proto | 25 - tests/inputs/enum/test_enum.py | 107 -- tests/inputs/example/example.proto | 911 ------------------ .../example_service/example_service.proto | 20 - .../example_service/test_example_service.py | 81 -- .../field_name_identical_to_type.json | 7 - .../field_name_identical_to_type.proto | 13 - tests/inputs/fixed/fixed.json | 6 - tests/inputs/fixed/fixed.proto | 10 - tests/inputs/float/float.json | 9 - tests/inputs/float/float.proto | 14 - .../google_impl_behavior_equivalence.proto | 22 - .../test_google_impl_behavior_equivalence.py | 84 -- .../googletypes/googletypes-missing.json | 1 - tests/inputs/googletypes/googletypes.json | 7 - tests/inputs/googletypes/googletypes.proto | 16 - .../googletypes_request.proto | 29 - .../test_googletypes_request.py | 46 - .../googletypes_response.proto | 23 - .../test_googletypes_response.py | 63 -- .../googletypes_response_embedded.proto | 26 - .../test_googletypes_response_embedded.py | 40 - .../googletypes_service_returns_empty.proto | 13 - ...ogletypes_service_returns_googletype.proto | 18 - .../googletypes_struct.json | 5 - .../googletypes_struct.proto | 9 - .../googletypes_value/googletypes_value.json | 11 - .../googletypes_value/googletypes_value.proto | 15 - .../capitalized.proto | 8 - .../import_capitalized_package/test.proto | 11 - .../child.proto | 7 - .../import_child_package_from_package.proto | 11 - .../package_message.proto | 9 - .../child.proto | 7 - .../import_child_package_from_root.proto | 11 - .../import_child_scoping_rules/child.proto | 7 - .../import_child_scoping_rules.proto | 9 - .../import_child_scoping_rules/package.proto | 13 - .../import_circular_dependency.proto | 30 - .../import_circular_dependency/other.proto | 8 - .../import_circular_dependency/root.proto | 7 - .../inputs/import_cousin_package/cousin.proto | 6 - tests/inputs/import_cousin_package/test.proto | 11 - .../cousin.proto | 6 - .../test.proto | 11 - .../child.proto | 7 - ...mport_nested_child_package_from_root.proto | 9 - .../import_packages_same_name.proto | 13 - .../import_packages_same_name/posts_v1.proto | 7 - .../import_packages_same_name/users_v1.proto | 7 - .../import_parent_package_from_child.proto | 12 - .../parent_package_message.proto | 6 - .../child.proto | 11 - .../import_root_package_from_child/root.proto | 7 - .../import_root_sibling.proto | 11 - .../inputs/import_root_sibling/sibling.proto | 7 - .../child_package_request_message.proto | 7 - .../import_service_input_message.proto | 25 - .../request_message.proto | 7 - .../test_import_service_input_message.py | 36 - tests/inputs/int32/int32.json | 4 - tests/inputs/int32/int32.proto | 10 - .../inputs/invalid_field/invalid_field.proto | 7 - .../invalid_field/test_invalid_field.py | 17 - tests/inputs/map/map.json | 7 - tests/inputs/map/map.proto | 7 - tests/inputs/mapmessage/mapmessage.json | 10 - tests/inputs/mapmessage/mapmessage.proto | 11 - .../namespace_builtin_types.json | 16 - .../namespace_builtin_types.proto | 40 - .../namespace_keywords.json | 37 - .../namespace_keywords.proto | 46 - tests/inputs/nested/nested.json | 7 - tests/inputs/nested/nested.proto | 26 - tests/inputs/nested2/nested2.proto | 21 - tests/inputs/nested2/package.proto | 7 - tests/inputs/nestedtwice/nestedtwice.json | 11 - tests/inputs/nestedtwice/nestedtwice.proto | 40 - tests/inputs/nestedtwice/test_nestedtwice.py | 25 - tests/inputs/oneof/oneof-name.json | 3 - tests/inputs/oneof/oneof.json | 3 - tests/inputs/oneof/oneof.proto | 23 - tests/inputs/oneof/oneof_name.json | 3 - tests/inputs/oneof/test_oneof.py | 43 - .../oneof_default_value_serialization.proto | 30 - .../test_oneof_default_value_serialization.py | 69 -- tests/inputs/oneof_empty/oneof_empty.json | 3 - tests/inputs/oneof_empty/oneof_empty.proto | 17 - .../oneof_empty/oneof_empty_maybe1.json | 3 - .../oneof_empty/oneof_empty_maybe2.json | 5 - tests/inputs/oneof_empty/test_oneof_empty.py | 0 .../inputs/oneof_enum/oneof_enum-enum-0.json | 3 - .../inputs/oneof_enum/oneof_enum-enum-1.json | 3 - tests/inputs/oneof_enum/oneof_enum.json | 6 - tests/inputs/oneof_enum/oneof_enum.proto | 20 - tests/inputs/oneof_enum/test_oneof_enum.py | 39 - .../proto3_field_presence.json | 13 - .../proto3_field_presence.proto | 26 - .../proto3_field_presence_default.json | 1 - .../proto3_field_presence_missing.json | 9 - .../test_proto3_field_presence.py | 46 - .../proto3_field_presence_oneof.json | 3 - .../proto3_field_presence_oneof.proto | 22 - .../test_proto3_field_presence_oneof.py | 27 - .../recursivemessage/recursivemessage.json | 12 - .../recursivemessage/recursivemessage.proto | 15 - tests/inputs/ref/ref.json | 5 - tests/inputs/ref/ref.proto | 9 - tests/inputs/ref/repeatedmessage.proto | 11 - .../regression_387/regression_387.proto | 12 - .../regression_387/test_regression_387.py | 12 - .../regression_414/regression_414.proto | 9 - .../regression_414/test_regression_414.py | 15 - tests/inputs/repeated/repeated.json | 3 - tests/inputs/repeated/repeated.proto | 7 - .../repeated_duration_timestamp.json | 4 - .../repeated_duration_timestamp.proto | 12 - .../test_repeated_duration_timestamp.py | 12 - .../repeatedmessage/repeatedmessage.json | 10 - .../repeatedmessage/repeatedmessage.proto | 11 - .../inputs/repeatedpacked/repeatedpacked.json | 5 - .../repeatedpacked/repeatedpacked.proto | 9 - .../rpc_empty_input_message.proto | 13 - .../test_rpc_empty_input_message.py | 24 - tests/inputs/service/service.proto | 35 - .../service_separate_packages/messages.proto | 31 - .../service_separate_packages/service.proto | 12 - tests/inputs/service_uppercase/service.proto | 16 - .../inputs/service_uppercase/test_service.py | 8 - tests/inputs/signed/signed.json | 6 - tests/inputs/signed/signed.proto | 11 - .../test_timestamp_dict_encode.py | 78 -- .../timestamp_dict_encode.json | 3 - .../timestamp_dict_encode.proto | 9 - tests/mocks.py | 40 - tests/oneof_pattern_matching.py | 47 - tests/streams/delimited_messages.in | 2 - tests/streams/dump_varint_negative.expected | 1 - tests/streams/dump_varint_positive.expected | 1 - tests/streams/java/.gitignore | 38 - tests/streams/java/pom.xml | 94 -- .../java/betterproto/CompatibilityTest.java | 41 - .../java/src/main/java/betterproto/Tests.java | 115 --- .../src/main/proto/betterproto/nested.proto | 27 - .../src/main/proto/betterproto/oneof.proto | 19 - tests/streams/load_varint_cutoff.in | 1 - .../message_dump_file_multiple.expected | 2 - .../streams/message_dump_file_single.expected | 1 - tests/test_all_definition.py | 19 - tests/test_casing.py | 129 --- tests/test_deprecated.py | 61 -- tests/test_documentation.py | 37 - tests/test_enum.py | 79 -- tests/test_features.py | 569 ----------- tests/test_inputs.py | 208 ---- tests/test_mapmessage.py | 18 - tests/test_module_validation.py | 111 --- tests/test_oneof_pattern_matching.py | 13 - tests/test_pickling.py | 161 ---- tests/test_streams.py | 362 ------- tests/test_struct.py | 36 - tests/test_timestamp.py | 27 - tests/test_typing_compiler.py | 70 -- tests/test_version.py | 15 - tests/util.py | 158 --- 204 files changed, 4 insertions(+), 7732 deletions(-) delete mode 100644 benchmarks/__init__.py delete mode 100644 benchmarks/benchmarks.py delete mode 100644 docs/api.md delete mode 100644 docs/migrating.md delete mode 100644 docs/quick-start.md delete mode 100644 tests/README.md delete mode 100644 tests/__init__.py delete mode 100644 tests/conftest.py delete mode 100755 tests/generate.py delete mode 100644 tests/grpc/__init__.py delete mode 100644 tests/grpc/test_grpclib_client.py delete mode 100644 tests/grpc/test_stream_stream.py delete mode 100644 tests/grpc/thing_service.py delete mode 100644 tests/inputs/bool/bool.json delete mode 100644 tests/inputs/bool/bool.proto delete mode 100644 tests/inputs/bool/test_bool.py delete mode 100644 tests/inputs/bytes/bytes.json delete mode 100644 tests/inputs/bytes/bytes.proto delete mode 100644 tests/inputs/casing/casing.json delete mode 100644 tests/inputs/casing/casing.proto delete mode 100644 tests/inputs/casing/test_casing.py delete mode 100644 tests/inputs/casing_inner_class/casing_inner_class.proto delete mode 100644 tests/inputs/casing_inner_class/test_casing_inner_class.py delete mode 100644 tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto delete mode 100644 tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py delete mode 100644 tests/inputs/config.py delete mode 100644 tests/inputs/deprecated/deprecated.json delete mode 100644 tests/inputs/deprecated/deprecated.proto delete mode 100644 tests/inputs/documentation/documentation.proto delete mode 100644 tests/inputs/double/double-negative.json delete mode 100644 tests/inputs/double/double.json delete mode 100644 tests/inputs/double/double.proto delete mode 100644 tests/inputs/empty_repeated/empty_repeated.json delete mode 100644 tests/inputs/empty_repeated/empty_repeated.proto delete mode 100644 tests/inputs/empty_service/empty_service.proto delete mode 100644 tests/inputs/entry/entry.proto delete mode 100644 tests/inputs/enum/enum.json delete mode 100644 tests/inputs/enum/enum.proto delete mode 100644 tests/inputs/enum/test_enum.py delete mode 100644 tests/inputs/example/example.proto delete mode 100644 tests/inputs/example_service/example_service.proto delete mode 100644 tests/inputs/example_service/test_example_service.py delete mode 100644 tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json delete mode 100644 tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto delete mode 100644 tests/inputs/fixed/fixed.json delete mode 100644 tests/inputs/fixed/fixed.proto delete mode 100644 tests/inputs/float/float.json delete mode 100644 tests/inputs/float/float.proto delete mode 100644 tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto delete mode 100644 tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py delete mode 100644 tests/inputs/googletypes/googletypes-missing.json delete mode 100644 tests/inputs/googletypes/googletypes.json delete mode 100644 tests/inputs/googletypes/googletypes.proto delete mode 100644 tests/inputs/googletypes_request/googletypes_request.proto delete mode 100644 tests/inputs/googletypes_request/test_googletypes_request.py delete mode 100644 tests/inputs/googletypes_response/googletypes_response.proto delete mode 100644 tests/inputs/googletypes_response/test_googletypes_response.py delete mode 100644 tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto delete mode 100644 tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py delete mode 100644 tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto delete mode 100644 tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto delete mode 100644 tests/inputs/googletypes_struct/googletypes_struct.json delete mode 100644 tests/inputs/googletypes_struct/googletypes_struct.proto delete mode 100644 tests/inputs/googletypes_value/googletypes_value.json delete mode 100644 tests/inputs/googletypes_value/googletypes_value.proto delete mode 100644 tests/inputs/import_capitalized_package/capitalized.proto delete mode 100644 tests/inputs/import_capitalized_package/test.proto delete mode 100644 tests/inputs/import_child_package_from_package/child.proto delete mode 100644 tests/inputs/import_child_package_from_package/import_child_package_from_package.proto delete mode 100644 tests/inputs/import_child_package_from_package/package_message.proto delete mode 100644 tests/inputs/import_child_package_from_root/child.proto delete mode 100644 tests/inputs/import_child_package_from_root/import_child_package_from_root.proto delete mode 100644 tests/inputs/import_child_scoping_rules/child.proto delete mode 100644 tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto delete mode 100644 tests/inputs/import_child_scoping_rules/package.proto delete mode 100644 tests/inputs/import_circular_dependency/import_circular_dependency.proto delete mode 100644 tests/inputs/import_circular_dependency/other.proto delete mode 100644 tests/inputs/import_circular_dependency/root.proto delete mode 100644 tests/inputs/import_cousin_package/cousin.proto delete mode 100644 tests/inputs/import_cousin_package/test.proto delete mode 100644 tests/inputs/import_cousin_package_same_name/cousin.proto delete mode 100644 tests/inputs/import_cousin_package_same_name/test.proto delete mode 100644 tests/inputs/import_nested_child_package_from_root/child.proto delete mode 100644 tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto delete mode 100644 tests/inputs/import_packages_same_name/import_packages_same_name.proto delete mode 100644 tests/inputs/import_packages_same_name/posts_v1.proto delete mode 100644 tests/inputs/import_packages_same_name/users_v1.proto delete mode 100644 tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto delete mode 100644 tests/inputs/import_parent_package_from_child/parent_package_message.proto delete mode 100644 tests/inputs/import_root_package_from_child/child.proto delete mode 100644 tests/inputs/import_root_package_from_child/root.proto delete mode 100644 tests/inputs/import_root_sibling/import_root_sibling.proto delete mode 100644 tests/inputs/import_root_sibling/sibling.proto delete mode 100644 tests/inputs/import_service_input_message/child_package_request_message.proto delete mode 100644 tests/inputs/import_service_input_message/import_service_input_message.proto delete mode 100644 tests/inputs/import_service_input_message/request_message.proto delete mode 100644 tests/inputs/import_service_input_message/test_import_service_input_message.py delete mode 100644 tests/inputs/int32/int32.json delete mode 100644 tests/inputs/int32/int32.proto delete mode 100644 tests/inputs/invalid_field/invalid_field.proto delete mode 100644 tests/inputs/invalid_field/test_invalid_field.py delete mode 100644 tests/inputs/map/map.json delete mode 100644 tests/inputs/map/map.proto delete mode 100644 tests/inputs/mapmessage/mapmessage.json delete mode 100644 tests/inputs/mapmessage/mapmessage.proto delete mode 100644 tests/inputs/namespace_builtin_types/namespace_builtin_types.json delete mode 100644 tests/inputs/namespace_builtin_types/namespace_builtin_types.proto delete mode 100644 tests/inputs/namespace_keywords/namespace_keywords.json delete mode 100644 tests/inputs/namespace_keywords/namespace_keywords.proto delete mode 100644 tests/inputs/nested/nested.json delete mode 100644 tests/inputs/nested/nested.proto delete mode 100644 tests/inputs/nested2/nested2.proto delete mode 100644 tests/inputs/nested2/package.proto delete mode 100644 tests/inputs/nestedtwice/nestedtwice.json delete mode 100644 tests/inputs/nestedtwice/nestedtwice.proto delete mode 100644 tests/inputs/nestedtwice/test_nestedtwice.py delete mode 100644 tests/inputs/oneof/oneof-name.json delete mode 100644 tests/inputs/oneof/oneof.json delete mode 100644 tests/inputs/oneof/oneof.proto delete mode 100644 tests/inputs/oneof/oneof_name.json delete mode 100644 tests/inputs/oneof/test_oneof.py delete mode 100644 tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto delete mode 100644 tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py delete mode 100644 tests/inputs/oneof_empty/oneof_empty.json delete mode 100644 tests/inputs/oneof_empty/oneof_empty.proto delete mode 100644 tests/inputs/oneof_empty/oneof_empty_maybe1.json delete mode 100644 tests/inputs/oneof_empty/oneof_empty_maybe2.json delete mode 100644 tests/inputs/oneof_empty/test_oneof_empty.py delete mode 100644 tests/inputs/oneof_enum/oneof_enum-enum-0.json delete mode 100644 tests/inputs/oneof_enum/oneof_enum-enum-1.json delete mode 100644 tests/inputs/oneof_enum/oneof_enum.json delete mode 100644 tests/inputs/oneof_enum/oneof_enum.proto delete mode 100644 tests/inputs/oneof_enum/test_oneof_enum.py delete mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence.json delete mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence.proto delete mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence_default.json delete mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence_missing.json delete mode 100644 tests/inputs/proto3_field_presence/test_proto3_field_presence.py delete mode 100644 tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json delete mode 100644 tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto delete mode 100644 tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py delete mode 100644 tests/inputs/recursivemessage/recursivemessage.json delete mode 100644 tests/inputs/recursivemessage/recursivemessage.proto delete mode 100644 tests/inputs/ref/ref.json delete mode 100644 tests/inputs/ref/ref.proto delete mode 100644 tests/inputs/ref/repeatedmessage.proto delete mode 100644 tests/inputs/regression_387/regression_387.proto delete mode 100644 tests/inputs/regression_387/test_regression_387.py delete mode 100644 tests/inputs/regression_414/regression_414.proto delete mode 100644 tests/inputs/regression_414/test_regression_414.py delete mode 100644 tests/inputs/repeated/repeated.json delete mode 100644 tests/inputs/repeated/repeated.proto delete mode 100644 tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json delete mode 100644 tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto delete mode 100644 tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py delete mode 100644 tests/inputs/repeatedmessage/repeatedmessage.json delete mode 100644 tests/inputs/repeatedmessage/repeatedmessage.proto delete mode 100644 tests/inputs/repeatedpacked/repeatedpacked.json delete mode 100644 tests/inputs/repeatedpacked/repeatedpacked.proto delete mode 100644 tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto delete mode 100644 tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py delete mode 100644 tests/inputs/service/service.proto delete mode 100644 tests/inputs/service_separate_packages/messages.proto delete mode 100644 tests/inputs/service_separate_packages/service.proto delete mode 100644 tests/inputs/service_uppercase/service.proto delete mode 100644 tests/inputs/service_uppercase/test_service.py delete mode 100644 tests/inputs/signed/signed.json delete mode 100644 tests/inputs/signed/signed.proto delete mode 100644 tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py delete mode 100644 tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json delete mode 100644 tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto delete mode 100644 tests/mocks.py delete mode 100644 tests/oneof_pattern_matching.py delete mode 100644 tests/streams/delimited_messages.in delete mode 100644 tests/streams/dump_varint_negative.expected delete mode 100644 tests/streams/dump_varint_positive.expected delete mode 100644 tests/streams/java/.gitignore delete mode 100644 tests/streams/java/pom.xml delete mode 100644 tests/streams/java/src/main/java/betterproto/CompatibilityTest.java delete mode 100644 tests/streams/java/src/main/java/betterproto/Tests.java delete mode 100644 tests/streams/java/src/main/proto/betterproto/nested.proto delete mode 100644 tests/streams/java/src/main/proto/betterproto/oneof.proto delete mode 100644 tests/streams/load_varint_cutoff.in delete mode 100644 tests/streams/message_dump_file_multiple.expected delete mode 100644 tests/streams/message_dump_file_single.expected delete mode 100644 tests/test_all_definition.py delete mode 100644 tests/test_casing.py delete mode 100644 tests/test_deprecated.py delete mode 100644 tests/test_documentation.py delete mode 100644 tests/test_enum.py delete mode 100644 tests/test_features.py delete mode 100644 tests/test_inputs.py delete mode 100644 tests/test_mapmessage.py delete mode 100644 tests/test_module_validation.py delete mode 100644 tests/test_oneof_pattern_matching.py delete mode 100644 tests/test_pickling.py delete mode 100644 tests/test_streams.py delete mode 100644 tests/test_struct.py delete mode 100644 tests/test_timestamp.py delete mode 100644 tests/test_typing_compiler.py delete mode 100644 tests/test_version.py delete mode 100644 tests/util.py diff --git a/README.md b/README.md index 71f6d768..38de5884 100644 --- a/README.md +++ b/README.md @@ -1,563 +1,8 @@ -# Better Protobuf / gRPC Support for Python +# Betterproto2 compiler -![](https://github.com/betterproto/python-betterproto2/actions/workflows/ci.yml/badge.svg) +![](https://github.com/betterproto/python-betterproto2-compiler/actions/workflows/ci.yml/badge.svg) -> :octocat: If you're reading this on github, please be aware that it might mention unreleased features! See the latest released README on [pypi](https://pypi.org/project/betterproto/). -This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments (e.g. Protobuf 2). The following are supported: - -- Protobuf 3 & gRPC code generation - - Both binary & JSON serialization is built-in -- Python 3.7+ making use of: - - Enums - - Dataclasses - - `async`/`await` - - Timezone-aware `datetime` and `timedelta` objects - - Relative imports - - Mypy type checking -- [Pydantic Models](https://docs.pydantic.dev/) generation (see #generating-pydantic-models) - -This project is heavily inspired by, and borrows functionality from: - -- https://github.com/protocolbuffers/protobuf/tree/master/python -- https://github.com/eigenein/protobuf/ -- https://github.com/vmagamedov/grpclib - -## Motivation - -This project exists because I am unhappy with the state of the official Google protoc plugin for Python. - -- No `async` support (requires additional `grpclib` plugin) -- No typing support or code completion/intelligence (requires additional `mypy` plugin) -- No `__init__.py` module files get generated -- Output is not importable - - Import paths break in Python 3 unless you mess with `sys.path` -- Bugs when names clash (e.g. `codecs` package) -- Generated code is not idiomatic - - Completely unreadable runtime code-generation - - Much code looks like C++ or Java ported 1:1 to Python - - Capitalized function names like `HasField()` and `SerializeToString()` - - Uses `SerializeToString()` rather than the built-in `__bytes__()` - - Special wrapped types don't use Python's `None` - - Timestamp/duration types don't use Python's built-in `datetime` module - - -This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical. - -## Installation - -First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin: - -```sh -# Install both the library and compiler -pip install "betterproto[compiler]" - -# Install just the library (to use the generated code output) -pip install betterproto -``` - -*Betterproto* is under active development. To install the latest beta version, use `pip install --pre betterproto`. - -## Getting Started - -### Compiling proto files - -Given you installed the compiler and have a proto file, e.g `example.proto`: - -```protobuf -syntax = "proto3"; - -package hello; - -// Greeting represents a message you can tell a user. -message Greeting { - string message = 1; -} -``` - -You can run the following to invoke protoc directly: - -```sh -mkdir lib -protoc -I . --python_betterproto_out=lib example.proto -``` - -or run the following to invoke protoc via grpcio-tools: - -```sh -pip install grpcio-tools -python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto -``` - -This will generate `lib/hello/__init__.py` which looks like: - -```python -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: example.proto -# plugin: python-betterproto -from dataclasses import dataclass - -import betterproto - - -@dataclass -class Greeting(betterproto.Message): - """Greeting represents a message you can tell a user.""" - - message: str = betterproto.string_field(1) -``` - -Now you can use it! - -```python ->>> from lib.hello import Greeting ->>> test = Greeting() ->>> test -Greeting(message='') - ->>> test.message = "Hey!" ->>> test -Greeting(message="Hey!") - ->>> serialized = bytes(test) ->>> serialized -b'\n\x04Hey!' - ->>> another = Greeting().parse(serialized) ->>> another -Greeting(message="Hey!") - ->>> another.to_dict() -{"message": "Hey!"} ->>> another.to_json(indent=2) -'{\n "message": "Hey!"\n}' -``` - -### Async gRPC Support - -The generated Protobuf `Message` classes are compatible with [grpclib](https://github.com/vmagamedov/grpclib) so you are free to use it if you like. That said, this project also includes support for async gRPC stub generation with better static type checking and code completion support. It is enabled by default. - -Given an example service definition: - -```protobuf -syntax = "proto3"; - -package echo; - -message EchoRequest { - string value = 1; - // Number of extra times to echo - uint32 extra_times = 2; -} - -message EchoResponse { - repeated string values = 1; -} - -message EchoStreamResponse { - string value = 1; -} - -service Echo { - rpc Echo(EchoRequest) returns (EchoResponse); - rpc EchoStream(EchoRequest) returns (stream EchoStreamResponse); -} -``` - -Generate echo proto file: - -``` -python -m grpc_tools.protoc -I . --python_betterproto_out=. echo.proto -``` - -A client can be implemented as follows: -```python -import asyncio -import echo - -from grpclib.client import Channel - - -async def main(): - channel = Channel(host="127.0.0.1", port=50051) - service = echo.EchoStub(channel) - response = await service.echo(echo.EchoRequest(value="hello", extra_times=1)) - print(response) - - async for response in service.echo_stream(echo.EchoRequest(value="hello", extra_times=1)): - print(response) - - # don't forget to close the channel when done! - channel.close() - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) - -``` - -which would output -```python -EchoResponse(values=['hello', 'hello']) -EchoStreamResponse(value='hello') -EchoStreamResponse(value='hello') -``` - -This project also produces server-facing stubs that can be used to implement a Python -gRPC server. -To use them, simply subclass the base class in the generated files and override the -service methods: - -```python -import asyncio -from echo import EchoBase, EchoRequest, EchoResponse, EchoStreamResponse -from grpclib.server import Server -from typing import AsyncIterator - - -class EchoService(EchoBase): - async def echo(self, echo_request: "EchoRequest") -> "EchoResponse": - return EchoResponse([echo_request.value for _ in range(echo_request.extra_times)]) - - async def echo_stream(self, echo_request: "EchoRequest") -> AsyncIterator["EchoStreamResponse"]: - for _ in range(echo_request.extra_times): - yield EchoStreamResponse(echo_request.value) - - -async def main(): - server = Server([EchoService()]) - await server.start("127.0.0.1", 50051) - await server.wait_closed() - -if __name__ == '__main__': - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) -``` - -### JSON - -Both serializing and parsing are supported to/from JSON and Python dictionaries using the following methods: - -- Dicts: `Message().to_dict()`, `Message().from_dict(...)` -- JSON: `Message().to_json()`, `Message().from_json(...)` - -For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g: - -```python -MyMessage().to_dict(casing=betterproto.Casing.SNAKE) -``` - -### One-of Support - -Protobuf supports grouping fields in a `oneof` clause. Only one of the fields in the group may be set at a given time. For example, given the proto: - -```protobuf -syntax = "proto3"; - -message Test { - oneof foo { - bool on = 1; - int32 count = 2; - string name = 3; - } -} -``` - -On Python 3.10 and later, you can use a `match` statement to access the provided one-of field, which supports type-checking: - -```py -test = Test() -match test: - case Test(on=bool(value)): - print(value) # value: bool - case Test(count=int(value)): - print(value) # value: int - case Test(name=str(value)): - print(value) # value: str - case _: - print("No value provided") -``` - -You can also use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset. - -```py ->>> test = Test() ->>> betterproto.which_one_of(test, "foo") -["", None] - ->>> test.on = True ->>> betterproto.which_one_of(test, "foo") -["on", True] - -# Setting one member of the group resets the others. ->>> test.count = 57 ->>> betterproto.which_one_of(test, "foo") -["count", 57] - -# Default (zero) values also work. ->>> test.name = "" ->>> betterproto.which_one_of(test, "foo") -["name", ""] -``` - -Again this is a little different than the official Google code generator: - -```py -# Old way (official Google protobuf package) ->>> message.WhichOneof("group") -"foo" - -# New way (this project) ->>> betterproto.which_one_of(message, "group") -["foo", "foo's value"] -``` - -### Well-Known Google Types - -Google provides several well-known message types like a timestamp, duration, and several wrappers used to provide optional zero value support. Each of these has a special JSON representation and is handled a little differently from normal messages. The Python mapping for these is as follows: - -| Google Message | Python Type | Default | -| --------------------------- | ---------------------------------------- | ---------------------- | -| `google.protobuf.duration` | [`datetime.timedelta`][td] | `0` | -| `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` | -| `google.protobuf.*Value` | `Optional[...]` | `None` | -| `google.protobuf.*` | `betterproto.lib.google.protobuf.*` | `None` | - -[td]: https://docs.python.org/3/library/datetime.html#timedelta-objects -[dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime - -For the wrapper types, the Python type corresponds to the wrapped type, e.g. `google.protobuf.BoolValue` becomes `Optional[bool]` while `google.protobuf.Int32Value` becomes `Optional[int]`. All of the optional values default to `None`, so don't forget to check for that possible state. Given: - -```protobuf -syntax = "proto3"; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -message Test { - google.protobuf.BoolValue maybe = 1; - google.protobuf.Timestamp ts = 2; - google.protobuf.Duration duration = 3; -} -``` - -You can do stuff like: - -```py ->>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"}) ->>> t -Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000)) - ->>> t.ts - t.duration -datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc) - ->>> t.ts.isoformat() -'2019-01-01T12:00:00+00:00' - ->>> t.maybe = None ->>> t.to_dict() -{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'} -``` - -## Generating Pydantic Models - -You can use python-betterproto to generate pydantic based models, using -pydantic dataclasses. This means the results of the protobuf unmarshalling will -be typed checked. The usage is the same, but you need to add a custom option -when calling the protobuf compiler: - - -``` -protoc -I . --python_betterproto_opt=pydantic_dataclasses --python_betterproto_out=lib example.proto -``` - -With the important change being `--python_betterproto_opt=pydantic_dataclasses`. This will -swap the dataclass implementation from the builtin python dataclass to the -pydantic dataclass. You must have pydantic as a dependency in your project for -this to work. - -## Configuration typing imports - -By default typing types will be imported directly from typing. This sometimes can lead to issues in generation if types that are being generated conflict with the name. In this case you can configure the way types are imported from 3 different options: - -### Direct -``` -protoc -I . --python_betterproto_opt=typing.direct --python_betterproto_out=lib example.proto -``` -this configuration is the default, and will import types as follows: -``` -from typing import ( - List, - Optional, - Union -) -... -value: List[str] = [] -value2: Optional[str] = None -value3: Union[str, int] = 1 -``` -### Root -``` -protoc -I . --python_betterproto_opt=typing.root --python_betterproto_out=lib example.proto -``` -this configuration loads the root typing module, and then access the types off of it directly: -``` -import typing -... -value: typing.List[str] = [] -value2: typing.Optional[str] = None -value3: typing.Union[str, int] = 1 -``` - -### 310 -``` -protoc -I . --python_betterproto_opt=typing.310 --python_betterproto_out=lib example.proto -``` -this configuration avoid loading typing all together if possible and uses the python 3.10 pattern: -``` -... -value: list[str] = [] -value2: str | None = None -value3: str | int = 1 -``` - -## Development - -- _Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!_ -- _See how you can help → [Contributing](.github/CONTRIBUTING.md)_ - -### Requirements - -- Python (3.8 or higher) - -- [poetry](https://python-poetry.org/docs/#installation) - *Needed to install dependencies in a virtual environment* - -- [poethepoet](https://github.com/nat-n/poethepoet) for running development tasks as defined in pyproject.toml - - Can be installed to your host environment via `pip install poethepoet` then executed as simple `poe` - - or run from the poetry venv as `poetry run poe` - -### Setup - -```sh -# Get set up with the virtual env & dependencies -poetry install -E compiler - -# Activate the poetry environment -poetry shell -``` - -### Code style - -This project enforces [black](https://github.com/psf/black) python code formatting. - -Before committing changes run: - -```sh -poe format -``` - -To avoid merge conflicts later, non-black formatted python code will fail in CI. - -### Tests - -There are two types of tests: - -1. Standard tests -2. Custom tests - -#### Standard tests - -Adding a standard test case is easy. - -- Create a new directory `betterproto/tests/inputs/` - - add `.proto` with a message called `Test` - - add `.json` with some test data (optional) - -It will be picked up automatically when you run the tests. - -- See also: [Standard Tests Development Guide](tests/README.md) - -#### Custom tests - -Custom tests are found in `tests/test_*.py` and are run with pytest. - -#### Running - -Here's how to run the tests. - -```sh -# Generate assets from sample .proto files required by the tests -poe generate -# Run the tests -poe test -``` - -To run tests as they are run in CI (with tox) run: - -```sh -poe full-test -``` - -### (Re)compiling Google Well-known Types - -Betterproto includes compiled versions for Google's well-known types at [src/betterproto/lib/google](src/betterproto/lib/google). -Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests. - -Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`. - -Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows: - -```sh -protoc \ - --plugin=protoc-gen-custom=src/betterproto/plugin/main.py \ - --custom_opt=INCLUDE_GOOGLE \ - --custom_out=src/betterproto/lib \ - -I /usr/local/include/ \ - /usr/local/include/google/protobuf/*.proto -``` - -### TODO - -- [x] Fixed length fields - - [x] Packed fixed-length -- [x] Zig-zag signed fields (sint32, sint64) -- [x] Don't encode zero values for nested types -- [x] Enums -- [x] Repeated message fields -- [x] Maps - - [x] Maps of message fields -- [x] Support passthrough of unknown fields -- [x] Refs to nested types -- [x] Imports in proto files -- [x] Well-known Google types - - [ ] Support as request input - - [ ] Support as response output - - [ ] Automatically wrap/unwrap responses -- [x] OneOf support - - [x] Basic support on the wire - - [x] Check which was set from the group - - [x] Setting one unsets the others -- [ ] JSON that isn't completely naive. - - [x] 64-bit ints as strings - - [x] Maps - - [x] Lists - - [x] Bytes as base64 - - [ ] Any support - - [x] Enum strings - - [x] Well known types support (timestamp, duration, wrappers) - - [x] Support different casing (orig vs. camel vs. others?) -- [x] Async service stubs - - [x] Unary-unary - - [x] Server streaming response - - [x] Client streaming request -- [x] Renaming messages and fields to conform to Python name standards -- [x] Renaming clashes with language keywords -- [x] Python package -- [x] Automate running tests -- [ ] Cleanup! ## License diff --git a/benchmarks/__init__.py b/benchmarks/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/benchmarks/benchmarks.py b/benchmarks/benchmarks.py deleted file mode 100644 index 5768cbf0..00000000 --- a/benchmarks/benchmarks.py +++ /dev/null @@ -1,117 +0,0 @@ -from dataclasses import dataclass -from typing import List - -import betterproto - - -@dataclass -class TestMessage(betterproto.Message): - foo: int = betterproto.uint32_field(1) - bar: str = betterproto.string_field(2) - baz: float = betterproto.float_field(3) - - -@dataclass -class TestNestedChildMessage(betterproto.Message): - str_key: str = betterproto.string_field(1) - bytes_key: bytes = betterproto.bytes_field(2) - bool_key: bool = betterproto.bool_field(3) - float_key: float = betterproto.float_field(4) - int_key: int = betterproto.uint64_field(5) - - -@dataclass -class TestNestedMessage(betterproto.Message): - foo: TestNestedChildMessage = betterproto.message_field(1) - bar: TestNestedChildMessage = betterproto.message_field(2) - baz: TestNestedChildMessage = betterproto.message_field(3) - - -@dataclass -class TestRepeatedMessage(betterproto.Message): - foo_repeat: List[str] = betterproto.string_field(1) - bar_repeat: List[int] = betterproto.int64_field(2) - baz_repeat: List[bool] = betterproto.bool_field(3) - - -class BenchMessage: - """Test creation and usage a proto message.""" - - def setup(self): - self.cls = TestMessage - self.instance = TestMessage() - self.instance_filled = TestMessage(0, "test", 0.0) - self.instance_filled_bytes = bytes(self.instance_filled) - self.instance_filled_nested = TestNestedMessage( - TestNestedChildMessage("foo", bytearray(b"test1"), True, 0.1234, 500), - TestNestedChildMessage("bar", bytearray(b"test2"), True, 3.1415, 302), - TestNestedChildMessage("baz", bytearray(b"test3"), False, 1e5, 300), - ) - self.instance_filled_nested_bytes = bytes(self.instance_filled_nested) - self.instance_filled_repeated = TestRepeatedMessage( - [f"test{i}" for i in range(1_000)], - [(i - 500) ** 3 for i in range(1_000)], - [i % 2 == 0 for i in range(1_000)], - ) - self.instance_filled_repeated_bytes = bytes(self.instance_filled_repeated) - - def time_overhead(self): - """Overhead in class definition.""" - - @dataclass - class Message(betterproto.Message): - foo: int = betterproto.uint32_field(1) - bar: str = betterproto.string_field(2) - baz: float = betterproto.float_field(3) - - def time_instantiation(self): - """Time instantiation""" - self.cls() - - def time_attribute_access(self): - """Time to access an attribute""" - self.instance.foo - self.instance.bar - self.instance.baz - - def time_init_with_values(self): - """Time to set an attribute""" - self.cls(0, "test", 0.0) - - def time_attribute_setting(self): - """Time to set attributes""" - self.instance.foo = 0 - self.instance.bar = "test" - self.instance.baz = 0.0 - - def time_serialize(self): - """Time serializing a message to wire.""" - bytes(self.instance_filled) - - def time_deserialize(self): - """Time deserialize a message.""" - TestMessage().parse(self.instance_filled_bytes) - - def time_serialize_nested(self): - """Time serializing a nested message to wire.""" - bytes(self.instance_filled_nested) - - def time_deserialize_nested(self): - """Time deserialize a nested message.""" - TestNestedMessage().parse(self.instance_filled_nested_bytes) - - def time_serialize_repeated(self): - """Time serializing a repeated message to wire.""" - bytes(self.instance_filled_repeated) - - def time_deserialize_repeated(self): - """Time deserialize a repeated message.""" - TestRepeatedMessage().parse(self.instance_filled_repeated_bytes) - - -class MemSuite: - def setup(self): - self.cls = TestMessage - - def mem_instance(self): - return self.cls() diff --git a/docs/api.md b/docs/api.md deleted file mode 100644 index 581836aa..00000000 --- a/docs/api.md +++ /dev/null @@ -1,18 +0,0 @@ -API reference -============= - -The following document outlines betterproto's api. These classes should not be extended manually. - - -## Message - -::: betterproto.Message - -::: betterproto.which_one_of - - -## Enumerations - -::: betterproto.Enum - -::: betterproto.Casing diff --git a/docs/migrating.md b/docs/migrating.md deleted file mode 100644 index c119fcac..00000000 --- a/docs/migrating.md +++ /dev/null @@ -1,123 +0,0 @@ -Migration Guide -=============== - -## Google's protocolbuffers - -betterproto has a mostly 1 to 1 drop in replacement for Google's protocolbuffers (after -regenerating your protobufs of course) although there are some minor differences. - -!!! note - - betterproto implements the same basic methods including: - - - `betterproto.Message.FromString` - - `betterproto.Message.SerializeToString` - - for compatibility purposes, however it is important to note that these are - effectively aliases for `betterproto.Message.parse` and - `betterproto.Message.__bytes__` respectively. - - -## One-of Support - -Protobuf supports grouping fields in a oneof clause. Only one of the fields in the group -may be set at a given time. For example, given the proto: - -```proto -syntax = "proto3"; - -message Test { - oneof foo { - bool on = 1; - int32 count = 2; - string name = 3; - } -} -``` - -You can use `betterproto.which_one_of(message, group_name)` to determine which of the -fields was set. It returns a tuple of the field name and value, or a blank string and -`None` if unset. Again this is a little different than the official Google code -generator: - -```python -# Old way (official Google protobuf package) ->>> message.WhichOneof("group") -"foo" - -# New way (this project) ->>> betterproto.which_one_of(message, "group") -("foo", "foo's value") -``` - - -## Well-Known Google Types - -Google provides several well-known message types like a timestamp, duration, and several -wrappers used to provide optional zero value support. Each of these has a special JSON -representation and is handled a little differently from normal messages. The Python -mapping for these is as follows: - -| Google Message | Python Type | Default | -|-------------------------------|------------------------------------------------|--------------------------| -| `google.protobuf.duration` | `datetime.timedelta` | `0` | -| `google.protobuf.timestamp` | Timezone-aware `datetime.datetime` | `1970-01-01T00:00:00Z` | -| `google.protobuf.*Value` | `Optional[...]` / `None` | `None` | -| `google.protobuf.*` | `betterproto.lib.std.google.protobuf.*` | `None` | -| `google.protobuf.*` | `betterproto.lib.pydantic.google.protobuf.*` | `None` | - -For the wrapper types, the Python type corresponds to the wrapped type, e.g. -``google.protobuf.BoolValue`` becomes ``Optional[bool]`` while -``google.protobuf.Int32Value`` becomes ``Optional[int]``. All of the optional values -default to None, so don't forget to check for that possible state. - -Given: - -```proto -syntax = "proto3"; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -message Test { - google.protobuf.BoolValue maybe = 1; - google.protobuf.Timestamp ts = 2; - google.protobuf.Duration duration = 3; -} -``` - -You can use it as such: - -```python ->>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"}) ->>> t -Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000)) - ->>> t.ts - t.duration -datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc) - ->>> t.ts.isoformat() -'2019-01-01T12:00:00+00:00' - ->>> t.maybe = None ->>> t.to_dict() -{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'} -``` - - -## [1.2.5] to [2.0.0b1] - -### Updated package structures - -Generated code now strictly follows the *package structure* of the `.proto` files. -Consequently `.proto` files without a package will be combined in a single -`__init__.py` file. To avoid overwriting existing `__init__.py` files, its best -to compile into a dedicated subdirectory. - -Upgrading: - -- Remove your previously compiled `.py` files. -- Create a new *empty* directory, e.g. `generated` or `lib/generated/proto` etc. -- Regenerate your python files into this directory -- Update import statements, e.g. `import ExampleMessage from generated` diff --git a/docs/quick-start.md b/docs/quick-start.md deleted file mode 100644 index c7f71908..00000000 --- a/docs/quick-start.md +++ /dev/null @@ -1,218 +0,0 @@ -Getting Started -=============== - -## Installation - -Installation from PyPI is as simple as running: - -```sh -python3 -m pip install -U betterproto -``` - -If you are using Windows, then the following should be used instead: - -```sh -py -3 -m pip install -U betterproto -``` - -To include the protoc plugin, install `betterproto[compiler]` instead of betterproto, -e.g. - -```sh -python3 -m pip install -U "betterproto[compiler]" -``` - -## Compiling proto files - - -Given you installed the compiler and have a proto file, e.g `example.proto`: - -```proto -syntax = "proto3"; - -package hello; - -// Greeting represents a message you can tell a user. -message Greeting { - string message = 1; -} -``` - -To compile the proto you would run the following: - -You can run the following to invoke protoc directly: - -```sh -mkdir hello -protoc -I . --python_betterproto_out=lib example.proto -``` - -or run the following to invoke protoc via grpcio-tools: - -```sh -pip install grpcio-tools -python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto -``` - - -This will generate `lib/__init__.py` which looks like: - -```python -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: example.proto -# plugin: python-betterproto -from dataclasses import dataclass - -import betterproto - - -@dataclass -class Greeting(betterproto.Message): - """Greeting represents a message you can tell a user.""" - - message: str = betterproto.string_field(1) -``` - -Then to use it: - -```python ->>> from lib import Greeting - ->>> test = Greeting() ->>> test -Greeting(message='') - ->>> test.message = "Hey!" ->>> test -Greeting(message="Hey!") - ->>> bytes(test) -b'\n\x04Hey!' ->>> Greeting().parse(serialized) -Greeting(message="Hey!") -``` - - -## Async gRPC Support - -The generated code includes [grpclib](https://grpclib.readthedocs.io/en/latest) based -stub (client and server) classes for rpc services declared in the input proto files. -It is enabled by default. - - -Given a service definition similar to the one below: - -```proto -syntax = "proto3"; - -package echo; - -message EchoRequest { - string value = 1; - // Number of extra times to echo - uint32 extra_times = 2; -} - -message EchoResponse { - repeated string values = 1; -} - -message EchoStreamResponse { - string value = 1; -} - -service Echo { - rpc Echo(EchoRequest) returns (EchoResponse); - rpc EchoStream(EchoRequest) returns (stream EchoStreamResponse); -} -``` - -The generated client can be used like so: - -```python -import asyncio -from grpclib.client import Channel -import echo - - -async def main(): - channel = Channel(host="127.0.0.1", port=50051) - service = echo.EchoStub(channel) - response = await service.echo(value="hello", extra_times=1) - print(response) - - async for response in service.echo_stream(value="hello", extra_times=1): - print(response) - - # don't forget to close the channel when you're done! - channel.close() - -asyncio.run(main()) - -# outputs -EchoResponse(values=['hello', 'hello']) -EchoStreamResponse(value='hello') -EchoStreamResponse(value='hello') -``` - - -The server-facing stubs can be used to implement a Python -gRPC server. -To use them, simply subclass the base class in the generated files and override the -service methods: - -```python -from echo import EchoBase -from grpclib.server import Server -from typing import AsyncIterator - - -class EchoService(EchoBase): - async def echo(self, value: str, extra_times: int) -> "EchoResponse": - return value - - async def echo_stream( - self, value: str, extra_times: int - ) -> AsyncIterator["EchoStreamResponse"]: - for _ in range(extra_times): - yield value - - -async def start_server(): - HOST = "127.0.0.1" - PORT = 1337 - server = Server([EchoService()]) - await server.start(HOST, PORT) - await server.serve_forever() -``` - -## JSON - -Message objects include `betterproto.Message.to_json` and -`betterproto.Message.from_json` methods for JSON (de)serialisation, and -`betterproto.Message.to_dict`, `betterproto.Message.from_dict` for -converting back and forth from JSON serializable dicts. - -For compatibility the default is to convert field names to -`betterproto.Casing.CAMEL`. You can control this behavior by passing a -different casing value, e.g: - -```python -@dataclass -class MyMessage(betterproto.Message): - a_long_field_name: str = betterproto.string_field(1) - - ->>> test = MyMessage(a_long_field_name="Hello World!") ->>> test.to_dict(betterproto.Casing.SNAKE) -{"a_long_field_name": "Hello World!"} ->>> test.to_dict(betterproto.Casing.CAMEL) -{"aLongFieldName": "Hello World!"} - ->>> test.to_json(indent=2) -'{\n "aLongFieldName": "Hello World!"\n}' - ->>> test.from_dict({"aLongFieldName": "Goodbye World!"}) ->>> test.a_long_field_name -"Goodbye World!" -``` diff --git a/pyproject.toml b/pyproject.toml index 70aa7e66..90b55ef5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,12 +14,11 @@ packages = [ [tool.poetry.dependencies] python = "^3.8" # The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml -ruff = { version = "~0.7.4", optional = true } +ruff = "~0.7.4" grpclib = "^0.4.1" -jinja2 = { version = ">=3.0.3", optional = true } +jinja2 = ">=3.0.3" python-dateutil = "^2.8" typing-extensions = "^4.7.1" -betterproto-rust-codec = { version = "0.1.1", optional = true } [tool.poetry.group.dev.dependencies] asv = "^0.4.2" @@ -138,44 +137,6 @@ protoc """ help = "Regenerate the types in betterproto.lib.std.google" -# CI tasks - -[tool.poe.tasks.full-test] -shell = "poe generate && tox" -help = "Run tests with multiple pythons" - -[tool.doc8] -paths = ["docs"] -max_line_length = 88 - -[tool.doc8.ignore_path_errors] -"docs/migrating.rst" = [ - "D001", # contains table which is longer than 88 characters long -] - -[tool.coverage.run] -omit = ["betterproto/tests/*"] - -[tool.tox] -legacy_tox_ini = """ -[tox] -requires = - tox>=4.2 - tox-poetry-installer[poetry]==1.0.0b1 -env_list = - py311 - py38 - py37 - -[testenv] -commands = - pytest {posargs: --cov betterproto} -poetry_dep_groups = - test -require_locked_deps = true -require_poetry = true -""" - [build-system] requires = ["poetry-core>=1.0.0,<2"] build-backend = "poetry.core.masonry.api" diff --git a/tests/README.md b/tests/README.md deleted file mode 100644 index f1ee609c..00000000 --- a/tests/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# Standard Tests Development Guide - -Standard test cases are found in [betterproto/tests/inputs](inputs), where each subdirectory represents a testcase, that is verified in isolation. - -``` -inputs/ - bool/ - double/ - int32/ - ... -``` - -## Test case directory structure - -Each testcase has a `.proto` file with a message called `Test`, and optionally a matching `.json` file and a custom test called `test_*.py`. - -```bash -bool/ - bool.proto - bool.json # optional - test_bool.py # optional -``` - -### proto - -`.proto` — *The protobuf message to test* - -```protobuf -syntax = "proto3"; - -message Test { - bool value = 1; -} -``` - -You can add multiple `.proto` files to the test case, as long as one file matches the directory name. - -### json - -`.json` — *Test-data to validate the message with* - -```json -{ - "value": true -} -``` - -### pytest - -`test_.py` — *Custom test to validate specific aspects of the generated class* - -```python -from tests.output_betterproto.bool.bool import Test - -def test_value(): - message = Test() - assert not message.value, "Boolean is False by default" -``` - -## Standard tests - -The following tests are automatically executed for all cases: - -- [x] Can the generated python code be imported? -- [x] Can the generated message class be instantiated? -- [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation? - - _when `.json` is present_ - -## Running the tests - -- `pipenv run generate` - This generates: - - `betterproto/tests/output_betterproto` — *the plugin generated python classes* - - `betterproto/tests/output_reference` — *reference implementation classes* -- `pipenv run test` - -## Intentionally Failing tests - -The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrected in the future. - -When running `pytest`, they show up as `x` or `X` in the test results. - -``` -betterproto/tests/test_inputs.py ..x...x..x...x.X........xx........x.....x.......x.xx....x...................... [ 84%] -``` - -- `.` — PASSED -- `x` — XFAIL: expected failure -- `X` — XPASS: expected failure, but still passed - -Test cases marked for expected failure are declared in [inputs/config.py](inputs/config.py) \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 34c044e0..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,11 +0,0 @@ -import copy -import sys - -import pytest - - -@pytest.fixture -def reset_sys_path(): - original = copy.deepcopy(sys.path) - yield - sys.path = original diff --git a/tests/generate.py b/tests/generate.py deleted file mode 100755 index 67dad859..00000000 --- a/tests/generate.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python -import asyncio -import os -import shutil -import sys -from pathlib import Path -from typing import Set - -from tests.util import ( - get_directories, - inputs_path, - output_path_betterproto, - output_path_betterproto_pydantic, - output_path_reference, - protoc, -) - -# Force pure-python implementation instead of C++, otherwise imports -# break things because we can't properly reset the symbol database. -os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" - - -def clear_directory(dir_path: Path): - for file_or_directory in dir_path.glob("*"): - if file_or_directory.is_dir(): - shutil.rmtree(file_or_directory) - else: - file_or_directory.unlink() - - -async def generate(whitelist: Set[str], verbose: bool): - test_case_names = set(get_directories(inputs_path)) - {"__pycache__"} - - path_whitelist = set() - name_whitelist = set() - for item in whitelist: - if item in test_case_names: - name_whitelist.add(item) - continue - path_whitelist.add(item) - - generation_tasks = [] - for test_case_name in sorted(test_case_names): - test_case_input_path = inputs_path.joinpath(test_case_name).resolve() - if whitelist and str(test_case_input_path) not in path_whitelist and test_case_name not in name_whitelist: - continue - generation_tasks.append(generate_test_case_output(test_case_input_path, test_case_name, verbose)) - - failed_test_cases = [] - # Wait for all subprocs and match any failures to names to report - for test_case_name, result in zip(sorted(test_case_names), await asyncio.gather(*generation_tasks)): - if result != 0: - failed_test_cases.append(test_case_name) - - if len(failed_test_cases) > 0: - sys.stderr.write("\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n") - for failed_test_case in failed_test_cases: - sys.stderr.write(f"- {failed_test_case}\n") - - sys.exit(1) - - -async def generate_test_case_output(test_case_input_path: Path, test_case_name: str, verbose: bool) -> int: - """ - Returns the max of the subprocess return values - """ - - test_case_output_path_reference = output_path_reference.joinpath(test_case_name) - test_case_output_path_betterproto = output_path_betterproto - test_case_output_path_betterproto_pyd = output_path_betterproto_pydantic - - os.makedirs(test_case_output_path_reference, exist_ok=True) - os.makedirs(test_case_output_path_betterproto, exist_ok=True) - os.makedirs(test_case_output_path_betterproto_pyd, exist_ok=True) - - clear_directory(test_case_output_path_reference) - clear_directory(test_case_output_path_betterproto) - - ( - (ref_out, ref_err, ref_code), - (plg_out, plg_err, plg_code), - (plg_out_pyd, plg_err_pyd, plg_code_pyd), - ) = await asyncio.gather( - protoc(test_case_input_path, test_case_output_path_reference, True), - protoc(test_case_input_path, test_case_output_path_betterproto, False), - protoc(test_case_input_path, test_case_output_path_betterproto_pyd, False, True), - ) - - if ref_code == 0: - print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m") - else: - print(f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m") - print(ref_err.decode()) - - if verbose: - if ref_out: - print("Reference stdout:") - sys.stdout.buffer.write(ref_out) - sys.stdout.buffer.flush() - - if ref_err: - print("Reference stderr:") - sys.stderr.buffer.write(ref_err) - sys.stderr.buffer.flush() - - if plg_code == 0: - print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m") - else: - print(f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m") - print(plg_err.decode()) - - if verbose: - if plg_out: - print("Plugin stdout:") - sys.stdout.buffer.write(plg_out) - sys.stdout.buffer.flush() - - if plg_err: - print("Plugin stderr:") - sys.stderr.buffer.write(plg_err) - sys.stderr.buffer.flush() - - if plg_code_pyd == 0: - print(f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m") - else: - print(f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m") - print(plg_err_pyd.decode()) - - if verbose: - if plg_out_pyd: - print("Plugin stdout:") - sys.stdout.buffer.write(plg_out_pyd) - sys.stdout.buffer.flush() - - if plg_err_pyd: - print("Plugin stderr:") - sys.stderr.buffer.write(plg_err_pyd) - sys.stderr.buffer.flush() - - return max(ref_code, plg_code, plg_code_pyd) - - -HELP = "\n".join( - ( - "Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]", - "Generate python classes for standard tests.", - "", - "DIRECTORIES One or more relative or absolute directories of test-cases to generate" "classes for.", - " python generate.py inputs/bool inputs/double inputs/enum", - "", - "NAMES One or more test-case names to generate classes for.", - " python generate.py bool double enums", - ) -) - - -def main(): - if set(sys.argv).intersection({"-h", "--help"}): - print(HELP) - return - if sys.argv[1:2] == ["-v"]: - verbose = True - whitelist = set(sys.argv[2:]) - else: - verbose = False - whitelist = set(sys.argv[1:]) - - asyncio.run(generate(whitelist, verbose)) - - -if __name__ == "__main__": - main() diff --git a/tests/grpc/__init__.py b/tests/grpc/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/grpc/test_grpclib_client.py b/tests/grpc/test_grpclib_client.py deleted file mode 100644 index 2b6a7ad5..00000000 --- a/tests/grpc/test_grpclib_client.py +++ /dev/null @@ -1,271 +0,0 @@ -import asyncio -import uuid - -import grpclib -import grpclib.client -import grpclib.metadata -import grpclib.server -import pytest -from grpclib.testing import ChannelFor - -from betterproto.grpc.util.async_channel import AsyncChannel -from tests.output_betterproto.service import ( - DoThingRequest, - DoThingResponse, - GetThingRequest, - TestStub as ThingServiceClient, -) - -from .thing_service import ThingService - - -async def _test_client(client: ThingServiceClient, name="clean room", **kwargs): - response = await client.do_thing(DoThingRequest(name=name), **kwargs) - assert response.names == [name] - - -def _assert_request_meta_received(deadline, metadata): - def server_side_test(stream): - assert stream.deadline._timestamp == pytest.approx( - deadline._timestamp, 1 - ), "The provided deadline should be received serverside" - assert ( - stream.metadata["authorization"] == metadata["authorization"] - ), "The provided authorization metadata should be received serverside" - - return server_side_test - - -@pytest.fixture -def handler_trailer_only_unauthenticated(): - async def handler(stream: grpclib.server.Stream): - await stream.recv_message() - await stream.send_initial_metadata() - await stream.send_trailing_metadata(status=grpclib.Status.UNAUTHENTICATED) - - return handler - - -@pytest.mark.asyncio -async def test_simple_service_call(): - async with ChannelFor([ThingService()]) as channel: - await _test_client(ThingServiceClient(channel)) - - -@pytest.mark.asyncio -async def test_trailer_only_error_unary_unary(mocker, handler_trailer_only_unauthenticated): - service = ThingService() - mocker.patch.object( - service, - "do_thing", - side_effect=handler_trailer_only_unauthenticated, - autospec=True, - ) - async with ChannelFor([service]) as channel: - with pytest.raises(grpclib.exceptions.GRPCError) as e: - await ThingServiceClient(channel).do_thing(DoThingRequest(name="something")) - assert e.value.status == grpclib.Status.UNAUTHENTICATED - - -@pytest.mark.asyncio -async def test_trailer_only_error_stream_unary(mocker, handler_trailer_only_unauthenticated): - service = ThingService() - mocker.patch.object( - service, - "do_many_things", - side_effect=handler_trailer_only_unauthenticated, - autospec=True, - ) - async with ChannelFor([service]) as channel: - with pytest.raises(grpclib.exceptions.GRPCError) as e: - await ThingServiceClient(channel).do_many_things( - do_thing_request_iterator=[DoThingRequest(name="something")] - ) - await _test_client(ThingServiceClient(channel)) - assert e.value.status == grpclib.Status.UNAUTHENTICATED - - -@pytest.mark.asyncio -async def test_service_call_mutable_defaults(mocker): - async with ChannelFor([ThingService()]) as channel: - client = ThingServiceClient(channel) - spy = mocker.spy(client, "_unary_unary") - await _test_client(client) - comments = spy.call_args_list[-1].args[1].comments - await _test_client(client) - assert spy.call_args_list[-1].args[1].comments is not comments - - -@pytest.mark.asyncio -async def test_service_call_with_upfront_request_params(): - # Setting deadline - deadline = grpclib.metadata.Deadline.from_timeout(22) - metadata = {"authorization": "12345"} - async with ChannelFor([ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]) as channel: - await _test_client(ThingServiceClient(channel, deadline=deadline, metadata=metadata)) - - # Setting timeout - timeout = 99 - deadline = grpclib.metadata.Deadline.from_timeout(timeout) - metadata = {"authorization": "12345"} - async with ChannelFor([ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]) as channel: - await _test_client(ThingServiceClient(channel, timeout=timeout, metadata=metadata)) - - -@pytest.mark.asyncio -async def test_service_call_lower_level_with_overrides(): - THING_TO_DO = "get milk" - - # Setting deadline - deadline = grpclib.metadata.Deadline.from_timeout(22) - metadata = {"authorization": "12345"} - kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28) - kwarg_metadata = {"authorization": "12345"} - async with ChannelFor([ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]) as channel: - client = ThingServiceClient(channel, deadline=deadline, metadata=metadata) - response = await client._unary_unary( - "/service.Test/DoThing", - DoThingRequest(THING_TO_DO), - DoThingResponse, - deadline=kwarg_deadline, - metadata=kwarg_metadata, - ) - assert response.names == [THING_TO_DO] - - # Setting timeout - timeout = 99 - deadline = grpclib.metadata.Deadline.from_timeout(timeout) - metadata = {"authorization": "12345"} - kwarg_timeout = 9000 - kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout) - kwarg_metadata = {"authorization": "09876"} - async with ChannelFor( - [ - ThingService( - test_hook=_assert_request_meta_received(kwarg_deadline, kwarg_metadata), - ) - ] - ) as channel: - client = ThingServiceClient(channel, deadline=deadline, metadata=metadata) - response = await client._unary_unary( - "/service.Test/DoThing", - DoThingRequest(THING_TO_DO), - DoThingResponse, - timeout=kwarg_timeout, - metadata=kwarg_metadata, - ) - assert response.names == [THING_TO_DO] - - -@pytest.mark.asyncio -@pytest.mark.parametrize( - ("overrides_gen",), - [ - (lambda: dict(timeout=10),), - (lambda: dict(deadline=grpclib.metadata.Deadline.from_timeout(10)),), - (lambda: dict(metadata={"authorization": str(uuid.uuid4())}),), - (lambda: dict(timeout=20, metadata={"authorization": str(uuid.uuid4())}),), - ], -) -async def test_service_call_high_level_with_overrides(mocker, overrides_gen): - overrides = overrides_gen() - request_spy = mocker.spy(grpclib.client.Channel, "request") - name = str(uuid.uuid4()) - defaults = dict( - timeout=99, - deadline=grpclib.metadata.Deadline.from_timeout(99), - metadata={"authorization": name}, - ) - - async with ChannelFor( - [ - ThingService( - test_hook=_assert_request_meta_received( - deadline=grpclib.metadata.Deadline.from_timeout(overrides.get("timeout", 99)), - metadata=overrides.get("metadata", defaults.get("metadata")), - ) - ) - ] - ) as channel: - client = ThingServiceClient(channel, **defaults) - await _test_client(client, name=name, **overrides) - assert request_spy.call_count == 1 - - request_spy_call_kwargs = request_spy.call_args.kwargs - - # ensure all overrides were successful - for key, value in overrides.items(): - assert key in request_spy_call_kwargs - assert request_spy_call_kwargs[key] == value - - # ensure default values were retained - for key in set(defaults.keys()) - set(overrides.keys()): - assert key in request_spy_call_kwargs - assert request_spy_call_kwargs[key] == defaults[key] - - -@pytest.mark.asyncio -async def test_async_gen_for_unary_stream_request(): - thing_name = "my milkshakes" - - async with ChannelFor([ThingService()]) as channel: - client = ThingServiceClient(channel) - expected_versions = [5, 4, 3, 2, 1] - async for response in client.get_thing_versions(GetThingRequest(name=thing_name)): - assert response.name == thing_name - assert response.version == expected_versions.pop() - - -@pytest.mark.asyncio -async def test_async_gen_for_stream_stream_request(): - some_things = ["cake", "cricket", "coral reef"] - more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"] - expected_things = (*some_things, *more_things) - - async with ChannelFor([ThingService()]) as channel: - client = ThingServiceClient(channel) - # Use an AsyncChannel to decouple sending and recieving, it'll send some_things - # immediately and we'll use it to send more_things later, after recieving some - # results - request_chan = AsyncChannel() - send_initial_requests = asyncio.ensure_future( - request_chan.send_from(GetThingRequest(name) for name in some_things) - ) - response_index = 0 - async for response in client.get_different_things(request_chan): - assert response.name == expected_things[response_index] - assert response.version == response_index + 1 - response_index += 1 - if more_things: - # Send some more requests as we receive responses to be sure coordination of - # send/receive events doesn't matter - await request_chan.send(GetThingRequest(more_things.pop(0))) - elif not send_initial_requests.done(): - # Make sure the sending task it completed - await send_initial_requests - else: - # No more things to send make sure channel is closed - request_chan.close() - assert response_index == len(expected_things), "Didn't receive all expected responses" - - -@pytest.mark.asyncio -async def test_stream_unary_with_empty_iterable(): - things = [] # empty - - async with ChannelFor([ThingService()]) as channel: - client = ThingServiceClient(channel) - requests = [DoThingRequest(name) for name in things] - response = await client.do_many_things(requests) - assert len(response.names) == 0 - - -@pytest.mark.asyncio -async def test_stream_stream_with_empty_iterable(): - things = [] # empty - - async with ChannelFor([ThingService()]) as channel: - client = ThingServiceClient(channel) - requests = [GetThingRequest(name) for name in things] - responses = [response async for response in client.get_different_things(requests)] - assert len(responses) == 0 diff --git a/tests/grpc/test_stream_stream.py b/tests/grpc/test_stream_stream.py deleted file mode 100644 index 9ce95b5f..00000000 --- a/tests/grpc/test_stream_stream.py +++ /dev/null @@ -1,89 +0,0 @@ -import asyncio -from dataclasses import dataclass -from typing import AsyncIterator - -import pytest - -import betterproto -from betterproto.grpc.util.async_channel import AsyncChannel - - -@dataclass -class Message(betterproto.Message): - body: str = betterproto.string_field(1) - - -@pytest.fixture -def expected_responses(): - return [Message("Hello world 1"), Message("Hello world 2"), Message("Done")] - - -class ClientStub: - async def connect(self, requests: AsyncIterator): - await asyncio.sleep(0.1) - async for request in requests: - await asyncio.sleep(0.1) - yield request - await asyncio.sleep(0.1) - yield Message("Done") - - -async def to_list(generator: AsyncIterator): - return [value async for value in generator] - - -@pytest.fixture -def client(): - # channel = Channel(host='127.0.0.1', port=50051) - # return ClientStub(channel) - return ClientStub() - - -@pytest.mark.asyncio -async def test_send_from_before_connect_and_close_automatically(client, expected_responses): - requests = AsyncChannel() - await requests.send_from([Message(body="Hello world 1"), Message(body="Hello world 2")], close=True) - responses = client.connect(requests) - - assert await to_list(responses) == expected_responses - - -@pytest.mark.asyncio -async def test_send_from_after_connect_and_close_automatically(client, expected_responses): - requests = AsyncChannel() - responses = client.connect(requests) - await requests.send_from([Message(body="Hello world 1"), Message(body="Hello world 2")], close=True) - - assert await to_list(responses) == expected_responses - - -@pytest.mark.asyncio -async def test_send_from_close_manually_immediately(client, expected_responses): - requests = AsyncChannel() - responses = client.connect(requests) - await requests.send_from([Message(body="Hello world 1"), Message(body="Hello world 2")], close=False) - requests.close() - - assert await to_list(responses) == expected_responses - - -@pytest.mark.asyncio -async def test_send_individually_and_close_before_connect(client, expected_responses): - requests = AsyncChannel() - await requests.send(Message(body="Hello world 1")) - await requests.send(Message(body="Hello world 2")) - requests.close() - responses = client.connect(requests) - - assert await to_list(responses) == expected_responses - - -@pytest.mark.asyncio -async def test_send_individually_and_close_after_connect(client, expected_responses): - requests = AsyncChannel() - await requests.send(Message(body="Hello world 1")) - await requests.send(Message(body="Hello world 2")) - responses = client.connect(requests) - requests.close() - - assert await to_list(responses) == expected_responses diff --git a/tests/grpc/thing_service.py b/tests/grpc/thing_service.py deleted file mode 100644 index 8693e628..00000000 --- a/tests/grpc/thing_service.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Dict - -import grpclib -import grpclib.server - -from tests.output_betterproto.service import ( - DoThingRequest, - DoThingResponse, - GetThingRequest, - GetThingResponse, -) - - -class ThingService: - def __init__(self, test_hook=None): - # This lets us pass assertions to the servicer ;) - self.test_hook = test_hook - - async def do_thing(self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"): - request = await stream.recv_message() - if self.test_hook is not None: - self.test_hook(stream) - await stream.send_message(DoThingResponse([request.name])) - - async def do_many_things(self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"): - thing_names = [request.name async for request in stream] - if self.test_hook is not None: - self.test_hook(stream) - await stream.send_message(DoThingResponse(thing_names)) - - async def get_thing_versions(self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"): - request = await stream.recv_message() - if self.test_hook is not None: - self.test_hook(stream) - for version_num in range(1, 6): - await stream.send_message(GetThingResponse(name=request.name, version=version_num)) - - async def get_different_things(self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"): - if self.test_hook is not None: - self.test_hook(stream) - # Respond to each input item immediately - response_num = 0 - async for request in stream: - response_num += 1 - await stream.send_message(GetThingResponse(name=request.name, version=response_num)) - - def __mapping__(self) -> Dict[str, "grpclib.const.Handler"]: - return { - "/service.Test/DoThing": grpclib.const.Handler( - self.do_thing, - grpclib.const.Cardinality.UNARY_UNARY, - DoThingRequest, - DoThingResponse, - ), - "/service.Test/DoManyThings": grpclib.const.Handler( - self.do_many_things, - grpclib.const.Cardinality.STREAM_UNARY, - DoThingRequest, - DoThingResponse, - ), - "/service.Test/GetThingVersions": grpclib.const.Handler( - self.get_thing_versions, - grpclib.const.Cardinality.UNARY_STREAM, - GetThingRequest, - GetThingResponse, - ), - "/service.Test/GetDifferentThings": grpclib.const.Handler( - self.get_different_things, - grpclib.const.Cardinality.STREAM_STREAM, - GetThingRequest, - GetThingResponse, - ), - } diff --git a/tests/inputs/bool/bool.json b/tests/inputs/bool/bool.json deleted file mode 100644 index 348e0319..00000000 --- a/tests/inputs/bool/bool.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "value": true -} diff --git a/tests/inputs/bool/bool.proto b/tests/inputs/bool/bool.proto deleted file mode 100644 index 77836b8e..00000000 --- a/tests/inputs/bool/bool.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package bool; - -message Test { - bool value = 1; -} diff --git a/tests/inputs/bool/test_bool.py b/tests/inputs/bool/test_bool.py deleted file mode 100644 index 6b0ad0be..00000000 --- a/tests/inputs/bool/test_bool.py +++ /dev/null @@ -1,24 +0,0 @@ -import pytest - -from tests.output_betterproto.bool import Test -from tests.output_betterproto_pydantic.bool import Test as TestPyd - - -def test_value(): - message = Test() - assert not message.value, "Boolean is False by default" - - -def test_pydantic_no_value(): - message = TestPyd() - assert not message.value, "Boolean is False by default" - - -def test_pydantic_value(): - message = TestPyd(value=False) - assert not message.value - - -def test_pydantic_bad_value(): - with pytest.raises(ValueError): - TestPyd(value=123) diff --git a/tests/inputs/bytes/bytes.json b/tests/inputs/bytes/bytes.json deleted file mode 100644 index 34c4554c..00000000 --- a/tests/inputs/bytes/bytes.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "data": "SGVsbG8sIFdvcmxkIQ==" -} diff --git a/tests/inputs/bytes/bytes.proto b/tests/inputs/bytes/bytes.proto deleted file mode 100644 index 98954685..00000000 --- a/tests/inputs/bytes/bytes.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package bytes; - -message Test { - bytes data = 1; -} diff --git a/tests/inputs/casing/casing.json b/tests/inputs/casing/casing.json deleted file mode 100644 index 559104b1..00000000 --- a/tests/inputs/casing/casing.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "camelCase": 1, - "snakeCase": "ONE" -} diff --git a/tests/inputs/casing/casing.proto b/tests/inputs/casing/casing.proto deleted file mode 100644 index 2023d934..00000000 --- a/tests/inputs/casing/casing.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; - -package casing; - -enum my_enum { - ZERO = 0; - ONE = 1; - TWO = 2; -} - -message Test { - int32 camelCase = 1; - my_enum snake_case = 2; - snake_case_message snake_case_message = 3; - int32 UPPERCASE = 4; -} - -message snake_case_message { - -} \ No newline at end of file diff --git a/tests/inputs/casing/test_casing.py b/tests/inputs/casing/test_casing.py deleted file mode 100644 index feee009a..00000000 --- a/tests/inputs/casing/test_casing.py +++ /dev/null @@ -1,17 +0,0 @@ -import tests.output_betterproto.casing as casing -from tests.output_betterproto.casing import Test - - -def test_message_attributes(): - message = Test() - assert hasattr(message, "snake_case_message"), "snake_case field name is same in python" - assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" - assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python" - - -def test_message_casing(): - assert hasattr(casing, "SnakeCaseMessage"), "snake_case Message name is converted to CamelCase in python" - - -def test_enum_casing(): - assert hasattr(casing, "MyEnum"), "snake_case Enum name is converted to CamelCase in python" diff --git a/tests/inputs/casing_inner_class/casing_inner_class.proto b/tests/inputs/casing_inner_class/casing_inner_class.proto deleted file mode 100644 index 7d231beb..00000000 --- a/tests/inputs/casing_inner_class/casing_inner_class.proto +++ /dev/null @@ -1,11 +0,0 @@ -// https://github.com/danielgtaylor/python-betterproto/issues/344 -syntax = "proto3"; - -package casing_inner_class; - -message Test { - message inner_class { - sint32 old_exp = 1; - } - inner_class inner = 2; -} \ No newline at end of file diff --git a/tests/inputs/casing_inner_class/test_casing_inner_class.py b/tests/inputs/casing_inner_class/test_casing_inner_class.py deleted file mode 100644 index 2560b6c2..00000000 --- a/tests/inputs/casing_inner_class/test_casing_inner_class.py +++ /dev/null @@ -1,10 +0,0 @@ -import tests.output_betterproto.casing_inner_class as casing_inner_class - - -def test_message_casing_inner_class_name(): - assert hasattr(casing_inner_class, "TestInnerClass"), "Inline defined Message is correctly converted to CamelCase" - - -def test_message_casing_inner_class_attributes(): - message = casing_inner_class.Test(inner=casing_inner_class.TestInnerClass()) - assert hasattr(message.inner, "old_exp"), "Inline defined Message attribute is snake_case" diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto deleted file mode 100644 index c6d42c31..00000000 --- a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package casing_message_field_uppercase; - -message Test { - int32 UPPERCASE = 1; - int32 UPPERCASE_V2 = 2; - int32 UPPER_CAMEL_CASE = 3; -} \ No newline at end of file diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py deleted file mode 100644 index 6dc69256..00000000 --- a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py +++ /dev/null @@ -1,8 +0,0 @@ -from tests.output_betterproto.casing_message_field_uppercase import Test - - -def test_message_casing(): - message = Test() - assert hasattr(message, "uppercase"), "UPPERCASE attribute is converted to 'uppercase' in python" - assert hasattr(message, "uppercase_v2"), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" - assert hasattr(message, "upper_camel_case"), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" diff --git a/tests/inputs/config.py b/tests/inputs/config.py deleted file mode 100644 index 4fb1565f..00000000 --- a/tests/inputs/config.py +++ /dev/null @@ -1,29 +0,0 @@ -# Test cases that are expected to fail, e.g. unimplemented features or bug-fixes. -# Remove from list when fixed. -xfail = { - "namespace_keywords", # 70 - "googletypes_struct", # 9 - "googletypes_value", # 9 - "example", # This is the example in the readme. Not a test. -} - -services = { - "googletypes_request", - "googletypes_response", - "googletypes_response_embedded", - "service", - "service_separate_packages", - "import_service_input_message", - "googletypes_service_returns_empty", - "googletypes_service_returns_googletype", - "example_service", - "empty_service", - "service_uppercase", -} - - -# Indicate json sample messages to skip when testing that json (de)serialization -# is symmetrical becuase some cases legitimately are not symmetrical. -# Each key references the name of the test scenario and the values in the tuple -# Are the names of the json files. -non_symmetrical_json = {"empty_repeated": ("empty_repeated",)} diff --git a/tests/inputs/deprecated/deprecated.json b/tests/inputs/deprecated/deprecated.json deleted file mode 100644 index 43b2b65a..00000000 --- a/tests/inputs/deprecated/deprecated.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "message": { - "value": "hello" - }, - "value": 10 -} diff --git a/tests/inputs/deprecated/deprecated.proto b/tests/inputs/deprecated/deprecated.proto deleted file mode 100644 index f504d03a..00000000 --- a/tests/inputs/deprecated/deprecated.proto +++ /dev/null @@ -1,21 +0,0 @@ -syntax = "proto3"; - -package deprecated; - -// Some documentation about the Test message. -message Test { - Message message = 1 [deprecated=true]; - int32 value = 2; -} - -message Message { - option deprecated = true; - string value = 1; -} - -message Empty {} - -service TestService { - rpc func(Empty) returns (Empty); - rpc deprecated_func(Empty) returns (Empty) { option deprecated = true; }; -} diff --git a/tests/inputs/documentation/documentation.proto b/tests/inputs/documentation/documentation.proto deleted file mode 100644 index 7fc6c83b..00000000 --- a/tests/inputs/documentation/documentation.proto +++ /dev/null @@ -1,44 +0,0 @@ -syntax = "proto3"; -package documentation; - -// Documentation of message 1 -// other line 1 - -// Documentation of message 2 -// other line 2 -message Test { // Documentation of message 3 - // Documentation of field 1 - // other line 1 - - // Documentation of field 2 - // other line 2 - uint32 x = 1; // Documentation of field 3 -} - -// Documentation of enum 1 -// other line 1 - -// Documentation of enum 2 -// other line 2 -enum Enum { // Documentation of enum 3 - // Documentation of variant 1 - // other line 1 - - // Documentation of variant 2 - // other line 2 - Enum_Variant = 0; // Documentation of variant 3 -} - -// Documentation of service 1 -// other line 1 - -// Documentation of service 2 -// other line 2 -service Service { // Documentation of service 3 - // Documentation of method 1 - // other line 1 - - // Documentation of method 2 - // other line 2 - rpc get(Test) returns (Test); // Documentation of method 3 -} diff --git a/tests/inputs/double/double-negative.json b/tests/inputs/double/double-negative.json deleted file mode 100644 index e0776c73..00000000 --- a/tests/inputs/double/double-negative.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "count": -123.45 -} diff --git a/tests/inputs/double/double.json b/tests/inputs/double/double.json deleted file mode 100644 index 321412e5..00000000 --- a/tests/inputs/double/double.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "count": 123.45 -} diff --git a/tests/inputs/double/double.proto b/tests/inputs/double/double.proto deleted file mode 100644 index 66aea95d..00000000 --- a/tests/inputs/double/double.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package double; - -message Test { - double count = 1; -} diff --git a/tests/inputs/empty_repeated/empty_repeated.json b/tests/inputs/empty_repeated/empty_repeated.json deleted file mode 100644 index 12a801c6..00000000 --- a/tests/inputs/empty_repeated/empty_repeated.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "msg": [{"values":[]}] -} diff --git a/tests/inputs/empty_repeated/empty_repeated.proto b/tests/inputs/empty_repeated/empty_repeated.proto deleted file mode 100644 index f787301f..00000000 --- a/tests/inputs/empty_repeated/empty_repeated.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package empty_repeated; - -message MessageA { - repeated float values = 1; -} - -message Test { - repeated MessageA msg = 1; -} diff --git a/tests/inputs/empty_service/empty_service.proto b/tests/inputs/empty_service/empty_service.proto deleted file mode 100644 index e96ff649..00000000 --- a/tests/inputs/empty_service/empty_service.proto +++ /dev/null @@ -1,7 +0,0 @@ -/* Empty service without comments */ -syntax = "proto3"; - -package empty_service; - -service Test { -} diff --git a/tests/inputs/entry/entry.proto b/tests/inputs/entry/entry.proto deleted file mode 100644 index 3f2af4d1..00000000 --- a/tests/inputs/entry/entry.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; - -package entry; - -// This is a minimal example of a repeated message field that caused issues when -// checking whether a message is a map. -// -// During the check wheter a field is a "map", the string "entry" is added to -// the field name, checked against the type name and then further checks are -// made against the nested type of a parent message. In this edge-case, the -// first check would pass even though it shouldn't and that would cause an -// error because the parent type does not have a "nested_type" attribute. - -message Test { - repeated ExportEntry export = 1; -} - -message ExportEntry { - string name = 1; -} diff --git a/tests/inputs/enum/enum.json b/tests/inputs/enum/enum.json deleted file mode 100644 index d68f1c50..00000000 --- a/tests/inputs/enum/enum.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "choice": "FOUR", - "choices": [ - "ZERO", - "ONE", - "THREE", - "FOUR" - ] -} diff --git a/tests/inputs/enum/enum.proto b/tests/inputs/enum/enum.proto deleted file mode 100644 index 5e2e80c1..00000000 --- a/tests/inputs/enum/enum.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package enum; - -// Tests that enums are correctly serialized and that it correctly handles skipped and out-of-order enum values -message Test { - Choice choice = 1; - repeated Choice choices = 2; -} - -enum Choice { - ZERO = 0; - ONE = 1; - // TWO = 2; - FOUR = 4; - THREE = 3; -} - -// A "C" like enum with the enum name prefixed onto members, these should be stripped -enum ArithmeticOperator { - ARITHMETIC_OPERATOR_NONE = 0; - ARITHMETIC_OPERATOR_PLUS = 1; - ARITHMETIC_OPERATOR_MINUS = 2; - ARITHMETIC_OPERATOR_0_PREFIXED = 3; -} diff --git a/tests/inputs/enum/test_enum.py b/tests/inputs/enum/test_enum.py deleted file mode 100644 index 20c9a4d5..00000000 --- a/tests/inputs/enum/test_enum.py +++ /dev/null @@ -1,107 +0,0 @@ -from tests.output_betterproto.enum import ( - ArithmeticOperator, - Choice, - Test, -) - - -def test_enum_set_and_get(): - assert Test(choice=Choice.ZERO).choice == Choice.ZERO - assert Test(choice=Choice.ONE).choice == Choice.ONE - assert Test(choice=Choice.THREE).choice == Choice.THREE - assert Test(choice=Choice.FOUR).choice == Choice.FOUR - - -def test_enum_set_with_int(): - assert Test(choice=0).choice == Choice.ZERO - assert Test(choice=1).choice == Choice.ONE - assert Test(choice=3).choice == Choice.THREE - assert Test(choice=4).choice == Choice.FOUR - - -def test_enum_is_comparable_with_int(): - assert Test(choice=Choice.ZERO).choice == 0 - assert Test(choice=Choice.ONE).choice == 1 - assert Test(choice=Choice.THREE).choice == 3 - assert Test(choice=Choice.FOUR).choice == 4 - - -def test_enum_to_dict(): - assert "choice" not in Test(choice=Choice.ZERO).to_dict(), "Default enum value is not serialized" - assert Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"] == "ZERO" - assert Test(choice=Choice.ONE).to_dict()["choice"] == "ONE" - assert Test(choice=Choice.THREE).to_dict()["choice"] == "THREE" - assert Test(choice=Choice.FOUR).to_dict()["choice"] == "FOUR" - - -def test_repeated_enum_is_comparable_with_int(): - assert Test(choices=[Choice.ZERO]).choices == [0] - assert Test(choices=[Choice.ONE]).choices == [1] - assert Test(choices=[Choice.THREE]).choices == [3] - assert Test(choices=[Choice.FOUR]).choices == [4] - - -def test_repeated_enum_set_and_get(): - assert Test(choices=[Choice.ZERO]).choices == [Choice.ZERO] - assert Test(choices=[Choice.ONE]).choices == [Choice.ONE] - assert Test(choices=[Choice.THREE]).choices == [Choice.THREE] - assert Test(choices=[Choice.FOUR]).choices == [Choice.FOUR] - - -def test_repeated_enum_to_dict(): - assert Test(choices=[Choice.ZERO]).to_dict()["choices"] == ["ZERO"] - assert Test(choices=[Choice.ONE]).to_dict()["choices"] == ["ONE"] - assert Test(choices=[Choice.THREE]).to_dict()["choices"] == ["THREE"] - assert Test(choices=[Choice.FOUR]).to_dict()["choices"] == ["FOUR"] - - all_enums_dict = Test(choices=[Choice.ZERO, Choice.ONE, Choice.THREE, Choice.FOUR]).to_dict() - assert (all_enums_dict["choices"]) == ["ZERO", "ONE", "THREE", "FOUR"] - - -def test_repeated_enum_with_single_value_to_dict(): - assert Test(choices=Choice.ONE).to_dict()["choices"] == ["ONE"] - assert Test(choices=1).to_dict()["choices"] == ["ONE"] - - -def test_repeated_enum_with_non_list_iterables_to_dict(): - assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"] - assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"] - assert Test(choices=(Choice.ONE, Choice.THREE)).to_dict()["choices"] == [ - "ONE", - "THREE", - ] - - def enum_generator(): - yield Choice.ONE - yield Choice.THREE - - assert Test(choices=enum_generator()).to_dict()["choices"] == ["ONE", "THREE"] - - -def test_enum_mapped_on_parse(): - # test default value - b = Test().parse(bytes(Test())) - assert b.choice.name == Choice.ZERO.name - assert b.choices == [] - - # test non default value - a = Test().parse(bytes(Test(choice=Choice.ONE))) - assert a.choice.name == Choice.ONE.name - assert b.choices == [] - - # test repeated - c = Test().parse(bytes(Test(choices=[Choice.THREE, Choice.FOUR]))) - assert c.choices[0].name == Choice.THREE.name - assert c.choices[1].name == Choice.FOUR.name - - # bonus: defaults after empty init are also mapped - assert Test().choice.name == Choice.ZERO.name - - -def test_renamed_enum_members(): - assert set(ArithmeticOperator.__members__) == { - "NONE", - "PLUS", - "MINUS", - "_0_PREFIXED", - } diff --git a/tests/inputs/example/example.proto b/tests/inputs/example/example.proto deleted file mode 100644 index 56bd3647..00000000 --- a/tests/inputs/example/example.proto +++ /dev/null @@ -1,911 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Author: kenton@google.com (Kenton Varda) -// Based on original Protocol Buffers design by -// Sanjay Ghemawat, Jeff Dean, and others. -// -// The messages in this file describe the definitions found in .proto files. -// A valid .proto file can be translated directly to a FileDescriptorProto -// without any other information (e.g. without reading its imports). - - -syntax = "proto2"; - -package example; - -// package google.protobuf; - -option go_package = "google.golang.org/protobuf/types/descriptorpb"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "DescriptorProtos"; -option csharp_namespace = "Google.Protobuf.Reflection"; -option objc_class_prefix = "GPB"; -option cc_enable_arenas = true; - -// descriptor.proto must be optimized for speed because reflection-based -// algorithms don't work during bootstrapping. -option optimize_for = SPEED; - -// The protocol compiler can output a FileDescriptorSet containing the .proto -// files it parses. -message FileDescriptorSet { - repeated FileDescriptorProto file = 1; -} - -// Describes a complete .proto file. -message FileDescriptorProto { - optional string name = 1; // file name, relative to root of source tree - optional string package = 2; // e.g. "foo", "foo.bar", etc. - - // Names of files imported by this file. - repeated string dependency = 3; - // Indexes of the public imported files in the dependency list above. - repeated int32 public_dependency = 10; - // Indexes of the weak imported files in the dependency list. - // For Google-internal migration only. Do not use. - repeated int32 weak_dependency = 11; - - // All top-level definitions in this file. - repeated DescriptorProto message_type = 4; - repeated EnumDescriptorProto enum_type = 5; - repeated ServiceDescriptorProto service = 6; - repeated FieldDescriptorProto extension = 7; - - optional FileOptions options = 8; - - // This field contains optional information about the original source code. - // You may safely remove this entire field without harming runtime - // functionality of the descriptors -- the information is needed only by - // development tools. - optional SourceCodeInfo source_code_info = 9; - - // The syntax of the proto file. - // The supported values are "proto2" and "proto3". - optional string syntax = 12; -} - -// Describes a message type. -message DescriptorProto { - optional string name = 1; - - repeated FieldDescriptorProto field = 2; - repeated FieldDescriptorProto extension = 6; - - repeated DescriptorProto nested_type = 3; - repeated EnumDescriptorProto enum_type = 4; - - message ExtensionRange { - optional int32 start = 1; // Inclusive. - optional int32 end = 2; // Exclusive. - - optional ExtensionRangeOptions options = 3; - } - repeated ExtensionRange extension_range = 5; - - repeated OneofDescriptorProto oneof_decl = 8; - - optional MessageOptions options = 7; - - // Range of reserved tag numbers. Reserved tag numbers may not be used by - // fields or extension ranges in the same message. Reserved ranges may - // not overlap. - message ReservedRange { - optional int32 start = 1; // Inclusive. - optional int32 end = 2; // Exclusive. - } - repeated ReservedRange reserved_range = 9; - // Reserved field names, which may not be used by fields in the same message. - // A given name may only be reserved once. - repeated string reserved_name = 10; -} - -message ExtensionRangeOptions { - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -// Describes a field within a message. -message FieldDescriptorProto { - enum Type { - // 0 is reserved for errors. - // Order is weird for historical reasons. - TYPE_DOUBLE = 1; - TYPE_FLOAT = 2; - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if - // negative values are likely. - TYPE_INT64 = 3; - TYPE_UINT64 = 4; - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if - // negative values are likely. - TYPE_INT32 = 5; - TYPE_FIXED64 = 6; - TYPE_FIXED32 = 7; - TYPE_BOOL = 8; - TYPE_STRING = 9; - // Tag-delimited aggregate. - // Group type is deprecated and not supported in proto3. However, Proto3 - // implementations should still be able to parse the group wire format and - // treat group fields as unknown fields. - TYPE_GROUP = 10; - TYPE_MESSAGE = 11; // Length-delimited aggregate. - - // New in version 2. - TYPE_BYTES = 12; - TYPE_UINT32 = 13; - TYPE_ENUM = 14; - TYPE_SFIXED32 = 15; - TYPE_SFIXED64 = 16; - TYPE_SINT32 = 17; // Uses ZigZag encoding. - TYPE_SINT64 = 18; // Uses ZigZag encoding. - } - - enum Label { - // 0 is reserved for errors - LABEL_OPTIONAL = 1; - LABEL_REQUIRED = 2; - LABEL_REPEATED = 3; - } - - optional string name = 1; - optional int32 number = 3; - optional Label label = 4; - - // If type_name is set, this need not be set. If both this and type_name - // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. - optional Type type = 5; - - // For message and enum types, this is the name of the type. If the name - // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping - // rules are used to find the type (i.e. first the nested types within this - // message are searched, then within the parent, on up to the root - // namespace). - optional string type_name = 6; - - // For extensions, this is the name of the type being extended. It is - // resolved in the same manner as type_name. - optional string extendee = 2; - - // For numeric types, contains the original text representation of the value. - // For booleans, "true" or "false". - // For strings, contains the default text contents (not escaped in any way). - // For bytes, contains the C escaped value. All bytes >= 128 are escaped. - // TODO(kenton): Base-64 encode? - optional string default_value = 7; - - // If set, gives the index of a oneof in the containing type's oneof_decl - // list. This field is a member of that oneof. - optional int32 oneof_index = 9; - - // JSON name of this field. The value is set by protocol compiler. If the - // user has set a "json_name" option on this field, that option's value - // will be used. Otherwise, it's deduced from the field's name by converting - // it to camelCase. - optional string json_name = 10; - - optional FieldOptions options = 8; - - // If true, this is a proto3 "optional". When a proto3 field is optional, it - // tracks presence regardless of field type. - // - // When proto3_optional is true, this field must be belong to a oneof to - // signal to old proto3 clients that presence is tracked for this field. This - // oneof is known as a "synthetic" oneof, and this field must be its sole - // member (each proto3 optional field gets its own synthetic oneof). Synthetic - // oneofs exist in the descriptor only, and do not generate any API. Synthetic - // oneofs must be ordered after all "real" oneofs. - // - // For message fields, proto3_optional doesn't create any semantic change, - // since non-repeated message fields always track presence. However it still - // indicates the semantic detail of whether the user wrote "optional" or not. - // This can be useful for round-tripping the .proto file. For consistency we - // give message fields a synthetic oneof also, even though it is not required - // to track presence. This is especially important because the parser can't - // tell if a field is a message or an enum, so it must always create a - // synthetic oneof. - // - // Proto2 optional fields do not set this flag, because they already indicate - // optional with `LABEL_OPTIONAL`. - optional bool proto3_optional = 17; -} - -// Describes a oneof. -message OneofDescriptorProto { - optional string name = 1; - optional OneofOptions options = 2; -} - -// Describes an enum type. -message EnumDescriptorProto { - optional string name = 1; - - repeated EnumValueDescriptorProto value = 2; - - optional EnumOptions options = 3; - - // Range of reserved numeric values. Reserved values may not be used by - // entries in the same enum. Reserved ranges may not overlap. - // - // Note that this is distinct from DescriptorProto.ReservedRange in that it - // is inclusive such that it can appropriately represent the entire int32 - // domain. - message EnumReservedRange { - optional int32 start = 1; // Inclusive. - optional int32 end = 2; // Inclusive. - } - - // Range of reserved numeric values. Reserved numeric values may not be used - // by enum values in the same enum declaration. Reserved ranges may not - // overlap. - repeated EnumReservedRange reserved_range = 4; - - // Reserved enum value names, which may not be reused. A given name may only - // be reserved once. - repeated string reserved_name = 5; -} - -// Describes a value within an enum. -message EnumValueDescriptorProto { - optional string name = 1; - optional int32 number = 2; - - optional EnumValueOptions options = 3; -} - -// Describes a service. -message ServiceDescriptorProto { - optional string name = 1; - repeated MethodDescriptorProto method = 2; - - optional ServiceOptions options = 3; -} - -// Describes a method of a service. -message MethodDescriptorProto { - optional string name = 1; - - // Input and output type names. These are resolved in the same way as - // FieldDescriptorProto.type_name, but must refer to a message type. - optional string input_type = 2; - optional string output_type = 3; - - optional MethodOptions options = 4; - - // Identifies if client streams multiple client messages - optional bool client_streaming = 5 [default = false]; - // Identifies if server streams multiple server messages - optional bool server_streaming = 6 [default = false]; -} - - -// =================================================================== -// Options - -// Each of the definitions above may have "options" attached. These are -// just annotations which may cause code to be generated slightly differently -// or may contain hints for code that manipulates protocol messages. -// -// Clients may define custom options as extensions of the *Options messages. -// These extensions may not yet be known at parsing time, so the parser cannot -// store the values in them. Instead it stores them in a field in the *Options -// message called uninterpreted_option. This field must have the same name -// across all *Options messages. We then use this field to populate the -// extensions when we build a descriptor, at which point all protos have been -// parsed and so all extensions are known. -// -// Extension numbers for custom options may be chosen as follows: -// * For options which will only be used within a single application or -// organization, or for experimental options, use field numbers 50000 -// through 99999. It is up to you to ensure that you do not use the -// same number for multiple options. -// * For options which will be published and used publicly by multiple -// independent entities, e-mail protobuf-global-extension-registry@google.com -// to reserve extension numbers. Simply provide your project name (e.g. -// Objective-C plugin) and your project website (if available) -- there's no -// need to explain how you intend to use them. Usually you only need one -// extension number. You can declare multiple options with only one extension -// number by putting them in a sub-message. See the Custom Options section of -// the docs for examples: -// https://developers.google.com/protocol-buffers/docs/proto#options -// If this turns out to be popular, a web service will be set up -// to automatically assign option numbers. - -message FileOptions { - - // Sets the Java package where classes generated from this .proto will be - // placed. By default, the proto package is used, but this is often - // inappropriate because proto packages do not normally start with backwards - // domain names. - optional string java_package = 1; - - - // If set, all the classes from the .proto file are wrapped in a single - // outer class with the given name. This applies to both Proto1 - // (equivalent to the old "--one_java_file" option) and Proto2 (where - // a .proto always translates to a single class, but you may want to - // explicitly choose the class name). - optional string java_outer_classname = 8; - - // If set true, then the Java code generator will generate a separate .java - // file for each top-level message, enum, and service defined in the .proto - // file. Thus, these types will *not* be nested inside the outer class - // named by java_outer_classname. However, the outer class will still be - // generated to contain the file's getDescriptor() method as well as any - // top-level extensions defined in the file. - optional bool java_multiple_files = 10 [default = false]; - - // This option does nothing. - optional bool java_generate_equals_and_hash = 20 [deprecated=true]; - - // If set true, then the Java2 code generator will generate code that - // throws an exception whenever an attempt is made to assign a non-UTF-8 - // byte sequence to a string field. - // Message reflection will do the same. - // However, an extension field still accepts non-UTF-8 byte sequences. - // This option has no effect on when used with the lite runtime. - optional bool java_string_check_utf8 = 27 [default = false]; - - - // Generated classes can be optimized for speed or code size. - enum OptimizeMode { - SPEED = 1; // Generate complete code for parsing, serialization, - // etc. - CODE_SIZE = 2; // Use ReflectionOps to implement these methods. - LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. - } - optional OptimizeMode optimize_for = 9 [default = SPEED]; - - // Sets the Go package where structs generated from this .proto will be - // placed. If omitted, the Go package will be derived from the following: - // - The basename of the package import path, if provided. - // - Otherwise, the package statement in the .proto file, if present. - // - Otherwise, the basename of the .proto file, without extension. - optional string go_package = 11; - - - - - // Should generic services be generated in each language? "Generic" services - // are not specific to any particular RPC system. They are generated by the - // main code generators in each language (without additional plugins). - // Generic services were the only kind of service generation supported by - // early versions of google.protobuf. - // - // Generic services are now considered deprecated in favor of using plugins - // that generate code specific to your particular RPC system. Therefore, - // these default to false. Old code which depends on generic services should - // explicitly set them to true. - optional bool cc_generic_services = 16 [default = false]; - optional bool java_generic_services = 17 [default = false]; - optional bool py_generic_services = 18 [default = false]; - optional bool php_generic_services = 42 [default = false]; - - // Is this file deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for everything in the file, or it will be completely ignored; in the very - // least, this is a formalization for deprecating files. - optional bool deprecated = 23 [default = false]; - - // Enables the use of arenas for the proto messages in this file. This applies - // only to generated classes for C++. - optional bool cc_enable_arenas = 31 [default = true]; - - - // Sets the objective c class prefix which is prepended to all objective c - // generated classes from this .proto. There is no default. - optional string objc_class_prefix = 36; - - // Namespace for generated classes; defaults to the package. - optional string csharp_namespace = 37; - - // By default Swift generators will take the proto package and CamelCase it - // replacing '.' with underscore and use that to prefix the types/symbols - // defined. When this options is provided, they will use this value instead - // to prefix the types/symbols defined. - optional string swift_prefix = 39; - - // Sets the php class prefix which is prepended to all php generated classes - // from this .proto. Default is empty. - optional string php_class_prefix = 40; - - // Use this option to change the namespace of php generated classes. Default - // is empty. When this option is empty, the package name will be used for - // determining the namespace. - optional string php_namespace = 41; - - // Use this option to change the namespace of php generated metadata classes. - // Default is empty. When this option is empty, the proto file name will be - // used for determining the namespace. - optional string php_metadata_namespace = 44; - - // Use this option to change the package of ruby generated classes. Default - // is empty. When this option is not set, the package name will be used for - // determining the ruby package. - optional string ruby_package = 45; - - - // The parser stores options it doesn't recognize here. - // See the documentation for the "Options" section above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. - // See the documentation for the "Options" section above. - extensions 1000 to max; - - reserved 38; -} - -message MessageOptions { - // Set true to use the old proto1 MessageSet wire format for extensions. - // This is provided for backwards-compatibility with the MessageSet wire - // format. You should not use this for any other reason: It's less - // efficient, has fewer features, and is more complicated. - // - // The message must be defined exactly as follows: - // message Foo { - // option message_set_wire_format = true; - // extensions 4 to max; - // } - // Note that the message cannot have any defined fields; MessageSets only - // have extensions. - // - // All extensions of your type must be singular messages; e.g. they cannot - // be int32s, enums, or repeated messages. - // - // Because this is an option, the above two restrictions are not enforced by - // the protocol compiler. - optional bool message_set_wire_format = 1 [default = false]; - - // Disables the generation of the standard "descriptor()" accessor, which can - // conflict with a field of the same name. This is meant to make migration - // from proto1 easier; new code should avoid fields named "descriptor". - optional bool no_standard_descriptor_accessor = 2 [default = false]; - - // Is this message deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the message, or it will be completely ignored; in the very least, - // this is a formalization for deprecating messages. - optional bool deprecated = 3 [default = false]; - - // Whether the message is an automatically generated map entry type for the - // maps field. - // - // For maps fields: - // map map_field = 1; - // The parsed descriptor looks like: - // message MapFieldEntry { - // option map_entry = true; - // optional KeyType key = 1; - // optional ValueType value = 2; - // } - // repeated MapFieldEntry map_field = 1; - // - // Implementations may choose not to generate the map_entry=true message, but - // use a native map in the target language to hold the keys and values. - // The reflection APIs in such implementations still need to work as - // if the field is a repeated message field. - // - // NOTE: Do not set the option in .proto files. Always use the maps syntax - // instead. The option should only be implicitly set by the proto compiler - // parser. - optional bool map_entry = 7; - - reserved 8; // javalite_serializable - reserved 9; // javanano_as_lite - - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -message FieldOptions { - // The ctype option instructs the C++ code generator to use a different - // representation of the field than it normally would. See the specific - // options below. This option is not yet implemented in the open source - // release -- sorry, we'll try to include it in a future version! - optional CType ctype = 1 [default = STRING]; - enum CType { - // Default mode. - STRING = 0; - - CORD = 1; - - STRING_PIECE = 2; - } - // The packed option can be enabled for repeated primitive fields to enable - // a more efficient representation on the wire. Rather than repeatedly - // writing the tag and type for each element, the entire array is encoded as - // a single length-delimited blob. In proto3, only explicit setting it to - // false will avoid using packed encoding. - optional bool packed = 2; - - // The jstype option determines the JavaScript type used for values of the - // field. The option is permitted only for 64 bit integral and fixed types - // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING - // is represented as JavaScript string, which avoids loss of precision that - // can happen when a large value is converted to a floating point JavaScript. - // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to - // use the JavaScript "number" type. The behavior of the default option - // JS_NORMAL is implementation dependent. - // - // This option is an enum to permit additional types to be added, e.g. - // goog.math.Integer. - optional JSType jstype = 6 [default = JS_NORMAL]; - enum JSType { - // Use the default type. - JS_NORMAL = 0; - - // Use JavaScript strings. - JS_STRING = 1; - - // Use JavaScript numbers. - JS_NUMBER = 2; - } - - // Should this field be parsed lazily? Lazy applies only to message-type - // fields. It means that when the outer message is initially parsed, the - // inner message's contents will not be parsed but instead stored in encoded - // form. The inner message will actually be parsed when it is first accessed. - // - // This is only a hint. Implementations are free to choose whether to use - // eager or lazy parsing regardless of the value of this option. However, - // setting this option true suggests that the protocol author believes that - // using lazy parsing on this field is worth the additional bookkeeping - // overhead typically needed to implement it. - // - // This option does not affect the public interface of any generated code; - // all method signatures remain the same. Furthermore, thread-safety of the - // interface is not affected by this option; const methods remain safe to - // call from multiple threads concurrently, while non-const methods continue - // to require exclusive access. - // - // - // Note that implementations may choose not to check required fields within - // a lazy sub-message. That is, calling IsInitialized() on the outer message - // may return true even if the inner message has missing required fields. - // This is necessary because otherwise the inner message would have to be - // parsed in order to perform the check, defeating the purpose of lazy - // parsing. An implementation which chooses not to check required fields - // must be consistent about it. That is, for any particular sub-message, the - // implementation must either *always* check its required fields, or *never* - // check its required fields, regardless of whether or not the message has - // been parsed. - optional bool lazy = 5 [default = false]; - - // Is this field deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for accessors, or it will be completely ignored; in the very least, this - // is a formalization for deprecating fields. - optional bool deprecated = 3 [default = false]; - - // For Google-internal migration only. Do not use. - optional bool weak = 10 [default = false]; - - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; - - reserved 4; // removed jtype -} - -message OneofOptions { - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -message EnumOptions { - - // Set this option to true to allow mapping different tag names to the same - // value. - optional bool allow_alias = 2; - - // Is this enum deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the enum, or it will be completely ignored; in the very least, this - // is a formalization for deprecating enums. - optional bool deprecated = 3 [default = false]; - - reserved 5; // javanano_as_lite - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -message EnumValueOptions { - // Is this enum value deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the enum value, or it will be completely ignored; in the very least, - // this is a formalization for deprecating enum values. - optional bool deprecated = 1 [default = false]; - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -message ServiceOptions { - - // Note: Field numbers 1 through 32 are reserved for Google's internal RPC - // framework. We apologize for hoarding these numbers to ourselves, but - // we were already using them long before we decided to release Protocol - // Buffers. - - // Is this service deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the service, or it will be completely ignored; in the very least, - // this is a formalization for deprecating services. - optional bool deprecated = 33 [default = false]; - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - -message MethodOptions { - - // Note: Field numbers 1 through 32 are reserved for Google's internal RPC - // framework. We apologize for hoarding these numbers to ourselves, but - // we were already using them long before we decided to release Protocol - // Buffers. - - // Is this method deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the method, or it will be completely ignored; in the very least, - // this is a formalization for deprecating methods. - optional bool deprecated = 33 [default = false]; - - // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, - // or neither? HTTP based RPC implementation may choose GET verb for safe - // methods, and PUT verb for idempotent methods instead of the default POST. - enum IdempotencyLevel { - IDEMPOTENCY_UNKNOWN = 0; - NO_SIDE_EFFECTS = 1; // implies idempotent - IDEMPOTENT = 2; // idempotent, but may have side effects - } - optional IdempotencyLevel idempotency_level = 34 - [default = IDEMPOTENCY_UNKNOWN]; - - // The parser stores options it doesn't recognize here. See above. - repeated UninterpretedOption uninterpreted_option = 999; - - // Clients can define custom options in extensions of this message. See above. - extensions 1000 to max; -} - - -// A message representing a option the parser does not recognize. This only -// appears in options protos created by the compiler::Parser class. -// DescriptorPool resolves these when building Descriptor objects. Therefore, -// options protos in descriptor objects (e.g. returned by Descriptor::options(), -// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions -// in them. -message UninterpretedOption { - // The name of the uninterpreted option. Each string represents a segment in - // a dot-separated name. is_extension is true iff a segment represents an - // extension (denoted with parentheses in options specs in .proto files). - // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents - // "foo.(bar.baz).qux". - message NamePart { - required string name_part = 1; - required bool is_extension = 2; - } - repeated NamePart name = 2; - - // The value of the uninterpreted option, in whatever type the tokenizer - // identified it as during parsing. Exactly one of these should be set. - optional string identifier_value = 3; - optional uint64 positive_int_value = 4; - optional int64 negative_int_value = 5; - optional double double_value = 6; - optional bytes string_value = 7; - optional string aggregate_value = 8; -} - -// =================================================================== -// Optional source code info - -// Encapsulates information about the original source file from which a -// FileDescriptorProto was generated. -message SourceCodeInfo { - // A Location identifies a piece of source code in a .proto file which - // corresponds to a particular definition. This information is intended - // to be useful to IDEs, code indexers, documentation generators, and similar - // tools. - // - // For example, say we have a file like: - // message Foo { - // optional string foo = 1; - // } - // Let's look at just the field definition: - // optional string foo = 1; - // ^ ^^ ^^ ^ ^^^ - // a bc de f ghi - // We have the following locations: - // span path represents - // [a,i) [ 4, 0, 2, 0 ] The whole field definition. - // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). - // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). - // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). - // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). - // - // Notes: - // - A location may refer to a repeated field itself (i.e. not to any - // particular index within it). This is used whenever a set of elements are - // logically enclosed in a single code segment. For example, an entire - // extend block (possibly containing multiple extension definitions) will - // have an outer location whose path refers to the "extensions" repeated - // field without an index. - // - Multiple locations may have the same path. This happens when a single - // logical declaration is spread out across multiple places. The most - // obvious example is the "extend" block again -- there may be multiple - // extend blocks in the same scope, each of which will have the same path. - // - A location's span is not always a subset of its parent's span. For - // example, the "extendee" of an extension declaration appears at the - // beginning of the "extend" block and is shared by all extensions within - // the block. - // - Just because a location's span is a subset of some other location's span - // does not mean that it is a descendant. For example, a "group" defines - // both a type and a field in a single declaration. Thus, the locations - // corresponding to the type and field and their components will overlap. - // - Code which tries to interpret locations should probably be designed to - // ignore those that it doesn't understand, as more types of locations could - // be recorded in the future. - repeated Location location = 1; - message Location { - // Identifies which part of the FileDescriptorProto was defined at this - // location. - // - // Each element is a field number or an index. They form a path from - // the root FileDescriptorProto to the place where the definition. For - // example, this path: - // [ 4, 3, 2, 7, 1 ] - // refers to: - // file.message_type(3) // 4, 3 - // .field(7) // 2, 7 - // .name() // 1 - // This is because FileDescriptorProto.message_type has field number 4: - // repeated DescriptorProto message_type = 4; - // and DescriptorProto.field has field number 2: - // repeated FieldDescriptorProto field = 2; - // and FieldDescriptorProto.name has field number 1: - // optional string name = 1; - // - // Thus, the above path gives the location of a field name. If we removed - // the last element: - // [ 4, 3, 2, 7 ] - // this path refers to the whole field declaration (from the beginning - // of the label to the terminating semicolon). - repeated int32 path = 1 [packed = true]; - - // Always has exactly three or four elements: start line, start column, - // end line (optional, otherwise assumed same as start line), end column. - // These are packed into a single field for efficiency. Note that line - // and column numbers are zero-based -- typically you will want to add - // 1 to each before displaying to a user. - repeated int32 span = 2 [packed = true]; - - // If this SourceCodeInfo represents a complete declaration, these are any - // comments appearing before and after the declaration which appear to be - // attached to the declaration. - // - // A series of line comments appearing on consecutive lines, with no other - // tokens appearing on those lines, will be treated as a single comment. - // - // leading_detached_comments will keep paragraphs of comments that appear - // before (but not connected to) the current element. Each paragraph, - // separated by empty lines, will be one comment element in the repeated - // field. - // - // Only the comment content is provided; comment markers (e.g. //) are - // stripped out. For block comments, leading whitespace and an asterisk - // will be stripped from the beginning of each line other than the first. - // Newlines are included in the output. - // - // Examples: - // - // optional int32 foo = 1; // Comment attached to foo. - // // Comment attached to bar. - // optional int32 bar = 2; - // - // optional string baz = 3; - // // Comment attached to baz. - // // Another line attached to baz. - // - // // Comment attached to qux. - // // - // // Another line attached to qux. - // optional double qux = 4; - // - // // Detached comment for corge. This is not leading or trailing comments - // // to qux or corge because there are blank lines separating it from - // // both. - // - // // Detached comment for corge paragraph 2. - // - // optional string corge = 5; - // /* Block comment attached - // * to corge. Leading asterisks - // * will be removed. */ - // /* Block comment attached to - // * grault. */ - // optional int32 grault = 6; - // - // // ignored detached comments. - optional string leading_comments = 3; - optional string trailing_comments = 4; - repeated string leading_detached_comments = 6; - } -} - -// Describes the relationship between generated code and its original source -// file. A GeneratedCodeInfo message is associated with only one generated -// source file, but may contain references to different source .proto files. -message GeneratedCodeInfo { - // An Annotation connects some span of text in generated code to an element - // of its generating .proto file. - repeated Annotation annotation = 1; - message Annotation { - // Identifies the element in the original source .proto file. This field - // is formatted the same as SourceCodeInfo.Location.path. - repeated int32 path = 1 [packed = true]; - - // Identifies the filesystem path to the original source .proto. - optional string source_file = 2; - - // Identifies the starting offset in bytes in the generated code - // that relates to the identified object. - optional int32 begin = 3; - - // Identifies the ending offset in bytes in the generated code that - // relates to the identified offset. The end offset should be one past - // the last relevant byte (so the length of the text = end - begin). - optional int32 end = 4; - } -} diff --git a/tests/inputs/example_service/example_service.proto b/tests/inputs/example_service/example_service.proto deleted file mode 100644 index 96455cc3..00000000 --- a/tests/inputs/example_service/example_service.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; - -package example_service; - -service Test { - rpc ExampleUnaryUnary(ExampleRequest) returns (ExampleResponse); - rpc ExampleUnaryStream(ExampleRequest) returns (stream ExampleResponse); - rpc ExampleStreamUnary(stream ExampleRequest) returns (ExampleResponse); - rpc ExampleStreamStream(stream ExampleRequest) returns (stream ExampleResponse); -} - -message ExampleRequest { - string example_string = 1; - int64 example_integer = 2; -} - -message ExampleResponse { - string example_string = 1; - int64 example_integer = 2; -} diff --git a/tests/inputs/example_service/test_example_service.py b/tests/inputs/example_service/test_example_service.py deleted file mode 100644 index cd2cc40f..00000000 --- a/tests/inputs/example_service/test_example_service.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import ( - AsyncIterator, -) - -import pytest -from grpclib.testing import ChannelFor - -from tests.output_betterproto.example_service import ( - ExampleRequest, - ExampleResponse, - TestBase, - TestStub, -) - - -class ExampleService(TestBase): - async def example_unary_unary(self, example_request: ExampleRequest) -> "ExampleResponse": - return ExampleResponse( - example_string=example_request.example_string, - example_integer=example_request.example_integer, - ) - - async def example_unary_stream(self, example_request: ExampleRequest) -> AsyncIterator["ExampleResponse"]: - response = ExampleResponse( - example_string=example_request.example_string, - example_integer=example_request.example_integer, - ) - yield response - yield response - yield response - - async def example_stream_unary( - self, example_request_iterator: AsyncIterator["ExampleRequest"] - ) -> "ExampleResponse": - async for example_request in example_request_iterator: - return ExampleResponse( - example_string=example_request.example_string, - example_integer=example_request.example_integer, - ) - - async def example_stream_stream( - self, example_request_iterator: AsyncIterator["ExampleRequest"] - ) -> AsyncIterator["ExampleResponse"]: - async for example_request in example_request_iterator: - yield ExampleResponse( - example_string=example_request.example_string, - example_integer=example_request.example_integer, - ) - - -@pytest.mark.asyncio -async def test_calls_with_different_cardinalities(): - example_request = ExampleRequest("test string", 42) - - async with ChannelFor([ExampleService()]) as channel: - stub = TestStub(channel) - - # unary unary - response = await stub.example_unary_unary(example_request) - assert response.example_string == example_request.example_string - assert response.example_integer == example_request.example_integer - - # unary stream - async for response in stub.example_unary_stream(example_request): - assert response.example_string == example_request.example_string - assert response.example_integer == example_request.example_integer - - # stream unary - async def request_iterator(): - yield example_request - yield example_request - yield example_request - - response = await stub.example_stream_unary(request_iterator()) - assert response.example_string == example_request.example_string - assert response.example_integer == example_request.example_integer - - # stream stream - async for response in stub.example_stream_stream(request_iterator()): - assert response.example_string == example_request.example_string - assert response.example_integer == example_request.example_integer diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json deleted file mode 100644 index 7a6e7ae8..00000000 --- a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "int": 26, - "float": 26.0, - "str": "value-for-str", - "bytes": "001a", - "bool": true -} \ No newline at end of file diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto deleted file mode 100644 index 81a0fc43..00000000 --- a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package field_name_identical_to_type; - -// Tests that messages may contain fields with names that are identical to their python types (PR #294) - -message Test { - int32 int = 1; - float float = 2; - string str = 3; - bytes bytes = 4; - bool bool = 5; -} \ No newline at end of file diff --git a/tests/inputs/fixed/fixed.json b/tests/inputs/fixed/fixed.json deleted file mode 100644 index 88587806..00000000 --- a/tests/inputs/fixed/fixed.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "foo": 4294967295, - "bar": -2147483648, - "baz": "18446744073709551615", - "qux": "-9223372036854775808" -} diff --git a/tests/inputs/fixed/fixed.proto b/tests/inputs/fixed/fixed.proto deleted file mode 100644 index 0f0ffb4e..00000000 --- a/tests/inputs/fixed/fixed.proto +++ /dev/null @@ -1,10 +0,0 @@ -syntax = "proto3"; - -package fixed; - -message Test { - fixed32 foo = 1; - sfixed32 bar = 2; - fixed64 baz = 3; - sfixed64 qux = 4; -} diff --git a/tests/inputs/float/float.json b/tests/inputs/float/float.json deleted file mode 100644 index 3adac974..00000000 --- a/tests/inputs/float/float.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "positive": "Infinity", - "negative": "-Infinity", - "nan": "NaN", - "three": 3.0, - "threePointOneFour": 3.14, - "negThree": -3.0, - "negThreePointOneFour": -3.14 - } diff --git a/tests/inputs/float/float.proto b/tests/inputs/float/float.proto deleted file mode 100644 index fea12b3d..00000000 --- a/tests/inputs/float/float.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -package float; - -// Some documentation about the Test message. -message Test { - double positive = 1; - double negative = 2; - double nan = 3; - double three = 4; - double three_point_one_four = 5; - double neg_three = 6; - double neg_three_point_one_four = 7; -} diff --git a/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto b/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto deleted file mode 100644 index 66ef8a64..00000000 --- a/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto +++ /dev/null @@ -1,22 +0,0 @@ -syntax = "proto3"; - -import "google/protobuf/timestamp.proto"; -package google_impl_behavior_equivalence; - -message Foo { int64 bar = 1; } - -message Test { - oneof group { - string string = 1; - int64 integer = 2; - Foo foo = 3; - } -} - -message Spam { - google.protobuf.Timestamp ts = 1; -} - -message Request { Empty foo = 1; } - -message Empty {} diff --git a/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py b/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py deleted file mode 100644 index b6ed5e0f..00000000 --- a/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py +++ /dev/null @@ -1,84 +0,0 @@ -from datetime import ( - datetime, - timezone, -) - -import pytest -from google.protobuf import json_format -from google.protobuf.timestamp_pb2 import Timestamp - -from tests.output_betterproto.google_impl_behavior_equivalence import ( - Empty, - Foo, - Request, - Spam, - Test, -) -from tests.output_reference.google_impl_behavior_equivalence.google_impl_behavior_equivalence_pb2 import ( - Empty as ReferenceEmpty, - Foo as ReferenceFoo, - Request as ReferenceRequest, - Spam as ReferenceSpam, - Test as ReferenceTest, -) - - -def test_oneof_serializes_similar_to_google_oneof(): - tests = [ - (Test(string="abc"), ReferenceTest(string="abc")), - (Test(integer=2), ReferenceTest(integer=2)), - (Test(foo=Foo(bar=1)), ReferenceTest(foo=ReferenceFoo(bar=1))), - # Default values should also behave the same within oneofs - (Test(string=""), ReferenceTest(string="")), - (Test(integer=0), ReferenceTest(integer=0)), - (Test(foo=Foo(bar=0)), ReferenceTest(foo=ReferenceFoo(bar=0))), - ] - for message, message_reference in tests: - # NOTE: As of July 2020, MessageToJson inserts newlines in the output string so, - # just compare dicts - assert message.to_dict() == json_format.MessageToDict(message_reference) - - -def test_bytes_are_the_same_for_oneof(): - message = Test(string="") - message_reference = ReferenceTest(string="") - - message_bytes = bytes(message) - message_reference_bytes = message_reference.SerializeToString() - - assert message_bytes == message_reference_bytes - - message2 = Test().parse(message_reference_bytes) - message_reference2 = ReferenceTest() - message_reference2.ParseFromString(message_reference_bytes) - - assert message == message2 - assert message_reference == message_reference2 - - # None of these fields were explicitly set BUT they should not actually be null - # themselves - assert message.foo is None - assert message2.foo is None - - assert isinstance(message_reference.foo, ReferenceFoo) - assert isinstance(message_reference2.foo, ReferenceFoo) - - -@pytest.mark.parametrize("dt", (datetime.min.replace(tzinfo=timezone.utc),)) -def test_datetime_clamping(dt): # see #407 - ts = Timestamp() - ts.FromDatetime(dt) - assert bytes(Spam(dt)) == ReferenceSpam(ts=ts).SerializeToString() - message_bytes = bytes(Spam(dt)) - - assert Spam().parse(message_bytes).ts.timestamp() == ReferenceSpam.FromString(message_bytes).ts.seconds - - -def test_empty_message_field(): - message = Request() - reference_message = ReferenceRequest() - - message.foo = Empty() - reference_message.foo.CopyFrom(ReferenceEmpty()) - - assert bytes(message) == reference_message.SerializeToString() diff --git a/tests/inputs/googletypes/googletypes-missing.json b/tests/inputs/googletypes/googletypes-missing.json deleted file mode 100644 index 0967ef42..00000000 --- a/tests/inputs/googletypes/googletypes-missing.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/inputs/googletypes/googletypes.json b/tests/inputs/googletypes/googletypes.json deleted file mode 100644 index 0a002e9b..00000000 --- a/tests/inputs/googletypes/googletypes.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "maybe": false, - "ts": "1972-01-01T10:00:20.021Z", - "duration": "1.200s", - "important": 10, - "empty": {} -} diff --git a/tests/inputs/googletypes/googletypes.proto b/tests/inputs/googletypes/googletypes.proto deleted file mode 100644 index ef8cb4a1..00000000 --- a/tests/inputs/googletypes/googletypes.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; - -package googletypes; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "google/protobuf/empty.proto"; - -message Test { - google.protobuf.BoolValue maybe = 1; - google.protobuf.Timestamp ts = 2; - google.protobuf.Duration duration = 3; - google.protobuf.Int32Value important = 4; - google.protobuf.Empty empty = 5; -} diff --git a/tests/inputs/googletypes_request/googletypes_request.proto b/tests/inputs/googletypes_request/googletypes_request.proto deleted file mode 100644 index 1cedcaaf..00000000 --- a/tests/inputs/googletypes_request/googletypes_request.proto +++ /dev/null @@ -1,29 +0,0 @@ -syntax = "proto3"; - -package googletypes_request; - -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -// Tests that google types can be used as params - -service Test { - rpc SendDouble (google.protobuf.DoubleValue) returns (Input); - rpc SendFloat (google.protobuf.FloatValue) returns (Input); - rpc SendInt64 (google.protobuf.Int64Value) returns (Input); - rpc SendUInt64 (google.protobuf.UInt64Value) returns (Input); - rpc SendInt32 (google.protobuf.Int32Value) returns (Input); - rpc SendUInt32 (google.protobuf.UInt32Value) returns (Input); - rpc SendBool (google.protobuf.BoolValue) returns (Input); - rpc SendString (google.protobuf.StringValue) returns (Input); - rpc SendBytes (google.protobuf.BytesValue) returns (Input); - rpc SendDatetime (google.protobuf.Timestamp) returns (Input); - rpc SendTimedelta (google.protobuf.Duration) returns (Input); - rpc SendEmpty (google.protobuf.Empty) returns (Input); -} - -message Input { - -} diff --git a/tests/inputs/googletypes_request/test_googletypes_request.py b/tests/inputs/googletypes_request/test_googletypes_request.py deleted file mode 100644 index f1cd4f0b..00000000 --- a/tests/inputs/googletypes_request/test_googletypes_request.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import ( - datetime, - timedelta, -) -from typing import ( - Any, - Callable, -) - -import pytest - -import betterproto.lib.google.protobuf as protobuf -from tests.mocks import MockChannel -from tests.output_betterproto.googletypes_request import ( - Input, - TestStub, -) - -test_cases = [ - (TestStub.send_double, protobuf.DoubleValue, 2.5), - (TestStub.send_float, protobuf.FloatValue, 2.5), - (TestStub.send_int64, protobuf.Int64Value, -64), - (TestStub.send_u_int64, protobuf.UInt64Value, 64), - (TestStub.send_int32, protobuf.Int32Value, -32), - (TestStub.send_u_int32, protobuf.UInt32Value, 32), - (TestStub.send_bool, protobuf.BoolValue, True), - (TestStub.send_string, protobuf.StringValue, "string"), - (TestStub.send_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), - (TestStub.send_datetime, protobuf.Timestamp, datetime(2038, 1, 19, 3, 14, 8)), - (TestStub.send_timedelta, protobuf.Duration, timedelta(seconds=123456)), -] - - -@pytest.mark.asyncio -@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) -async def test_channel_receives_wrapped_type( - service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value -): - wrapped_value = wrapper_class() - wrapped_value.value = value - channel = MockChannel(responses=[Input()]) - service = TestStub(channel) - - await service_method(service, wrapped_value) - - assert channel.requests[0]["request"] == type(wrapped_value) diff --git a/tests/inputs/googletypes_response/googletypes_response.proto b/tests/inputs/googletypes_response/googletypes_response.proto deleted file mode 100644 index 8917d1c7..00000000 --- a/tests/inputs/googletypes_response/googletypes_response.proto +++ /dev/null @@ -1,23 +0,0 @@ -syntax = "proto3"; - -package googletypes_response; - -import "google/protobuf/wrappers.proto"; - -// Tests that wrapped values can be used directly as return values - -service Test { - rpc GetDouble (Input) returns (google.protobuf.DoubleValue); - rpc GetFloat (Input) returns (google.protobuf.FloatValue); - rpc GetInt64 (Input) returns (google.protobuf.Int64Value); - rpc GetUInt64 (Input) returns (google.protobuf.UInt64Value); - rpc GetInt32 (Input) returns (google.protobuf.Int32Value); - rpc GetUInt32 (Input) returns (google.protobuf.UInt32Value); - rpc GetBool (Input) returns (google.protobuf.BoolValue); - rpc GetString (Input) returns (google.protobuf.StringValue); - rpc GetBytes (Input) returns (google.protobuf.BytesValue); -} - -message Input { - -} diff --git a/tests/inputs/googletypes_response/test_googletypes_response.py b/tests/inputs/googletypes_response/test_googletypes_response.py deleted file mode 100644 index e1aebc6d..00000000 --- a/tests/inputs/googletypes_response/test_googletypes_response.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import ( - Any, - Callable, - Optional, -) - -import pytest - -import betterproto.lib.google.protobuf as protobuf -from tests.mocks import MockChannel -from tests.output_betterproto.googletypes_response import ( - Input, - TestStub, -) - -test_cases = [ - (TestStub.get_double, protobuf.DoubleValue, 2.5), - (TestStub.get_float, protobuf.FloatValue, 2.5), - (TestStub.get_int64, protobuf.Int64Value, -64), - (TestStub.get_u_int64, protobuf.UInt64Value, 64), - (TestStub.get_int32, protobuf.Int32Value, -32), - (TestStub.get_u_int32, protobuf.UInt32Value, 32), - (TestStub.get_bool, protobuf.BoolValue, True), - (TestStub.get_string, protobuf.StringValue, "string"), - (TestStub.get_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), -] - - -@pytest.mark.asyncio -@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) -async def test_channel_receives_wrapped_type( - service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value -): - wrapped_value = wrapper_class() - wrapped_value.value = value - channel = MockChannel(responses=[wrapped_value]) - service = TestStub(channel) - method_param = Input() - - await service_method(service, method_param) - - assert channel.requests[0]["response_type"] != Optional[type(value)] - assert channel.requests[0]["response_type"] == type(wrapped_value) - - -@pytest.mark.asyncio -@pytest.mark.xfail -@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) -async def test_service_unwraps_response( - service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value -): - """ - grpclib does not unwrap wrapper values returned by services - """ - wrapped_value = wrapper_class() - wrapped_value.value = value - service = TestStub(MockChannel(responses=[wrapped_value])) - method_param = Input() - - response_value = await service_method(service, method_param) - - assert response_value == value - assert type(response_value) == type(value) diff --git a/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto b/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto deleted file mode 100644 index 47284e3a..00000000 --- a/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; - -package googletypes_response_embedded; - -import "google/protobuf/wrappers.proto"; - -// Tests that wrapped values are supported as part of output message -service Test { - rpc getOutput (Input) returns (Output); -} - -message Input { - -} - -message Output { - google.protobuf.DoubleValue double_value = 1; - google.protobuf.FloatValue float_value = 2; - google.protobuf.Int64Value int64_value = 3; - google.protobuf.UInt64Value uint64_value = 4; - google.protobuf.Int32Value int32_value = 5; - google.protobuf.UInt32Value uint32_value = 6; - google.protobuf.BoolValue bool_value = 7; - google.protobuf.StringValue string_value = 8; - google.protobuf.BytesValue bytes_value = 9; -} diff --git a/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py b/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py deleted file mode 100644 index 57ebce1b..00000000 --- a/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py +++ /dev/null @@ -1,40 +0,0 @@ -import pytest - -from tests.mocks import MockChannel -from tests.output_betterproto.googletypes_response_embedded import ( - Input, - Output, - TestStub, -) - - -@pytest.mark.asyncio -async def test_service_passes_through_unwrapped_values_embedded_in_response(): - """ - We do not not need to implement value unwrapping for embedded well-known types, - as this is already handled by grpclib. This test merely shows that this is the case. - """ - output = Output( - double_value=10.0, - float_value=12.0, - int64_value=-13, - uint64_value=14, - int32_value=-15, - uint32_value=16, - bool_value=True, - string_value="string", - bytes_value=bytes(0xFF)[0:4], - ) - - service = TestStub(MockChannel(responses=[output])) - response = await service.get_output(Input()) - - assert response.double_value == 10.0 - assert response.float_value == 12.0 - assert response.int64_value == -13 - assert response.uint64_value == 14 - assert response.int32_value == -15 - assert response.uint32_value == 16 - assert response.bool_value - assert response.string_value == "string" - assert response.bytes_value == bytes(0xFF)[0:4] diff --git a/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto b/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto deleted file mode 100644 index 2153ad5e..00000000 --- a/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package googletypes_service_returns_empty; - -import "google/protobuf/empty.proto"; - -service Test { - rpc Send (RequestMessage) returns (google.protobuf.Empty) { - } -} - -message RequestMessage { -} \ No newline at end of file diff --git a/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto b/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto deleted file mode 100644 index 457707b7..00000000 --- a/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; - -package googletypes_service_returns_googletype; - -import "google/protobuf/empty.proto"; -import "google/protobuf/struct.proto"; - -// Tests that imports are generated correctly when returning Google well-known types - -service Test { - rpc GetEmpty (RequestMessage) returns (google.protobuf.Empty); - rpc GetStruct (RequestMessage) returns (google.protobuf.Struct); - rpc GetListValue (RequestMessage) returns (google.protobuf.ListValue); - rpc GetValue (RequestMessage) returns (google.protobuf.Value); -} - -message RequestMessage { -} \ No newline at end of file diff --git a/tests/inputs/googletypes_struct/googletypes_struct.json b/tests/inputs/googletypes_struct/googletypes_struct.json deleted file mode 100644 index ecc175e0..00000000 --- a/tests/inputs/googletypes_struct/googletypes_struct.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "struct": { - "key": true - } -} diff --git a/tests/inputs/googletypes_struct/googletypes_struct.proto b/tests/inputs/googletypes_struct/googletypes_struct.proto deleted file mode 100644 index 2b8b5c55..00000000 --- a/tests/inputs/googletypes_struct/googletypes_struct.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package googletypes_struct; - -import "google/protobuf/struct.proto"; - -message Test { - google.protobuf.Struct struct = 1; -} diff --git a/tests/inputs/googletypes_value/googletypes_value.json b/tests/inputs/googletypes_value/googletypes_value.json deleted file mode 100644 index db52d5c0..00000000 --- a/tests/inputs/googletypes_value/googletypes_value.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "value1": "hello world", - "value2": true, - "value3": 1, - "value4": null, - "value5": [ - 1, - 2, - 3 - ] -} diff --git a/tests/inputs/googletypes_value/googletypes_value.proto b/tests/inputs/googletypes_value/googletypes_value.proto deleted file mode 100644 index d5089d5e..00000000 --- a/tests/inputs/googletypes_value/googletypes_value.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -package googletypes_value; - -import "google/protobuf/struct.proto"; - -// Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values. - -message Test { - google.protobuf.Value value1 = 1; - google.protobuf.Value value2 = 2; - google.protobuf.Value value3 = 3; - google.protobuf.Value value4 = 4; - google.protobuf.Value value5 = 5; -} diff --git a/tests/inputs/import_capitalized_package/capitalized.proto b/tests/inputs/import_capitalized_package/capitalized.proto deleted file mode 100644 index e80c95cd..00000000 --- a/tests/inputs/import_capitalized_package/capitalized.proto +++ /dev/null @@ -1,8 +0,0 @@ -syntax = "proto3"; - - -package import_capitalized_package.Capitalized; - -message Message { - -} diff --git a/tests/inputs/import_capitalized_package/test.proto b/tests/inputs/import_capitalized_package/test.proto deleted file mode 100644 index 38c9b2d7..00000000 --- a/tests/inputs/import_capitalized_package/test.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_capitalized_package; - -import "capitalized.proto"; - -// Tests that we can import from a package with a capital name, that looks like a nested type, but isn't. - -message Test { - Capitalized.Message message = 1; -} diff --git a/tests/inputs/import_child_package_from_package/child.proto b/tests/inputs/import_child_package_from_package/child.proto deleted file mode 100644 index d99c7c31..00000000 --- a/tests/inputs/import_child_package_from_package/child.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_child_package_from_package.package.childpackage; - -message ChildMessage { - -} diff --git a/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto b/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto deleted file mode 100644 index 66e0aa81..00000000 --- a/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_child_package_from_package; - -import "package_message.proto"; - -// Tests generated imports when a message in a package refers to a message in a nested child package. - -message Test { - package.PackageMessage message = 1; -} diff --git a/tests/inputs/import_child_package_from_package/package_message.proto b/tests/inputs/import_child_package_from_package/package_message.proto deleted file mode 100644 index 79d66f37..00000000 --- a/tests/inputs/import_child_package_from_package/package_message.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -import "child.proto"; - -package import_child_package_from_package.package; - -message PackageMessage { - package.childpackage.ChildMessage c = 1; -} diff --git a/tests/inputs/import_child_package_from_root/child.proto b/tests/inputs/import_child_package_from_root/child.proto deleted file mode 100644 index 2a46d5f5..00000000 --- a/tests/inputs/import_child_package_from_root/child.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_child_package_from_root.childpackage; - -message Message { - -} diff --git a/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto b/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto deleted file mode 100644 index 62998310..00000000 --- a/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_child_package_from_root; - -import "child.proto"; - -// Tests generated imports when a message in root refers to a message in a child package. - -message Test { - childpackage.Message child = 1; -} diff --git a/tests/inputs/import_child_scoping_rules/child.proto b/tests/inputs/import_child_scoping_rules/child.proto deleted file mode 100644 index f491e0da..00000000 --- a/tests/inputs/import_child_scoping_rules/child.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_child_scoping_rules.aaa.bbb.ccc.ddd; - -message ChildMessage { - -} diff --git a/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto b/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto deleted file mode 100644 index 272852cc..00000000 --- a/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package import_child_scoping_rules; - -import "package.proto"; - -message Test { - aaa.bbb.Msg msg = 1; -} diff --git a/tests/inputs/import_child_scoping_rules/package.proto b/tests/inputs/import_child_scoping_rules/package.proto deleted file mode 100644 index 6b51fe56..00000000 --- a/tests/inputs/import_child_scoping_rules/package.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package import_child_scoping_rules.aaa.bbb; - -import "child.proto"; - -message Msg { - .import_child_scoping_rules.aaa.bbb.ccc.ddd.ChildMessage a = 1; - import_child_scoping_rules.aaa.bbb.ccc.ddd.ChildMessage b = 2; - aaa.bbb.ccc.ddd.ChildMessage c = 3; - bbb.ccc.ddd.ChildMessage d = 4; - ccc.ddd.ChildMessage e = 5; -} diff --git a/tests/inputs/import_circular_dependency/import_circular_dependency.proto b/tests/inputs/import_circular_dependency/import_circular_dependency.proto deleted file mode 100644 index 4441be9f..00000000 --- a/tests/inputs/import_circular_dependency/import_circular_dependency.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; - -package import_circular_dependency; - -import "root.proto"; -import "other.proto"; - -// This test-case verifies support for circular dependencies in the generated python files. -// -// This is important because we generate 1 python file/module per package, rather than 1 file per proto file. -// -// Scenario: -// -// The proto messages depend on each other in a non-circular way: -// -// Test -------> RootPackageMessage <--------------. -// `------------------------------------> OtherPackageMessage -// -// Test and RootPackageMessage are in different files, but belong to the same package (root): -// -// (Test -------> RootPackageMessage) <------------. -// `------------------------------------> OtherPackageMessage -// -// After grouping the packages into single files or modules, a circular dependency is created: -// -// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) -message Test { - RootPackageMessage message = 1; - other.OtherPackageMessage other_value = 2; -} diff --git a/tests/inputs/import_circular_dependency/other.proto b/tests/inputs/import_circular_dependency/other.proto deleted file mode 100644 index 833b8699..00000000 --- a/tests/inputs/import_circular_dependency/other.proto +++ /dev/null @@ -1,8 +0,0 @@ -syntax = "proto3"; - -import "root.proto"; -package import_circular_dependency.other; - -message OtherPackageMessage { - RootPackageMessage rootPackageMessage = 1; -} diff --git a/tests/inputs/import_circular_dependency/root.proto b/tests/inputs/import_circular_dependency/root.proto deleted file mode 100644 index 73839477..00000000 --- a/tests/inputs/import_circular_dependency/root.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_circular_dependency; - -message RootPackageMessage { - -} diff --git a/tests/inputs/import_cousin_package/cousin.proto b/tests/inputs/import_cousin_package/cousin.proto deleted file mode 100644 index 2870dfe9..00000000 --- a/tests/inputs/import_cousin_package/cousin.proto +++ /dev/null @@ -1,6 +0,0 @@ -syntax = "proto3"; - -package import_cousin_package.cousin.cousin_subpackage; - -message CousinMessage { -} diff --git a/tests/inputs/import_cousin_package/test.proto b/tests/inputs/import_cousin_package/test.proto deleted file mode 100644 index 89ec3d84..00000000 --- a/tests/inputs/import_cousin_package/test.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_cousin_package.test.subpackage; - -import "cousin.proto"; - -// Verify that we can import message unrelated to us - -message Test { - cousin.cousin_subpackage.CousinMessage message = 1; -} diff --git a/tests/inputs/import_cousin_package_same_name/cousin.proto b/tests/inputs/import_cousin_package_same_name/cousin.proto deleted file mode 100644 index 84b6a407..00000000 --- a/tests/inputs/import_cousin_package_same_name/cousin.proto +++ /dev/null @@ -1,6 +0,0 @@ -syntax = "proto3"; - -package import_cousin_package_same_name.cousin.subpackage; - -message CousinMessage { -} diff --git a/tests/inputs/import_cousin_package_same_name/test.proto b/tests/inputs/import_cousin_package_same_name/test.proto deleted file mode 100644 index 7b420d30..00000000 --- a/tests/inputs/import_cousin_package_same_name/test.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_cousin_package_same_name.test.subpackage; - -import "cousin.proto"; - -// Verify that we can import a message unrelated to us, in a subpackage with the same name as us. - -message Test { - cousin.subpackage.CousinMessage message = 1; -} diff --git a/tests/inputs/import_nested_child_package_from_root/child.proto b/tests/inputs/import_nested_child_package_from_root/child.proto deleted file mode 100644 index fcd7e2f6..00000000 --- a/tests/inputs/import_nested_child_package_from_root/child.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_nested_child_package_from_root.package.child.otherchild; - -message ChildMessage { - -} diff --git a/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto b/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto deleted file mode 100644 index 96da1ace..00000000 --- a/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package import_nested_child_package_from_root; - -import "child.proto"; - -message Test { - package.child.otherchild.ChildMessage child = 1; -} diff --git a/tests/inputs/import_packages_same_name/import_packages_same_name.proto b/tests/inputs/import_packages_same_name/import_packages_same_name.proto deleted file mode 100644 index dff7efed..00000000 --- a/tests/inputs/import_packages_same_name/import_packages_same_name.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package import_packages_same_name; - -import "users_v1.proto"; -import "posts_v1.proto"; - -// Tests generated message can correctly reference two packages with the same leaf-name - -message Test { - users.v1.User user = 1; - posts.v1.Post post = 2; -} diff --git a/tests/inputs/import_packages_same_name/posts_v1.proto b/tests/inputs/import_packages_same_name/posts_v1.proto deleted file mode 100644 index d3b9b1ca..00000000 --- a/tests/inputs/import_packages_same_name/posts_v1.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_packages_same_name.posts.v1; - -message Post { - -} diff --git a/tests/inputs/import_packages_same_name/users_v1.proto b/tests/inputs/import_packages_same_name/users_v1.proto deleted file mode 100644 index d3a17e92..00000000 --- a/tests/inputs/import_packages_same_name/users_v1.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_packages_same_name.users.v1; - -message User { - -} diff --git a/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto b/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto deleted file mode 100644 index edc47362..00000000 --- a/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -import "parent_package_message.proto"; - -package import_parent_package_from_child.parent.child; - -// Tests generated imports when a message refers to a message defined in its parent package - -message Test { - ParentPackageMessage message_implicit = 1; - parent.ParentPackageMessage message_explicit = 2; -} diff --git a/tests/inputs/import_parent_package_from_child/parent_package_message.proto b/tests/inputs/import_parent_package_from_child/parent_package_message.proto deleted file mode 100644 index fb3fd31d..00000000 --- a/tests/inputs/import_parent_package_from_child/parent_package_message.proto +++ /dev/null @@ -1,6 +0,0 @@ -syntax = "proto3"; - -package import_parent_package_from_child.parent; - -message ParentPackageMessage { -} diff --git a/tests/inputs/import_root_package_from_child/child.proto b/tests/inputs/import_root_package_from_child/child.proto deleted file mode 100644 index bd519677..00000000 --- a/tests/inputs/import_root_package_from_child/child.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_root_package_from_child.child; - -import "root.proto"; - -// Verify that we can import root message from child package - -message Test { - RootMessage message = 1; -} diff --git a/tests/inputs/import_root_package_from_child/root.proto b/tests/inputs/import_root_package_from_child/root.proto deleted file mode 100644 index 6ae955ad..00000000 --- a/tests/inputs/import_root_package_from_child/root.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_root_package_from_child; - - -message RootMessage { -} diff --git a/tests/inputs/import_root_sibling/import_root_sibling.proto b/tests/inputs/import_root_sibling/import_root_sibling.proto deleted file mode 100644 index 759e606f..00000000 --- a/tests/inputs/import_root_sibling/import_root_sibling.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package import_root_sibling; - -import "sibling.proto"; - -// Tests generated imports when a message in the root package refers to another message in the root package - -message Test { - SiblingMessage sibling = 1; -} diff --git a/tests/inputs/import_root_sibling/sibling.proto b/tests/inputs/import_root_sibling/sibling.proto deleted file mode 100644 index 6b6ba2ea..00000000 --- a/tests/inputs/import_root_sibling/sibling.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_root_sibling; - -message SiblingMessage { - -} diff --git a/tests/inputs/import_service_input_message/child_package_request_message.proto b/tests/inputs/import_service_input_message/child_package_request_message.proto deleted file mode 100644 index 54fc1123..00000000 --- a/tests/inputs/import_service_input_message/child_package_request_message.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_service_input_message.child; - -message ChildRequestMessage { - int32 child_argument = 1; -} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/import_service_input_message.proto b/tests/inputs/import_service_input_message/import_service_input_message.proto deleted file mode 100644 index cbf48fa9..00000000 --- a/tests/inputs/import_service_input_message/import_service_input_message.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package import_service_input_message; - -import "request_message.proto"; -import "child_package_request_message.proto"; - -// Tests generated service correctly imports the RequestMessage - -service Test { - rpc DoThing (RequestMessage) returns (RequestResponse); - rpc DoThing2 (child.ChildRequestMessage) returns (RequestResponse); - rpc DoThing3 (Nested.RequestMessage) returns (RequestResponse); -} - - -message RequestResponse { - int32 value = 1; -} - -message Nested { - message RequestMessage { - int32 nestedArgument = 1; - } -} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/request_message.proto b/tests/inputs/import_service_input_message/request_message.proto deleted file mode 100644 index 36a6e788..00000000 --- a/tests/inputs/import_service_input_message/request_message.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package import_service_input_message; - -message RequestMessage { - int32 argument = 1; -} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/test_import_service_input_message.py b/tests/inputs/import_service_input_message/test_import_service_input_message.py deleted file mode 100644 index 60c3e202..00000000 --- a/tests/inputs/import_service_input_message/test_import_service_input_message.py +++ /dev/null @@ -1,36 +0,0 @@ -import pytest - -from tests.mocks import MockChannel -from tests.output_betterproto.import_service_input_message import ( - NestedRequestMessage, - RequestMessage, - RequestResponse, - TestStub, -) -from tests.output_betterproto.import_service_input_message.child import ( - ChildRequestMessage, -) - - -@pytest.mark.asyncio -async def test_service_correctly_imports_reference_message(): - mock_response = RequestResponse(value=10) - service = TestStub(MockChannel([mock_response])) - response = await service.do_thing(RequestMessage(1)) - assert mock_response == response - - -@pytest.mark.asyncio -async def test_service_correctly_imports_reference_message_from_child_package(): - mock_response = RequestResponse(value=10) - service = TestStub(MockChannel([mock_response])) - response = await service.do_thing2(ChildRequestMessage(1)) - assert mock_response == response - - -@pytest.mark.asyncio -async def test_service_correctly_imports_nested_reference(): - mock_response = RequestResponse(value=10) - service = TestStub(MockChannel([mock_response])) - response = await service.do_thing3(NestedRequestMessage(1)) - assert mock_response == response diff --git a/tests/inputs/int32/int32.json b/tests/inputs/int32/int32.json deleted file mode 100644 index 34d41119..00000000 --- a/tests/inputs/int32/int32.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "positive": 150, - "negative": -150 -} diff --git a/tests/inputs/int32/int32.proto b/tests/inputs/int32/int32.proto deleted file mode 100644 index 4721c235..00000000 --- a/tests/inputs/int32/int32.proto +++ /dev/null @@ -1,10 +0,0 @@ -syntax = "proto3"; - -package int32; - -// Some documentation about the Test message. -message Test { - // Some documentation about the count. - int32 positive = 1; - int32 negative = 2; -} diff --git a/tests/inputs/invalid_field/invalid_field.proto b/tests/inputs/invalid_field/invalid_field.proto deleted file mode 100644 index e3a73ce1..00000000 --- a/tests/inputs/invalid_field/invalid_field.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package invalid_field; - -message Test { - int32 x = 1; -} diff --git a/tests/inputs/invalid_field/test_invalid_field.py b/tests/inputs/invalid_field/test_invalid_field.py deleted file mode 100644 index 947b8e13..00000000 --- a/tests/inputs/invalid_field/test_invalid_field.py +++ /dev/null @@ -1,17 +0,0 @@ -import pytest - - -def test_invalid_field(): - from tests.output_betterproto.invalid_field import Test - - with pytest.raises(TypeError): - Test(unknown_field=12) - - -def test_invalid_field_pydantic(): - from pydantic import ValidationError - - from tests.output_betterproto_pydantic.invalid_field import Test - - with pytest.raises(ValidationError): - Test(unknown_field=12) diff --git a/tests/inputs/map/map.json b/tests/inputs/map/map.json deleted file mode 100644 index 6a1e853b..00000000 --- a/tests/inputs/map/map.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "counts": { - "item1": 1, - "item2": 2, - "item3": 3 - } -} diff --git a/tests/inputs/map/map.proto b/tests/inputs/map/map.proto deleted file mode 100644 index ecef3ccb..00000000 --- a/tests/inputs/map/map.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package map; - -message Test { - map counts = 1; -} diff --git a/tests/inputs/mapmessage/mapmessage.json b/tests/inputs/mapmessage/mapmessage.json deleted file mode 100644 index a944ddd1..00000000 --- a/tests/inputs/mapmessage/mapmessage.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "items": { - "foo": { - "count": 1 - }, - "bar": { - "count": 2 - } - } -} diff --git a/tests/inputs/mapmessage/mapmessage.proto b/tests/inputs/mapmessage/mapmessage.proto deleted file mode 100644 index 2c704a49..00000000 --- a/tests/inputs/mapmessage/mapmessage.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package mapmessage; - -message Test { - map items = 1; -} - -message Nested { - int32 count = 1; -} \ No newline at end of file diff --git a/tests/inputs/namespace_builtin_types/namespace_builtin_types.json b/tests/inputs/namespace_builtin_types/namespace_builtin_types.json deleted file mode 100644 index 82000323..00000000 --- a/tests/inputs/namespace_builtin_types/namespace_builtin_types.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "int": "value-for-int", - "float": "value-for-float", - "complex": "value-for-complex", - "list": "value-for-list", - "tuple": "value-for-tuple", - "range": "value-for-range", - "str": "value-for-str", - "bytearray": "value-for-bytearray", - "bytes": "value-for-bytes", - "memoryview": "value-for-memoryview", - "set": "value-for-set", - "frozenset": "value-for-frozenset", - "map": "value-for-map", - "bool": "value-for-bool" -} \ No newline at end of file diff --git a/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto b/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto deleted file mode 100644 index 71cb0298..00000000 --- a/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto +++ /dev/null @@ -1,40 +0,0 @@ -syntax = "proto3"; - -package namespace_builtin_types; - -// Tests that messages may contain fields with names that are python types - -message Test { - // https://docs.python.org/2/library/stdtypes.html#numeric-types-int-float-long-complex - string int = 1; - string float = 2; - string complex = 3; - - // https://docs.python.org/3/library/stdtypes.html#sequence-types-list-tuple-range - string list = 4; - string tuple = 5; - string range = 6; - - // https://docs.python.org/3/library/stdtypes.html#str - string str = 7; - - // https://docs.python.org/3/library/stdtypes.html#bytearray-objects - string bytearray = 8; - - // https://docs.python.org/3/library/stdtypes.html#bytes-and-bytearray-operations - string bytes = 9; - - // https://docs.python.org/3/library/stdtypes.html#memory-views - string memoryview = 10; - - // https://docs.python.org/3/library/stdtypes.html#set-types-set-frozenset - string set = 11; - string frozenset = 12; - - // https://docs.python.org/3/library/stdtypes.html#dict - string map = 13; - string dict = 14; - - // https://docs.python.org/3/library/stdtypes.html#boolean-values - string bool = 15; -} \ No newline at end of file diff --git a/tests/inputs/namespace_keywords/namespace_keywords.json b/tests/inputs/namespace_keywords/namespace_keywords.json deleted file mode 100644 index 4f11b602..00000000 --- a/tests/inputs/namespace_keywords/namespace_keywords.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "False": 1, - "None": 2, - "True": 3, - "and": 4, - "as": 5, - "assert": 6, - "async": 7, - "await": 8, - "break": 9, - "class": 10, - "continue": 11, - "def": 12, - "del": 13, - "elif": 14, - "else": 15, - "except": 16, - "finally": 17, - "for": 18, - "from": 19, - "global": 20, - "if": 21, - "import": 22, - "in": 23, - "is": 24, - "lambda": 25, - "nonlocal": 26, - "not": 27, - "or": 28, - "pass": 29, - "raise": 30, - "return": 31, - "try": 32, - "while": 33, - "with": 34, - "yield": 35 -} diff --git a/tests/inputs/namespace_keywords/namespace_keywords.proto b/tests/inputs/namespace_keywords/namespace_keywords.proto deleted file mode 100644 index ac3e5c52..00000000 --- a/tests/inputs/namespace_keywords/namespace_keywords.proto +++ /dev/null @@ -1,46 +0,0 @@ -syntax = "proto3"; - -package namespace_keywords; - -// Tests that messages may contain fields that are Python keywords -// -// Generated with Python 3.7.6 -// print('\n'.join(f'string {k} = {i+1};' for i,k in enumerate(keyword.kwlist))) - -message Test { - string False = 1; - string None = 2; - string True = 3; - string and = 4; - string as = 5; - string assert = 6; - string async = 7; - string await = 8; - string break = 9; - string class = 10; - string continue = 11; - string def = 12; - string del = 13; - string elif = 14; - string else = 15; - string except = 16; - string finally = 17; - string for = 18; - string from = 19; - string global = 20; - string if = 21; - string import = 22; - string in = 23; - string is = 24; - string lambda = 25; - string nonlocal = 26; - string not = 27; - string or = 28; - string pass = 29; - string raise = 30; - string return = 31; - string try = 32; - string while = 33; - string with = 34; - string yield = 35; -} \ No newline at end of file diff --git a/tests/inputs/nested/nested.json b/tests/inputs/nested/nested.json deleted file mode 100644 index f460cadb..00000000 --- a/tests/inputs/nested/nested.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "nested": { - "count": 150 - }, - "sibling": {}, - "msg": "THIS" -} diff --git a/tests/inputs/nested/nested.proto b/tests/inputs/nested/nested.proto deleted file mode 100644 index 619c721c..00000000 --- a/tests/inputs/nested/nested.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; - -package nested; - -// A test message with a nested message inside of it. -message Test { - // This is the nested type. - message Nested { - // Stores a simple counter. - int32 count = 1; - } - // This is the nested enum. - enum Msg { - NONE = 0; - THIS = 1; - } - - Nested nested = 1; - Sibling sibling = 2; - Sibling sibling2 = 3; - Msg msg = 4; -} - -message Sibling { - int32 foo = 1; -} \ No newline at end of file diff --git a/tests/inputs/nested2/nested2.proto b/tests/inputs/nested2/nested2.proto deleted file mode 100644 index cd6510c5..00000000 --- a/tests/inputs/nested2/nested2.proto +++ /dev/null @@ -1,21 +0,0 @@ -syntax = "proto3"; - -package nested2; - -import "package.proto"; - -message Game { - message Player { - enum Race { - human = 0; - orc = 1; - } - } -} - -message Test { - Game game = 1; - Game.Player GamePlayer = 2; - Game.Player.Race GamePlayerRace = 3; - equipment.Weapon Weapon = 4; -} \ No newline at end of file diff --git a/tests/inputs/nested2/package.proto b/tests/inputs/nested2/package.proto deleted file mode 100644 index e12abb12..00000000 --- a/tests/inputs/nested2/package.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package nested2.equipment; - -message Weapon { - -} \ No newline at end of file diff --git a/tests/inputs/nestedtwice/nestedtwice.json b/tests/inputs/nestedtwice/nestedtwice.json deleted file mode 100644 index c9531328..00000000 --- a/tests/inputs/nestedtwice/nestedtwice.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "top": { - "name": "double-nested", - "middle": { - "bottom": [{"foo": "hello"}], - "enumBottom": ["A"], - "topMiddleBottom": [{"a": "hello"}], - "bar": true - } - } -} diff --git a/tests/inputs/nestedtwice/nestedtwice.proto b/tests/inputs/nestedtwice/nestedtwice.proto deleted file mode 100644 index 84d142a3..00000000 --- a/tests/inputs/nestedtwice/nestedtwice.proto +++ /dev/null @@ -1,40 +0,0 @@ -syntax = "proto3"; - -package nestedtwice; - -/* Test doc. */ -message Test { - /* Top doc. */ - message Top { - /* Middle doc. */ - message Middle { - /* TopMiddleBottom doc.*/ - message TopMiddleBottom { - // TopMiddleBottom.a doc. - string a = 1; - } - /* EnumBottom doc. */ - enum EnumBottom{ - /* EnumBottom.A doc. */ - A = 0; - B = 1; - } - /* Bottom doc. */ - message Bottom { - /* Bottom.foo doc. */ - string foo = 1; - } - reserved 1; - /* Middle.bottom doc. */ - repeated Bottom bottom = 2; - repeated EnumBottom enumBottom=3; - repeated TopMiddleBottom topMiddleBottom=4; - bool bar = 5; - } - /* Top.name doc. */ - string name = 1; - Middle middle = 2; - } - /* Test.top doc. */ - Top top = 1; -} diff --git a/tests/inputs/nestedtwice/test_nestedtwice.py b/tests/inputs/nestedtwice/test_nestedtwice.py deleted file mode 100644 index ca0557a7..00000000 --- a/tests/inputs/nestedtwice/test_nestedtwice.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest - -from tests.output_betterproto.nestedtwice import ( - Test, - TestTop, - TestTopMiddle, - TestTopMiddleBottom, - TestTopMiddleEnumBottom, - TestTopMiddleTopMiddleBottom, -) - - -@pytest.mark.parametrize( - ("cls", "expected_comment"), - [ - (Test, "Test doc."), - (TestTopMiddleEnumBottom, "EnumBottom doc."), - (TestTop, "Top doc."), - (TestTopMiddle, "Middle doc."), - (TestTopMiddleTopMiddleBottom, "TopMiddleBottom doc."), - (TestTopMiddleBottom, "Bottom doc."), - ], -) -def test_comment(cls, expected_comment): - assert cls.__doc__.strip() == expected_comment diff --git a/tests/inputs/oneof/oneof-name.json b/tests/inputs/oneof/oneof-name.json deleted file mode 100644 index 605484b6..00000000 --- a/tests/inputs/oneof/oneof-name.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pitier": "Mr. T" -} diff --git a/tests/inputs/oneof/oneof.json b/tests/inputs/oneof/oneof.json deleted file mode 100644 index 65cafc5f..00000000 --- a/tests/inputs/oneof/oneof.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pitied": 100 -} diff --git a/tests/inputs/oneof/oneof.proto b/tests/inputs/oneof/oneof.proto deleted file mode 100644 index 41f93b0e..00000000 --- a/tests/inputs/oneof/oneof.proto +++ /dev/null @@ -1,23 +0,0 @@ -syntax = "proto3"; - -package oneof; - -message MixedDrink { - int32 shots = 1; -} - -message Test { - oneof foo { - int32 pitied = 1; - string pitier = 2; - } - - int32 just_a_regular_field = 3; - - oneof bar { - int32 drinks = 11; - string bar_name = 12; - MixedDrink mixed_drink = 13; - } -} - diff --git a/tests/inputs/oneof/oneof_name.json b/tests/inputs/oneof/oneof_name.json deleted file mode 100644 index 605484b6..00000000 --- a/tests/inputs/oneof/oneof_name.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "pitier": "Mr. T" -} diff --git a/tests/inputs/oneof/test_oneof.py b/tests/inputs/oneof/test_oneof.py deleted file mode 100644 index b7d2d94a..00000000 --- a/tests/inputs/oneof/test_oneof.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -import betterproto -from tests.output_betterproto.oneof import ( - MixedDrink, - Test, -) -from tests.output_betterproto_pydantic.oneof import Test as TestPyd -from tests.util import get_test_case_json_data - - -def test_which_count(): - message = Test() - message.from_json(get_test_case_json_data("oneof")[0].json) - assert betterproto.which_one_of(message, "foo") == ("pitied", 100) - - -def test_which_name(): - message = Test() - message.from_json(get_test_case_json_data("oneof", "oneof_name.json")[0].json) - assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") - - -def test_which_count_pyd(): - message = TestPyd(pitier="Mr. T", just_a_regular_field=2, bar_name="a_bar") - assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") - - -def test_oneof_constructor_assign(): - message = Test(mixed_drink=MixedDrink(shots=42)) - field, value = betterproto.which_one_of(message, "bar") - assert field == "mixed_drink" - assert value.shots == 42 - - -# Issue #305: -@pytest.mark.xfail -def test_oneof_nested_assign(): - message = Test() - message.mixed_drink.shots = 42 - field, value = betterproto.which_one_of(message, "bar") - assert field == "mixed_drink" - assert value.shots == 42 diff --git a/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto b/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto deleted file mode 100644 index f7ac6fe8..00000000 --- a/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; - -package oneof_default_value_serialization; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - -message Message{ - int64 value = 1; -} - -message NestedMessage{ - int64 id = 1; - oneof value_type{ - Message wrapped_message_value = 2; - } -} - -message Test{ - oneof value_type { - bool bool_value = 1; - int64 int64_value = 2; - google.protobuf.Timestamp timestamp_value = 3; - google.protobuf.Duration duration_value = 4; - Message wrapped_message_value = 5; - NestedMessage wrapped_nested_message_value = 6; - google.protobuf.BoolValue wrapped_bool_value = 7; - } -} \ No newline at end of file diff --git a/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py b/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py deleted file mode 100644 index d2ff494e..00000000 --- a/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py +++ /dev/null @@ -1,69 +0,0 @@ -import datetime - -import betterproto -from tests.output_betterproto.oneof_default_value_serialization import ( - Message, - NestedMessage, - Test, -) - - -def assert_round_trip_serialization_works(message: Test) -> None: - assert betterproto.which_one_of(message, "value_type") == betterproto.which_one_of( - Test().from_json(message.to_json()), "value_type" - ) - - -def test_oneof_default_value_serialization_works_for_all_values(): - """ - Serialization from message with oneof set to default -> JSON -> message should keep - default value field intact. - """ - - test_cases = [ - Test(bool_value=False), - Test(int64_value=0), - Test( - timestamp_value=datetime.datetime( - year=1970, - month=1, - day=1, - hour=0, - minute=0, - tzinfo=datetime.timezone.utc, - ) - ), - Test(duration_value=datetime.timedelta(0)), - Test(wrapped_message_value=Message(value=0)), - # NOTE: Do NOT use betterproto.BoolValue here, it will cause JSON serialization - # errors. - # TODO: Do we want to allow use of BoolValue directly within a wrapped field or - # should we simply hard fail here? - Test(wrapped_bool_value=False), - ] - for message in test_cases: - assert_round_trip_serialization_works(message) - - -def test_oneof_no_default_values_passed(): - message = Test() - assert ( - betterproto.which_one_of(message, "value_type") - == betterproto.which_one_of(Test().from_json(message.to_json()), "value_type") - == ("", None) - ) - - -def test_oneof_nested_oneof_messages_are_serialized_with_defaults(): - """ - Nested messages with oneofs should also be handled - """ - message = Test(wrapped_nested_message_value=NestedMessage(id=0, wrapped_message_value=Message(value=0))) - assert ( - betterproto.which_one_of(message, "value_type") - == betterproto.which_one_of(Test().from_json(message.to_json()), "value_type") - == ( - "wrapped_nested_message_value", - NestedMessage(id=0, wrapped_message_value=Message(value=0)), - ) - ) diff --git a/tests/inputs/oneof_empty/oneof_empty.json b/tests/inputs/oneof_empty/oneof_empty.json deleted file mode 100644 index 9d21c897..00000000 --- a/tests/inputs/oneof_empty/oneof_empty.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "nothing": {} -} diff --git a/tests/inputs/oneof_empty/oneof_empty.proto b/tests/inputs/oneof_empty/oneof_empty.proto deleted file mode 100644 index ca51d5ae..00000000 --- a/tests/inputs/oneof_empty/oneof_empty.proto +++ /dev/null @@ -1,17 +0,0 @@ -syntax = "proto3"; - -package oneof_empty; - -message Nothing {} - -message MaybeNothing { - string sometimes = 42; -} - -message Test { - oneof empty { - Nothing nothing = 1; - MaybeNothing maybe1 = 2; - MaybeNothing maybe2 = 3; - } -} diff --git a/tests/inputs/oneof_empty/oneof_empty_maybe1.json b/tests/inputs/oneof_empty/oneof_empty_maybe1.json deleted file mode 100644 index f7a2d278..00000000 --- a/tests/inputs/oneof_empty/oneof_empty_maybe1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "maybe1": {} -} diff --git a/tests/inputs/oneof_empty/oneof_empty_maybe2.json b/tests/inputs/oneof_empty/oneof_empty_maybe2.json deleted file mode 100644 index bc2b385b..00000000 --- a/tests/inputs/oneof_empty/oneof_empty_maybe2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "maybe2": { - "sometimes": "now" - } -} diff --git a/tests/inputs/oneof_empty/test_oneof_empty.py b/tests/inputs/oneof_empty/test_oneof_empty.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/inputs/oneof_enum/oneof_enum-enum-0.json b/tests/inputs/oneof_enum/oneof_enum-enum-0.json deleted file mode 100644 index be30cf08..00000000 --- a/tests/inputs/oneof_enum/oneof_enum-enum-0.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "signal": "PASS" -} diff --git a/tests/inputs/oneof_enum/oneof_enum-enum-1.json b/tests/inputs/oneof_enum/oneof_enum-enum-1.json deleted file mode 100644 index cb638737..00000000 --- a/tests/inputs/oneof_enum/oneof_enum-enum-1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "signal": "RESIGN" -} diff --git a/tests/inputs/oneof_enum/oneof_enum.json b/tests/inputs/oneof_enum/oneof_enum.json deleted file mode 100644 index 3220b706..00000000 --- a/tests/inputs/oneof_enum/oneof_enum.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "move": { - "x": 2, - "y": 3 - } -} diff --git a/tests/inputs/oneof_enum/oneof_enum.proto b/tests/inputs/oneof_enum/oneof_enum.proto deleted file mode 100644 index 906abcb1..00000000 --- a/tests/inputs/oneof_enum/oneof_enum.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; - -package oneof_enum; - -message Test { - oneof action { - Signal signal = 1; - Move move = 2; - } -} - -enum Signal { - PASS = 0; - RESIGN = 1; -} - -message Move { - int32 x = 1; - int32 y = 2; -} \ No newline at end of file diff --git a/tests/inputs/oneof_enum/test_oneof_enum.py b/tests/inputs/oneof_enum/test_oneof_enum.py deleted file mode 100644 index 4a71223b..00000000 --- a/tests/inputs/oneof_enum/test_oneof_enum.py +++ /dev/null @@ -1,39 +0,0 @@ -import betterproto -from tests.output_betterproto.oneof_enum import ( - Move, - Signal, - Test, -) -from tests.util import get_test_case_json_data - - -def test_which_one_of_returns_enum_with_default_value(): - """ - returns first field when it is enum and set with default value - """ - message = Test() - message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-0.json")[0].json) - - assert message.move is None - assert message.signal == Signal.PASS - assert betterproto.which_one_of(message, "action") == ("signal", Signal.PASS) - - -def test_which_one_of_returns_enum_with_non_default_value(): - """ - returns first field when it is enum and set with non default value - """ - message = Test() - message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-1.json")[0].json) - - assert message.move is None - assert message.signal == Signal.RESIGN - assert betterproto.which_one_of(message, "action") == ("signal", Signal.RESIGN) - - -def test_which_one_of_returns_second_field_when_set(): - message = Test() - message.from_json(get_test_case_json_data("oneof_enum")[0].json) - assert message.move == Move(x=2, y=3) - assert message.signal is None - assert betterproto.which_one_of(message, "action") == ("move", Move(x=2, y=3)) diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.json b/tests/inputs/proto3_field_presence/proto3_field_presence.json deleted file mode 100644 index 988df8e8..00000000 --- a/tests/inputs/proto3_field_presence/proto3_field_presence.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "test1": 128, - "test2": true, - "test3": "A value", - "test4": "aGVsbG8=", - "test5": { - "test": "Hello" - }, - "test6": "B", - "test7": "8589934592", - "test8": 2.5, - "test9": "2022-01-24T12:12:42Z" -} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.proto b/tests/inputs/proto3_field_presence/proto3_field_presence.proto deleted file mode 100644 index f28123df..00000000 --- a/tests/inputs/proto3_field_presence/proto3_field_presence.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; - -package proto3_field_presence; - -import "google/protobuf/timestamp.proto"; - -message InnerTest { - string test = 1; -} - -message Test { - optional uint32 test1 = 1; - optional bool test2 = 2; - optional string test3 = 3; - optional bytes test4 = 4; - optional InnerTest test5 = 5; - optional TestEnum test6 = 6; - optional uint64 test7 = 7; - optional float test8 = 8; - optional google.protobuf.Timestamp test9 = 9; -} - -enum TestEnum { - A = 0; - B = 1; -} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_default.json b/tests/inputs/proto3_field_presence/proto3_field_presence_default.json deleted file mode 100644 index 0967ef42..00000000 --- a/tests/inputs/proto3_field_presence/proto3_field_presence_default.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json b/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json deleted file mode 100644 index b19ae980..00000000 --- a/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "test1": 0, - "test2": false, - "test3": "", - "test4": "", - "test6": "A", - "test7": "0", - "test8": 0 -} diff --git a/tests/inputs/proto3_field_presence/test_proto3_field_presence.py b/tests/inputs/proto3_field_presence/test_proto3_field_presence.py deleted file mode 100644 index 9c2d6e69..00000000 --- a/tests/inputs/proto3_field_presence/test_proto3_field_presence.py +++ /dev/null @@ -1,46 +0,0 @@ -import json - -from tests.output_betterproto.proto3_field_presence import ( - Test, -) - - -def test_null_fields_json(): - """Ensure that using "null" in JSON is equivalent to not specifying a - field, for fields with explicit presence""" - - def test_json(ref_json: str, obj_json: str) -> None: - """`ref_json` and `obj_json` are JSON strings describing a `Test` object. - Test that deserializing both leads to the same object, and that - `ref_json` is the normalized format.""" - ref_obj = Test().from_json(ref_json) - obj = Test().from_json(obj_json) - - assert obj == ref_obj - assert json.loads(obj.to_json(0)) == json.loads(ref_json) - - test_json("{}", '{ "test1": null, "test2": null, "test3": null }') - test_json("{}", '{ "test4": null, "test5": null, "test6": null }') - test_json("{}", '{ "test7": null, "test8": null }') - test_json('{ "test5": {} }', '{ "test3": null, "test5": {} }') - - # Make sure that if include_default_values is set, None values are - # exported. - obj = Test() - assert obj.to_dict() == {} - assert obj.to_dict(include_default_values=True) == { - "test1": None, - "test2": None, - "test3": None, - "test4": None, - "test5": None, - "test6": None, - "test7": None, - "test8": None, - "test9": None, - } - - -def test_unset_access(): # see #523 - assert Test().test1 is None - assert Test(test1=None).test1 is None diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json deleted file mode 100644 index da081927..00000000 --- a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "nested": {} -} diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto deleted file mode 100644 index caa76ec8..00000000 --- a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto +++ /dev/null @@ -1,22 +0,0 @@ -syntax = "proto3"; - -package proto3_field_presence_oneof; - -message Test { - oneof kind { - Nested nested = 1; - WithOptional with_optional = 2; - } -} - -message InnerNested { - optional bool a = 1; -} - -message Nested { - InnerNested inner = 1; -} - -message WithOptional { - optional bool b = 2; -} diff --git a/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py b/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py deleted file mode 100644 index 2320dc64..00000000 --- a/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py +++ /dev/null @@ -1,27 +0,0 @@ -from tests.output_betterproto.proto3_field_presence_oneof import ( - Nested, - Test, - WithOptional, -) - - -def test_serialization(): - """Ensure that serialization of fields unset but with explicit field - presence do not bloat the serialized payload with length-delimited fields - with length 0""" - - def test_empty_nested(message: Test) -> None: - # '0a' => tag 1, length delimited - # '00' => length: 0 - assert bytes(message) == bytearray.fromhex("0a 00") - - test_empty_nested(Test(nested=Nested())) - test_empty_nested(Test(nested=Nested(inner=None))) - - def test_empty_with_optional(message: Test) -> None: - # '12' => tag 2, length delimited - # '00' => length: 0 - assert bytes(message) == bytearray.fromhex("12 00") - - test_empty_with_optional(Test(with_optional=WithOptional())) - test_empty_with_optional(Test(with_optional=WithOptional(b=None))) diff --git a/tests/inputs/recursivemessage/recursivemessage.json b/tests/inputs/recursivemessage/recursivemessage.json deleted file mode 100644 index e92c3fbf..00000000 --- a/tests/inputs/recursivemessage/recursivemessage.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "Zues", - "child": { - "name": "Hercules" - }, - "intermediate": { - "child": { - "name": "Douglas Adams" - }, - "number": 42 - } -} diff --git a/tests/inputs/recursivemessage/recursivemessage.proto b/tests/inputs/recursivemessage/recursivemessage.proto deleted file mode 100644 index 1da2b57e..00000000 --- a/tests/inputs/recursivemessage/recursivemessage.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -package recursivemessage; - -message Test { - string name = 1; - Test child = 2; - Intermediate intermediate = 3; -} - - -message Intermediate { - int32 number = 1; - Test child = 2; -} diff --git a/tests/inputs/ref/ref.json b/tests/inputs/ref/ref.json deleted file mode 100644 index 2c6bdc10..00000000 --- a/tests/inputs/ref/ref.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "greeting": { - "greeting": "hello" - } -} diff --git a/tests/inputs/ref/ref.proto b/tests/inputs/ref/ref.proto deleted file mode 100644 index 69455909..00000000 --- a/tests/inputs/ref/ref.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package ref; - -import "repeatedmessage.proto"; - -message Test { - repeatedmessage.Sub greeting = 1; -} diff --git a/tests/inputs/ref/repeatedmessage.proto b/tests/inputs/ref/repeatedmessage.proto deleted file mode 100644 index 0ffacafd..00000000 --- a/tests/inputs/ref/repeatedmessage.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package repeatedmessage; - -message Test { - repeated Sub greetings = 1; -} - -message Sub { - string greeting = 1; -} \ No newline at end of file diff --git a/tests/inputs/regression_387/regression_387.proto b/tests/inputs/regression_387/regression_387.proto deleted file mode 100644 index 57bd9544..00000000 --- a/tests/inputs/regression_387/regression_387.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package regression_387; - -message Test { - uint64 id = 1; -} - -message ParentElement { - string name = 1; - repeated Test elems = 2; -} \ No newline at end of file diff --git a/tests/inputs/regression_387/test_regression_387.py b/tests/inputs/regression_387/test_regression_387.py deleted file mode 100644 index 7bb40b2e..00000000 --- a/tests/inputs/regression_387/test_regression_387.py +++ /dev/null @@ -1,12 +0,0 @@ -from tests.output_betterproto.regression_387 import ( - ParentElement, - Test, -) - - -def test_regression_387(): - el = ParentElement(name="test", elems=[Test(id=0), Test(id=42)]) - binary = bytes(el) - decoded = ParentElement().parse(binary) - assert decoded == el - assert decoded.elems == [Test(id=0), Test(id=42)] diff --git a/tests/inputs/regression_414/regression_414.proto b/tests/inputs/regression_414/regression_414.proto deleted file mode 100644 index d20dddab..00000000 --- a/tests/inputs/regression_414/regression_414.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package regression_414; - -message Test { - bytes body = 1; - bytes auth = 2; - repeated bytes signatures = 3; -} \ No newline at end of file diff --git a/tests/inputs/regression_414/test_regression_414.py b/tests/inputs/regression_414/test_regression_414.py deleted file mode 100644 index 742c97b4..00000000 --- a/tests/inputs/regression_414/test_regression_414.py +++ /dev/null @@ -1,15 +0,0 @@ -from tests.output_betterproto.regression_414 import Test - - -def test_full_cycle(): - body = bytes([0, 1]) - auth = bytes([2, 3]) - sig = [b""] - - obj = Test(body=body, auth=auth, signatures=sig) - - decoded = Test().parse(bytes(obj)) - assert decoded == obj - assert decoded.body == body - assert decoded.auth == auth - assert decoded.signatures == sig diff --git a/tests/inputs/repeated/repeated.json b/tests/inputs/repeated/repeated.json deleted file mode 100644 index b8a7c4eb..00000000 --- a/tests/inputs/repeated/repeated.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "names": ["one", "two", "three"] -} diff --git a/tests/inputs/repeated/repeated.proto b/tests/inputs/repeated/repeated.proto deleted file mode 100644 index 4f3c788c..00000000 --- a/tests/inputs/repeated/repeated.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package repeated; - -message Test { - repeated string names = 1; -} diff --git a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json deleted file mode 100644 index 6ce7b34c..00000000 --- a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "times": ["1972-01-01T10:00:20.021Z", "1972-01-01T10:00:20.021Z"], - "durations": ["1.200s", "1.200s"] -} diff --git a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto deleted file mode 100644 index 38f1eaa3..00000000 --- a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package repeated_duration_timestamp; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - - -message Test { - repeated google.protobuf.Timestamp times = 1; - repeated google.protobuf.Duration durations = 2; -} diff --git a/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py b/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py deleted file mode 100644 index efc34866..00000000 --- a/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py +++ /dev/null @@ -1,12 +0,0 @@ -from datetime import ( - datetime, - timedelta, -) - -from tests.output_betterproto.repeated_duration_timestamp import Test - - -def test_roundtrip(): - message = Test() - message.times = [datetime.now(), datetime.now()] - message.durations = [timedelta(), timedelta()] diff --git a/tests/inputs/repeatedmessage/repeatedmessage.json b/tests/inputs/repeatedmessage/repeatedmessage.json deleted file mode 100644 index 90ec5967..00000000 --- a/tests/inputs/repeatedmessage/repeatedmessage.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "greetings": [ - { - "greeting": "hello" - }, - { - "greeting": "hi" - } - ] -} diff --git a/tests/inputs/repeatedmessage/repeatedmessage.proto b/tests/inputs/repeatedmessage/repeatedmessage.proto deleted file mode 100644 index 0ffacafd..00000000 --- a/tests/inputs/repeatedmessage/repeatedmessage.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package repeatedmessage; - -message Test { - repeated Sub greetings = 1; -} - -message Sub { - string greeting = 1; -} \ No newline at end of file diff --git a/tests/inputs/repeatedpacked/repeatedpacked.json b/tests/inputs/repeatedpacked/repeatedpacked.json deleted file mode 100644 index 106fd908..00000000 --- a/tests/inputs/repeatedpacked/repeatedpacked.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "counts": [1, 2, -1, -2], - "signed": ["1", "2", "-1", "-2"], - "fixed": [1.0, 2.7, 3.4] -} diff --git a/tests/inputs/repeatedpacked/repeatedpacked.proto b/tests/inputs/repeatedpacked/repeatedpacked.proto deleted file mode 100644 index a037d1b8..00000000 --- a/tests/inputs/repeatedpacked/repeatedpacked.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package repeatedpacked; - -message Test { - repeated int32 counts = 1; - repeated sint64 signed = 2; - repeated double fixed = 3; -} diff --git a/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto b/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto deleted file mode 100644 index 9a4449c6..00000000 --- a/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package rpc_empty_input_message; - -message Test {} - -message Response { - int32 v = 1; -} - -service Service { - rpc read(Test) returns (Response); -} diff --git a/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py b/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py deleted file mode 100644 index f77578f6..00000000 --- a/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py +++ /dev/null @@ -1,24 +0,0 @@ -import pytest -from grpclib.testing import ChannelFor - - -@pytest.mark.asyncio -async def test_rpc_input_message(): - from tests.output_betterproto.rpc_empty_input_message import ( - Response, - ServiceBase, - ServiceStub, - Test, - ) - - class Service(ServiceBase): - async def read(self, test: "Test") -> "Response": - return Response(v=42) - - async with ChannelFor([Service()]) as channel: - client = ServiceStub(channel) - - assert (await client.read(Test())).v == 42 - - # Check that we can call the method without providing the message - assert (await client.read()).v == 42 diff --git a/tests/inputs/service/service.proto b/tests/inputs/service/service.proto deleted file mode 100644 index 53d84fbd..00000000 --- a/tests/inputs/service/service.proto +++ /dev/null @@ -1,35 +0,0 @@ -syntax = "proto3"; - -package service; - -enum ThingType { - UNKNOWN = 0; - LIVING = 1; - DEAD = 2; -} - -message DoThingRequest { - string name = 1; - repeated string comments = 2; - ThingType type = 3; -} - -message DoThingResponse { - repeated string names = 1; -} - -message GetThingRequest { - string name = 1; -} - -message GetThingResponse { - string name = 1; - int32 version = 2; -} - -service Test { - rpc DoThing (DoThingRequest) returns (DoThingResponse); - rpc DoManyThings (stream DoThingRequest) returns (DoThingResponse); - rpc GetThingVersions (GetThingRequest) returns (stream GetThingResponse); - rpc GetDifferentThings (stream GetThingRequest) returns (stream GetThingResponse); -} diff --git a/tests/inputs/service_separate_packages/messages.proto b/tests/inputs/service_separate_packages/messages.proto deleted file mode 100644 index 270b188f..00000000 --- a/tests/inputs/service_separate_packages/messages.proto +++ /dev/null @@ -1,31 +0,0 @@ -syntax = "proto3"; - -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - -package service_separate_packages.things.messages; - -message DoThingRequest { - string name = 1; - - // use `repeated` so we can check if `List` is correctly imported - repeated string comments = 2; - - // use google types `timestamp` and `duration` so we can check - // if everything from `datetime` is correctly imported - google.protobuf.Timestamp when = 3; - google.protobuf.Duration duration = 4; -} - -message DoThingResponse { - repeated string names = 1; -} - -message GetThingRequest { - string name = 1; -} - -message GetThingResponse { - string name = 1; - int32 version = 2; -} diff --git a/tests/inputs/service_separate_packages/service.proto b/tests/inputs/service_separate_packages/service.proto deleted file mode 100644 index 950eab49..00000000 --- a/tests/inputs/service_separate_packages/service.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -import "messages.proto"; - -package service_separate_packages.things.service; - -service Test { - rpc DoThing (things.messages.DoThingRequest) returns (things.messages.DoThingResponse); - rpc DoManyThings (stream things.messages.DoThingRequest) returns (things.messages.DoThingResponse); - rpc GetThingVersions (things.messages.GetThingRequest) returns (stream things.messages.GetThingResponse); - rpc GetDifferentThings (stream things.messages.GetThingRequest) returns (stream things.messages.GetThingResponse); -} diff --git a/tests/inputs/service_uppercase/service.proto b/tests/inputs/service_uppercase/service.proto deleted file mode 100644 index 786eec2c..00000000 --- a/tests/inputs/service_uppercase/service.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; - -package service_uppercase; - -message DoTHINGRequest { - string name = 1; - repeated string comments = 2; -} - -message DoTHINGResponse { - repeated string names = 1; -} - -service Test { - rpc DoThing (DoTHINGRequest) returns (DoTHINGResponse); -} diff --git a/tests/inputs/service_uppercase/test_service.py b/tests/inputs/service_uppercase/test_service.py deleted file mode 100644 index 35405e13..00000000 --- a/tests/inputs/service_uppercase/test_service.py +++ /dev/null @@ -1,8 +0,0 @@ -import inspect - -from tests.output_betterproto.service_uppercase import TestStub - - -def test_parameters(): - sig = inspect.signature(TestStub.do_thing) - assert len(sig.parameters) == 5, "Expected 5 parameters" diff --git a/tests/inputs/signed/signed.json b/tests/inputs/signed/signed.json deleted file mode 100644 index b171e155..00000000 --- a/tests/inputs/signed/signed.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "signed32": 150, - "negative32": -150, - "string64": "150", - "negative64": "-150" -} diff --git a/tests/inputs/signed/signed.proto b/tests/inputs/signed/signed.proto deleted file mode 100644 index b40aad49..00000000 --- a/tests/inputs/signed/signed.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package signed; - -message Test { - // todo: rename fields after fixing bug where 'signed_32_positive' will map to 'signed_32Positive' as output json - sint32 signed32 = 1; // signed_32_positive - sint32 negative32 = 2; // signed_32_negative - sint64 string64 = 3; // signed_64_positive - sint64 negative64 = 4; // signed_64_negative -} diff --git a/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py b/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py deleted file mode 100644 index 35783ea6..00000000 --- a/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py +++ /dev/null @@ -1,78 +0,0 @@ -from datetime import ( - datetime, - timedelta, - timezone, -) - -import pytest - -from tests.output_betterproto.timestamp_dict_encode import Test - -# Current World Timezone range (UTC-12 to UTC+14) -MIN_UTC_OFFSET_MIN = -12 * 60 -MAX_UTC_OFFSET_MIN = 14 * 60 - -# Generate all timezones in range in 15 min increments -timezones = [timezone(timedelta(minutes=x)) for x in range(MIN_UTC_OFFSET_MIN, MAX_UTC_OFFSET_MIN + 1, 15)] - - -@pytest.mark.parametrize("tz", timezones) -def test_timezone_aware_datetime_dict_encode(tz: timezone): - original_time = datetime.now(tz=tz) - original_message = Test() - original_message.ts = original_time - encoded = original_message.to_dict() - decoded_message = Test() - decoded_message.from_dict(encoded) - - # check that the timestamps are equal after decoding from dict - assert original_message.ts.tzinfo is not None - assert decoded_message.ts.tzinfo is not None - assert original_message.ts == decoded_message.ts - - -def test_naive_datetime_dict_encode(): - # make suer naive datetime objects are still treated as utc - original_time = datetime.now() - assert original_time.tzinfo is None - original_message = Test() - original_message.ts = original_time - original_time_utc = original_time.replace(tzinfo=timezone.utc) - encoded = original_message.to_dict() - decoded_message = Test() - decoded_message.from_dict(encoded) - - # check that the timestamps are equal after decoding from dict - assert decoded_message.ts.tzinfo is not None - assert original_time_utc == decoded_message.ts - - -@pytest.mark.parametrize("tz", timezones) -def test_timezone_aware_json_serialize(tz: timezone): - original_time = datetime.now(tz=tz) - original_message = Test() - original_message.ts = original_time - json_serialized = original_message.to_json() - decoded_message = Test() - decoded_message.from_json(json_serialized) - - # check that the timestamps are equal after decoding from dict - assert original_message.ts.tzinfo is not None - assert decoded_message.ts.tzinfo is not None - assert original_message.ts == decoded_message.ts - - -def test_naive_datetime_json_serialize(): - # make suer naive datetime objects are still treated as utc - original_time = datetime.now() - assert original_time.tzinfo is None - original_message = Test() - original_message.ts = original_time - original_time_utc = original_time.replace(tzinfo=timezone.utc) - json_serialized = original_message.to_json() - decoded_message = Test() - decoded_message.from_json(json_serialized) - - # check that the timestamps are equal after decoding from dict - assert decoded_message.ts.tzinfo is not None - assert original_time_utc == decoded_message.ts diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json deleted file mode 100644 index 3f455587..00000000 --- a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "ts" : "2023-03-15T22:35:51.253277Z" -} \ No newline at end of file diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto deleted file mode 100644 index 9c4081ac..00000000 --- a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package timestamp_dict_encode; - -import "google/protobuf/timestamp.proto"; - -message Test { - google.protobuf.Timestamp ts = 1; -} \ No newline at end of file diff --git a/tests/mocks.py b/tests/mocks.py deleted file mode 100644 index dc6e1172..00000000 --- a/tests/mocks.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import List - -from grpclib.client import Channel - - -class MockChannel(Channel): - # noinspection PyMissingConstructor - def __init__(self, responses=None) -> None: - self.responses = responses or [] - self.requests = [] - self._loop = None - - def request(self, route, cardinality, request, response_type, **kwargs): - self.requests.append( - { - "route": route, - "cardinality": cardinality, - "request": request, - "response_type": response_type, - } - ) - return MockStream(self.responses) - - -class MockStream: - def __init__(self, responses: List) -> None: - super().__init__() - self.responses = responses - - async def recv_message(self): - return self.responses.pop(0) - - async def send_message(self, *args, **kwargs): - pass - - async def __aexit__(self, exc_type, exc_val, exc_tb): - return True - - async def __aenter__(self): - return self diff --git a/tests/oneof_pattern_matching.py b/tests/oneof_pattern_matching.py deleted file mode 100644 index 4eab565a..00000000 --- a/tests/oneof_pattern_matching.py +++ /dev/null @@ -1,47 +0,0 @@ -from dataclasses import dataclass -from typing import Optional - -import pytest - -import betterproto - - -def test_oneof_pattern_matching(): - @dataclass - class Sub(betterproto.Message): - val: int = betterproto.int32_field(1) - - @dataclass - class Foo(betterproto.Message): - bar: Optional[int] = betterproto.int32_field(1, group="group1") - baz: Optional[str] = betterproto.string_field(2, group="group1") - sub: Optional[Sub] = betterproto.message_field(3, group="group2") - abc: Optional[str] = betterproto.string_field(4, group="group2") - - foo = Foo(baz="test1", abc="test2") - - match foo: - case Foo(bar=int(_)): - pytest.fail("Matched 'bar' instead of 'baz'") - case Foo(baz=v): - assert v == "test1" - case _: - pytest.fail("Matched neither 'bar' nor 'baz'") - - match foo: - case Foo(sub=Sub(_)): - pytest.fail("Matched 'sub' instead of 'abc'") - case Foo(abc=v): - assert v == "test2" - case _: - pytest.fail("Matched neither 'sub' nor 'abc'") - - foo.sub = Sub(val=1) - - match foo: - case Foo(sub=Sub(val=v)): - assert v == 1 - case Foo(abc=str(v)): - pytest.fail("Matched 'abc' instead of 'sub'") - case _: - pytest.fail("Matched neither 'sub' nor 'abc'") diff --git a/tests/streams/delimited_messages.in b/tests/streams/delimited_messages.in deleted file mode 100644 index 5993ac6f..00000000 --- a/tests/streams/delimited_messages.in +++ /dev/null @@ -1,2 +0,0 @@ -:bTesting:bTesting -  \ No newline at end of file diff --git a/tests/streams/dump_varint_negative.expected b/tests/streams/dump_varint_negative.expected deleted file mode 100644 index 09548229..00000000 --- a/tests/streams/dump_varint_negative.expected +++ /dev/null @@ -1 +0,0 @@ -ӝ \ No newline at end of file diff --git a/tests/streams/dump_varint_positive.expected b/tests/streams/dump_varint_positive.expected deleted file mode 100644 index 8614b9d7..00000000 --- a/tests/streams/dump_varint_positive.expected +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/tests/streams/java/.gitignore b/tests/streams/java/.gitignore deleted file mode 100644 index 9b1ebba9..00000000 --- a/tests/streams/java/.gitignore +++ /dev/null @@ -1,38 +0,0 @@ -### Output ### -target/ -!.mvn/wrapper/maven-wrapper.jar -!**/src/main/**/target/ -!**/src/test/**/target/ -dependency-reduced-pom.xml -MANIFEST.MF - -### IntelliJ IDEA ### -.idea/ -*.iws -*.iml -*.ipr - -### Eclipse ### -.apt_generated -.classpath -.factorypath -.project -.settings -.springBeans -.sts4-cache - -### NetBeans ### -/nbproject/private/ -/nbbuild/ -/dist/ -/nbdist/ -/.nb-gradle/ -build/ -!**/src/main/**/build/ -!**/src/test/**/build/ - -### VS Code ### -.vscode/ - -### Mac OS ### -.DS_Store \ No newline at end of file diff --git a/tests/streams/java/pom.xml b/tests/streams/java/pom.xml deleted file mode 100644 index 170d2d66..00000000 --- a/tests/streams/java/pom.xml +++ /dev/null @@ -1,94 +0,0 @@ - - - 4.0.0 - - betterproto - compatibility-test - 1.0-SNAPSHOT - jar - - - 11 - 11 - UTF-8 - 3.23.4 - - - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.5.0 - - - package - - shade - - - - - betterproto.CompatibilityTest - - - - - - - - - org.apache.maven.plugins - maven-jar-plugin - 3.3.0 - - - - true - betterproto.CompatibilityTest - - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - 0.6.1 - - - - compile - - - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - ${project.artifactId} - - - \ No newline at end of file diff --git a/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java b/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java deleted file mode 100644 index 908f87af..00000000 --- a/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java +++ /dev/null @@ -1,41 +0,0 @@ -package betterproto; - -import java.io.IOException; - -public class CompatibilityTest { - public static void main(String[] args) throws IOException { - if (args.length < 2) - throw new RuntimeException("Attempted to run without the required arguments."); - else if (args.length > 2) - throw new RuntimeException( - "Attempted to run with more than the expected number of arguments (>1)."); - - Tests tests = new Tests(args[1]); - - switch (args[0]) { - case "single_varint": - tests.testSingleVarint(); - break; - - case "multiple_varints": - tests.testMultipleVarints(); - break; - - case "single_message": - tests.testSingleMessage(); - break; - - case "multiple_messages": - tests.testMultipleMessages(); - break; - - case "infinite_messages": - tests.testInfiniteMessages(); - break; - - default: - throw new RuntimeException( - "Attempted to run with unknown argument '" + args[0] + "'."); - } - } -} diff --git a/tests/streams/java/src/main/java/betterproto/Tests.java b/tests/streams/java/src/main/java/betterproto/Tests.java deleted file mode 100644 index a7c8fd57..00000000 --- a/tests/streams/java/src/main/java/betterproto/Tests.java +++ /dev/null @@ -1,115 +0,0 @@ -package betterproto; - -import betterproto.nested.NestedOuterClass; -import betterproto.oneof.Oneof; - -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; - -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; - -public class Tests { - String path; - - public Tests(String path) { - this.path = path; - } - - public void testSingleVarint() throws IOException { - // Read in the Python-generated single varint file - FileInputStream inputStream = new FileInputStream(path + "/py_single_varint.out"); - CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); - - int value = codedInput.readUInt32(); - - inputStream.close(); - - // Write the value back to a file - FileOutputStream outputStream = new FileOutputStream(path + "/java_single_varint.out"); - CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); - - codedOutput.writeUInt32NoTag(value); - - codedOutput.flush(); - outputStream.close(); - } - - public void testMultipleVarints() throws IOException { - // Read in the Python-generated multiple varints file - FileInputStream inputStream = new FileInputStream(path + "/py_multiple_varints.out"); - CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); - - int value1 = codedInput.readUInt32(); - int value2 = codedInput.readUInt32(); - long value3 = codedInput.readUInt64(); - - inputStream.close(); - - // Write the values back to a file - FileOutputStream outputStream = new FileOutputStream(path + "/java_multiple_varints.out"); - CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); - - codedOutput.writeUInt32NoTag(value1); - codedOutput.writeUInt64NoTag(value2); - codedOutput.writeUInt64NoTag(value3); - - codedOutput.flush(); - outputStream.close(); - } - - public void testSingleMessage() throws IOException { - // Read in the Python-generated single message file - FileInputStream inputStream = new FileInputStream(path + "/py_single_message.out"); - CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); - - Oneof.Test message = Oneof.Test.parseFrom(codedInput); - - inputStream.close(); - - // Write the message back to a file - FileOutputStream outputStream = new FileOutputStream(path + "/java_single_message.out"); - CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); - - message.writeTo(codedOutput); - - codedOutput.flush(); - outputStream.close(); - } - - public void testMultipleMessages() throws IOException { - // Read in the Python-generated multi-message file - FileInputStream inputStream = new FileInputStream(path + "/py_multiple_messages.out"); - - Oneof.Test oneof = Oneof.Test.parseDelimitedFrom(inputStream); - NestedOuterClass.Test nested = NestedOuterClass.Test.parseDelimitedFrom(inputStream); - - inputStream.close(); - - // Write the messages back to a file - FileOutputStream outputStream = new FileOutputStream(path + "/java_multiple_messages.out"); - - oneof.writeDelimitedTo(outputStream); - nested.writeDelimitedTo(outputStream); - - outputStream.flush(); - outputStream.close(); - } - - public void testInfiniteMessages() throws IOException { - // Read in as many messages as are present in the Python-generated file and write them back - FileInputStream inputStream = new FileInputStream(path + "/py_infinite_messages.out"); - FileOutputStream outputStream = new FileOutputStream(path + "/java_infinite_messages.out"); - - Oneof.Test current = Oneof.Test.parseDelimitedFrom(inputStream); - while (current != null) { - current.writeDelimitedTo(outputStream); - current = Oneof.Test.parseDelimitedFrom(inputStream); - } - - inputStream.close(); - outputStream.flush(); - outputStream.close(); - } -} diff --git a/tests/streams/java/src/main/proto/betterproto/nested.proto b/tests/streams/java/src/main/proto/betterproto/nested.proto deleted file mode 100644 index 405a05a4..00000000 --- a/tests/streams/java/src/main/proto/betterproto/nested.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -package nested; -option java_package = "betterproto.nested"; - -// A test message with a nested message inside of it. -message Test { - // This is the nested type. - message Nested { - // Stores a simple counter. - int32 count = 1; - } - // This is the nested enum. - enum Msg { - NONE = 0; - THIS = 1; - } - - Nested nested = 1; - Sibling sibling = 2; - Sibling sibling2 = 3; - Msg msg = 4; -} - -message Sibling { - int32 foo = 1; -} \ No newline at end of file diff --git a/tests/streams/java/src/main/proto/betterproto/oneof.proto b/tests/streams/java/src/main/proto/betterproto/oneof.proto deleted file mode 100644 index ad21028c..00000000 --- a/tests/streams/java/src/main/proto/betterproto/oneof.proto +++ /dev/null @@ -1,19 +0,0 @@ -syntax = "proto3"; - -package oneof; -option java_package = "betterproto.oneof"; - -message Test { - oneof foo { - int32 pitied = 1; - string pitier = 2; - } - - int32 just_a_regular_field = 3; - - oneof bar { - int32 drinks = 11; - string bar_name = 12; - } -} - diff --git a/tests/streams/load_varint_cutoff.in b/tests/streams/load_varint_cutoff.in deleted file mode 100644 index 52b9bf1e..00000000 --- a/tests/streams/load_varint_cutoff.in +++ /dev/null @@ -1 +0,0 @@ -ȁ \ No newline at end of file diff --git a/tests/streams/message_dump_file_multiple.expected b/tests/streams/message_dump_file_multiple.expected deleted file mode 100644 index b5fdf9c3..00000000 --- a/tests/streams/message_dump_file_multiple.expected +++ /dev/null @@ -1,2 +0,0 @@ -:bTesting:bTesting -  \ No newline at end of file diff --git a/tests/streams/message_dump_file_single.expected b/tests/streams/message_dump_file_single.expected deleted file mode 100644 index 9b7bafb6..00000000 --- a/tests/streams/message_dump_file_single.expected +++ /dev/null @@ -1 +0,0 @@ -:bTesting \ No newline at end of file diff --git a/tests/test_all_definition.py b/tests/test_all_definition.py deleted file mode 100644 index 01743af7..00000000 --- a/tests/test_all_definition.py +++ /dev/null @@ -1,19 +0,0 @@ -def test_all_definition(): - """ - Check that a compiled module defines __all__ with the right value. - - These modules have been chosen since they contain messages, services and enums. - """ - import tests.output_betterproto.enum as enum - import tests.output_betterproto.service as service - - assert service.__all__ == ( - "ThingType", - "DoThingRequest", - "DoThingResponse", - "GetThingRequest", - "GetThingResponse", - "TestStub", - "TestBase", - ) - assert enum.__all__ == ("ArithmeticOperator", "Choice", "Test") diff --git a/tests/test_casing.py b/tests/test_casing.py deleted file mode 100644 index 56cd8a93..00000000 --- a/tests/test_casing.py +++ /dev/null @@ -1,129 +0,0 @@ -import pytest - -from betterproto.casing import ( - camel_case, - pascal_case, - snake_case, -) - - -@pytest.mark.parametrize( - ["value", "expected"], - [ - ("", ""), - ("a", "A"), - ("foobar", "Foobar"), - ("fooBar", "FooBar"), - ("FooBar", "FooBar"), - ("foo.bar", "FooBar"), - ("foo_bar", "FooBar"), - ("FOOBAR", "Foobar"), - ("FOOBar", "FooBar"), - ("UInt32", "UInt32"), - ("FOO_BAR", "FooBar"), - ("FOOBAR1", "Foobar1"), - ("FOOBAR_1", "Foobar1"), - ("FOO1BAR2", "Foo1Bar2"), - ("foo__bar", "FooBar"), - ("_foobar", "Foobar"), - ("foobaR", "FoobaR"), - ("foo~bar", "FooBar"), - ("foo:bar", "FooBar"), - ("1foobar", "1Foobar"), - ], -) -def test_pascal_case(value, expected): - actual = pascal_case(value, strict=True) - assert actual == expected, f"{value} => {expected} (actual: {actual})" - - -@pytest.mark.parametrize( - ["value", "expected"], - [ - ("", ""), - ("a", "a"), - ("foobar", "foobar"), - ("fooBar", "fooBar"), - ("FooBar", "fooBar"), - ("foo.bar", "fooBar"), - ("foo_bar", "fooBar"), - ("FOOBAR", "foobar"), - ("FOO_BAR", "fooBar"), - ("FOOBAR1", "foobar1"), - ("FOOBAR_1", "foobar1"), - ("FOO1BAR2", "foo1Bar2"), - ("foo__bar", "fooBar"), - ("_foobar", "foobar"), - ("foobaR", "foobaR"), - ("foo~bar", "fooBar"), - ("foo:bar", "fooBar"), - ("1foobar", "1Foobar"), - ], -) -def test_camel_case_strict(value, expected): - actual = camel_case(value, strict=True) - assert actual == expected, f"{value} => {expected} (actual: {actual})" - - -@pytest.mark.parametrize( - ["value", "expected"], - [ - ("foo_bar", "fooBar"), - ("FooBar", "fooBar"), - ("foo__bar", "foo_Bar"), - ("foo__Bar", "foo__Bar"), - ], -) -def test_camel_case_not_strict(value, expected): - actual = camel_case(value, strict=False) - assert actual == expected, f"{value} => {expected} (actual: {actual})" - - -@pytest.mark.parametrize( - ["value", "expected"], - [ - ("", ""), - ("a", "a"), - ("foobar", "foobar"), - ("fooBar", "foo_bar"), - ("FooBar", "foo_bar"), - ("foo.bar", "foo_bar"), - ("foo_bar", "foo_bar"), - ("foo_Bar", "foo_bar"), - ("FOOBAR", "foobar"), - ("FOOBar", "foo_bar"), - ("UInt32", "u_int32"), - ("FOO_BAR", "foo_bar"), - ("FOOBAR1", "foobar1"), - ("FOOBAR_1", "foobar_1"), - ("FOOBAR_123", "foobar_123"), - ("FOO1BAR2", "foo1_bar2"), - ("foo__bar", "foo_bar"), - ("_foobar", "foobar"), - ("foobaR", "fooba_r"), - ("foo~bar", "foo_bar"), - ("foo:bar", "foo_bar"), - ("1foobar", "1_foobar"), - ("GetUInt64", "get_u_int64"), - ], -) -def test_snake_case_strict(value, expected): - actual = snake_case(value) - assert actual == expected, f"{value} => {expected} (actual: {actual})" - - -@pytest.mark.parametrize( - ["value", "expected"], - [ - ("fooBar", "foo_bar"), - ("FooBar", "foo_bar"), - ("foo_Bar", "foo__bar"), - ("foo__bar", "foo__bar"), - ("FOOBar", "foo_bar"), - ("__foo", "__foo"), - ("GetUInt64", "get_u_int64"), - ], -) -def test_snake_case_not_strict(value, expected): - actual = snake_case(value, strict=False) - assert actual == expected, f"{value} => {expected} (actual: {actual})" diff --git a/tests/test_deprecated.py b/tests/test_deprecated.py deleted file mode 100644 index ea16d370..00000000 --- a/tests/test_deprecated.py +++ /dev/null @@ -1,61 +0,0 @@ -import warnings - -import pytest - -from tests.mocks import MockChannel -from tests.output_betterproto.deprecated import ( - Empty, - Message, - Test, - TestServiceStub, -) - - -@pytest.fixture -def message(): - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - return Message(value="hello") - - -def test_deprecated_message(): - with pytest.warns(DeprecationWarning) as record: - Message(value="hello") - - assert len(record) == 1 - assert str(record[0].message) == f"{Message.__name__} is deprecated" - - -def test_message_with_deprecated_field(message): - with pytest.warns(DeprecationWarning) as record: - Test(message=message, value=10) - - assert len(record) == 1 - assert str(record[0].message) == f"{Test.__name__}.message is deprecated" - - -def test_message_with_deprecated_field_not_set(message): - with warnings.catch_warnings(): - warnings.simplefilter("error") - Test(value=10) - - -def test_message_with_deprecated_field_not_set_default(message): - with warnings.catch_warnings(): - warnings.simplefilter("error") - _ = Test(value=10).message - - -@pytest.mark.asyncio -async def test_service_with_deprecated_method(): - stub = TestServiceStub(MockChannel([Empty(), Empty()])) - - with pytest.warns(DeprecationWarning) as record: - await stub.deprecated_func(Empty()) - - assert len(record) == 1 - assert str(record[0].message) == f"TestService.deprecated_func is deprecated" - - with warnings.catch_warnings(): - warnings.simplefilter("error") - await stub.func(Empty()) diff --git a/tests/test_documentation.py b/tests/test_documentation.py deleted file mode 100644 index da82a1b7..00000000 --- a/tests/test_documentation.py +++ /dev/null @@ -1,37 +0,0 @@ -import ast -import inspect - - -def check(generated_doc: str, type: str) -> None: - assert f"Documentation of {type} 1" in generated_doc - assert "other line 1" in generated_doc - assert f"Documentation of {type} 2" in generated_doc - assert "other line 2" in generated_doc - assert f"Documentation of {type} 3" in generated_doc - - -def test_documentation() -> None: - from .output_betterproto.documentation import ( - Enum, - ServiceBase, - ServiceStub, - Test, - ) - - check(Test.__doc__, "message") - - source = inspect.getsource(Test) - tree = ast.parse(source) - check(tree.body[0].body[2].value.value, "field") - - check(Enum.__doc__, "enum") - - source = inspect.getsource(Enum) - tree = ast.parse(source) - check(tree.body[0].body[2].value.value, "variant") - - check(ServiceBase.__doc__, "service") - check(ServiceBase.get.__doc__, "method") - - check(ServiceStub.__doc__, "service") - check(ServiceStub.get.__doc__, "method") diff --git a/tests/test_enum.py b/tests/test_enum.py deleted file mode 100644 index 04b8a167..00000000 --- a/tests/test_enum.py +++ /dev/null @@ -1,79 +0,0 @@ -from typing import ( - Optional, - Tuple, -) - -import pytest - -import betterproto - - -class Colour(betterproto.Enum): - RED = 1 - GREEN = 2 - BLUE = 3 - - -PURPLE = Colour.__new__(Colour, name=None, value=4) - - -@pytest.mark.parametrize( - "member, str_value", - [ - (Colour.RED, "RED"), - (Colour.GREEN, "GREEN"), - (Colour.BLUE, "BLUE"), - ], -) -def test_str(member: Colour, str_value: str) -> None: - assert str(member) == str_value - - -@pytest.mark.parametrize( - "member, repr_value", - [ - (Colour.RED, "Colour.RED"), - (Colour.GREEN, "Colour.GREEN"), - (Colour.BLUE, "Colour.BLUE"), - ], -) -def test_repr(member: Colour, repr_value: str) -> None: - assert repr(member) == repr_value - - -@pytest.mark.parametrize( - "member, values", - [ - (Colour.RED, ("RED", 1)), - (Colour.GREEN, ("GREEN", 2)), - (Colour.BLUE, ("BLUE", 3)), - (PURPLE, (None, 4)), - ], -) -def test_name_values(member: Colour, values: Tuple[Optional[str], int]) -> None: - assert (member.name, member.value) == values - - -@pytest.mark.parametrize( - "member, input_str", - [ - (Colour.RED, "RED"), - (Colour.GREEN, "GREEN"), - (Colour.BLUE, "BLUE"), - ], -) -def test_from_string(member: Colour, input_str: str) -> None: - assert Colour.from_string(input_str) == member - - -@pytest.mark.parametrize( - "member, input_int", - [ - (Colour.RED, 1), - (Colour.GREEN, 2), - (Colour.BLUE, 3), - (PURPLE, 4), - ], -) -def test_try_value(member: Colour, input_int: int) -> None: - assert Colour.try_value(input_int) == member diff --git a/tests/test_features.py b/tests/test_features.py deleted file mode 100644 index f8297e41..00000000 --- a/tests/test_features.py +++ /dev/null @@ -1,569 +0,0 @@ -import json -from dataclasses import dataclass -from datetime import ( - datetime, - timedelta, -) -from inspect import ( - Parameter, - signature, -) -from typing import ( - List, - Optional, -) -from unittest.mock import ANY - -import betterproto - - -def test_class_init(): - @dataclass - class Bar(betterproto.Message): - name: str = betterproto.string_field(1) - - @dataclass - class Foo(betterproto.Message): - name: str = betterproto.string_field(1) - child: Bar = betterproto.message_field(2) - - foo = Foo(name="foo", child=Bar(name="bar")) - - assert foo.to_dict() == {"name": "foo", "child": {"name": "bar"}} - assert foo.to_pydict() == {"name": "foo", "child": {"name": "bar"}} - - -def test_enum_as_int_json(): - class TestEnum(betterproto.Enum): - ZERO = 0 - ONE = 1 - - @dataclass - class Foo(betterproto.Message): - bar: TestEnum = betterproto.enum_field(1, enum_default_value=lambda: TestEnum.try_value(0)) - - # JSON strings are supported, but ints should still be supported too. - foo = Foo().from_dict({"bar": 1}) - assert foo.bar == TestEnum.ONE - - # Plain-ol'-ints should serialize properly too. - foo.bar = 1 - assert foo.to_dict() == {"bar": "ONE"} - - # Similar expectations for pydict - foo = Foo().from_pydict({"bar": 1}) - assert foo.bar == TestEnum.ONE - assert foo.to_pydict() == {"bar": TestEnum.ONE} - - -def test_unknown_fields(): - @dataclass - class Newer(betterproto.Message): - foo: bool = betterproto.bool_field(1) - bar: int = betterproto.int32_field(2) - baz: str = betterproto.string_field(3) - - @dataclass - class Older(betterproto.Message): - foo: bool = betterproto.bool_field(1) - - newer = Newer(foo=True, bar=1, baz="Hello") - serialized_newer = bytes(newer) - - # Unknown fields in `Newer` should round trip with `Older` - round_trip = bytes(Older().parse(serialized_newer)) - assert serialized_newer == round_trip - - new_again = Newer().parse(round_trip) - assert newer == new_again - - -def test_oneof_support(): - @dataclass - class Sub(betterproto.Message): - val: int = betterproto.int32_field(1) - - @dataclass - class Foo(betterproto.Message): - bar: int = betterproto.int32_field(1, optional=True, group="group1") - baz: str = betterproto.string_field(2, optional=True, group="group1") - sub: Sub = betterproto.message_field(3, optional=True, group="group2") - abc: str = betterproto.string_field(4, optional=True, group="group2") - - foo = Foo() - - assert betterproto.which_one_of(foo, "group1")[0] == "" - - foo.bar = 1 - assert betterproto.which_one_of(foo, "group1")[0] == "bar" - - foo.bar = None - foo.baz = "test" - assert betterproto.which_one_of(foo, "group1")[0] == "baz" - - foo.sub = Sub(val=1) - assert betterproto.which_one_of(foo, "group2")[0] == "sub" - - foo.sub = None - foo.abc = "test" - assert betterproto.which_one_of(foo, "group2")[0] == "abc" - - # Group 1 shouldn't be touched - assert betterproto.which_one_of(foo, "group1")[0] == "baz" - - # Zero value should always serialize for one-of - foo = Foo(bar=0) - assert betterproto.which_one_of(foo, "group1")[0] == "bar" - assert bytes(foo) == b"\x08\x00" - - # Round trip should also work - foo2 = Foo().parse(bytes(foo)) - assert betterproto.which_one_of(foo2, "group1")[0] == "bar" - assert foo.bar == 0 - assert betterproto.which_one_of(foo2, "group2")[0] == "" - - -def test_json_casing(): - @dataclass - class CasingTest(betterproto.Message): - pascal_case: int = betterproto.int32_field(1) - camel_case: int = betterproto.int32_field(2) - snake_case: int = betterproto.int32_field(3) - kabob_case: int = betterproto.int32_field(4) - - # Parsing should accept almost any input - test = CasingTest().from_dict({"PascalCase": 1, "camelCase": 2, "snake_case": 3, "kabob-case": 4}) - - assert test == CasingTest(1, 2, 3, 4) - - # Serializing should be strict. - assert json.loads(test.to_json()) == { - "pascalCase": 1, - "camelCase": 2, - "snakeCase": 3, - "kabobCase": 4, - } - - assert json.loads(test.to_json(casing=betterproto.Casing.SNAKE)) == { - "pascal_case": 1, - "camel_case": 2, - "snake_case": 3, - "kabob_case": 4, - } - - -def test_dict_casing(): - @dataclass - class CasingTest(betterproto.Message): - pascal_case: int = betterproto.int32_field(1) - camel_case: int = betterproto.int32_field(2) - snake_case: int = betterproto.int32_field(3) - kabob_case: int = betterproto.int32_field(4) - - # Parsing should accept almost any input - test = CasingTest().from_dict({"PascalCase": 1, "camelCase": 2, "snake_case": 3, "kabob-case": 4}) - - assert test == CasingTest(1, 2, 3, 4) - - # Serializing should be strict. - assert test.to_dict() == { - "pascalCase": 1, - "camelCase": 2, - "snakeCase": 3, - "kabobCase": 4, - } - assert test.to_pydict() == { - "pascalCase": 1, - "camelCase": 2, - "snakeCase": 3, - "kabobCase": 4, - } - - assert test.to_dict(casing=betterproto.Casing.SNAKE) == { - "pascal_case": 1, - "camel_case": 2, - "snake_case": 3, - "kabob_case": 4, - } - assert test.to_pydict(casing=betterproto.Casing.SNAKE) == { - "pascal_case": 1, - "camel_case": 2, - "snake_case": 3, - "kabob_case": 4, - } - - -def test_optional_flag(): - @dataclass - class Request(betterproto.Message): - flag: Optional[bool] = betterproto.message_field(1, wraps=betterproto.TYPE_BOOL) - - # Serialization of not passed vs. set vs. zero-value. - assert bytes(Request()) == b"" - assert bytes(Request(flag=True)) == b"\n\x02\x08\x01" - assert bytes(Request(flag=False)) == b"\n\x00" - - # Differentiate between not passed and the zero-value. - assert Request().parse(b"").flag is None - assert Request().parse(b"\n\x00").flag is False - - -def test_optional_datetime_to_dict(): - @dataclass - class Request(betterproto.Message): - date: Optional[datetime] = betterproto.message_field(1, optional=True) - - # Check dict serialization - assert Request().to_dict() == {} - assert Request().to_dict(include_default_values=True) == {"date": None} - assert Request(date=datetime(2020, 1, 1)).to_dict() == {"date": "2020-01-01T00:00:00Z"} - assert Request(date=datetime(2020, 1, 1)).to_dict(include_default_values=True) == {"date": "2020-01-01T00:00:00Z"} - - # Check pydict serialization - assert Request().to_pydict() == {} - assert Request().to_pydict(include_default_values=True) == {"date": None} - assert Request(date=datetime(2020, 1, 1)).to_pydict() == {"date": datetime(2020, 1, 1)} - assert Request(date=datetime(2020, 1, 1)).to_pydict(include_default_values=True) == {"date": datetime(2020, 1, 1)} - - -def test_to_json_default_values(): - @dataclass - class TestMessage(betterproto.Message): - some_int: int = betterproto.int32_field(1) - some_double: float = betterproto.double_field(2) - some_str: str = betterproto.string_field(3) - some_bool: bool = betterproto.bool_field(4) - - # Empty dict - test = TestMessage().from_dict({}) - - assert json.loads(test.to_json(include_default_values=True)) == { - "someInt": 0, - "someDouble": 0.0, - "someStr": "", - "someBool": False, - } - - # All default values - test = TestMessage().from_dict({"someInt": 0, "someDouble": 0.0, "someStr": "", "someBool": False}) - - assert json.loads(test.to_json(include_default_values=True)) == { - "someInt": 0, - "someDouble": 0.0, - "someStr": "", - "someBool": False, - } - - -def test_to_dict_default_values(): - @dataclass - class TestMessage(betterproto.Message): - some_int: int = betterproto.int32_field(1) - some_double: float = betterproto.double_field(2) - some_str: str = betterproto.string_field(3) - some_bool: bool = betterproto.bool_field(4) - - # Empty dict - test = TestMessage() - - assert test.to_dict(include_default_values=True) == { - "someInt": 0, - "someDouble": 0.0, - "someStr": "", - "someBool": False, - } - - assert test.to_pydict(include_default_values=True) == { - "someInt": 0, - "someDouble": 0.0, - "someStr": "", - "someBool": False, - } - - # Some default and some other values - @dataclass - class TestMessage2(betterproto.Message): - some_int: int = betterproto.int32_field(1) - some_double: float = betterproto.double_field(2) - some_str: str = betterproto.string_field(3) - some_bool: bool = betterproto.bool_field(4) - some_default_int: int = betterproto.int32_field(5) - some_default_double: float = betterproto.double_field(6) - some_default_str: str = betterproto.string_field(7) - some_default_bool: bool = betterproto.bool_field(8) - - test = TestMessage2().from_dict( - { - "someInt": 2, - "someDouble": 1.2, - "someStr": "hello", - "someBool": True, - "someDefaultInt": 0, - "someDefaultDouble": 0.0, - "someDefaultStr": "", - "someDefaultBool": False, - } - ) - - assert test.to_dict(include_default_values=True) == { - "someInt": 2, - "someDouble": 1.2, - "someStr": "hello", - "someBool": True, - "someDefaultInt": 0, - "someDefaultDouble": 0.0, - "someDefaultStr": "", - "someDefaultBool": False, - } - - test = TestMessage2().from_pydict( - { - "someInt": 2, - "someDouble": 1.2, - "someStr": "hello", - "someBool": True, - "someDefaultInt": 0, - "someDefaultDouble": 0.0, - "someDefaultStr": "", - "someDefaultBool": False, - } - ) - - assert test.to_pydict(include_default_values=True) == { - "someInt": 2, - "someDouble": 1.2, - "someStr": "hello", - "someBool": True, - "someDefaultInt": 0, - "someDefaultDouble": 0.0, - "someDefaultStr": "", - "someDefaultBool": False, - } - - # Nested messages - @dataclass - class TestChildMessage(betterproto.Message): - some_other_int: int = betterproto.int32_field(1) - - @dataclass - class TestParentMessage(betterproto.Message): - some_int: int = betterproto.int32_field(1) - some_double: float = betterproto.double_field(2) - some_message: Optional[TestChildMessage] = betterproto.message_field(3) - - test = TestParentMessage().from_dict({"someInt": 0, "someDouble": 1.2}) - - assert test.to_dict(include_default_values=True) == { - "someInt": 0, - "someDouble": 1.2, - "someMessage": None, - } - - test = TestParentMessage().from_pydict({"someInt": 0, "someDouble": 1.2}) - - assert test.to_pydict(include_default_values=True) == { - "someInt": 0, - "someDouble": 1.2, - "someMessage": None, - } - - -def test_to_dict_datetime_values(): - @dataclass - class TestDatetimeMessage(betterproto.Message): - bar: datetime = betterproto.message_field(1) - baz: timedelta = betterproto.message_field(2) - - test = TestDatetimeMessage().from_dict({"bar": "2020-01-01T00:00:00Z", "baz": "86400.000s"}) - - assert test.to_dict() == {"bar": "2020-01-01T00:00:00Z", "baz": "86400.000s"} - - test = TestDatetimeMessage().from_pydict({"bar": datetime(year=2020, month=1, day=1), "baz": timedelta(days=1)}) - - assert test.to_pydict() == { - "bar": datetime(year=2020, month=1, day=1), - "baz": timedelta(days=1), - } - - -def test_oneof_default_value_set_causes_writes_wire(): - @dataclass - class Empty(betterproto.Message): - pass - - @dataclass - class Foo(betterproto.Message): - bar: int = betterproto.int32_field(1, optional=True, group="group1") - baz: str = betterproto.string_field(2, optional=True, group="group1") - qux: Empty = betterproto.message_field(3, optional=True, group="group1") - - def _round_trip_serialization(foo: Foo) -> Foo: - return Foo().parse(bytes(foo)) - - foo1 = Foo(bar=0) - foo2 = Foo(baz="") - foo3 = Foo(qux=Empty()) - foo4 = Foo() - - assert bytes(foo1) == b"\x08\x00" - assert ( - betterproto.which_one_of(foo1, "group1") - == betterproto.which_one_of(_round_trip_serialization(foo1), "group1") - == ("bar", 0) - ) - - assert bytes(foo2) == b"\x12\x00" # Baz is just an empty string - assert ( - betterproto.which_one_of(foo2, "group1") - == betterproto.which_one_of(_round_trip_serialization(foo2), "group1") - == ("baz", "") - ) - - assert bytes(foo3) == b"\x1a\x00" - assert ( - betterproto.which_one_of(foo3, "group1") - == betterproto.which_one_of(_round_trip_serialization(foo3), "group1") - == ("qux", Empty()) - ) - - assert bytes(foo4) == b"" - assert ( - betterproto.which_one_of(foo4, "group1") - == betterproto.which_one_of(_round_trip_serialization(foo4), "group1") - == ("", None) - ) - - -def test_message_repr(): - from tests.output_betterproto.recursivemessage import Test - - assert repr(Test(name="Loki")) == "Test(name='Loki')" - assert repr(Test(child=Test(), name="Loki")) == "Test(name='Loki', child=Test())" - - -def test_bool(): - """Messages should evaluate similarly to a collection - >>> test = [] - >>> bool(test) - ... False - >>> test.append(1) - >>> bool(test) - ... True - >>> del test[0] - >>> bool(test) - ... False - """ - - @dataclass - class Falsy(betterproto.Message): - pass - - @dataclass - class Truthy(betterproto.Message): - bar: int = betterproto.int32_field(1) - - assert not Falsy() - t = Truthy() - assert not t - t.bar = 1 - assert t - t.bar = 0 - assert not t - - -# valid ISO datetimes according to https://www.myintervals.com/blog/2009/05/20/iso-8601-date-validation-that-doesnt-suck/ -iso_candidates = """2009-12-12T12:34 -2009 -2009-05-19 -2009-05-19 -20090519 -2009123 -2009-05 -2009-123 -2009-222 -2009-001 -2009-W01-1 -2009-W51-1 -2009-W33 -2009W511 -2009-05-19 -2009-05-19 00:00 -2009-05-19 14 -2009-05-19 14:31 -2009-05-19 14:39:22 -2009-05-19T14:39Z -2009-W21-2 -2009-W21-2T01:22 -2009-139 -2009-05-19 14:39:22-06:00 -2009-05-19 14:39:22+0600 -2009-05-19 14:39:22-01 -20090621T0545Z -2007-04-06T00:00 -2007-04-05T24:00 -2010-02-18T16:23:48.5 -2010-02-18T16:23:48,444 -2010-02-18T16:23:48,3-06:00 -2010-02-18T16:23:00.4 -2010-02-18T16:23:00,25 -2010-02-18T16:23:00.33+0600 -2010-02-18T16:00:00.23334444 -2010-02-18T16:00:00,2283 -2009-05-19 143922 -2009-05-19 1439""".split("\n") - - -def test_iso_datetime(): - @dataclass - class Envelope(betterproto.Message): - ts: datetime = betterproto.message_field(1) - - msg = Envelope() - - for _, candidate in enumerate(iso_candidates): - msg.from_dict({"ts": candidate}) - assert isinstance(msg.ts, datetime) - - -def test_iso_datetime_list(): - @dataclass - class Envelope(betterproto.Message): - timestamps: List[datetime] = betterproto.message_field(1, repeated=True) - - msg = Envelope() - - msg.from_dict({"timestamps": iso_candidates}) - assert all([isinstance(item, datetime) for item in msg.timestamps]) - - -def test_service_argument__expected_parameter(): - from tests.output_betterproto.service import TestStub - - sig = signature(TestStub.do_thing) - do_thing_request_parameter = sig.parameters["do_thing_request"] - assert do_thing_request_parameter.default is Parameter.empty - assert do_thing_request_parameter.annotation == "DoThingRequest" - - -def test_is_set(): - @dataclass - class Spam(betterproto.Message): - foo: bool = betterproto.bool_field(1) - bar: Optional[int] = betterproto.int32_field(2, optional=True) - - assert not Spam().is_set("foo") - assert not Spam().is_set("bar") - assert Spam(foo=True).is_set("foo") - assert Spam(foo=True, bar=0).is_set("bar") - - -def test_equality_comparison(): - from tests.output_betterproto.bool import Test as TestMessage - - msg = TestMessage(value=True) - - assert msg == msg - assert msg == ANY - assert msg == TestMessage(value=True) - assert msg != 1 - assert msg != TestMessage(value=False) diff --git a/tests/test_inputs.py b/tests/test_inputs.py deleted file mode 100644 index 6b384421..00000000 --- a/tests/test_inputs.py +++ /dev/null @@ -1,208 +0,0 @@ -import importlib -import json -import math -import os -import sys -from collections import namedtuple -from types import ModuleType -from typing import ( - Any, - Dict, - List, - Set, -) - -import pytest - -import betterproto -from tests.inputs import config as test_input_config -from tests.mocks import MockChannel -from tests.util import ( - find_module, - get_directories, - get_test_case_json_data, - inputs_path, -) - -# Force pure-python implementation instead of C++, otherwise imports -# break things because we can't properly reset the symbol database. -os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" - -from google.protobuf.json_format import Parse - - -class TestCases: - def __init__( - self, - path, - services: Set[str], - xfail: Set[str], - ): - _all = set(get_directories(path)) - {"__pycache__"} - _services = services - _messages = (_all - services) - {"__pycache__"} - _messages_with_json = {test for test in _messages if get_test_case_json_data(test)} - - unknown_xfail_tests = xfail - _all - if unknown_xfail_tests: - raise Exception(f"Unknown test(s) in config.py: {unknown_xfail_tests}") - - self.all = self.apply_xfail_marks(_all, xfail) - self.services = self.apply_xfail_marks(_services, xfail) - self.messages = self.apply_xfail_marks(_messages, xfail) - self.messages_with_json = self.apply_xfail_marks(_messages_with_json, xfail) - - @staticmethod - def apply_xfail_marks(test_set: Set[str], xfail: Set[str]): - return [pytest.param(test, marks=pytest.mark.xfail) if test in xfail else test for test in test_set] - - -test_cases = TestCases( - path=inputs_path, - services=test_input_config.services, - xfail=test_input_config.xfail, -) - -plugin_output_package = "tests.output_betterproto" -reference_output_package = "tests.output_reference" - -TestData = namedtuple("TestData", ["plugin_module", "reference_module", "json_data"]) - - -def module_has_entry_point(module: ModuleType): - return any(hasattr(module, attr) for attr in ["Test", "TestStub"]) - - -def list_replace_nans(items: List) -> List[Any]: - """Replace float("nan") in a list with the string "NaN" - - Parameters - ---------- - items : List - List to update - - Returns - ------- - List[Any] - Updated list - """ - result = [] - for item in items: - if isinstance(item, list): - result.append(list_replace_nans(item)) - elif isinstance(item, dict): - result.append(dict_replace_nans(item)) - elif isinstance(item, float) and math.isnan(item): - result.append(betterproto.NAN) - return result - - -def dict_replace_nans(input_dict: Dict[Any, Any]) -> Dict[Any, Any]: - """Replace float("nan") in a dictionary with the string "NaN" - - Parameters - ---------- - input_dict : Dict[Any, Any] - Dictionary to update - - Returns - ------- - Dict[Any, Any] - Updated dictionary - """ - result = {} - for key, value in input_dict.items(): - if isinstance(value, dict): - value = dict_replace_nans(value) - elif isinstance(value, list): - value = list_replace_nans(value) - elif isinstance(value, float) and math.isnan(value): - value = betterproto.NAN - result[key] = value - return result - - -@pytest.fixture -def test_data(request, reset_sys_path): - test_case_name = request.param - - reference_module_root = os.path.join(*reference_output_package.split("."), test_case_name) - sys.path.append(reference_module_root) - - plugin_module = importlib.import_module(f"{plugin_output_package}.{test_case_name}") - - plugin_module_entry_point = find_module(plugin_module, module_has_entry_point) - - if not plugin_module_entry_point: - raise Exception( - f"Test case {repr(test_case_name)} has no entry point. " - "Please add a proto message or service called Test and recompile." - ) - - yield ( - TestData( - plugin_module=plugin_module_entry_point, - reference_module=lambda: importlib.import_module( - f"{reference_output_package}.{test_case_name}.{test_case_name}_pb2" - ), - json_data=get_test_case_json_data(test_case_name), - ) - ) - - -@pytest.mark.parametrize("test_data", test_cases.messages, indirect=True) -def test_message_can_instantiated(test_data: TestData) -> None: - plugin_module, *_ = test_data - plugin_module.Test() - - -@pytest.mark.parametrize("test_data", test_cases.messages, indirect=True) -def test_message_equality(test_data: TestData) -> None: - plugin_module, *_ = test_data - message1 = plugin_module.Test() - message2 = plugin_module.Test() - assert message1 == message2 - - -@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) -def test_message_json(test_data: TestData) -> None: - plugin_module, _, json_data = test_data - - for sample in json_data: - if sample.belongs_to(test_input_config.non_symmetrical_json): - continue - - message: betterproto.Message = plugin_module.Test() - - message.from_json(sample.json) - message_json = message.to_json(indent=0) - - assert dict_replace_nans(json.loads(message_json)) == dict_replace_nans(json.loads(sample.json)) - - -@pytest.mark.parametrize("test_data", test_cases.services, indirect=True) -def test_service_can_be_instantiated(test_data: TestData) -> None: - test_data.plugin_module.TestStub(MockChannel()) - - -@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) -def test_binary_compatibility(test_data: TestData) -> None: - plugin_module, reference_module, json_data = test_data - - for sample in json_data: - reference_instance = Parse(sample.json, reference_module().Test()) - reference_binary_output = reference_instance.SerializeToString() - - plugin_instance_from_json: betterproto.Message = plugin_module.Test().from_json(sample.json) - plugin_instance_from_binary = plugin_module.Test.FromString(reference_binary_output) - - # Generally this can't be relied on, but here we are aiming to match the - # existing Python implementation and aren't doing anything tricky. - # https://developers.google.com/protocol-buffers/docs/encoding#implications - assert bytes(plugin_instance_from_json) == reference_binary_output - assert bytes(plugin_instance_from_binary) == reference_binary_output - - assert plugin_instance_from_json == plugin_instance_from_binary - assert dict_replace_nans(plugin_instance_from_json.to_dict()) == dict_replace_nans( - plugin_instance_from_binary.to_dict() - ) diff --git a/tests/test_mapmessage.py b/tests/test_mapmessage.py deleted file mode 100644 index 16bd6ce6..00000000 --- a/tests/test_mapmessage.py +++ /dev/null @@ -1,18 +0,0 @@ -from tests.output_betterproto.mapmessage import ( - Nested, - Test, -) - - -def test_mapmessage_to_dict_preserves_message(): - message = Test( - items={ - "test": Nested( - count=1, - ) - } - ) - - message.to_dict() - - assert isinstance(message.items["test"], Nested), "Wrong nested type after to_dict" diff --git a/tests/test_module_validation.py b/tests/test_module_validation.py deleted file mode 100644 index 9cae272b..00000000 --- a/tests/test_module_validation.py +++ /dev/null @@ -1,111 +0,0 @@ -from typing import ( - List, - Optional, - Set, -) - -import pytest - -from betterproto.plugin.module_validation import ModuleValidator - - -@pytest.mark.parametrize( - ["text", "expected_collisions"], - [ - pytest.param( - ["import os"], - None, - id="single import", - ), - pytest.param( - ["import os", "import sys"], - None, - id="multiple imports", - ), - pytest.param( - ["import os", "import os"], - {"os"}, - id="duplicate imports", - ), - pytest.param( - ["from os import path", "import os"], - None, - id="duplicate imports with alias", - ), - pytest.param( - ["from os import path", "import os as os_alias"], - None, - id="duplicate imports with alias", - ), - pytest.param( - ["from os import path", "import os as path"], - {"path"}, - id="duplicate imports with alias", - ), - pytest.param( - ["import os", "class os:"], - {"os"}, - id="duplicate import with class", - ), - pytest.param( - ["import os", "class os:", " pass", "import sys"], - {"os"}, - id="duplicate import with class and another", - ), - pytest.param( - ["def test(): pass", "class test:"], - {"test"}, - id="duplicate class and function", - ), - pytest.param( - ["def test(): pass", "def test(): pass"], - {"test"}, - id="duplicate functions", - ), - pytest.param( - ["def test(): pass", "test = 100"], - {"test"}, - id="function and variable", - ), - pytest.param( - ["def test():", " test = 3"], - None, - id="function and variable in function", - ), - pytest.param( - [ - "def test(): pass", - "'''", - "def test(): pass", - "'''", - "def test_2(): pass", - ], - None, - id="duplicate functions with multiline string", - ), - pytest.param( - ["def test(): pass", "# def test(): pass"], - None, - id="duplicate functions with comments", - ), - pytest.param( - ["from test import (", " A", " B", " C", ")"], - None, - id="multiline import", - ), - pytest.param( - ["from test import (", " A", " B", " C", ")", "from test import A"], - {"A"}, - id="multiline import with duplicate", - ), - ], -) -def test_module_validator(text: List[str], expected_collisions: Optional[Set[str]]): - line_iterator = iter(text) - validator = ModuleValidator(line_iterator) - valid = validator.validate() - if expected_collisions is None: - assert valid - else: - assert set(validator.collisions.keys()) == expected_collisions - assert not valid diff --git a/tests/test_oneof_pattern_matching.py b/tests/test_oneof_pattern_matching.py deleted file mode 100644 index 186cfa3d..00000000 --- a/tests/test_oneof_pattern_matching.py +++ /dev/null @@ -1,13 +0,0 @@ -import sys - -import pytest - - -@pytest.mark.skipif( - sys.version_info < (3, 10), - reason="pattern matching is only supported in python3.10+", -) -def test_oneof_pattern_matching(): - from tests.oneof_pattern_matching import test_oneof_pattern_matching - - test_oneof_pattern_matching() diff --git a/tests/test_pickling.py b/tests/test_pickling.py deleted file mode 100644 index f45e7a67..00000000 --- a/tests/test_pickling.py +++ /dev/null @@ -1,161 +0,0 @@ -import pickle -from copy import ( - copy, - deepcopy, -) -from dataclasses import dataclass -from typing import ( - Dict, - List, -) - -import cachelib - -import betterproto -from betterproto.lib.google import protobuf as google - - -def unpickled(message): - return pickle.loads(pickle.dumps(message)) - - -@dataclass(eq=False, repr=False) -class Fe(betterproto.Message): - abc: str = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class Fi(betterproto.Message): - abc: str = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class Fo(betterproto.Message): - abc: str = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class NestedData(betterproto.Message): - struct_foo: Dict[str, "google.Struct"] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE) - map_str_any_bar: Dict[str, "google.Any"] = betterproto.map_field( - 2, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE - ) - - -@dataclass(eq=False, repr=False) -class Complex(betterproto.Message): - foo_str: str = betterproto.string_field(1) - fe: "Fe" = betterproto.message_field(3, group="grp") - fi: "Fi" = betterproto.message_field(4, group="grp") - fo: "Fo" = betterproto.message_field(5, group="grp") - nested_data: "NestedData" = betterproto.message_field(6) - mapping: Dict[str, "google.Any"] = betterproto.map_field(7, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE) - - -def complex_msg(): - return Complex( - foo_str="yep", - fe=Fe(abc="1"), - nested_data=NestedData( - struct_foo={ - "foo": google.Struct( - fields={ - "hello": google.Value(list_value=google.ListValue(values=[google.Value(string_value="world")])) - } - ), - }, - map_str_any_bar={ - "key": google.Any(value=b"value"), - }, - ), - mapping={ - "message": google.Any(value=bytes(Fi(abc="hi"))), - "string": google.Any(value=b"howdy"), - }, - ) - - -def test_pickling_complex_message(): - msg = complex_msg() - deser = unpickled(msg) - assert msg == deser - assert msg.fe.abc == "1" - assert msg.is_set("fi") is not True - assert msg.mapping["message"] == google.Any(value=bytes(Fi(abc="hi"))) - assert msg.mapping["string"].value.decode() == "howdy" - assert msg.nested_data.struct_foo["foo"].fields["hello"].list_value.values[0].string_value == "world" - - -def test_recursive_message_defaults(): - from tests.output_betterproto.recursivemessage import ( - Intermediate, - Test as RecursiveMessage, - ) - - msg = RecursiveMessage(name="bob", intermediate=Intermediate(42)) - msg = unpickled(msg) - - # set values are as expected - assert msg == RecursiveMessage(name="bob", intermediate=Intermediate(42)) - - # lazy initialized works modifies the message - assert msg != RecursiveMessage(name="bob", intermediate=Intermediate(42), child=RecursiveMessage(name="jude")) - msg.child = RecursiveMessage(child=RecursiveMessage(name="jude")) - assert msg == RecursiveMessage( - name="bob", - intermediate=Intermediate(42), - child=RecursiveMessage(child=RecursiveMessage(name="jude")), - ) - - -@dataclass -class PickledMessage(betterproto.Message): - foo: bool = betterproto.bool_field(1) - bar: int = betterproto.int32_field(2) - baz: List[str] = betterproto.string_field(3, repeated=True) - - -def test_copyability(): - msg = PickledMessage(bar=12, baz=["hello"]) - msg = unpickled(msg) - - copied = copy(msg) - assert msg == copied - assert msg is not copied - assert msg.baz is copied.baz - - deepcopied = deepcopy(msg) - assert msg == deepcopied - assert msg is not deepcopied - assert msg.baz is not deepcopied.baz - - -def test_message_can_be_cached(): - """Cachelib uses pickling to cache values""" - - cache = cachelib.SimpleCache() - - def use_cache(): - calls = getattr(use_cache, "calls", 0) - result = cache.get("message") - if result is not None: - return result - else: - setattr(use_cache, "calls", calls + 1) - result = complex_msg() - cache.set("message", result) - return result - - for n in range(10): - if n == 0: - assert not cache.has("message") - else: - assert cache.has("message") - - msg = use_cache() - assert use_cache.calls == 1 # The message is only ever built once - assert msg.fe.abc == "1" - assert msg.is_set("fi") is not True - assert msg.mapping["message"] == google.Any(value=bytes(Fi(abc="hi"))) - assert msg.mapping["string"].value.decode() == "howdy" - assert msg.nested_data.struct_foo["foo"].fields["hello"].list_value.values[0].string_value == "world" diff --git a/tests/test_streams.py b/tests/test_streams.py deleted file mode 100644 index cf259c8c..00000000 --- a/tests/test_streams.py +++ /dev/null @@ -1,362 +0,0 @@ -from io import BytesIO -from pathlib import Path -from shutil import which -from subprocess import run - -import pytest - -import betterproto -from tests.output_betterproto import ( - map, - nested, - oneof, - repeated, - repeatedpacked, -) - -oneof_example = oneof.Test().from_dict({"pitied": 1, "just_a_regular_field": 123456789, "bar_name": "Testing"}) - -len_oneof = len(bytes(oneof_example)) - -nested_example = nested.Test().from_dict( - { - "nested": {"count": 1}, - "sibling": {"foo": 2}, - "sibling2": {"foo": 3}, - "msg": nested.TestMsg.THIS, - } -) - -repeated_example = repeated.Test().from_dict({"names": ["blah", "Blah2"]}) - -packed_example = repeatedpacked.Test().from_dict( - {"counts": [1, 2, 3], "signed": [-1, 2, -3], "fixed": [1.2, -2.3, 3.4]} -) - -map_example = map.Test().from_dict({"counts": {"blah": 1, "Blah2": 2}}) - -streams_path = Path("tests/streams/") - -java = which("java") - - -def test_load_varint_too_long(): - with BytesIO(b"\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01") as stream, pytest.raises(ValueError): - betterproto.load_varint(stream) - - with BytesIO(b"\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01") as stream: - # This should not raise a ValueError, as it is within 64 bits - betterproto.load_varint(stream) - - -def test_load_varint_file(): - with open(streams_path / "message_dump_file_single.expected", "rb") as stream: - assert betterproto.load_varint(stream) == (8, b"\x08") # Single-byte varint - stream.read(2) # Skip until first multi-byte - assert betterproto.load_varint(stream) == ( - 123456789, - b"\x95\x9a\xef\x3a", - ) # Multi-byte varint - - -def test_load_varint_cutoff(): - with open(streams_path / "load_varint_cutoff.in", "rb") as stream: - with pytest.raises(EOFError): - betterproto.load_varint(stream) - - stream.seek(1) - with pytest.raises(EOFError): - betterproto.load_varint(stream) - - -def test_dump_varint_file(tmp_path): - # Dump test varints to file - with open(tmp_path / "dump_varint_file.out", "wb") as stream: - betterproto.dump_varint(8, stream) # Single-byte varint - betterproto.dump_varint(123456789, stream) # Multi-byte varint - - # Check that file contents are as expected - with open(tmp_path / "dump_varint_file.out", "rb") as test_stream, open( - streams_path / "message_dump_file_single.expected", "rb" - ) as exp_stream: - assert betterproto.load_varint(test_stream) == betterproto.load_varint(exp_stream) - exp_stream.read(2) - assert betterproto.load_varint(test_stream) == betterproto.load_varint(exp_stream) - - -def test_parse_fields(): - with open(streams_path / "message_dump_file_single.expected", "rb") as stream: - parsed_bytes = betterproto.parse_fields(stream.read()) - - with open(streams_path / "message_dump_file_single.expected", "rb") as stream: - parsed_stream = betterproto.load_fields(stream) - for field in parsed_bytes: - assert field == next(parsed_stream) - - -def test_message_dump_file_single(tmp_path): - # Write the message to the stream - with open(tmp_path / "message_dump_file_single.out", "wb") as stream: - oneof_example.dump(stream) - - # Check that the outputted file is exactly as expected - with open(tmp_path / "message_dump_file_single.out", "rb") as test_stream, open( - streams_path / "message_dump_file_single.expected", "rb" - ) as exp_stream: - assert test_stream.read() == exp_stream.read() - - -def test_message_dump_file_multiple(tmp_path): - # Write the same Message twice and another, different message - with open(tmp_path / "message_dump_file_multiple.out", "wb") as stream: - oneof_example.dump(stream) - oneof_example.dump(stream) - nested_example.dump(stream) - - # Check that all three Messages were outputted to the file correctly - with open(tmp_path / "message_dump_file_multiple.out", "rb") as test_stream, open( - streams_path / "message_dump_file_multiple.expected", "rb" - ) as exp_stream: - assert test_stream.read() == exp_stream.read() - - -def test_message_dump_delimited(tmp_path): - with open(tmp_path / "message_dump_delimited.out", "wb") as stream: - oneof_example.dump(stream, True) - oneof_example.dump(stream, True) - nested_example.dump(stream, True) - - with open(tmp_path / "message_dump_delimited.out", "rb") as test_stream, open( - streams_path / "delimited_messages.in", "rb" - ) as exp_stream: - assert test_stream.read() == exp_stream.read() - - -def test_message_load_file_single(): - with open(streams_path / "message_dump_file_single.expected", "rb") as stream: - assert oneof.Test().load(stream) == oneof_example - stream.seek(0) - assert oneof.Test().load(stream, len_oneof) == oneof_example - - -def test_message_load_file_multiple(): - with open(streams_path / "message_dump_file_multiple.expected", "rb") as stream: - oneof_size = len_oneof - assert oneof.Test().load(stream, oneof_size) == oneof_example - assert oneof.Test().load(stream, oneof_size) == oneof_example - assert nested.Test().load(stream) == nested_example - assert stream.read(1) == b"" - - -def test_message_load_too_small(): - with open(streams_path / "message_dump_file_single.expected", "rb") as stream, pytest.raises(ValueError): - oneof.Test().load(stream, len_oneof - 1) - - -def test_message_load_delimited(): - with open(streams_path / "delimited_messages.in", "rb") as stream: - assert oneof.Test().load(stream, betterproto.SIZE_DELIMITED) == oneof_example - assert oneof.Test().load(stream, betterproto.SIZE_DELIMITED) == oneof_example - assert nested.Test().load(stream, betterproto.SIZE_DELIMITED) == nested_example - assert stream.read(1) == b"" - - -def test_message_load_too_large(): - with open(streams_path / "message_dump_file_single.expected", "rb") as stream, pytest.raises(ValueError): - oneof.Test().load(stream, len_oneof + 1) - - -def test_calculate_varint_size_negative(): - single_byte = -1 - multi_byte = -10000000 - edge = -(1 << 63) - before = -(1 << 63) + 1 - - assert len(betterproto.encode_varint(single_byte)) == 10 - assert len(betterproto.encode_varint(multi_byte)) == 10 - assert len(betterproto.encode_varint(edge)) == 10 - assert len(betterproto.encode_varint(before)) == 10 - - -def test_calculate_varint_size_positive(): - single_byte = 1 - multi_byte = 10000000 - - assert len(betterproto.encode_varint(single_byte)) - assert len(betterproto.encode_varint(multi_byte)) - - -def test_dump_varint_negative(tmp_path): - single_byte = -1 - multi_byte = -10000000 - edge = -(1 << 63) - beyond = -(1 << 63) - 1 - before = -(1 << 63) + 1 - - with open(tmp_path / "dump_varint_negative.out", "wb") as stream: - betterproto.dump_varint(single_byte, stream) - betterproto.dump_varint(multi_byte, stream) - betterproto.dump_varint(edge, stream) - betterproto.dump_varint(before, stream) - - with pytest.raises(ValueError): - betterproto.dump_varint(beyond, stream) - - with open(streams_path / "dump_varint_negative.expected", "rb") as exp_stream, open( - tmp_path / "dump_varint_negative.out", "rb" - ) as test_stream: - assert test_stream.read() == exp_stream.read() - - -def test_dump_varint_positive(tmp_path): - single_byte = 1 - multi_byte = 10000000 - - with open(tmp_path / "dump_varint_positive.out", "wb") as stream: - betterproto.dump_varint(single_byte, stream) - betterproto.dump_varint(multi_byte, stream) - - with open(tmp_path / "dump_varint_positive.out", "rb") as test_stream, open( - streams_path / "dump_varint_positive.expected", "rb" - ) as exp_stream: - assert test_stream.read() == exp_stream.read() - - -# Java compatibility tests - - -@pytest.fixture(scope="module") -def compile_jar(): - # Skip if not all required tools are present - if java is None: - pytest.skip("`java` command is absent and is required") - mvn = which("mvn") - if mvn is None: - pytest.skip("Maven is absent and is required") - - # Compile the JAR - proc_maven = run([mvn, "clean", "install", "-f", "tests/streams/java/pom.xml"]) - if proc_maven.returncode != 0: - pytest.skip("Maven compatibility-test.jar build failed (maybe Java version <11?)") - - -jar = "tests/streams/java/target/compatibility-test.jar" - - -def run_jar(command: str, tmp_path): - return run([java, "-jar", jar, command, tmp_path], check=True) - - -def run_java_single_varint(value: int, tmp_path) -> int: - # Write single varint to file - with open(tmp_path / "py_single_varint.out", "wb") as stream: - betterproto.dump_varint(value, stream) - - # Have Java read this varint and write it back - run_jar("single_varint", tmp_path) - - # Read single varint from Java output file - with open(tmp_path / "java_single_varint.out", "rb") as stream: - returned = betterproto.load_varint(stream) - with pytest.raises(EOFError): - betterproto.load_varint(stream) - - return returned - - -def test_single_varint(compile_jar, tmp_path): - single_byte = (1, b"\x01") - multi_byte = (123456789, b"\x95\x9a\xef\x3a") - - # Write a single-byte varint to a file and have Java read it back - returned = run_java_single_varint(single_byte[0], tmp_path) - assert returned == single_byte - - # Same for a multi-byte varint - returned = run_java_single_varint(multi_byte[0], tmp_path) - assert returned == multi_byte - - -def test_multiple_varints(compile_jar, tmp_path): - single_byte = (1, b"\x01") - multi_byte = (123456789, b"\x95\x9a\xef\x3a") - over32 = (3000000000, b"\x80\xbc\xc1\x96\x0b") - - # Write two varints to the same file - with open(tmp_path / "py_multiple_varints.out", "wb") as stream: - betterproto.dump_varint(single_byte[0], stream) - betterproto.dump_varint(multi_byte[0], stream) - betterproto.dump_varint(over32[0], stream) - - # Have Java read these varints and write them back - run_jar("multiple_varints", tmp_path) - - # Read varints from Java output file - with open(tmp_path / "java_multiple_varints.out", "rb") as stream: - returned_single = betterproto.load_varint(stream) - returned_multi = betterproto.load_varint(stream) - returned_over32 = betterproto.load_varint(stream) - with pytest.raises(EOFError): - betterproto.load_varint(stream) - - assert returned_single == single_byte - assert returned_multi == multi_byte - assert returned_over32 == over32 - - -def test_single_message(compile_jar, tmp_path): - # Write message to file - with open(tmp_path / "py_single_message.out", "wb") as stream: - oneof_example.dump(stream) - - # Have Java read and return the message - run_jar("single_message", tmp_path) - - # Read and check the returned message - with open(tmp_path / "java_single_message.out", "rb") as stream: - returned = oneof.Test().load(stream, len(bytes(oneof_example))) - assert stream.read() == b"" - - assert returned == oneof_example - - -def test_multiple_messages(compile_jar, tmp_path): - # Write delimited messages to file - with open(tmp_path / "py_multiple_messages.out", "wb") as stream: - oneof_example.dump(stream, True) - nested_example.dump(stream, True) - - # Have Java read and return the messages - run_jar("multiple_messages", tmp_path) - - # Read and check the returned messages - with open(tmp_path / "java_multiple_messages.out", "rb") as stream: - returned_oneof = oneof.Test().load(stream, betterproto.SIZE_DELIMITED) - returned_nested = nested.Test().load(stream, betterproto.SIZE_DELIMITED) - assert stream.read() == b"" - - assert returned_oneof == oneof_example - assert returned_nested == nested_example - - -def test_infinite_messages(compile_jar, tmp_path): - num_messages = 5 - - # Write delimited messages to file - with open(tmp_path / "py_infinite_messages.out", "wb") as stream: - for _ in range(num_messages): - oneof_example.dump(stream, True) - - # Have Java read and return the messages - run_jar("infinite_messages", tmp_path) - - # Read and check the returned messages - messages = [] - with open(tmp_path / "java_infinite_messages.out", "rb") as stream: - while True: - try: - messages.append(oneof.Test().load(stream, betterproto.SIZE_DELIMITED)) - except EOFError: - break - - assert len(messages) == num_messages diff --git a/tests/test_struct.py b/tests/test_struct.py deleted file mode 100644 index 6376ea45..00000000 --- a/tests/test_struct.py +++ /dev/null @@ -1,36 +0,0 @@ -import json - -from betterproto.lib.google.protobuf import Struct -from betterproto.lib.pydantic.google.protobuf import Struct as StructPydantic - - -def test_struct_roundtrip(): - data = { - "foo": "bar", - "baz": None, - "quux": 123, - "zap": [1, {"two": 3}, "four"], - } - data_json = json.dumps(data) - - struct_from_dict = Struct().from_dict(data) - assert struct_from_dict.fields == data - assert struct_from_dict.to_dict() == data - assert struct_from_dict.to_json() == data_json - - struct_from_json = Struct().from_json(data_json) - assert struct_from_json.fields == data - assert struct_from_json.to_dict() == data - assert struct_from_json == struct_from_dict - assert struct_from_json.to_json() == data_json - - struct_pyd_from_dict = StructPydantic(fields={}).from_dict(data) - assert struct_pyd_from_dict.fields == data - assert struct_pyd_from_dict.to_dict() == data - assert struct_pyd_from_dict.to_json() == data_json - - struct_pyd_from_dict = StructPydantic(fields={}).from_json(data_json) - assert struct_pyd_from_dict.fields == data - assert struct_pyd_from_dict.to_dict() == data - assert struct_pyd_from_dict == struct_pyd_from_dict - assert struct_pyd_from_dict.to_json() == data_json diff --git a/tests/test_timestamp.py b/tests/test_timestamp.py deleted file mode 100644 index 422738ff..00000000 --- a/tests/test_timestamp.py +++ /dev/null @@ -1,27 +0,0 @@ -from datetime import ( - datetime, - timezone, -) - -import pytest - -from betterproto import _Timestamp - - -@pytest.mark.parametrize( - "dt", - [ - datetime(2023, 10, 11, 9, 41, 12, tzinfo=timezone.utc), - datetime.now(timezone.utc), - # potential issue with floating point precision: - datetime(2242, 12, 31, 23, 0, 0, 1, tzinfo=timezone.utc), - # potential issue with negative timestamps: - datetime(1969, 12, 31, 23, 0, 0, 1, tzinfo=timezone.utc), - ], -) -def test_timestamp_to_datetime_and_back(dt: datetime): - """ - Make sure converting a datetime to a protobuf timestamp message - and then back again ends up with the same datetime. - """ - assert _Timestamp.from_datetime(dt).to_datetime() == dt diff --git a/tests/test_typing_compiler.py b/tests/test_typing_compiler.py deleted file mode 100644 index 1fc6f55c..00000000 --- a/tests/test_typing_compiler.py +++ /dev/null @@ -1,70 +0,0 @@ -from betterproto.plugin.typing_compiler import ( - DirectImportTypingCompiler, - NoTyping310TypingCompiler, - TypingImportTypingCompiler, -) - - -def test_direct_import_typing_compiler(): - compiler = DirectImportTypingCompiler() - assert compiler.imports() == {} - assert compiler.optional("str") == "Optional[str]" - assert compiler.imports() == {"typing": {"Optional"}} - assert compiler.list("str") == "List[str]" - assert compiler.imports() == {"typing": {"Optional", "List"}} - assert compiler.dict("str", "int") == "Dict[str, int]" - assert compiler.imports() == {"typing": {"Optional", "List", "Dict"}} - assert compiler.union("str", "int") == "Union[str, int]" - assert compiler.imports() == {"typing": {"Optional", "List", "Dict", "Union"}} - assert compiler.iterable("str") == "Iterable[str]" - assert compiler.imports() == {"typing": {"Optional", "List", "Dict", "Union", "Iterable"}} - assert compiler.async_iterable("str") == "AsyncIterable[str]" - assert compiler.imports() == {"typing": {"Optional", "List", "Dict", "Union", "Iterable", "AsyncIterable"}} - assert compiler.async_iterator("str") == "AsyncIterator[str]" - assert compiler.imports() == { - "typing": { - "Optional", - "List", - "Dict", - "Union", - "Iterable", - "AsyncIterable", - "AsyncIterator", - } - } - - -def test_typing_import_typing_compiler(): - compiler = TypingImportTypingCompiler() - assert compiler.imports() == {} - assert compiler.optional("str") == "typing.Optional[str]" - assert compiler.imports() == {"typing": None} - assert compiler.list("str") == "typing.List[str]" - assert compiler.imports() == {"typing": None} - assert compiler.dict("str", "int") == "typing.Dict[str, int]" - assert compiler.imports() == {"typing": None} - assert compiler.union("str", "int") == "typing.Union[str, int]" - assert compiler.imports() == {"typing": None} - assert compiler.iterable("str") == "typing.Iterable[str]" - assert compiler.imports() == {"typing": None} - assert compiler.async_iterable("str") == "typing.AsyncIterable[str]" - assert compiler.imports() == {"typing": None} - assert compiler.async_iterator("str") == "typing.AsyncIterator[str]" - assert compiler.imports() == {"typing": None} - - -def test_no_typing_311_typing_compiler(): - compiler = NoTyping310TypingCompiler() - assert compiler.imports() == {} - assert compiler.optional("str") == "str | None" - assert compiler.imports() == {} - assert compiler.list("str") == "list[str]" - assert compiler.imports() == {} - assert compiler.dict("str", "int") == "dict[str, int]" - assert compiler.imports() == {} - assert compiler.union("str", "int") == "str | int" - assert compiler.imports() == {} - assert compiler.iterable("str") == "Iterable[str]" - assert compiler.async_iterable("str") == "AsyncIterable[str]" - assert compiler.async_iterator("str") == "AsyncIterator[str]" - assert compiler.imports() == {"collections.abc": {"Iterable", "AsyncIterable", "AsyncIterator"}} diff --git a/tests/test_version.py b/tests/test_version.py deleted file mode 100644 index 87bbd758..00000000 --- a/tests/test_version.py +++ /dev/null @@ -1,15 +0,0 @@ -from pathlib import Path - -import tomlkit - -from betterproto import __version__ - -PROJECT_TOML = Path(__file__).joinpath("..", "..", "pyproject.toml").resolve() - - -def test_version(): - with PROJECT_TOML.open() as toml_file: - project_config = tomlkit.loads(toml_file.read()) - assert ( - __version__ == project_config["tool"]["poetry"]["version"] - ), "Project version should match in package and package config" diff --git a/tests/util.py b/tests/util.py deleted file mode 100644 index 6db92b0a..00000000 --- a/tests/util.py +++ /dev/null @@ -1,158 +0,0 @@ -import asyncio -import atexit -import importlib -import os -import platform -import sys -import tempfile -from dataclasses import dataclass -from pathlib import Path -from types import ModuleType -from typing import ( - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Union, -) - -os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" - -root_path = Path(__file__).resolve().parent -inputs_path = root_path.joinpath("inputs") -output_path_reference = root_path.joinpath("output_reference") -output_path_betterproto = root_path.joinpath("output_betterproto") -output_path_betterproto_pydantic = root_path.joinpath("output_betterproto_pydantic") - - -def get_files(path, suffix: str) -> Generator[str, None, None]: - for r, dirs, files in os.walk(path): - for filename in [f for f in files if f.endswith(suffix)]: - yield os.path.join(r, filename) - - -def get_directories(path): - for root, directories, files in os.walk(path): - yield from directories - - -async def protoc( - path: Union[str, Path], - output_dir: Union[str, Path], - reference: bool = False, - pydantic_dataclasses: bool = False, -): - path: Path = Path(path).resolve() - output_dir: Path = Path(output_dir).resolve() - python_out_option: str = "python_betterproto_out" if not reference else "python_out" - - if pydantic_dataclasses: - plugin_path = Path("src/betterproto/plugin/main.py") - - if "Win" in platform.system(): - with tempfile.NamedTemporaryFile("w", encoding="UTF-8", suffix=".bat", delete=False) as tf: - # See https://stackoverflow.com/a/42622705 - tf.writelines( - [ - "@echo off", - f"\nchdir {os.getcwd()}", - f"\n{sys.executable} -u {plugin_path.as_posix()}", - ] - ) - - tf.flush() - - plugin_path = Path(tf.name) - atexit.register(os.remove, plugin_path) - - command = [ - sys.executable, - "-m", - "grpc.tools.protoc", - f"--plugin=protoc-gen-custom={plugin_path.as_posix()}", - "--experimental_allow_proto3_optional", - "--custom_opt=pydantic_dataclasses", - f"--proto_path={path.as_posix()}", - f"--custom_out={output_dir.as_posix()}", - *[p.as_posix() for p in path.glob("*.proto")], - ] - else: - command = [ - sys.executable, - "-m", - "grpc.tools.protoc", - f"--proto_path={path.as_posix()}", - f"--{python_out_option}={output_dir.as_posix()}", - *[p.as_posix() for p in path.glob("*.proto")], - ] - proc = await asyncio.create_subprocess_exec( - *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE - ) - stdout, stderr = await proc.communicate() - return stdout, stderr, proc.returncode - - -@dataclass -class TestCaseJsonFile: - json: str - test_name: str - file_name: str - - def belongs_to(self, non_symmetrical_json: Dict[str, Tuple[str, ...]]) -> bool: - return self.file_name in non_symmetrical_json.get(self.test_name, ()) - - -def get_test_case_json_data(test_case_name: str, *json_file_names: str) -> List[TestCaseJsonFile]: - """ - :return: - A list of all files found in "{inputs_path}/test_case_name" with names matching - f"{test_case_name}.json" or f"{test_case_name}_*.json", OR given by - json_file_names - """ - test_case_dir = inputs_path.joinpath(test_case_name) - possible_file_paths = [ - *(test_case_dir.joinpath(json_file_name) for json_file_name in json_file_names), - test_case_dir.joinpath(f"{test_case_name}.json"), - *test_case_dir.glob(f"{test_case_name}_*.json"), - ] - - result = [] - for test_data_file_path in possible_file_paths: - if not test_data_file_path.exists(): - continue - with test_data_file_path.open("r") as fh: - result.append(TestCaseJsonFile(fh.read(), test_case_name, test_data_file_path.name.split(".")[0])) - - return result - - -def find_module(module: ModuleType, predicate: Callable[[ModuleType], bool]) -> Optional[ModuleType]: - """ - Recursively search module tree for a module that matches the search predicate. - Assumes that the submodules are directories containing __init__.py. - - Example: - - # find module inside foo that contains Test - import foo - test_module = find_module(foo, lambda m: hasattr(m, 'Test')) - """ - if predicate(module): - return module - - module_path = Path(*module.__path__) - - for sub in [sub.parent for sub in module_path.glob("**/__init__.py")]: - if sub == module_path: - continue - sub_module_path = sub.relative_to(module_path) - sub_module_name = ".".join(sub_module_path.parts) - - sub_module = importlib.import_module(f".{sub_module_name}", module.__name__) - - if predicate(sub_module): - return sub_module - - return None From 5fe85a40610326ceae8859b7383a2171cfb0236d Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:06:49 +0100 Subject: [PATCH 02/13] Remove non-compiler files --- asv.conf.json | 157 ------------------------------------------------- docs/index.md | 14 +---- pyproject.toml | 23 +------- 3 files changed, 2 insertions(+), 192 deletions(-) delete mode 100644 asv.conf.json diff --git a/asv.conf.json b/asv.conf.json deleted file mode 100644 index 58a43420..00000000 --- a/asv.conf.json +++ /dev/null @@ -1,157 +0,0 @@ -{ - // The version of the config file format. Do not change, unless - // you know what you are doing. - "version": 1, - - // The name of the project being benchmarked - "project": "python-betterproto", - - // The project's homepage - "project_url": "https://github.com/danielgtaylor/python-betterproto", - - // The URL or local path of the source code repository for the - // project being benchmarked - "repo": ".", - - // The Python project's subdirectory in your repo. If missing or - // the empty string, the project is assumed to be located at the root - // of the repository. - // "repo_subdir": "", - - // Customizable commands for building, installing, and - // uninstalling the project. See asv.conf.json documentation. - // - "install_command": ["python -m pip install ."], - "uninstall_command": ["return-code=any python -m pip uninstall -y {project}"], - "build_command": ["python -m pip wheel -w {build_cache_dir} {build_dir}"], - - // List of branches to benchmark. If not provided, defaults to "master" - // (for git) or "default" (for mercurial). - // "branches": ["master"], // for git - // "branches": ["default"], // for mercurial - - // The DVCS being used. If not set, it will be automatically - // determined from "repo" by looking at the protocol in the URL - // (if remote), or by looking for special directories, such as - // ".git" (if local). - // "dvcs": "git", - - // The tool to use to create environments. May be "conda", - // "virtualenv" or other value depending on the plugins in use. - // If missing or the empty string, the tool will be automatically - // determined by looking for tools on the PATH environment - // variable. - "environment_type": "virtualenv", - - // timeout in seconds for installing any dependencies in environment - // defaults to 10 min - //"install_timeout": 600, - - // the base URL to show a commit for the project. - // "show_commit_url": "http://github.com/owner/project/commit/", - - // The Pythons you'd like to test against. If not provided, defaults - // to the current version of Python used to run `asv`. - // "pythons": ["2.7", "3.6"], - - // The list of conda channel names to be searched for benchmark - // dependency packages in the specified order - // "conda_channels": ["conda-forge", "defaults"], - - // The matrix of dependencies to test. Each key is the name of a - // package (in PyPI) and the values are version numbers. An empty - // list or empty string indicates to just test against the default - // (latest) version. null indicates that the package is to not be - // installed. If the package to be tested is only available from - // PyPi, and the 'environment_type' is conda, then you can preface - // the package name by 'pip+', and the package will be installed via - // pip (with all the conda available packages installed first, - // followed by the pip installed packages). - // - // "matrix": { - // "numpy": ["1.6", "1.7"], - // "six": ["", null], // test with and without six installed - // "pip+emcee": [""], // emcee is only available for install with pip. - // }, - - // Combinations of libraries/python versions can be excluded/included - // from the set to test. Each entry is a dictionary containing additional - // key-value pairs to include/exclude. - // - // An exclude entry excludes entries where all values match. The - // values are regexps that should match the whole string. - // - // An include entry adds an environment. Only the packages listed - // are installed. The 'python' key is required. The exclude rules - // do not apply to includes. - // - // In addition to package names, the following keys are available: - // - // - python - // Python version, as in the *pythons* variable above. - // - environment_type - // Environment type, as above. - // - sys_platform - // Platform, as in sys.platform. Possible values for the common - // cases: 'linux2', 'win32', 'cygwin', 'darwin'. - // - // "exclude": [ - // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows - // {"environment_type": "conda", "six": null}, // don't run without six on conda - // ], - // - // "include": [ - // // additional env for python2.7 - // {"python": "2.7", "numpy": "1.8"}, - // // additional env if run on windows+conda - // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, - // ], - - // The directory (relative to the current directory) that benchmarks are - // stored in. If not provided, defaults to "benchmarks" - // "benchmark_dir": "benchmarks", - - // The directory (relative to the current directory) to cache the Python - // environments in. If not provided, defaults to "env" - "env_dir": ".asv/env", - - // The directory (relative to the current directory) that raw benchmark - // results are stored in. If not provided, defaults to "results". - "results_dir": ".asv/results", - - // The directory (relative to the current directory) that the html tree - // should be written to. If not provided, defaults to "html". - "html_dir": ".asv/html", - - // The number of characters to retain in the commit hashes. - // "hash_length": 8, - - // `asv` will cache results of the recent builds in each - // environment, making them faster to install next time. This is - // the number of builds to keep, per environment. - // "build_cache_size": 2, - - // The commits after which the regression search in `asv publish` - // should start looking for regressions. Dictionary whose keys are - // regexps matching to benchmark names, and values corresponding to - // the commit (exclusive) after which to start looking for - // regressions. The default is to start from the first commit - // with results. If the commit is `null`, regression detection is - // skipped for the matching benchmark. - // - // "regressions_first_commits": { - // "some_benchmark": "352cdf", // Consider regressions only after this commit - // "another_benchmark": null, // Skip regression detection altogether - // }, - - // The thresholds for relative change in results, after which `asv - // publish` starts reporting regressions. Dictionary of the same - // form as in ``regressions_first_commits``, with values - // indicating the thresholds. If multiple entries match, the - // maximum is taken. If no entry matches, the default is 5%. - // - // "regressions_thresholds": { - // "some_benchmark": 0.01, // Threshold of 1% - // "another_benchmark": 0.5, // Threshold of 50% - // }, -} diff --git a/docs/index.md b/docs/index.md index 11fbfd6a..c7c2fad3 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,16 +1,4 @@ Home ==== -Welcome to betterproto2's documentation! - -betterproto is a protobuf compiler and interpreter. It improves the experience of using -Protobuf and gRPC in Python, by generating readable, understandable, and idiomatic -Python code, using modern language features. - - -## Features - -- Generated messages are both binary & JSON serializable -- Messages use relevant python types, e.g. ``Enum``, ``datetime`` and ``timedelta`` objects -- ``async``/``await`` support for gRPC Clients and Servers -- Generates modern, readable, idiomatic python code +Welcome to betterproto2-compiler's documentation! diff --git a/pyproject.toml b/pyproject.toml index 90b55ef5..d4a3f76f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.8" +python = "^3.10" # The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml ruff = "~0.7.4" grpclib = "^0.4.1" @@ -21,13 +21,9 @@ python-dateutil = "^2.8" typing-extensions = "^4.7.1" [tool.poetry.group.dev.dependencies] -asv = "^0.4.2" -bpython = "^0.19" jinja2 = ">=3.0.3" mypy = "^1.11.2" pre-commit = "^2.17.0" -grpcio-tools = "^1.54.2" -tox = "^4.0.0" mkdocs-material = {version = "^9.5.49", python = ">=3.10"} mkdocstrings = {version = "^0.27.0", python = ">=3.10", extras = ["python"]} @@ -46,7 +42,6 @@ tomlkit = ">=0.7.0" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] -compiler = ["ruff", "jinja2"] rust-codec = ["betterproto-rust-codec"] [tool.ruff] @@ -78,10 +73,6 @@ combine-as-imports = true script = "tests.generate:main" help = "Generate test cases (do this once before running test)" -[tool.poe.tasks.test] -cmd = "pytest" -help = "Run tests" - [tool.poe.tasks.types] cmd = "mypy src --ignore-missing-imports" help = "Check types with mypy" @@ -114,18 +105,6 @@ help = "Check that the imports are sorted" cmd = "sphinx-build docs docs/build" help = "Build the sphinx docs" -[tool.poe.tasks.bench] -shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD" -help = "Benchmark current commit vs. master branch" - -[tool.poe.tasks.clean] -cmd = """ -rm -rf .asv .coverage .mypy_cache .pytest_cache - dist betterproto.egg-info **/__pycache__ - testsoutput_* -""" -help = "Clean out generated files from the workspace" - [tool.poe.tasks.generate_lib] cmd = """ protoc From 61be887bb43dd5f5f5e05ebc23fff72bdad8e1d5 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:46:05 +0100 Subject: [PATCH 03/13] Rename files, mark compilation working again --- poetry.lock | 852 ++++++---------- pyproject.toml | 12 +- pytest.ini | 5 - .../lib/google/protobuf/__init__.py | 1 - .../lib/google/protobuf/compiler/__init__.py | 1 - .../__init__.py | 0 .../_types.py | 0 .../_version.py | 0 .../casing.py | 0 .../compile/__init__.py | 0 .../compile/importing.py | 0 .../compile/naming.py | 2 +- .../enum.py | 0 .../grpc/__init__.py | 0 .../grpc/grpclib_client.py | 0 .../grpc/grpclib_server.py | 0 .../grpc/util/__init__.py | 0 .../grpc/util/async_channel.py | 0 .../lib/__init__.py | 0 .../lib/google/__init__.py | 0 .../lib/google/protobuf/__init__.py | 1 + .../lib/google/protobuf/compiler/__init__.py | 1 + .../lib/pydantic/__init__.py | 0 .../lib/pydantic/google/__init__.py | 0 .../lib/pydantic/google/protobuf/__init__.py | 634 ++++++------ .../google/protobuf/compiler/__init__.py | 54 +- .../lib/std/__init__.py | 0 .../lib/std/google/__init__.py | 0 .../lib/std/google/protobuf/__init__.py | 526 +++++----- .../std/google/protobuf/compiler/__init__.py | 52 +- .../plugin/__init__.py | 0 .../plugin/__main__.py | 0 .../plugin/compiler.py | 0 .../plugin/main.py | 4 +- .../plugin/models.py | 12 +- .../plugin/module_validation.py | 0 .../plugin/parser.py | 4 +- .../plugin/plugin.bat | 0 .../plugin/typing_compiler.py | 0 .../py.typed | 0 .../templates/header.py.j2 | 0 .../templates/template.py.j2 | 0 .../utils.py | 0 tests/__init__.py | 0 tests/generate.py | 172 ++++ tests/inputs/bool/bool.json | 3 + tests/inputs/bool/bool.proto | 7 + tests/inputs/bool/test_bool.py | 24 + tests/inputs/bytes/bytes.json | 3 + tests/inputs/bytes/bytes.proto | 7 + tests/inputs/casing/casing.json | 4 + tests/inputs/casing/casing.proto | 20 + tests/inputs/casing/test_casing.py | 17 + .../casing_inner_class.proto | 11 + .../test_casing_inner_class.py | 10 + .../casing_message_field_uppercase.proto | 9 + .../casing_message_field_uppercase.py | 8 + tests/inputs/config.py | 29 + tests/inputs/deprecated/deprecated.json | 6 + tests/inputs/deprecated/deprecated.proto | 21 + .../inputs/documentation/documentation.proto | 44 + tests/inputs/double/double-negative.json | 3 + tests/inputs/double/double.json | 3 + tests/inputs/double/double.proto | 7 + .../inputs/empty_repeated/empty_repeated.json | 3 + .../empty_repeated/empty_repeated.proto | 11 + .../inputs/empty_service/empty_service.proto | 7 + tests/inputs/entry/entry.proto | 20 + tests/inputs/enum/enum.json | 9 + tests/inputs/enum/enum.proto | 25 + tests/inputs/enum/test_enum.py | 107 ++ tests/inputs/example/example.proto | 911 ++++++++++++++++++ .../example_service/example_service.proto | 20 + .../example_service/test_example_service.py | 81 ++ .../field_name_identical_to_type.json | 7 + .../field_name_identical_to_type.proto | 13 + tests/inputs/fixed/fixed.json | 6 + tests/inputs/fixed/fixed.proto | 10 + tests/inputs/float/float.json | 9 + tests/inputs/float/float.proto | 14 + .../google_impl_behavior_equivalence.proto | 22 + .../test_google_impl_behavior_equivalence.py | 84 ++ .../googletypes/googletypes-missing.json | 1 + tests/inputs/googletypes/googletypes.json | 7 + tests/inputs/googletypes/googletypes.proto | 16 + .../googletypes_request.proto | 29 + .../test_googletypes_request.py | 46 + .../googletypes_response.proto | 23 + .../test_googletypes_response.py | 63 ++ .../googletypes_response_embedded.proto | 26 + .../test_googletypes_response_embedded.py | 40 + .../googletypes_service_returns_empty.proto | 13 + ...ogletypes_service_returns_googletype.proto | 18 + .../googletypes_struct.json | 5 + .../googletypes_struct.proto | 9 + .../googletypes_value/googletypes_value.json | 11 + .../googletypes_value/googletypes_value.proto | 15 + .../capitalized.proto | 8 + .../import_capitalized_package/test.proto | 11 + .../child.proto | 7 + .../import_child_package_from_package.proto | 11 + .../package_message.proto | 9 + .../child.proto | 7 + .../import_child_package_from_root.proto | 11 + .../import_child_scoping_rules/child.proto | 7 + .../import_child_scoping_rules.proto | 9 + .../import_child_scoping_rules/package.proto | 13 + .../import_circular_dependency.proto | 30 + .../import_circular_dependency/other.proto | 8 + .../import_circular_dependency/root.proto | 7 + .../inputs/import_cousin_package/cousin.proto | 6 + tests/inputs/import_cousin_package/test.proto | 11 + .../cousin.proto | 6 + .../test.proto | 11 + .../child.proto | 7 + ...mport_nested_child_package_from_root.proto | 9 + .../import_packages_same_name.proto | 13 + .../import_packages_same_name/posts_v1.proto | 7 + .../import_packages_same_name/users_v1.proto | 7 + .../import_parent_package_from_child.proto | 12 + .../parent_package_message.proto | 6 + .../child.proto | 11 + .../import_root_package_from_child/root.proto | 7 + .../import_root_sibling.proto | 11 + .../inputs/import_root_sibling/sibling.proto | 7 + .../child_package_request_message.proto | 7 + .../import_service_input_message.proto | 25 + .../request_message.proto | 7 + .../test_import_service_input_message.py | 36 + tests/inputs/int32/int32.json | 4 + tests/inputs/int32/int32.proto | 10 + .../inputs/invalid_field/invalid_field.proto | 7 + .../invalid_field/test_invalid_field.py | 17 + tests/inputs/map/map.json | 7 + tests/inputs/map/map.proto | 7 + tests/inputs/mapmessage/mapmessage.json | 10 + tests/inputs/mapmessage/mapmessage.proto | 11 + .../namespace_builtin_types.json | 16 + .../namespace_builtin_types.proto | 40 + .../namespace_keywords.json | 37 + .../namespace_keywords.proto | 46 + tests/inputs/nested/nested.json | 7 + tests/inputs/nested/nested.proto | 26 + tests/inputs/nested2/nested2.proto | 21 + tests/inputs/nested2/package.proto | 7 + tests/inputs/nestedtwice/nestedtwice.json | 11 + tests/inputs/nestedtwice/nestedtwice.proto | 40 + tests/inputs/nestedtwice/test_nestedtwice.py | 25 + tests/inputs/oneof/oneof-name.json | 3 + tests/inputs/oneof/oneof.json | 3 + tests/inputs/oneof/oneof.proto | 23 + tests/inputs/oneof/oneof_name.json | 3 + tests/inputs/oneof/test_oneof.py | 43 + .../oneof_default_value_serialization.proto | 30 + .../test_oneof_default_value_serialization.py | 70 ++ tests/inputs/oneof_empty/oneof_empty.json | 3 + tests/inputs/oneof_empty/oneof_empty.proto | 17 + .../oneof_empty/oneof_empty_maybe1.json | 3 + .../oneof_empty/oneof_empty_maybe2.json | 5 + tests/inputs/oneof_empty/test_oneof_empty.py | 0 .../inputs/oneof_enum/oneof_enum-enum-0.json | 3 + .../inputs/oneof_enum/oneof_enum-enum-1.json | 3 + tests/inputs/oneof_enum/oneof_enum.json | 6 + tests/inputs/oneof_enum/oneof_enum.proto | 20 + tests/inputs/oneof_enum/test_oneof_enum.py | 40 + .../proto3_field_presence.json | 13 + .../proto3_field_presence.proto | 26 + .../proto3_field_presence_default.json | 1 + .../proto3_field_presence_missing.json | 9 + .../test_proto3_field_presence.py | 46 + .../proto3_field_presence_oneof.json | 3 + .../proto3_field_presence_oneof.proto | 22 + .../test_proto3_field_presence_oneof.py | 27 + .../recursivemessage/recursivemessage.json | 12 + .../recursivemessage/recursivemessage.proto | 15 + tests/inputs/ref/ref.json | 5 + tests/inputs/ref/ref.proto | 9 + tests/inputs/ref/repeatedmessage.proto | 11 + .../regression_387/regression_387.proto | 12 + .../regression_387/test_regression_387.py | 12 + .../regression_414/regression_414.proto | 9 + .../regression_414/test_regression_414.py | 15 + tests/inputs/repeated/repeated.json | 3 + tests/inputs/repeated/repeated.proto | 7 + .../repeated_duration_timestamp.json | 4 + .../repeated_duration_timestamp.proto | 12 + .../test_repeated_duration_timestamp.py | 12 + .../repeatedmessage/repeatedmessage.json | 10 + .../repeatedmessage/repeatedmessage.proto | 11 + .../inputs/repeatedpacked/repeatedpacked.json | 5 + .../repeatedpacked/repeatedpacked.proto | 9 + .../rpc_empty_input_message.proto | 13 + .../test_rpc_empty_input_message.py | 24 + tests/inputs/service/service.proto | 35 + .../service_separate_packages/messages.proto | 31 + .../service_separate_packages/service.proto | 12 + tests/inputs/service_uppercase/service.proto | 16 + .../inputs/service_uppercase/test_service.py | 8 + tests/inputs/signed/signed.json | 6 + tests/inputs/signed/signed.proto | 11 + .../test_timestamp_dict_encode.py | 78 ++ .../timestamp_dict_encode.json | 3 + .../timestamp_dict_encode.proto | 9 + tests/util.py | 158 +++ 204 files changed, 4713 insertions(+), 1229 deletions(-) delete mode 100644 pytest.ini delete mode 100644 src/betterproto/lib/google/protobuf/__init__.py delete mode 100644 src/betterproto/lib/google/protobuf/compiler/__init__.py rename src/{betterproto => betterproto2_compiler}/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/_types.py (100%) rename src/{betterproto => betterproto2_compiler}/_version.py (100%) rename src/{betterproto => betterproto2_compiler}/casing.py (100%) rename src/{betterproto => betterproto2_compiler}/compile/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/compile/importing.py (100%) rename src/{betterproto => betterproto2_compiler}/compile/naming.py (92%) rename src/{betterproto => betterproto2_compiler}/enum.py (100%) rename src/{betterproto => betterproto2_compiler}/grpc/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/grpc/grpclib_client.py (100%) rename src/{betterproto => betterproto2_compiler}/grpc/grpclib_server.py (100%) rename src/{betterproto => betterproto2_compiler}/grpc/util/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/grpc/util/async_channel.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/google/__init__.py (100%) create mode 100644 src/betterproto2_compiler/lib/google/protobuf/__init__.py create mode 100644 src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py rename src/{betterproto => betterproto2_compiler}/lib/pydantic/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/pydantic/google/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/pydantic/google/protobuf/__init__.py (79%) rename src/{betterproto => betterproto2_compiler}/lib/pydantic/google/protobuf/compiler/__init__.py (82%) rename src/{betterproto => betterproto2_compiler}/lib/std/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/std/google/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/lib/std/google/protobuf/__init__.py (79%) rename src/{betterproto => betterproto2_compiler}/lib/std/google/protobuf/compiler/__init__.py (82%) rename src/{betterproto => betterproto2_compiler}/plugin/__init__.py (100%) rename src/{betterproto => betterproto2_compiler}/plugin/__main__.py (100%) rename src/{betterproto => betterproto2_compiler}/plugin/compiler.py (100%) rename src/{betterproto => betterproto2_compiler}/plugin/main.py (91%) rename src/{betterproto => betterproto2_compiler}/plugin/models.py (98%) rename src/{betterproto => betterproto2_compiler}/plugin/module_validation.py (100%) rename src/{betterproto => betterproto2_compiler}/plugin/parser.py (98%) rename src/{betterproto => betterproto2_compiler}/plugin/plugin.bat (100%) rename src/{betterproto => betterproto2_compiler}/plugin/typing_compiler.py (100%) rename src/{betterproto => betterproto2_compiler}/py.typed (100%) rename src/{betterproto => betterproto2_compiler}/templates/header.py.j2 (100%) rename src/{betterproto => betterproto2_compiler}/templates/template.py.j2 (100%) rename src/{betterproto => betterproto2_compiler}/utils.py (100%) create mode 100644 tests/__init__.py create mode 100644 tests/generate.py create mode 100644 tests/inputs/bool/bool.json create mode 100644 tests/inputs/bool/bool.proto create mode 100644 tests/inputs/bool/test_bool.py create mode 100644 tests/inputs/bytes/bytes.json create mode 100644 tests/inputs/bytes/bytes.proto create mode 100644 tests/inputs/casing/casing.json create mode 100644 tests/inputs/casing/casing.proto create mode 100644 tests/inputs/casing/test_casing.py create mode 100644 tests/inputs/casing_inner_class/casing_inner_class.proto create mode 100644 tests/inputs/casing_inner_class/test_casing_inner_class.py create mode 100644 tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto create mode 100644 tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py create mode 100644 tests/inputs/config.py create mode 100644 tests/inputs/deprecated/deprecated.json create mode 100644 tests/inputs/deprecated/deprecated.proto create mode 100644 tests/inputs/documentation/documentation.proto create mode 100644 tests/inputs/double/double-negative.json create mode 100644 tests/inputs/double/double.json create mode 100644 tests/inputs/double/double.proto create mode 100644 tests/inputs/empty_repeated/empty_repeated.json create mode 100644 tests/inputs/empty_repeated/empty_repeated.proto create mode 100644 tests/inputs/empty_service/empty_service.proto create mode 100644 tests/inputs/entry/entry.proto create mode 100644 tests/inputs/enum/enum.json create mode 100644 tests/inputs/enum/enum.proto create mode 100644 tests/inputs/enum/test_enum.py create mode 100644 tests/inputs/example/example.proto create mode 100644 tests/inputs/example_service/example_service.proto create mode 100644 tests/inputs/example_service/test_example_service.py create mode 100644 tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json create mode 100644 tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto create mode 100644 tests/inputs/fixed/fixed.json create mode 100644 tests/inputs/fixed/fixed.proto create mode 100644 tests/inputs/float/float.json create mode 100644 tests/inputs/float/float.proto create mode 100644 tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto create mode 100644 tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py create mode 100644 tests/inputs/googletypes/googletypes-missing.json create mode 100644 tests/inputs/googletypes/googletypes.json create mode 100644 tests/inputs/googletypes/googletypes.proto create mode 100644 tests/inputs/googletypes_request/googletypes_request.proto create mode 100644 tests/inputs/googletypes_request/test_googletypes_request.py create mode 100644 tests/inputs/googletypes_response/googletypes_response.proto create mode 100644 tests/inputs/googletypes_response/test_googletypes_response.py create mode 100644 tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto create mode 100644 tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py create mode 100644 tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto create mode 100644 tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto create mode 100644 tests/inputs/googletypes_struct/googletypes_struct.json create mode 100644 tests/inputs/googletypes_struct/googletypes_struct.proto create mode 100644 tests/inputs/googletypes_value/googletypes_value.json create mode 100644 tests/inputs/googletypes_value/googletypes_value.proto create mode 100644 tests/inputs/import_capitalized_package/capitalized.proto create mode 100644 tests/inputs/import_capitalized_package/test.proto create mode 100644 tests/inputs/import_child_package_from_package/child.proto create mode 100644 tests/inputs/import_child_package_from_package/import_child_package_from_package.proto create mode 100644 tests/inputs/import_child_package_from_package/package_message.proto create mode 100644 tests/inputs/import_child_package_from_root/child.proto create mode 100644 tests/inputs/import_child_package_from_root/import_child_package_from_root.proto create mode 100644 tests/inputs/import_child_scoping_rules/child.proto create mode 100644 tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto create mode 100644 tests/inputs/import_child_scoping_rules/package.proto create mode 100644 tests/inputs/import_circular_dependency/import_circular_dependency.proto create mode 100644 tests/inputs/import_circular_dependency/other.proto create mode 100644 tests/inputs/import_circular_dependency/root.proto create mode 100644 tests/inputs/import_cousin_package/cousin.proto create mode 100644 tests/inputs/import_cousin_package/test.proto create mode 100644 tests/inputs/import_cousin_package_same_name/cousin.proto create mode 100644 tests/inputs/import_cousin_package_same_name/test.proto create mode 100644 tests/inputs/import_nested_child_package_from_root/child.proto create mode 100644 tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto create mode 100644 tests/inputs/import_packages_same_name/import_packages_same_name.proto create mode 100644 tests/inputs/import_packages_same_name/posts_v1.proto create mode 100644 tests/inputs/import_packages_same_name/users_v1.proto create mode 100644 tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto create mode 100644 tests/inputs/import_parent_package_from_child/parent_package_message.proto create mode 100644 tests/inputs/import_root_package_from_child/child.proto create mode 100644 tests/inputs/import_root_package_from_child/root.proto create mode 100644 tests/inputs/import_root_sibling/import_root_sibling.proto create mode 100644 tests/inputs/import_root_sibling/sibling.proto create mode 100644 tests/inputs/import_service_input_message/child_package_request_message.proto create mode 100644 tests/inputs/import_service_input_message/import_service_input_message.proto create mode 100644 tests/inputs/import_service_input_message/request_message.proto create mode 100644 tests/inputs/import_service_input_message/test_import_service_input_message.py create mode 100644 tests/inputs/int32/int32.json create mode 100644 tests/inputs/int32/int32.proto create mode 100644 tests/inputs/invalid_field/invalid_field.proto create mode 100644 tests/inputs/invalid_field/test_invalid_field.py create mode 100644 tests/inputs/map/map.json create mode 100644 tests/inputs/map/map.proto create mode 100644 tests/inputs/mapmessage/mapmessage.json create mode 100644 tests/inputs/mapmessage/mapmessage.proto create mode 100644 tests/inputs/namespace_builtin_types/namespace_builtin_types.json create mode 100644 tests/inputs/namespace_builtin_types/namespace_builtin_types.proto create mode 100644 tests/inputs/namespace_keywords/namespace_keywords.json create mode 100644 tests/inputs/namespace_keywords/namespace_keywords.proto create mode 100644 tests/inputs/nested/nested.json create mode 100644 tests/inputs/nested/nested.proto create mode 100644 tests/inputs/nested2/nested2.proto create mode 100644 tests/inputs/nested2/package.proto create mode 100644 tests/inputs/nestedtwice/nestedtwice.json create mode 100644 tests/inputs/nestedtwice/nestedtwice.proto create mode 100644 tests/inputs/nestedtwice/test_nestedtwice.py create mode 100644 tests/inputs/oneof/oneof-name.json create mode 100644 tests/inputs/oneof/oneof.json create mode 100644 tests/inputs/oneof/oneof.proto create mode 100644 tests/inputs/oneof/oneof_name.json create mode 100644 tests/inputs/oneof/test_oneof.py create mode 100644 tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto create mode 100644 tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py create mode 100644 tests/inputs/oneof_empty/oneof_empty.json create mode 100644 tests/inputs/oneof_empty/oneof_empty.proto create mode 100644 tests/inputs/oneof_empty/oneof_empty_maybe1.json create mode 100644 tests/inputs/oneof_empty/oneof_empty_maybe2.json create mode 100644 tests/inputs/oneof_empty/test_oneof_empty.py create mode 100644 tests/inputs/oneof_enum/oneof_enum-enum-0.json create mode 100644 tests/inputs/oneof_enum/oneof_enum-enum-1.json create mode 100644 tests/inputs/oneof_enum/oneof_enum.json create mode 100644 tests/inputs/oneof_enum/oneof_enum.proto create mode 100644 tests/inputs/oneof_enum/test_oneof_enum.py create mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence.json create mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence.proto create mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence_default.json create mode 100644 tests/inputs/proto3_field_presence/proto3_field_presence_missing.json create mode 100644 tests/inputs/proto3_field_presence/test_proto3_field_presence.py create mode 100644 tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json create mode 100644 tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto create mode 100644 tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py create mode 100644 tests/inputs/recursivemessage/recursivemessage.json create mode 100644 tests/inputs/recursivemessage/recursivemessage.proto create mode 100644 tests/inputs/ref/ref.json create mode 100644 tests/inputs/ref/ref.proto create mode 100644 tests/inputs/ref/repeatedmessage.proto create mode 100644 tests/inputs/regression_387/regression_387.proto create mode 100644 tests/inputs/regression_387/test_regression_387.py create mode 100644 tests/inputs/regression_414/regression_414.proto create mode 100644 tests/inputs/regression_414/test_regression_414.py create mode 100644 tests/inputs/repeated/repeated.json create mode 100644 tests/inputs/repeated/repeated.proto create mode 100644 tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json create mode 100644 tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto create mode 100644 tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py create mode 100644 tests/inputs/repeatedmessage/repeatedmessage.json create mode 100644 tests/inputs/repeatedmessage/repeatedmessage.proto create mode 100644 tests/inputs/repeatedpacked/repeatedpacked.json create mode 100644 tests/inputs/repeatedpacked/repeatedpacked.proto create mode 100644 tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto create mode 100644 tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py create mode 100644 tests/inputs/service/service.proto create mode 100644 tests/inputs/service_separate_packages/messages.proto create mode 100644 tests/inputs/service_separate_packages/service.proto create mode 100644 tests/inputs/service_uppercase/service.proto create mode 100644 tests/inputs/service_uppercase/test_service.py create mode 100644 tests/inputs/signed/signed.json create mode 100644 tests/inputs/signed/signed.proto create mode 100644 tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py create mode 100644 tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json create mode 100644 tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto create mode 100644 tests/util.py diff --git a/poetry.lock b/poetry.lock index 59e4ba4f..86ac5956 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,36 +11,6 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - -[[package]] -name = "ansicon" -version = "1.89.0" -description = "Python wrapper for loading Jason Hood's ANSICON" -optional = false -python-versions = "*" -files = [ - {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, - {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, -] - -[[package]] -name = "asv" -version = "0.4.2" -description = "Airspeed Velocity: A simple Python history benchmarking tool" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "asv-0.4.2.tar.gz", hash = "sha256:9134f56b7a2f465420f17b5bb0dee16047a70f01029c996b7ab3f197de2d0779"}, -] - -[package.dependencies] -six = ">=1.4" - -[package.extras] -hg = ["python-hglib (>=1.5)"] - [[package]] name = "atomicwrites" version = "1.4.1" @@ -85,63 +55,28 @@ files = [ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] -name = "betterproto-rust-codec" -version = "0.1.1" -description = "Fast conversion between betterproto messages and Protobuf wire format." -optional = true -python-versions = ">=3.7" -files = [ - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:38ec2ec1743d815a04ffc020e8e3791955601b239b097e4ae0721528d4d8b608"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:96a6deef8cda4b4d084df98b621e39a3123d8878dab551b86bbe733d885c4965"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72ce9f153c83b1d0559ab40b0d6a31d8b83ac486230cefc298c8a08f4a97738b"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8a8485aabbe843208307a9a2c3fc8a8c09295fb22c840cebd5fa7ec6b8ddb36"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a395bf0c9dc86b7d3783ba43f161cd9f7a42809f38c70673cd9999d40eb4f1"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea99bee659b33500bb1afc4e0dbfa63530f50a7c549d0687565a10a0de63d18f"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913d73365780d8f3da04cbaa1b2428ca5dc5372a5ee6f4ff2b9f30127362dff7"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a16dbbbc48f4a27b3b70205a2a71baa53fe0e915bc347b75d9b3864326446fa"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-win32.whl", hash = "sha256:06f95ac4c92aa1f28bd1be884c6db86f0bed05c9b93a1e4e3d80bbe2fc66847c"}, - {file = "betterproto_rust_codec-0.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:5b70b3aea76f336cc243b966f2f7496cb6366ad2679d7a999ff521d873f9de48"}, - {file = "betterproto_rust_codec-0.1.1.tar.gz", hash = "sha256:6f7cbe80c8e3f87df992d71568771082c869ed6856521e01db833d9d3b012af5"}, -] - -[[package]] -name = "blessed" -version = "1.20.0" -description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -optional = false -python-versions = ">=2.7" -files = [ - {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, - {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, -] - -[package.dependencies] -jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} -six = ">=1.9.0" -wcwidth = ">=0.1.4" - -[[package]] -name = "bpython" -version = "0.19" -description = "Fancy Interface to the Python Interpreter" +name = "betterproto" +version = "2.0.0b7" +description = "A better Protobuf / gRPC generator & library" optional = false -python-versions = "*" -files = [ - {file = "bpython-0.19-py2.py3-none-any.whl", hash = "sha256:95d95783bfadfa0a25300a648de5aba4423b0ee76b034022a81dde2b5e853c00"}, - {file = "bpython-0.19.tar.gz", hash = "sha256:476ce09a896c4d34bf5e56aca64650c56fdcfce45781a20dc1521221df8cc49c"}, -] +python-versions = "^3.8" +files = [] +develop = false [package.dependencies] -curtsies = ">=0.1.18" -greenlet = "*" -pygments = "*" -requests = "*" -six = ">=1.5" +grpclib = "^0.4.1" +python-dateutil = "^2.8" +typing-extensions = "^4.7.1" [package.extras] -jedi = ["jedi"] -urwid = ["urwid"] -watch = ["watchdog"] +compiler = ["jinja2 (>=3.0.3)", "ruff (>=0.7.4,<0.8.0)"] +rust-codec = ["betterproto-rust-codec (==0.1.1)"] + +[package.source] +type = "git" +url = "https://github.com/betterproto/python-betterproto2" +reference = "881bd6e09a809dc61add4ae4ed6a3c70c1fca00a" +resolved_reference = "881bd6e09a809dc61add4ae4ed6a3c70c1fca00a" [[package]] name = "cachelib" @@ -154,17 +89,6 @@ files = [ {file = "cachelib-0.10.2.tar.gz", hash = "sha256:593faeee62a7c037d50fc835617a01b887503f972fb52b188ae7e50e9cb69740"}, ] -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - [[package]] name = "certifi" version = "2024.12.14" @@ -187,17 +111,6 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - [[package]] name = "charset-normalizer" version = "3.4.0" @@ -339,148 +252,78 @@ files = [ [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.9" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, + {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, + {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, + {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, + {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, + {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, + {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, + {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, + {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, + {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, + {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, + {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, + {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, + {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, + {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, ] [package.extras] toml = ["tomli"] -[[package]] -name = "curtsies" -version = "0.4.2" -description = "Curses-like terminal wrapper, with colored strings!" -optional = false -python-versions = ">=3.7" -files = [ - {file = "curtsies-0.4.2-py3-none-any.whl", hash = "sha256:f24d676a8c4711fb9edba1ab7e6134bc52305a222980b3b717bb303f5e94cec6"}, - {file = "curtsies-0.4.2.tar.gz", hash = "sha256:6ebe33215bd7c92851a506049c720cca4cf5c192c1665c1d7a98a04c4702760e"}, -] - -[package.dependencies] -blessed = ">=1.5" -cwcwidth = "*" - -[[package]] -name = "cwcwidth" -version = "0.1.9" -description = "Python bindings for wc(s)width" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cwcwidth-0.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704f0d6888aa5e81e76d9f76709385f9f55aca8c450ee82cc722054814a7791f"}, - {file = "cwcwidth-0.1.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0633158205b50f253ad04e301156807e309a9fb9479a520418e010da6df13604"}, - {file = "cwcwidth-0.1.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a5407d0933c3aab8ee92cffd9e4f09010f25af10ebdfa19776748402bba9261"}, - {file = "cwcwidth-0.1.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:72490e07dfbc599fdf6efe26a13cfbf725f0513b181c3386d65bfd84f6175924"}, - {file = "cwcwidth-0.1.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf71151ae06e95f266bef91392c1562539b2eed847fd1f00f7b5b4ca3fd41a67"}, - {file = "cwcwidth-0.1.9-cp310-cp310-win32.whl", hash = "sha256:3e3c186b5c171d85f2b7f093e7efb33fd9b6e55b791ff75a0f101b18ec0433cd"}, - {file = "cwcwidth-0.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:ae17e493ffc18497c2602f8f42a0d8e490ea42ab3ccfbe5e4a6069a6d24f3b36"}, - {file = "cwcwidth-0.1.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b1c3eb0a8c1b25c4a17b6b9bbf7d25ce9df3ea43b6f87903c51bc12434a2cc29"}, - {file = "cwcwidth-0.1.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8752815ce4e40e7b34b7fe039276a5fbfb1b077131614381b13ef3b7bb21ff"}, - {file = "cwcwidth-0.1.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368ace13080dbaacdc247370d8a965a749b124aa50d0b1b6eb87601826db870f"}, - {file = "cwcwidth-0.1.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ca9a653661e152a426bdb51a272f36bc79f9830e6a7169abe8110ec367c3518c"}, - {file = "cwcwidth-0.1.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f96386cc29e6eef8ef066d7dd3c767c5119d66506dabea20dd344dabb3f2d225"}, - {file = "cwcwidth-0.1.9-cp311-cp311-win32.whl", hash = "sha256:f6ba88970ec12fdbed5554beb1b9a25d8271fc3d0d9e60639db700a79bed1863"}, - {file = "cwcwidth-0.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:aa6725e7b3571fdf6ce7c02d1dd2d69e00d166bb6df44e46ab215067028b3a03"}, - {file = "cwcwidth-0.1.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:42de102d5191fc68ef3ff6530f60c4895148ddc21aa0acaaf4612e5f7f0c38c4"}, - {file = "cwcwidth-0.1.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:877e48c615b3fec88b7e640f9cf9d96704497657fb5aad2b7c0b0c59ecabff69"}, - {file = "cwcwidth-0.1.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdbaf0a8dad20eb685df11a195a2449fe230b08a5b356d036c8d7e59d4128a88"}, - {file = "cwcwidth-0.1.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6e0e023c4b127c47fd4c44cf537be209b9a28d8725f4f576f4d63744a23aa38"}, - {file = "cwcwidth-0.1.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b4f7d24236ce3c9d3b5e07fd75d232452f19bdddb6ae8bbfdcb97b6cb02835e8"}, - {file = "cwcwidth-0.1.9-cp312-cp312-win32.whl", hash = "sha256:ba9da6c911bf108334426890bc9f57b839a38e1afc4383a41bd70adbce470db3"}, - {file = "cwcwidth-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:40466f16e85c338e8fc3eee87a8c9ca23416cc68b3049f68cb4cead5fb8b71b3"}, - {file = "cwcwidth-0.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167f59c3c1e1d8e231a1abd666af4e73dd8a94917efb6522e9b610ac4587903a"}, - {file = "cwcwidth-0.1.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afc745f18c9e3c38851a931c0c0a7e479d6494911ba1353f998d707f95a895b4"}, - {file = "cwcwidth-0.1.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8d55c47cbec4796e89cfedc89c52e6c4c2faeb77489a763415b9f76d8fc14db"}, - {file = "cwcwidth-0.1.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c858842849ce2cfdf207095253da83831d9407771c8073f6b75f24d3faf1a1eb"}, - {file = "cwcwidth-0.1.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc049ce273f32b632f5ead649b2120f8b2b78035d7b069fdc460c4be9affddb5"}, - {file = "cwcwidth-0.1.9-cp38-cp38-win32.whl", hash = "sha256:1bafe978a5b7915848244a952829e3f8757c0cebef581c8250da6064c906c38c"}, - {file = "cwcwidth-0.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:024d1b21e6123bf30a849e60eea3482f556bbd00d39215f86c904e5bd81fc1b6"}, - {file = "cwcwidth-0.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d367da5e6fb538388817bf5b2d6dd4db90e5e631d99c34055589d007b5c94bc"}, - {file = "cwcwidth-0.1.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad721d9dbc14eafd06176e4f5594942336b1e813de2a5ab7bd64254393c5713f"}, - {file = "cwcwidth-0.1.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711ace9796cb6767ff29095ff5b0ec4619e7297854eb4b91ba99154590eddcc9"}, - {file = "cwcwidth-0.1.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:945615a7b8cdcbcd8e06d399f96a2b09440c3a4c5cb3c2d0109f00d80da27a12"}, - {file = "cwcwidth-0.1.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ffaf706abe400282f299463594d8887566e2a280cd0255110bd4397cc7be2910"}, - {file = "cwcwidth-0.1.9-cp39-cp39-win32.whl", hash = "sha256:03093cac6f8e4326b1c30169e024fe2894f76c6ffddf6464e489bb33cb3a2897"}, - {file = "cwcwidth-0.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:0ddef2c504e6f4fd6122b46d55061f487add1ebb86596ae70ffc2a8b8955c8bc"}, - {file = "cwcwidth-0.1.9.tar.gz", hash = "sha256:f19d11a0148d4a8cacd064c96e93bca8ce3415a186ae8204038f45e108db76b8"}, -] - [[package]] name = "distlib" version = "0.3.9" @@ -525,92 +368,6 @@ python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] -[[package]] -name = "greenlet" -version = "3.1.1" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - [[package]] name = "griffe" version = "1.5.1" @@ -810,13 +567,13 @@ files = [ [[package]] name = "identify" -version = "2.6.1" +version = "2.6.3" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, + {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] @@ -864,20 +621,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jinxed" -version = "1.3.0" -description = "Jinxed Terminal Library" -optional = false -python-versions = "*" -files = [ - {file = "jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5"}, - {file = "jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf"}, -] - -[package.dependencies] -ansicon = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "markdown" version = "3.7" @@ -895,71 +638,72 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1378,13 +1122,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.30.0" +version = "0.31.1" description = "A task runner that works well with poetry." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "poethepoet-0.30.0-py3-none-any.whl", hash = "sha256:bf875741407a98da9e96f2f2d0b2c4c34f56d89939a7f53a4b6b3a64b546ec4e"}, - {file = "poethepoet-0.30.0.tar.gz", hash = "sha256:9f7ccda2d6525616ce989ca8ef973739fd668f50bef0b9d3631421d504d9ae4a"}, + {file = "poethepoet-0.31.1-py3-none-any.whl", hash = "sha256:7fdfa0ac6074be9936723e7231b5bfaad2923e96c674a9857e81d326cf8ccdc2"}, + {file = "poethepoet-0.31.1.tar.gz", hash = "sha256:d6b66074edf85daf115bb916eae0afd6387d19e1562e1c9ef7d61d5c585696aa"}, ] [package.dependencies] @@ -1446,18 +1190,18 @@ files = [ [[package]] name = "pydantic" -version = "2.10.3" +version = "2.10.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, + {file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"}, + {file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] @@ -1466,111 +1210,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -1608,25 +1352,6 @@ pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] -[[package]] -name = "pyproject-api" -version = "1.8.0" -description = "API to interact with the python pyproject.toml based projects" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, - {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, -] - -[package.dependencies] -packaging = ">=24.1" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"] - [[package]] name = "pytest" version = "6.2.5" @@ -1921,7 +1646,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.7.4" description = "An extremely fast Python linter and code formatter, written in Rust." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, @@ -1946,23 +1671,23 @@ files = [ [[package]] name = "setuptools" -version = "75.3.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" @@ -2038,33 +1763,6 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] -[[package]] -name = "tox" -version = "4.23.2" -description = "tox is a generic virtualenv management and test command line tool" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, - {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, -] - -[package.dependencies] -cachetools = ">=5.5" -chardet = ">=5.2" -colorama = ">=0.4.6" -filelock = ">=3.16.1" -packaging = ">=24.1" -platformdirs = ">=4.3.6" -pluggy = ">=1.5" -pyproject-api = ">=1.8" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.6" - -[package.extras] -test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2155,22 +1853,10 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - [extras] -compiler = ["jinja2", "ruff"] -rust-codec = ["betterproto-rust-codec"] +rust-codec = [] [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "4a69db59dc36e8c639b7b6bd950d829415c14265979e8ef5f34d0bedd07aaf64" +python-versions = "^3.10" +content-hash = "a6b16fe4cc66b8deac7f45dc580ab9d7833f099affc3001effb1c38641531573" diff --git a/pyproject.toml b/pyproject.toml index d4a3f76f..d84e1155 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "betterproto" +name = "betterproto2_compiler" version = "2.0.0b7" description = "A better Protobuf / gRPC generator & library" authors = ["Daniel G. Taylor "] @@ -8,11 +8,12 @@ repository = "https://github.com/danielgtaylor/python-betterproto" keywords = ["protobuf", "gRPC"] license = "MIT" packages = [ - { include = "betterproto", from = "src" } + { include = "betterproto2_compiler", from = "src" } ] [tool.poetry.dependencies] python = "^3.10" +betterproto = { git = "https://github.com/betterproto/python-betterproto2", rev = "881bd6e09a809dc61add4ae4ed6a3c70c1fca00a" } # The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml ruff = "~0.7.4" grpclib = "^0.4.1" @@ -24,6 +25,7 @@ typing-extensions = "^4.7.1" jinja2 = ">=3.0.3" mypy = "^1.11.2" pre-commit = "^2.17.0" +grpcio-tools = "^1.54.2" mkdocs-material = {version = "^9.5.49", python = ">=3.10"} mkdocstrings = {version = "^0.27.0", python = ">=3.10", extras = ["python"]} @@ -39,14 +41,14 @@ cachelib = "^0.10.2" tomlkit = ">=0.7.0" [tool.poetry.scripts] -protoc-gen-python_betterproto = "betterproto.plugin:main" +protoc-gen-python_betterproto = "betterproto2_compiler.plugin:main" [tool.poetry.extras] rust-codec = ["betterproto-rust-codec"] [tool.ruff] extend-exclude = ["tests/output_*"] -target-version = "py38" +target-version = "py310" line-length = 120 [tool.ruff.lint] @@ -108,7 +110,7 @@ help = "Build the sphinx docs" [tool.poe.tasks.generate_lib] cmd = """ protoc - --plugin=protoc-gen-custom=src/betterproto/plugin/main.py + --plugin=protoc-gen-custom=src/betterproto2_compiler/plugin/main.py --custom_opt=INCLUDE_GOOGLE --custom_out=src/betterproto/lib/std -I /usr/local/include/ diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index bec2b960..00000000 --- a/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = test_*.py -python_classes = -norecursedirs = **/output_* -addopts = -p no:warnings \ No newline at end of file diff --git a/src/betterproto/lib/google/protobuf/__init__.py b/src/betterproto/lib/google/protobuf/__init__.py deleted file mode 100644 index dfc9d558..00000000 --- a/src/betterproto/lib/google/protobuf/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from betterproto.lib.std.google.protobuf import * diff --git a/src/betterproto/lib/google/protobuf/compiler/__init__.py b/src/betterproto/lib/google/protobuf/compiler/__init__.py deleted file mode 100644 index 59bf56f1..00000000 --- a/src/betterproto/lib/google/protobuf/compiler/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from betterproto.lib.std.google.protobuf.compiler import * diff --git a/src/betterproto/__init__.py b/src/betterproto2_compiler/__init__.py similarity index 100% rename from src/betterproto/__init__.py rename to src/betterproto2_compiler/__init__.py diff --git a/src/betterproto/_types.py b/src/betterproto2_compiler/_types.py similarity index 100% rename from src/betterproto/_types.py rename to src/betterproto2_compiler/_types.py diff --git a/src/betterproto/_version.py b/src/betterproto2_compiler/_version.py similarity index 100% rename from src/betterproto/_version.py rename to src/betterproto2_compiler/_version.py diff --git a/src/betterproto/casing.py b/src/betterproto2_compiler/casing.py similarity index 100% rename from src/betterproto/casing.py rename to src/betterproto2_compiler/casing.py diff --git a/src/betterproto/compile/__init__.py b/src/betterproto2_compiler/compile/__init__.py similarity index 100% rename from src/betterproto/compile/__init__.py rename to src/betterproto2_compiler/compile/__init__.py diff --git a/src/betterproto/compile/importing.py b/src/betterproto2_compiler/compile/importing.py similarity index 100% rename from src/betterproto/compile/importing.py rename to src/betterproto2_compiler/compile/importing.py diff --git a/src/betterproto/compile/naming.py b/src/betterproto2_compiler/compile/naming.py similarity index 92% rename from src/betterproto/compile/naming.py rename to src/betterproto2_compiler/compile/naming.py index baa9fc38..c5b5a39e 100644 --- a/src/betterproto/compile/naming.py +++ b/src/betterproto2_compiler/compile/naming.py @@ -1,4 +1,4 @@ -from betterproto import casing +from betterproto2_compiler import casing def pythonize_class_name(name: str) -> str: diff --git a/src/betterproto/enum.py b/src/betterproto2_compiler/enum.py similarity index 100% rename from src/betterproto/enum.py rename to src/betterproto2_compiler/enum.py diff --git a/src/betterproto/grpc/__init__.py b/src/betterproto2_compiler/grpc/__init__.py similarity index 100% rename from src/betterproto/grpc/__init__.py rename to src/betterproto2_compiler/grpc/__init__.py diff --git a/src/betterproto/grpc/grpclib_client.py b/src/betterproto2_compiler/grpc/grpclib_client.py similarity index 100% rename from src/betterproto/grpc/grpclib_client.py rename to src/betterproto2_compiler/grpc/grpclib_client.py diff --git a/src/betterproto/grpc/grpclib_server.py b/src/betterproto2_compiler/grpc/grpclib_server.py similarity index 100% rename from src/betterproto/grpc/grpclib_server.py rename to src/betterproto2_compiler/grpc/grpclib_server.py diff --git a/src/betterproto/grpc/util/__init__.py b/src/betterproto2_compiler/grpc/util/__init__.py similarity index 100% rename from src/betterproto/grpc/util/__init__.py rename to src/betterproto2_compiler/grpc/util/__init__.py diff --git a/src/betterproto/grpc/util/async_channel.py b/src/betterproto2_compiler/grpc/util/async_channel.py similarity index 100% rename from src/betterproto/grpc/util/async_channel.py rename to src/betterproto2_compiler/grpc/util/async_channel.py diff --git a/src/betterproto/lib/__init__.py b/src/betterproto2_compiler/lib/__init__.py similarity index 100% rename from src/betterproto/lib/__init__.py rename to src/betterproto2_compiler/lib/__init__.py diff --git a/src/betterproto/lib/google/__init__.py b/src/betterproto2_compiler/lib/google/__init__.py similarity index 100% rename from src/betterproto/lib/google/__init__.py rename to src/betterproto2_compiler/lib/google/__init__.py diff --git a/src/betterproto2_compiler/lib/google/protobuf/__init__.py b/src/betterproto2_compiler/lib/google/protobuf/__init__.py new file mode 100644 index 00000000..7f8a9d85 --- /dev/null +++ b/src/betterproto2_compiler/lib/google/protobuf/__init__.py @@ -0,0 +1 @@ +from betterproto2_compiler.lib.std.google.protobuf import * diff --git a/src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py b/src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py new file mode 100644 index 00000000..b3d40e58 --- /dev/null +++ b/src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py @@ -0,0 +1 @@ +from betterproto2_compiler.lib.std.google.protobuf.compiler import * diff --git a/src/betterproto/lib/pydantic/__init__.py b/src/betterproto2_compiler/lib/pydantic/__init__.py similarity index 100% rename from src/betterproto/lib/pydantic/__init__.py rename to src/betterproto2_compiler/lib/pydantic/__init__.py diff --git a/src/betterproto/lib/pydantic/google/__init__.py b/src/betterproto2_compiler/lib/pydantic/google/__init__.py similarity index 100% rename from src/betterproto/lib/pydantic/google/__init__.py rename to src/betterproto2_compiler/lib/pydantic/google/__init__.py diff --git a/src/betterproto/lib/pydantic/google/protobuf/__init__.py b/src/betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py similarity index 79% rename from src/betterproto/lib/pydantic/google/protobuf/__init__.py rename to src/betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py index 8a91dd2b..1f51973a 100644 --- a/src/betterproto/lib/pydantic/google/protobuf/__init__.py +++ b/src/betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py @@ -9,7 +9,7 @@ from typing_extensions import Self -from betterproto import hybridmethod +from betterproto2_compiler import hybridmethod if TYPE_CHECKING: from dataclasses import dataclass @@ -25,10 +25,10 @@ from pydantic import model_validator from pydantic.dataclasses import rebuild_dataclass -import betterproto +import betterproto2_compiler -class Syntax(betterproto.Enum): +class Syntax(betterproto2_compiler.Enum): """The syntax in which a protocol buffer element is defined.""" PROTO2 = 0 @@ -47,7 +47,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldKind(betterproto.Enum): +class FieldKind(betterproto2_compiler.Enum): """Basic field types.""" TYPE_UNKNOWN = 0 @@ -114,7 +114,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldCardinality(betterproto.Enum): +class FieldCardinality(betterproto2_compiler.Enum): """Whether a field is optional, required, or repeated.""" CARDINALITY_UNKNOWN = 0 @@ -136,7 +136,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class Edition(betterproto.Enum): +class Edition(betterproto2_compiler.Enum): """The full set of known editions.""" UNKNOWN = 0 @@ -183,7 +183,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class ExtensionRangeOptionsVerificationState(betterproto.Enum): +class ExtensionRangeOptionsVerificationState(betterproto2_compiler.Enum): """The verification state of the extension range.""" DECLARATION = 0 @@ -198,7 +198,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldDescriptorProtoType(betterproto.Enum): +class FieldDescriptorProtoType(betterproto2_compiler.Enum): TYPE_DOUBLE = 1 """ 0 is reserved for errors. @@ -250,7 +250,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldDescriptorProtoLabel(betterproto.Enum): +class FieldDescriptorProtoLabel(betterproto2_compiler.Enum): LABEL_OPTIONAL = 1 """0 is reserved for errors""" @@ -269,7 +269,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FileOptionsOptimizeMode(betterproto.Enum): +class FileOptionsOptimizeMode(betterproto2_compiler.Enum): """Generated classes can be optimized for speed or code size.""" SPEED = 1 @@ -285,7 +285,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldOptionsCType(betterproto.Enum): +class FieldOptionsCType(betterproto2_compiler.Enum): STRING = 0 """Default mode.""" @@ -308,7 +308,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldOptionsJsType(betterproto.Enum): +class FieldOptionsJsType(betterproto2_compiler.Enum): JS_NORMAL = 0 """Use the default type.""" @@ -325,7 +325,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldOptionsOptionRetention(betterproto.Enum): +class FieldOptionsOptionRetention(betterproto2_compiler.Enum): """ If set to RETENTION_SOURCE, the option will be omitted from the binary. Note: as of January 2023, support for this is in progress and does not yet @@ -343,7 +343,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FieldOptionsOptionTargetType(betterproto.Enum): +class FieldOptionsOptionTargetType(betterproto2_compiler.Enum): """ This indicates the types of entities that the field may apply to when used as an option. If it is unset, then the field may be freely used as an @@ -369,7 +369,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class MethodOptionsIdempotencyLevel(betterproto.Enum): +class MethodOptionsIdempotencyLevel(betterproto2_compiler.Enum): """ Is this method side-effect-free (or safe in HTTP parlance), or idempotent, or neither? HTTP based RPC implementation may choose GET verb for safe @@ -387,7 +387,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetFieldPresence(betterproto.Enum): +class FeatureSetFieldPresence(betterproto2_compiler.Enum): FIELD_PRESENCE_UNKNOWN = 0 EXPLICIT = 1 IMPLICIT = 2 @@ -400,7 +400,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetEnumType(betterproto.Enum): +class FeatureSetEnumType(betterproto2_compiler.Enum): ENUM_TYPE_UNKNOWN = 0 OPEN = 1 CLOSED = 2 @@ -412,7 +412,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetRepeatedFieldEncoding(betterproto.Enum): +class FeatureSetRepeatedFieldEncoding(betterproto2_compiler.Enum): REPEATED_FIELD_ENCODING_UNKNOWN = 0 PACKED = 1 EXPANDED = 2 @@ -424,7 +424,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetUtf8Validation(betterproto.Enum): +class FeatureSetUtf8Validation(betterproto2_compiler.Enum): UTF8_VALIDATION_UNKNOWN = 0 VERIFY = 2 NONE = 3 @@ -436,7 +436,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetMessageEncoding(betterproto.Enum): +class FeatureSetMessageEncoding(betterproto2_compiler.Enum): MESSAGE_ENCODING_UNKNOWN = 0 LENGTH_PREFIXED = 1 DELIMITED = 2 @@ -448,7 +448,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class FeatureSetJsonFormat(betterproto.Enum): +class FeatureSetJsonFormat(betterproto2_compiler.Enum): JSON_FORMAT_UNKNOWN = 0 ALLOW = 1 LEGACY_BEST_EFFORT = 2 @@ -460,7 +460,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class GeneratedCodeInfoAnnotationSemantic(betterproto.Enum): +class GeneratedCodeInfoAnnotationSemantic(betterproto2_compiler.Enum): """ Represents the identified object's effect on the element in the original .proto file. @@ -482,7 +482,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): return core_schema.int_schema(ge=0) -class NullValue(betterproto.Enum): +class NullValue(betterproto2_compiler.Enum): """ `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. @@ -501,7 +501,7 @@ def __get_pydantic_core_schema__(cls, _source_type, _handler): @dataclass(eq=False, repr=False) -class Any(betterproto.Message): +class Any(betterproto2_compiler.Message): """ `Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. @@ -590,7 +590,7 @@ class Any(betterproto.Message): } """ - type_url: str = betterproto.string_field(1) + type_url: str = betterproto2_compiler.string_field(1) """ A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least @@ -622,20 +622,20 @@ class Any(betterproto.Message): used with implementation specific semantics. """ - value: bytes = betterproto.bytes_field(2) + value: bytes = betterproto2_compiler.bytes_field(2) """ Must be a valid serialized protocol buffer of the above specified type. """ @dataclass(eq=False, repr=False) -class SourceContext(betterproto.Message): +class SourceContext(betterproto2_compiler.Message): """ `SourceContext` represents information about the source of a protobuf element, like the file in which it is defined. """ - file_name: str = betterproto.string_field(1) + file_name: str = betterproto2_compiler.string_field(1) """ The path-qualified name of the .proto file that contained the associated protobuf element. For example: `"google/protobuf/source_context.proto"`. @@ -643,125 +643,125 @@ class SourceContext(betterproto.Message): @dataclass(eq=False, repr=False) -class Type(betterproto.Message): +class Type(betterproto2_compiler.Message): """A protocol buffer message type.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The fully qualified message name.""" - fields: List["Field"] = betterproto.message_field(2) + fields: List["Field"] = betterproto2_compiler.message_field(2) """The list of fields.""" - oneofs: List[str] = betterproto.string_field(3) + oneofs: List[str] = betterproto2_compiler.string_field(3) """The list of types appearing in `oneof` definitions in this type.""" - options: List["Option"] = betterproto.message_field(4) + options: List["Option"] = betterproto2_compiler.message_field(4) """The protocol buffer options.""" - source_context: "SourceContext" = betterproto.message_field(5) + source_context: "SourceContext" = betterproto2_compiler.message_field(5) """The source context.""" - syntax: "Syntax" = betterproto.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" - edition: str = betterproto.string_field(7) + edition: str = betterproto2_compiler.string_field(7) """ The source edition string, only valid when syntax is SYNTAX_EDITIONS. """ @dataclass(eq=False, repr=False) -class Field(betterproto.Message): +class Field(betterproto2_compiler.Message): """A single field of a message type.""" - kind: "FieldKind" = betterproto.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) + kind: "FieldKind" = betterproto2_compiler.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) """The field type.""" - cardinality: "FieldCardinality" = betterproto.enum_field( + cardinality: "FieldCardinality" = betterproto2_compiler.enum_field( 2, enum_default_value=lambda: FieldCardinality.try_value(0) ) """The field cardinality.""" - number: int = betterproto.int32_field(3) + number: int = betterproto2_compiler.int32_field(3) """The field number.""" - name: str = betterproto.string_field(4) + name: str = betterproto2_compiler.string_field(4) """The field name.""" - type_url: str = betterproto.string_field(6) + type_url: str = betterproto2_compiler.string_field(6) """ The field type URL, without the scheme, for message or enumeration types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. """ - oneof_index: int = betterproto.int32_field(7) + oneof_index: int = betterproto2_compiler.int32_field(7) """ The index of the field type in `Type.oneofs`, for message or enumeration types. The first type has index 1; zero means the type is not in the list. """ - packed: bool = betterproto.bool_field(8) + packed: bool = betterproto2_compiler.bool_field(8) """Whether to use alternative packed wire representation.""" - options: List["Option"] = betterproto.message_field(9) + options: List["Option"] = betterproto2_compiler.message_field(9) """The protocol buffer options.""" - json_name: str = betterproto.string_field(10) + json_name: str = betterproto2_compiler.string_field(10) """The field JSON name.""" - default_value: str = betterproto.string_field(11) + default_value: str = betterproto2_compiler.string_field(11) """ The string value of the default value of this field. Proto2 syntax only. """ @dataclass(eq=False, repr=False) -class Enum(betterproto.Message): +class Enum(betterproto2_compiler.Message): """Enum type definition.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """Enum type name.""" - enumvalue: List["EnumValue"] = betterproto.message_field(2, wraps=betterproto.TYPE_ENUM) + enumvalue: List["EnumValue"] = betterproto2_compiler.message_field(2, wraps=betterproto2_compiler.TYPE_ENUM) """Enum value definitions.""" - options: List["Option"] = betterproto.message_field(3) + options: List["Option"] = betterproto2_compiler.message_field(3) """Protocol buffer options.""" - source_context: "SourceContext" = betterproto.message_field(4) + source_context: "SourceContext" = betterproto2_compiler.message_field(4) """The source context.""" - syntax: "Syntax" = betterproto.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" - edition: str = betterproto.string_field(6) + edition: str = betterproto2_compiler.string_field(6) """ The source edition string, only valid when syntax is SYNTAX_EDITIONS. """ @dataclass(eq=False, repr=False) -class EnumValue(betterproto.Message): +class EnumValue(betterproto2_compiler.Message): """Enum value definition.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """Enum value name.""" - number: int = betterproto.int32_field(2) + number: int = betterproto2_compiler.int32_field(2) """Enum value number.""" - options: List["Option"] = betterproto.message_field(3) + options: List["Option"] = betterproto2_compiler.message_field(3) """Protocol buffer options.""" @dataclass(eq=False, repr=False) -class Option(betterproto.Message): +class Option(betterproto2_compiler.Message): """ A protocol buffer option, which can be attached to a message, field, enumeration, etc. """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The option's name. For protobuf built-in options (options defined in descriptor.proto), this is the short name. For example, `"map_entry"`. @@ -769,7 +769,7 @@ class Option(betterproto.Message): `"google.api.http"`. """ - value: "Any" = betterproto.message_field(2) + value: "Any" = betterproto2_compiler.message_field(2) """ The option's value packed in an Any message. If the value is a primitive, the corresponding wrapper type defined in google/protobuf/wrappers.proto @@ -779,7 +779,7 @@ class Option(betterproto.Message): @dataclass(eq=False, repr=False) -class Api(betterproto.Message): +class Api(betterproto2_compiler.Message): """ Api is a light-weight descriptor for an API Interface. @@ -792,19 +792,19 @@ class Api(betterproto.Message): detailed terminology. """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The fully qualified name of this interface, including package name followed by the interface's simple name. """ - methods: List["Method"] = betterproto.message_field(2) + methods: List["Method"] = betterproto2_compiler.message_field(2) """The methods of this interface, in unspecified order.""" - options: List["Option"] = betterproto.message_field(3) + options: List["Option"] = betterproto2_compiler.message_field(3) """Any metadata attached to the interface.""" - version: str = betterproto.string_field(4) + version: str = betterproto2_compiler.string_field(4) """ A version string for this interface. If specified, must have the form `major-version.minor-version`, as in `1.10`. If the minor version is @@ -827,47 +827,47 @@ class Api(betterproto.Message): experimental, non-GA interfaces. """ - source_context: "SourceContext" = betterproto.message_field(5) + source_context: "SourceContext" = betterproto2_compiler.message_field(5) """ Source context for the protocol buffer service represented by this message. """ - mixins: List["Mixin"] = betterproto.message_field(6) + mixins: List["Mixin"] = betterproto2_compiler.message_field(6) """Included interfaces. See [Mixin][].""" - syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of the service.""" @dataclass(eq=False, repr=False) -class Method(betterproto.Message): +class Method(betterproto2_compiler.Message): """Method represents a method of an API interface.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The simple name of this method.""" - request_type_url: str = betterproto.string_field(2) + request_type_url: str = betterproto2_compiler.string_field(2) """A URL of the input message type.""" - request_streaming: bool = betterproto.bool_field(3) + request_streaming: bool = betterproto2_compiler.bool_field(3) """If true, the request is streamed.""" - response_type_url: str = betterproto.string_field(4) + response_type_url: str = betterproto2_compiler.string_field(4) """The URL of the output message type.""" - response_streaming: bool = betterproto.bool_field(5) + response_streaming: bool = betterproto2_compiler.bool_field(5) """If true, the response is streamed.""" - options: List["Option"] = betterproto.message_field(6) + options: List["Option"] = betterproto2_compiler.message_field(6) """Any metadata attached to the method.""" - syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of this method.""" @dataclass(eq=False, repr=False) -class Mixin(betterproto.Message): +class Mixin(betterproto2_compiler.Message): """ Declares an API Interface to be included in this interface. The including interface must redeclare all the methods from the included interface, but @@ -949,10 +949,10 @@ class Mixin(betterproto.Message): } """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The fully qualified name of the interface which is included.""" - root: str = betterproto.string_field(2) + root: str = betterproto2_compiler.string_field(2) """ If non-empty specifies a path under which inherited HTTP paths are rooted. @@ -960,41 +960,41 @@ class Mixin(betterproto.Message): @dataclass(eq=False, repr=False) -class FileDescriptorSet(betterproto.Message): +class FileDescriptorSet(betterproto2_compiler.Message): """ The protocol compiler can output a FileDescriptorSet containing the .proto files it parses. """ - file: List["FileDescriptorProto"] = betterproto.message_field(1) + file: List["FileDescriptorProto"] = betterproto2_compiler.message_field(1) @dataclass(eq=False, repr=False) -class FileDescriptorProto(betterproto.Message): +class FileDescriptorProto(betterproto2_compiler.Message): """Describes a complete .proto file.""" - name: str = betterproto.string_field(1) - package: str = betterproto.string_field(2) - dependency: List[str] = betterproto.string_field(3) + name: str = betterproto2_compiler.string_field(1) + package: str = betterproto2_compiler.string_field(2) + dependency: List[str] = betterproto2_compiler.string_field(3) """Names of files imported by this file.""" - public_dependency: List[int] = betterproto.int32_field(10) + public_dependency: List[int] = betterproto2_compiler.int32_field(10) """Indexes of the public imported files in the dependency list above.""" - weak_dependency: List[int] = betterproto.int32_field(11) + weak_dependency: List[int] = betterproto2_compiler.int32_field(11) """ Indexes of the weak imported files in the dependency list. For Google-internal migration only. Do not use. """ - message_type: List["DescriptorProto"] = betterproto.message_field(4) + message_type: List["DescriptorProto"] = betterproto2_compiler.message_field(4) """All top-level definitions in this file.""" - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) - service: List["ServiceDescriptorProto"] = betterproto.message_field(6) - extension: List["FieldDescriptorProto"] = betterproto.message_field(7) - options: "FileOptions" = betterproto.message_field(8) - source_code_info: "SourceCodeInfo" = betterproto.message_field(9) + enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(5) + service: List["ServiceDescriptorProto"] = betterproto2_compiler.message_field(6) + extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(7) + options: "FileOptions" = betterproto2_compiler.message_field(8) + source_code_info: "SourceCodeInfo" = betterproto2_compiler.message_field(9) """ This field contains optional information about the original source code. You may safely remove this entire field without harming runtime @@ -1002,7 +1002,7 @@ class FileDescriptorProto(betterproto.Message): development tools. """ - syntax: str = betterproto.string_field(12) + syntax: str = betterproto2_compiler.string_field(12) """ The syntax of the proto file. The supported values are "proto2", "proto3", and "editions". @@ -1010,24 +1010,24 @@ class FileDescriptorProto(betterproto.Message): If `edition` is present, this value must be "editions". """ - edition: "Edition" = betterproto.enum_field(14, enum_default_value=lambda: Edition.try_value(0)) + edition: "Edition" = betterproto2_compiler.enum_field(14, enum_default_value=lambda: Edition.try_value(0)) """The edition of the proto file.""" @dataclass(eq=False, repr=False) -class DescriptorProto(betterproto.Message): +class DescriptorProto(betterproto2_compiler.Message): """Describes a message type.""" - name: str = betterproto.string_field(1) - field: List["FieldDescriptorProto"] = betterproto.message_field(2) - extension: List["FieldDescriptorProto"] = betterproto.message_field(6) - nested_type: List["DescriptorProto"] = betterproto.message_field(3) - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4) - extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field(5) - oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8) - options: "MessageOptions" = betterproto.message_field(7) - reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9) - reserved_name: List[str] = betterproto.string_field(10) + name: str = betterproto2_compiler.string_field(1) + field: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(2) + extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(6) + nested_type: List["DescriptorProto"] = betterproto2_compiler.message_field(3) + enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(4) + extension_range: List["DescriptorProtoExtensionRange"] = betterproto2_compiler.message_field(5) + oneof_decl: List["OneofDescriptorProto"] = betterproto2_compiler.message_field(8) + options: "MessageOptions" = betterproto2_compiler.message_field(7) + reserved_range: List["DescriptorProtoReservedRange"] = betterproto2_compiler.message_field(9) + reserved_name: List[str] = betterproto2_compiler.string_field(10) """ Reserved field names, which may not be used by fields in the same message. A given name may only be reserved once. @@ -1035,40 +1035,40 @@ class DescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class DescriptorProtoExtensionRange(betterproto.Message): - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) - options: "ExtensionRangeOptions" = betterproto.message_field(3) +class DescriptorProtoExtensionRange(betterproto2_compiler.Message): + start: int = betterproto2_compiler.int32_field(1) + end: int = betterproto2_compiler.int32_field(2) + options: "ExtensionRangeOptions" = betterproto2_compiler.message_field(3) @dataclass(eq=False, repr=False) -class DescriptorProtoReservedRange(betterproto.Message): +class DescriptorProtoReservedRange(betterproto2_compiler.Message): """ Range of reserved tag numbers. Reserved tag numbers may not be used by fields or extension ranges in the same message. Reserved ranges may not overlap. """ - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) + start: int = betterproto2_compiler.int32_field(1) + end: int = betterproto2_compiler.int32_field(2) @dataclass(eq=False, repr=False) -class ExtensionRangeOptions(betterproto.Message): - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) +class ExtensionRangeOptions(betterproto2_compiler.Message): + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" - declaration: List["ExtensionRangeOptionsDeclaration"] = betterproto.message_field(2) + declaration: List["ExtensionRangeOptionsDeclaration"] = betterproto2_compiler.message_field(2) """ For external users: DO NOT USE. We are in the process of open sourcing extension declaration and executing internal cleanups before it can be used externally. """ - features: "FeatureSet" = betterproto.message_field(50) + features: "FeatureSet" = betterproto2_compiler.message_field(50) """Any features defined in the specific edition.""" - verification: "ExtensionRangeOptionsVerificationState" = betterproto.enum_field( + verification: "ExtensionRangeOptionsVerificationState" = betterproto2_compiler.enum_field( 3, enum_default_value=lambda: ExtensionRangeOptionsVerificationState.try_value(0), ) @@ -1080,31 +1080,31 @@ class ExtensionRangeOptions(betterproto.Message): @dataclass(eq=False, repr=False) -class ExtensionRangeOptionsDeclaration(betterproto.Message): - number: int = betterproto.int32_field(1) +class ExtensionRangeOptionsDeclaration(betterproto2_compiler.Message): + number: int = betterproto2_compiler.int32_field(1) """The extension number declared within the extension range.""" - full_name: str = betterproto.string_field(2) + full_name: str = betterproto2_compiler.string_field(2) """ The fully-qualified name of the extension field. There must be a leading dot in front of the full name. """ - type: str = betterproto.string_field(3) + type: str = betterproto2_compiler.string_field(3) """ The fully-qualified type name of the extension field. Unlike Metadata.type, Declaration.type must have a leading dot for messages and enums. """ - reserved: bool = betterproto.bool_field(5) + reserved: bool = betterproto2_compiler.bool_field(5) """ If true, indicates that the number is reserved in the extension range, and any extension field with the number will fail to compile. Set this when a declared extension field is deleted. """ - repeated: bool = betterproto.bool_field(6) + repeated: bool = betterproto2_compiler.bool_field(6) """ If true, indicates that the extension must be defined as repeated. Otherwise the extension must be defined as optional. @@ -1112,15 +1112,15 @@ class ExtensionRangeOptionsDeclaration(betterproto.Message): @dataclass(eq=False, repr=False) -class FieldDescriptorProto(betterproto.Message): +class FieldDescriptorProto(betterproto2_compiler.Message): """Describes a field within a message.""" - name: str = betterproto.string_field(1) - number: int = betterproto.int32_field(3) - label: "FieldDescriptorProtoLabel" = betterproto.enum_field( + name: str = betterproto2_compiler.string_field(1) + number: int = betterproto2_compiler.int32_field(3) + label: "FieldDescriptorProtoLabel" = betterproto2_compiler.enum_field( 4, enum_default_value=lambda: FieldDescriptorProtoLabel.try_value(0) ) - type: "FieldDescriptorProtoType" = betterproto.enum_field( + type: "FieldDescriptorProtoType" = betterproto2_compiler.enum_field( 5, enum_default_value=lambda: FieldDescriptorProtoType.try_value(0) ) """ @@ -1128,7 +1128,7 @@ class FieldDescriptorProto(betterproto.Message): are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. """ - type_name: str = betterproto.string_field(6) + type_name: str = betterproto2_compiler.string_field(6) """ For message and enum types, this is the name of the type. If the name starts with a '.', it is fully-qualified. Otherwise, C++-like scoping @@ -1137,13 +1137,13 @@ class FieldDescriptorProto(betterproto.Message): namespace). """ - extendee: str = betterproto.string_field(2) + extendee: str = betterproto2_compiler.string_field(2) """ For extensions, this is the name of the type being extended. It is resolved in the same manner as type_name. """ - default_value: str = betterproto.string_field(7) + default_value: str = betterproto2_compiler.string_field(7) """ For numeric types, contains the original text representation of the value. For booleans, "true" or "false". @@ -1151,13 +1151,13 @@ class FieldDescriptorProto(betterproto.Message): For bytes, contains the C escaped value. All bytes >= 128 are escaped. """ - oneof_index: int = betterproto.int32_field(9) + oneof_index: int = betterproto2_compiler.int32_field(9) """ If set, gives the index of a oneof in the containing type's oneof_decl list. This field is a member of that oneof. """ - json_name: str = betterproto.string_field(10) + json_name: str = betterproto2_compiler.string_field(10) """ JSON name of this field. The value is set by protocol compiler. If the user has set a "json_name" option on this field, that option's value @@ -1165,8 +1165,8 @@ class FieldDescriptorProto(betterproto.Message): it to camelCase. """ - options: "FieldOptions" = betterproto.message_field(8) - proto3_optional: bool = betterproto.bool_field(17) + options: "FieldOptions" = betterproto2_compiler.message_field(8) + proto3_optional: bool = betterproto2_compiler.bool_field(17) """ If true, this is a proto3 "optional". When a proto3 field is optional, it tracks presence regardless of field type. @@ -1193,28 +1193,28 @@ class FieldDescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class OneofDescriptorProto(betterproto.Message): +class OneofDescriptorProto(betterproto2_compiler.Message): """Describes a oneof.""" - name: str = betterproto.string_field(1) - options: "OneofOptions" = betterproto.message_field(2) + name: str = betterproto2_compiler.string_field(1) + options: "OneofOptions" = betterproto2_compiler.message_field(2) @dataclass(eq=False, repr=False) -class EnumDescriptorProto(betterproto.Message): +class EnumDescriptorProto(betterproto2_compiler.Message): """Describes an enum type.""" - name: str = betterproto.string_field(1) - value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) - options: "EnumOptions" = betterproto.message_field(3) - reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto.message_field(4) + name: str = betterproto2_compiler.string_field(1) + value: List["EnumValueDescriptorProto"] = betterproto2_compiler.message_field(2) + options: "EnumOptions" = betterproto2_compiler.message_field(3) + reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto2_compiler.message_field(4) """ Range of reserved numeric values. Reserved numeric values may not be used by enum values in the same enum declaration. Reserved ranges may not overlap. """ - reserved_name: List[str] = betterproto.string_field(5) + reserved_name: List[str] = betterproto2_compiler.string_field(5) """ Reserved enum value names, which may not be reused. A given name may only be reserved once. @@ -1222,7 +1222,7 @@ class EnumDescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class EnumDescriptorProtoEnumReservedRange(betterproto.Message): +class EnumDescriptorProtoEnumReservedRange(betterproto2_compiler.Message): """ Range of reserved numeric values. Reserved values may not be used by entries in the same enum. Reserved ranges may not overlap. @@ -1232,51 +1232,51 @@ class EnumDescriptorProtoEnumReservedRange(betterproto.Message): domain. """ - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) + start: int = betterproto2_compiler.int32_field(1) + end: int = betterproto2_compiler.int32_field(2) @dataclass(eq=False, repr=False) -class EnumValueDescriptorProto(betterproto.Message): +class EnumValueDescriptorProto(betterproto2_compiler.Message): """Describes a value within an enum.""" - name: str = betterproto.string_field(1) - number: int = betterproto.int32_field(2) - options: "EnumValueOptions" = betterproto.message_field(3) + name: str = betterproto2_compiler.string_field(1) + number: int = betterproto2_compiler.int32_field(2) + options: "EnumValueOptions" = betterproto2_compiler.message_field(3) @dataclass(eq=False, repr=False) -class ServiceDescriptorProto(betterproto.Message): +class ServiceDescriptorProto(betterproto2_compiler.Message): """Describes a service.""" - name: str = betterproto.string_field(1) - method: List["MethodDescriptorProto"] = betterproto.message_field(2) - options: "ServiceOptions" = betterproto.message_field(3) + name: str = betterproto2_compiler.string_field(1) + method: List["MethodDescriptorProto"] = betterproto2_compiler.message_field(2) + options: "ServiceOptions" = betterproto2_compiler.message_field(3) @dataclass(eq=False, repr=False) -class MethodDescriptorProto(betterproto.Message): +class MethodDescriptorProto(betterproto2_compiler.Message): """Describes a method of a service.""" - name: str = betterproto.string_field(1) - input_type: str = betterproto.string_field(2) + name: str = betterproto2_compiler.string_field(1) + input_type: str = betterproto2_compiler.string_field(2) """ Input and output type names. These are resolved in the same way as FieldDescriptorProto.type_name, but must refer to a message type. """ - output_type: str = betterproto.string_field(3) - options: "MethodOptions" = betterproto.message_field(4) - client_streaming: bool = betterproto.bool_field(5) + output_type: str = betterproto2_compiler.string_field(3) + options: "MethodOptions" = betterproto2_compiler.message_field(4) + client_streaming: bool = betterproto2_compiler.bool_field(5) """Identifies if client streams multiple client messages""" - server_streaming: bool = betterproto.bool_field(6) + server_streaming: bool = betterproto2_compiler.bool_field(6) """Identifies if server streams multiple server messages""" @dataclass(eq=False, repr=False) -class FileOptions(betterproto.Message): - java_package: str = betterproto.string_field(1) +class FileOptions(betterproto2_compiler.Message): + java_package: str = betterproto2_compiler.string_field(1) """ Sets the Java package where classes generated from this .proto will be placed. By default, the proto package is used, but this is often @@ -1284,7 +1284,7 @@ class FileOptions(betterproto.Message): domain names. """ - java_outer_classname: str = betterproto.string_field(8) + java_outer_classname: str = betterproto2_compiler.string_field(8) """ Controls the name of the wrapper Java class generated for the .proto file. That class will always contain the .proto file's getDescriptor() method as @@ -1293,7 +1293,7 @@ class FileOptions(betterproto.Message): .proto file will be nested inside the single wrapper outer class. """ - java_multiple_files: bool = betterproto.bool_field(10) + java_multiple_files: bool = betterproto2_compiler.bool_field(10) """ If enabled, then the Java code generator will generate a separate .java file for each top-level message, enum, and service defined in the .proto @@ -1303,10 +1303,10 @@ class FileOptions(betterproto.Message): top-level extensions defined in the file. """ - java_generate_equals_and_hash: bool = betterproto.bool_field(20) + java_generate_equals_and_hash: bool = betterproto2_compiler.bool_field(20) """This option does nothing.""" - java_string_check_utf8: bool = betterproto.bool_field(27) + java_string_check_utf8: bool = betterproto2_compiler.bool_field(27) """ If set true, then the Java2 code generator will generate code that throws an exception whenever an attempt is made to assign a non-UTF-8 @@ -1316,10 +1316,10 @@ class FileOptions(betterproto.Message): This option has no effect on when used with the lite runtime. """ - optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field( + optimize_for: "FileOptionsOptimizeMode" = betterproto2_compiler.enum_field( 9, enum_default_value=lambda: FileOptionsOptimizeMode.try_value(0) ) - go_package: str = betterproto.string_field(11) + go_package: str = betterproto2_compiler.string_field(11) """ Sets the Go package where structs generated from this .proto will be placed. If omitted, the Go package will be derived from the following: @@ -1328,7 +1328,7 @@ class FileOptions(betterproto.Message): - Otherwise, the basename of the .proto file, without extension. """ - cc_generic_services: bool = betterproto.bool_field(16) + cc_generic_services: bool = betterproto2_compiler.bool_field(16) """ Should generic services be generated in each language? "Generic" services are not specific to any particular RPC system. They are generated by the @@ -1342,9 +1342,9 @@ class FileOptions(betterproto.Message): explicitly set them to true. """ - java_generic_services: bool = betterproto.bool_field(17) - py_generic_services: bool = betterproto.bool_field(18) - deprecated: bool = betterproto.bool_field(23) + java_generic_services: bool = betterproto2_compiler.bool_field(17) + py_generic_services: bool = betterproto2_compiler.bool_field(18) + deprecated: bool = betterproto2_compiler.bool_field(23) """ Is this file deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1352,22 +1352,22 @@ class FileOptions(betterproto.Message): least, this is a formalization for deprecating files. """ - cc_enable_arenas: bool = betterproto.bool_field(31) + cc_enable_arenas: bool = betterproto2_compiler.bool_field(31) """ Enables the use of arenas for the proto messages in this file. This applies only to generated classes for C++. """ - objc_class_prefix: str = betterproto.string_field(36) + objc_class_prefix: str = betterproto2_compiler.string_field(36) """ Sets the objective c class prefix which is prepended to all objective c generated classes from this .proto. There is no default. """ - csharp_namespace: str = betterproto.string_field(37) + csharp_namespace: str = betterproto2_compiler.string_field(37) """Namespace for generated classes; defaults to the package.""" - swift_prefix: str = betterproto.string_field(39) + swift_prefix: str = betterproto2_compiler.string_field(39) """ By default Swift generators will take the proto package and CamelCase it replacing '.' with underscore and use that to prefix the types/symbols @@ -1375,37 +1375,37 @@ class FileOptions(betterproto.Message): to prefix the types/symbols defined. """ - php_class_prefix: str = betterproto.string_field(40) + php_class_prefix: str = betterproto2_compiler.string_field(40) """ Sets the php class prefix which is prepended to all php generated classes from this .proto. Default is empty. """ - php_namespace: str = betterproto.string_field(41) + php_namespace: str = betterproto2_compiler.string_field(41) """ Use this option to change the namespace of php generated classes. Default is empty. When this option is empty, the package name will be used for determining the namespace. """ - php_metadata_namespace: str = betterproto.string_field(44) + php_metadata_namespace: str = betterproto2_compiler.string_field(44) """ Use this option to change the namespace of php generated metadata classes. Default is empty. When this option is empty, the proto file name will be used for determining the namespace. """ - ruby_package: str = betterproto.string_field(45) + ruby_package: str = betterproto2_compiler.string_field(45) """ Use this option to change the package of ruby generated classes. Default is empty. When this option is not set, the package name will be used for determining the ruby package. """ - features: "FeatureSet" = betterproto.message_field(50) + features: "FeatureSet" = betterproto2_compiler.message_field(50) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """ The parser stores options it doesn't recognize here. See the documentation for the "Options" section above. @@ -1421,8 +1421,8 @@ def __post_init__(self) -> None: @dataclass(eq=False, repr=False) -class MessageOptions(betterproto.Message): - message_set_wire_format: bool = betterproto.bool_field(1) +class MessageOptions(betterproto2_compiler.Message): + message_set_wire_format: bool = betterproto2_compiler.bool_field(1) """ Set true to use the old proto1 MessageSet wire format for extensions. This is provided for backwards-compatibility with the MessageSet wire @@ -1444,14 +1444,14 @@ class MessageOptions(betterproto.Message): the protocol compiler. """ - no_standard_descriptor_accessor: bool = betterproto.bool_field(2) + no_standard_descriptor_accessor: bool = betterproto2_compiler.bool_field(2) """ Disables the generation of the standard "descriptor()" accessor, which can conflict with a field of the same name. This is meant to make migration from proto1 easier; new code should avoid fields named "descriptor". """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this message deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1459,7 +1459,7 @@ class MessageOptions(betterproto.Message): this is a formalization for deprecating messages. """ - map_entry: bool = betterproto.bool_field(7) + map_entry: bool = betterproto2_compiler.bool_field(7) """ Whether the message is an automatically generated map entry type for the maps field. @@ -1484,7 +1484,7 @@ class MessageOptions(betterproto.Message): parser. """ - deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(11) + deprecated_legacy_json_field_conflicts: bool = betterproto2_compiler.bool_field(11) """ Enable the legacy handling of JSON field name conflicts. This lowercases and strips underscored from the fields before comparison in proto3 only. @@ -1498,10 +1498,10 @@ class MessageOptions(betterproto.Message): teams have had time to migrate. """ - features: "FeatureSet" = betterproto.message_field(12) + features: "FeatureSet" = betterproto2_compiler.message_field(12) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" def __post_init__(self) -> None: @@ -1514,8 +1514,10 @@ def __post_init__(self) -> None: @dataclass(eq=False, repr=False) -class FieldOptions(betterproto.Message): - ctype: "FieldOptionsCType" = betterproto.enum_field(1, enum_default_value=lambda: FieldOptionsCType.try_value(0)) +class FieldOptions(betterproto2_compiler.Message): + ctype: "FieldOptionsCType" = betterproto2_compiler.enum_field( + 1, enum_default_value=lambda: FieldOptionsCType.try_value(0) + ) """ The ctype option instructs the C++ code generator to use a different representation of the field than it normally would. See the specific @@ -1525,7 +1527,7 @@ class FieldOptions(betterproto.Message): other types in a future version! """ - packed: bool = betterproto.bool_field(2) + packed: bool = betterproto2_compiler.bool_field(2) """ The packed option can be enabled for repeated primitive fields to enable a more efficient representation on the wire. Rather than repeatedly @@ -1536,7 +1538,9 @@ class FieldOptions(betterproto.Message): the behavior. """ - jstype: "FieldOptionsJsType" = betterproto.enum_field(6, enum_default_value=lambda: FieldOptionsJsType.try_value(0)) + jstype: "FieldOptionsJsType" = betterproto2_compiler.enum_field( + 6, enum_default_value=lambda: FieldOptionsJsType.try_value(0) + ) """ The jstype option determines the JavaScript type used for values of the field. The option is permitted only for 64 bit integral and fixed types @@ -1551,7 +1555,7 @@ class FieldOptions(betterproto.Message): goog.math.Integer. """ - lazy: bool = betterproto.bool_field(5) + lazy: bool = betterproto2_compiler.bool_field(5) """ Should this field be parsed lazily? Lazy applies only to message-type fields. It means that when the outer message is initially parsed, the @@ -1577,14 +1581,14 @@ class FieldOptions(betterproto.Message): uninitialized messages are acceptable). """ - unverified_lazy: bool = betterproto.bool_field(15) + unverified_lazy: bool = betterproto2_compiler.bool_field(15) """ unverified_lazy does no correctness checks on the byte stream. This should only be used where lazy with verification is prohibitive for performance reasons. """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this field deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1592,53 +1596,53 @@ class FieldOptions(betterproto.Message): is a formalization for deprecating fields. """ - weak: bool = betterproto.bool_field(10) + weak: bool = betterproto2_compiler.bool_field(10) """For Google-internal migration only. Do not use.""" - debug_redact: bool = betterproto.bool_field(16) + debug_redact: bool = betterproto2_compiler.bool_field(16) """ Indicate that the field value should not be printed out when using debug formats, e.g. when the field contains sensitive credentials. """ - retention: "FieldOptionsOptionRetention" = betterproto.enum_field( + retention: "FieldOptionsOptionRetention" = betterproto2_compiler.enum_field( 17, enum_default_value=lambda: FieldOptionsOptionRetention.try_value(0) ) - targets: List["FieldOptionsOptionTargetType"] = betterproto.enum_field( + targets: List["FieldOptionsOptionTargetType"] = betterproto2_compiler.enum_field( 19, enum_default_value=lambda: FieldOptionsOptionTargetType.try_value(0) ) - edition_defaults: List["FieldOptionsEditionDefault"] = betterproto.message_field(20) - features: "FeatureSet" = betterproto.message_field(21) + edition_defaults: List["FieldOptionsEditionDefault"] = betterproto2_compiler.message_field(20) + features: "FeatureSet" = betterproto2_compiler.message_field(21) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class FieldOptionsEditionDefault(betterproto.Message): - edition: "Edition" = betterproto.enum_field(3, enum_default_value=lambda: Edition.try_value(0)) - value: str = betterproto.string_field(2) +class FieldOptionsEditionDefault(betterproto2_compiler.Message): + edition: "Edition" = betterproto2_compiler.enum_field(3, enum_default_value=lambda: Edition.try_value(0)) + value: str = betterproto2_compiler.string_field(2) @dataclass(eq=False, repr=False) -class OneofOptions(betterproto.Message): - features: "FeatureSet" = betterproto.message_field(1) +class OneofOptions(betterproto2_compiler.Message): + features: "FeatureSet" = betterproto2_compiler.message_field(1) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class EnumOptions(betterproto.Message): - allow_alias: bool = betterproto.bool_field(2) +class EnumOptions(betterproto2_compiler.Message): + allow_alias: bool = betterproto2_compiler.bool_field(2) """ Set this option to true to allow mapping different tag names to the same value. """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this enum deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1646,7 +1650,7 @@ class EnumOptions(betterproto.Message): is a formalization for deprecating enums. """ - deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(6) + deprecated_legacy_json_field_conflicts: bool = betterproto2_compiler.bool_field(6) """ Enable the legacy handling of JSON field name conflicts. This lowercases and strips underscored from the fields before comparison in proto3 only. @@ -1656,10 +1660,10 @@ class EnumOptions(betterproto.Message): had time to migrate. """ - features: "FeatureSet" = betterproto.message_field(7) + features: "FeatureSet" = betterproto2_compiler.message_field(7) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" def __post_init__(self) -> None: @@ -1672,8 +1676,8 @@ def __post_init__(self) -> None: @dataclass(eq=False, repr=False) -class EnumValueOptions(betterproto.Message): - deprecated: bool = betterproto.bool_field(1) +class EnumValueOptions(betterproto2_compiler.Message): + deprecated: bool = betterproto2_compiler.bool_field(1) """ Is this enum value deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1681,26 +1685,26 @@ class EnumValueOptions(betterproto.Message): this is a formalization for deprecating enum values. """ - features: "FeatureSet" = betterproto.message_field(2) + features: "FeatureSet" = betterproto2_compiler.message_field(2) """Any features defined in the specific edition.""" - debug_redact: bool = betterproto.bool_field(3) + debug_redact: bool = betterproto2_compiler.bool_field(3) """ Indicate that fields annotated with this enum value should not be printed out when using debug formats, e.g. when the field contains sensitive credentials. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class ServiceOptions(betterproto.Message): - features: "FeatureSet" = betterproto.message_field(34) +class ServiceOptions(betterproto2_compiler.Message): + features: "FeatureSet" = betterproto2_compiler.message_field(34) """Any features defined in the specific edition.""" - deprecated: bool = betterproto.bool_field(33) + deprecated: bool = betterproto2_compiler.bool_field(33) """ Is this service deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1708,13 +1712,13 @@ class ServiceOptions(betterproto.Message): this is a formalization for deprecating services. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class MethodOptions(betterproto.Message): - deprecated: bool = betterproto.bool_field(33) +class MethodOptions(betterproto2_compiler.Message): + deprecated: bool = betterproto2_compiler.bool_field(33) """ Is this method deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1722,18 +1726,18 @@ class MethodOptions(betterproto.Message): this is a formalization for deprecating methods. """ - idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field( + idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto2_compiler.enum_field( 34, enum_default_value=lambda: MethodOptionsIdempotencyLevel.try_value(0) ) - features: "FeatureSet" = betterproto.message_field(35) + features: "FeatureSet" = betterproto2_compiler.message_field(35) """Any features defined in the specific edition.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class UninterpretedOption(betterproto.Message): +class UninterpretedOption(betterproto2_compiler.Message): """ A message representing a option the parser does not recognize. This only appears in options protos created by the compiler::Parser class. @@ -1743,22 +1747,22 @@ class UninterpretedOption(betterproto.Message): in them. """ - name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2) - identifier_value: str = betterproto.string_field(3) + name: List["UninterpretedOptionNamePart"] = betterproto2_compiler.message_field(2) + identifier_value: str = betterproto2_compiler.string_field(3) """ The value of the uninterpreted option, in whatever type the tokenizer identified it as during parsing. Exactly one of these should be set. """ - positive_int_value: int = betterproto.uint64_field(4) - negative_int_value: int = betterproto.int64_field(5) - double_value: float = betterproto.double_field(6) - string_value: bytes = betterproto.bytes_field(7) - aggregate_value: str = betterproto.string_field(8) + positive_int_value: int = betterproto2_compiler.uint64_field(4) + negative_int_value: int = betterproto2_compiler.int64_field(5) + double_value: float = betterproto2_compiler.double_field(6) + string_value: bytes = betterproto2_compiler.bytes_field(7) + aggregate_value: str = betterproto2_compiler.string_field(8) @dataclass(eq=False, repr=False) -class UninterpretedOptionNamePart(betterproto.Message): +class UninterpretedOptionNamePart(betterproto2_compiler.Message): """ The name of the uninterpreted option. Each string represents a segment in a dot-separated name. is_extension is true iff a segment represents an @@ -1767,12 +1771,12 @@ class UninterpretedOptionNamePart(betterproto.Message): "foo.(bar.baz).moo". """ - name_part: str = betterproto.string_field(1) - is_extension: bool = betterproto.bool_field(2) + name_part: str = betterproto2_compiler.string_field(1) + is_extension: bool = betterproto2_compiler.bool_field(2) @dataclass(eq=False, repr=False) -class FeatureSet(betterproto.Message): +class FeatureSet(betterproto2_compiler.Message): """ TODO Enums in C++ gencode (and potentially other languages) are not well scoped. This means that each of the feature enums below can clash @@ -1782,28 +1786,28 @@ class FeatureSet(betterproto.Message): conflict here. """ - field_presence: "FeatureSetFieldPresence" = betterproto.enum_field( + field_presence: "FeatureSetFieldPresence" = betterproto2_compiler.enum_field( 1, enum_default_value=lambda: FeatureSetFieldPresence.try_value(0) ) - enum_type: "FeatureSetEnumType" = betterproto.enum_field( + enum_type: "FeatureSetEnumType" = betterproto2_compiler.enum_field( 2, enum_default_value=lambda: FeatureSetEnumType.try_value(0) ) - repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = betterproto.enum_field( + repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = betterproto2_compiler.enum_field( 3, enum_default_value=lambda: FeatureSetRepeatedFieldEncoding.try_value(0) ) - utf8_validation: "FeatureSetUtf8Validation" = betterproto.enum_field( + utf8_validation: "FeatureSetUtf8Validation" = betterproto2_compiler.enum_field( 4, enum_default_value=lambda: FeatureSetUtf8Validation.try_value(0) ) - message_encoding: "FeatureSetMessageEncoding" = betterproto.enum_field( + message_encoding: "FeatureSetMessageEncoding" = betterproto2_compiler.enum_field( 5, enum_default_value=lambda: FeatureSetMessageEncoding.try_value(0) ) - json_format: "FeatureSetJsonFormat" = betterproto.enum_field( + json_format: "FeatureSetJsonFormat" = betterproto2_compiler.enum_field( 6, enum_default_value=lambda: FeatureSetJsonFormat.try_value(0) ) @dataclass(eq=False, repr=False) -class FeatureSetDefaults(betterproto.Message): +class FeatureSetDefaults(betterproto2_compiler.Message): """ A compiled specification for the defaults of a set of features. These messages are generated from FeatureSet extensions and can be used to seed @@ -1811,14 +1815,14 @@ class FeatureSetDefaults(betterproto.Message): for the closest matching edition, followed by proto merges. """ - defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = betterproto.message_field(1) - minimum_edition: "Edition" = betterproto.enum_field(4, enum_default_value=lambda: Edition.try_value(0)) + defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = betterproto2_compiler.message_field(1) + minimum_edition: "Edition" = betterproto2_compiler.enum_field(4, enum_default_value=lambda: Edition.try_value(0)) """ The minimum supported edition (inclusive) when this was constructed. Editions before this will not have defaults. """ - maximum_edition: "Edition" = betterproto.enum_field(5, enum_default_value=lambda: Edition.try_value(0)) + maximum_edition: "Edition" = betterproto2_compiler.enum_field(5, enum_default_value=lambda: Edition.try_value(0)) """ The maximum known edition (inclusive) when this was constructed. Editions after this will not have reliable defaults. @@ -1826,7 +1830,7 @@ class FeatureSetDefaults(betterproto.Message): @dataclass(eq=False, repr=False) -class FeatureSetDefaultsFeatureSetEditionDefault(betterproto.Message): +class FeatureSetDefaultsFeatureSetEditionDefault(betterproto2_compiler.Message): """ A map from every known edition with a unique set of defaults to its defaults. Not all editions may be contained here. For a given edition, @@ -1834,18 +1838,18 @@ class FeatureSetDefaultsFeatureSetEditionDefault(betterproto.Message): be used. This field must be in strict ascending order by edition. """ - edition: "Edition" = betterproto.enum_field(3, enum_default_value=lambda: Edition.try_value(0)) - features: "FeatureSet" = betterproto.message_field(2) + edition: "Edition" = betterproto2_compiler.enum_field(3, enum_default_value=lambda: Edition.try_value(0)) + features: "FeatureSet" = betterproto2_compiler.message_field(2) @dataclass(eq=False, repr=False) -class SourceCodeInfo(betterproto.Message): +class SourceCodeInfo(betterproto2_compiler.Message): """ Encapsulates information about the original source file from which a FileDescriptorProto was generated. """ - location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) + location: List["SourceCodeInfoLocation"] = betterproto2_compiler.message_field(1) """ A Location identifies a piece of source code in a .proto file which corresponds to a particular definition. This information is intended @@ -1894,8 +1898,8 @@ class SourceCodeInfo(betterproto.Message): @dataclass(eq=False, repr=False) -class SourceCodeInfoLocation(betterproto.Message): - path: List[int] = betterproto.int32_field(1) +class SourceCodeInfoLocation(betterproto2_compiler.Message): + path: List[int] = betterproto2_compiler.int32_field(1) """ Identifies which part of the FileDescriptorProto was defined at this location. @@ -1922,7 +1926,7 @@ class SourceCodeInfoLocation(betterproto.Message): of the label to the terminating semicolon). """ - span: List[int] = betterproto.int32_field(2) + span: List[int] = betterproto2_compiler.int32_field(2) """ Always has exactly three or four elements: start line, start column, end line (optional, otherwise assumed same as start line), end column. @@ -1931,7 +1935,7 @@ class SourceCodeInfoLocation(betterproto.Message): 1 to each before displaying to a user. """ - leading_comments: str = betterproto.string_field(3) + leading_comments: str = betterproto2_compiler.string_field(3) """ If this SourceCodeInfo represents a complete declaration, these are any comments appearing before and after the declaration which appear to be @@ -1982,19 +1986,19 @@ class SourceCodeInfoLocation(betterproto.Message): // ignored detached comments. """ - trailing_comments: str = betterproto.string_field(4) - leading_detached_comments: List[str] = betterproto.string_field(6) + trailing_comments: str = betterproto2_compiler.string_field(4) + leading_detached_comments: List[str] = betterproto2_compiler.string_field(6) @dataclass(eq=False, repr=False) -class GeneratedCodeInfo(betterproto.Message): +class GeneratedCodeInfo(betterproto2_compiler.Message): """ Describes the relationship between generated code and its original source file. A GeneratedCodeInfo message is associated with only one generated source file, but may contain references to different source .proto files. """ - annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) + annotation: List["GeneratedCodeInfoAnnotation"] = betterproto2_compiler.message_field(1) """ An Annotation connects some span of text in generated code to an element of its generating .proto file. @@ -2002,36 +2006,36 @@ class GeneratedCodeInfo(betterproto.Message): @dataclass(eq=False, repr=False) -class GeneratedCodeInfoAnnotation(betterproto.Message): - path: List[int] = betterproto.int32_field(1) +class GeneratedCodeInfoAnnotation(betterproto2_compiler.Message): + path: List[int] = betterproto2_compiler.int32_field(1) """ Identifies the element in the original source .proto file. This field is formatted the same as SourceCodeInfo.Location.path. """ - source_file: str = betterproto.string_field(2) + source_file: str = betterproto2_compiler.string_field(2) """Identifies the filesystem path to the original source .proto.""" - begin: int = betterproto.int32_field(3) + begin: int = betterproto2_compiler.int32_field(3) """ Identifies the starting offset in bytes in the generated code that relates to the identified object. """ - end: int = betterproto.int32_field(4) + end: int = betterproto2_compiler.int32_field(4) """ Identifies the ending offset in bytes in the generated code that relates to the identified object. The end offset should be one past the last relevant byte (so the length of the text = end - begin). """ - semantic: "GeneratedCodeInfoAnnotationSemantic" = betterproto.enum_field( + semantic: "GeneratedCodeInfoAnnotationSemantic" = betterproto2_compiler.enum_field( 5, enum_default_value=lambda: GeneratedCodeInfoAnnotationSemantic.try_value(0) ) @dataclass(eq=False, repr=False) -class Duration(betterproto.Message): +class Duration(betterproto2_compiler.Message): """ A Duration represents a signed, fixed-length span of time represented as a count of seconds and fractions of seconds at nanosecond @@ -2093,14 +2097,14 @@ class Duration(betterproto.Message): microsecond should be expressed in JSON format as "3.000001s". """ - seconds: int = betterproto.int64_field(1) + seconds: int = betterproto2_compiler.int64_field(1) """ Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years """ - nanos: int = betterproto.int32_field(2) + nanos: int = betterproto2_compiler.int32_field(2) """ Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 @@ -2112,7 +2116,7 @@ class Duration(betterproto.Message): @dataclass(eq=False, repr=False) -class Empty(betterproto.Message): +class Empty(betterproto2_compiler.Message): """ A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request @@ -2127,7 +2131,7 @@ class Empty(betterproto.Message): @dataclass(eq=False, repr=False) -class FieldMask(betterproto.Message): +class FieldMask(betterproto2_compiler.Message): """ `FieldMask` represents a set of symbolic field paths, for example: @@ -2330,12 +2334,12 @@ class FieldMask(betterproto.Message): `INVALID_ARGUMENT` error if any path is unmappable. """ - paths: List[str] = betterproto.string_field(1) + paths: List[str] = betterproto2_compiler.string_field(1) """The set of field mask paths.""" @dataclass(eq=False, repr=False) -class Struct(betterproto.Message): +class Struct(betterproto2_compiler.Message): """ `Struct` represents a structured data value, consisting of fields which map to dynamically typed values. In some languages, `Struct` @@ -2347,7 +2351,9 @@ class Struct(betterproto.Message): The JSON representation for `Struct` is JSON object. """ - fields: Dict[str, "Value"] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE) + fields: Dict[str, "Value"] = betterproto2_compiler.map_field( + 1, betterproto2_compiler.TYPE_STRING, betterproto2_compiler.TYPE_MESSAGE + ) """Unordered map of dynamically typed values.""" @hybridmethod @@ -2367,7 +2373,7 @@ def from_dict(self, value: Mapping[str, Any]) -> Self: def to_dict( self, - casing: betterproto.Casing = betterproto.Casing.CAMEL, + casing: betterproto2_compiler.Casing = betterproto2_compiler.Casing.CAMEL, include_default_values: bool = False, ) -> Dict[str, Any]: output = {**self.fields} @@ -2378,7 +2384,7 @@ def to_dict( @dataclass(eq=False, repr=False) -class Value(betterproto.Message): +class Value(betterproto2_compiler.Message): """ `Value` represents a dynamically typed value which can be either null, a number, a string, a boolean, a recursive struct value, or a @@ -2388,7 +2394,7 @@ class Value(betterproto.Message): The JSON representation for `Value` is JSON value. """ - null_value: Optional["NullValue"] = betterproto.enum_field( + null_value: Optional["NullValue"] = betterproto2_compiler.enum_field( 1, enum_default_value=lambda: NullValue.try_value(0), optional=True, @@ -2396,19 +2402,19 @@ class Value(betterproto.Message): ) """Represents a null value.""" - number_value: Optional[float] = betterproto.double_field(2, optional=True, group="kind") + number_value: Optional[float] = betterproto2_compiler.double_field(2, optional=True, group="kind") """Represents a double value.""" - string_value: Optional[str] = betterproto.string_field(3, optional=True, group="kind") + string_value: Optional[str] = betterproto2_compiler.string_field(3, optional=True, group="kind") """Represents a string value.""" - bool_value: Optional[bool] = betterproto.bool_field(4, optional=True, group="kind") + bool_value: Optional[bool] = betterproto2_compiler.bool_field(4, optional=True, group="kind") """Represents a boolean value.""" - struct_value: Optional["Struct"] = betterproto.message_field(5, optional=True, group="kind") + struct_value: Optional["Struct"] = betterproto2_compiler.message_field(5, optional=True, group="kind") """Represents a structured value.""" - list_value: Optional["ListValue"] = betterproto.message_field(6, optional=True, group="kind") + list_value: Optional["ListValue"] = betterproto2_compiler.message_field(6, optional=True, group="kind") """Represents a repeated `Value`.""" @model_validator(mode="after") @@ -2417,19 +2423,19 @@ def check_oneof(cls, values): @dataclass(eq=False, repr=False) -class ListValue(betterproto.Message): +class ListValue(betterproto2_compiler.Message): """ `ListValue` is a wrapper around a repeated field of values. The JSON representation for `ListValue` is JSON array. """ - values: List["Value"] = betterproto.message_field(1) + values: List["Value"] = betterproto2_compiler.message_field(1) """Repeated field of dynamically typed values.""" @dataclass(eq=False, repr=False) -class Timestamp(betterproto.Message): +class Timestamp(betterproto2_compiler.Message): """ A Timestamp represents a point in time independent of any time zone or local calendar, encoded as a count of seconds and fractions of seconds at @@ -2522,14 +2528,14 @@ class Timestamp(betterproto.Message): ) to obtain a formatter capable of generating timestamps in this format. """ - seconds: int = betterproto.int64_field(1) + seconds: int = betterproto2_compiler.int64_field(1) """ Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. """ - nanos: int = betterproto.int32_field(2) + nanos: int = betterproto2_compiler.int32_field(2) """ Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values @@ -2539,110 +2545,110 @@ class Timestamp(betterproto.Message): @dataclass(eq=False, repr=False) -class DoubleValue(betterproto.Message): +class DoubleValue(betterproto2_compiler.Message): """ Wrapper message for `double`. The JSON representation for `DoubleValue` is JSON number. """ - value: float = betterproto.double_field(1) + value: float = betterproto2_compiler.double_field(1) """The double value.""" @dataclass(eq=False, repr=False) -class FloatValue(betterproto.Message): +class FloatValue(betterproto2_compiler.Message): """ Wrapper message for `float`. The JSON representation for `FloatValue` is JSON number. """ - value: float = betterproto.float_field(1) + value: float = betterproto2_compiler.float_field(1) """The float value.""" @dataclass(eq=False, repr=False) -class Int64Value(betterproto.Message): +class Int64Value(betterproto2_compiler.Message): """ Wrapper message for `int64`. The JSON representation for `Int64Value` is JSON string. """ - value: int = betterproto.int64_field(1) + value: int = betterproto2_compiler.int64_field(1) """The int64 value.""" @dataclass(eq=False, repr=False) -class UInt64Value(betterproto.Message): +class UInt64Value(betterproto2_compiler.Message): """ Wrapper message for `uint64`. The JSON representation for `UInt64Value` is JSON string. """ - value: int = betterproto.uint64_field(1) + value: int = betterproto2_compiler.uint64_field(1) """The uint64 value.""" @dataclass(eq=False, repr=False) -class Int32Value(betterproto.Message): +class Int32Value(betterproto2_compiler.Message): """ Wrapper message for `int32`. The JSON representation for `Int32Value` is JSON number. """ - value: int = betterproto.int32_field(1) + value: int = betterproto2_compiler.int32_field(1) """The int32 value.""" @dataclass(eq=False, repr=False) -class UInt32Value(betterproto.Message): +class UInt32Value(betterproto2_compiler.Message): """ Wrapper message for `uint32`. The JSON representation for `UInt32Value` is JSON number. """ - value: int = betterproto.uint32_field(1) + value: int = betterproto2_compiler.uint32_field(1) """The uint32 value.""" @dataclass(eq=False, repr=False) -class BoolValue(betterproto.Message): +class BoolValue(betterproto2_compiler.Message): """ Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON `true` and `false`. """ - value: bool = betterproto.bool_field(1) + value: bool = betterproto2_compiler.bool_field(1) """The bool value.""" @dataclass(eq=False, repr=False) -class StringValue(betterproto.Message): +class StringValue(betterproto2_compiler.Message): """ Wrapper message for `string`. The JSON representation for `StringValue` is JSON string. """ - value: str = betterproto.string_field(1) + value: str = betterproto2_compiler.string_field(1) """The string value.""" @dataclass(eq=False, repr=False) -class BytesValue(betterproto.Message): +class BytesValue(betterproto2_compiler.Message): """ Wrapper message for `bytes`. The JSON representation for `BytesValue` is JSON string. """ - value: bytes = betterproto.bytes_field(1) + value: bytes = betterproto2_compiler.bytes_field(1) """The bytes value.""" diff --git a/src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py b/src/betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py similarity index 82% rename from src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py rename to src/betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py index 6f167546..c15fa206 100644 --- a/src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py +++ b/src/betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py @@ -12,11 +12,11 @@ from typing import List -import betterproto -import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf +import betterproto2_compiler +import betterproto2_compiler.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf -class CodeGeneratorResponseFeature(betterproto.Enum): +class CodeGeneratorResponseFeature(betterproto2_compiler.Enum): """Sync with code_generator.h.""" FEATURE_NONE = 0 @@ -25,13 +25,13 @@ class CodeGeneratorResponseFeature(betterproto.Enum): @dataclass(eq=False, repr=False) -class Version(betterproto.Message): +class Version(betterproto2_compiler.Message): """The version number of protocol compiler.""" - major: int = betterproto.int32_field(1) - minor: int = betterproto.int32_field(2) - patch: int = betterproto.int32_field(3) - suffix: str = betterproto.string_field(4) + major: int = betterproto2_compiler.int32_field(1) + minor: int = betterproto2_compiler.int32_field(2) + patch: int = betterproto2_compiler.int32_field(3) + suffix: str = betterproto2_compiler.string_field(4) """ A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should be empty for mainline stable releases. @@ -39,20 +39,22 @@ class Version(betterproto.Message): @dataclass(eq=False, repr=False) -class CodeGeneratorRequest(betterproto.Message): +class CodeGeneratorRequest(betterproto2_compiler.Message): """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" - file_to_generate: List[str] = betterproto.string_field(1) + file_to_generate: List[str] = betterproto2_compiler.string_field(1) """ The .proto files that were explicitly listed on the command-line. The code generator should generate code only for these files. Each file's descriptor will be included in proto_file, below. """ - parameter: str = betterproto.string_field(2) + parameter: str = betterproto2_compiler.string_field(2) """The generator parameter passed on the command-line.""" - proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = betterproto.message_field(15) + proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = ( + betterproto2_compiler.message_field(15) + ) """ FileDescriptorProtos for all files in files_to_generate and everything they import. The files will appear in topological order, so each file @@ -76,7 +78,7 @@ class CodeGeneratorRequest(betterproto.Message): """ source_file_descriptors: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = ( - betterproto.message_field(17) + betterproto2_compiler.message_field(17) ) """ File descriptors with all options, including source-retention options. @@ -84,15 +86,15 @@ class CodeGeneratorRequest(betterproto.Message): files_to_generate. """ - compiler_version: "Version" = betterproto.message_field(3) + compiler_version: "Version" = betterproto2_compiler.message_field(3) """The version number of protocol compiler.""" @dataclass(eq=False, repr=False) -class CodeGeneratorResponse(betterproto.Message): +class CodeGeneratorResponse(betterproto2_compiler.Message): """The plugin writes an encoded CodeGeneratorResponse to stdout.""" - error: str = betterproto.string_field(1) + error: str = betterproto2_compiler.string_field(1) """ Error message. If non-empty, code generation failed. The plugin process should exit with status code zero even if it reports an error in this way. @@ -104,13 +106,13 @@ class CodeGeneratorResponse(betterproto.Message): exiting with a non-zero status code. """ - supported_features: int = betterproto.uint64_field(2) + supported_features: int = betterproto2_compiler.uint64_field(2) """ A bitmask of supported features that the code generator supports. This is a bitwise "or" of values from the Feature enum. """ - minimum_edition: int = betterproto.int32_field(3) + minimum_edition: int = betterproto2_compiler.int32_field(3) """ The minimum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -118,7 +120,7 @@ class CodeGeneratorResponse(betterproto.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - maximum_edition: int = betterproto.int32_field(4) + maximum_edition: int = betterproto2_compiler.int32_field(4) """ The maximum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -126,14 +128,14 @@ class CodeGeneratorResponse(betterproto.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) + file: List["CodeGeneratorResponseFile"] = betterproto2_compiler.message_field(15) @dataclass(eq=False, repr=False) -class CodeGeneratorResponseFile(betterproto.Message): +class CodeGeneratorResponseFile(betterproto2_compiler.Message): """Represents a single generated file.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The file name, relative to the output directory. The name must not contain "." or ".." components and must be relative, not be absolute (so, @@ -148,7 +150,7 @@ class CodeGeneratorResponseFile(betterproto.Message): CodeGeneratorResponse before writing files to disk. """ - insertion_point: str = betterproto.string_field(2) + insertion_point: str = betterproto2_compiler.string_field(2) """ If non-empty, indicates that the named file should already exist, and the content here is to be inserted into that file at a defined insertion @@ -189,10 +191,12 @@ class CodeGeneratorResponseFile(betterproto.Message): If |insertion_point| is present, |name| must also be present. """ - content: str = betterproto.string_field(15) + content: str = betterproto2_compiler.string_field(15) """The file contents.""" - generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(16) + generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = ( + betterproto2_compiler.message_field(16) + ) """ Information describing the file content being inserted. If an insertion point is used, this information will be appropriately offset and inserted diff --git a/src/betterproto/lib/std/__init__.py b/src/betterproto2_compiler/lib/std/__init__.py similarity index 100% rename from src/betterproto/lib/std/__init__.py rename to src/betterproto2_compiler/lib/std/__init__.py diff --git a/src/betterproto/lib/std/google/__init__.py b/src/betterproto2_compiler/lib/std/google/__init__.py similarity index 100% rename from src/betterproto/lib/std/google/__init__.py rename to src/betterproto2_compiler/lib/std/google/__init__.py diff --git a/src/betterproto/lib/std/google/protobuf/__init__.py b/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py similarity index 79% rename from src/betterproto/lib/std/google/protobuf/__init__.py rename to src/betterproto2_compiler/lib/std/google/protobuf/__init__.py index 4bb32789..5045fd7b 100644 --- a/src/betterproto/lib/std/google/protobuf/__init__.py +++ b/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py @@ -83,11 +83,11 @@ from typing_extensions import Self -import betterproto -from betterproto.utils import hybridmethod +import betterproto2_compiler +from betterproto2_compiler.utils import hybridmethod -class Syntax(betterproto.Enum): +class Syntax(betterproto2_compiler.Enum): """The syntax in which a protocol buffer element is defined.""" PROTO2 = 0 @@ -97,7 +97,7 @@ class Syntax(betterproto.Enum): """Syntax `proto3`.""" -class FieldKind(betterproto.Enum): +class FieldKind(betterproto2_compiler.Enum): """Basic field types.""" TYPE_UNKNOWN = 0 @@ -158,7 +158,7 @@ class FieldKind(betterproto.Enum): """Field type sint64.""" -class FieldCardinality(betterproto.Enum): +class FieldCardinality(betterproto2_compiler.Enum): """Whether a field is optional, required, or repeated.""" CARDINALITY_UNKNOWN = 0 @@ -174,7 +174,7 @@ class FieldCardinality(betterproto.Enum): """For repeated fields.""" -class FieldDescriptorProtoType(betterproto.Enum): +class FieldDescriptorProtoType(betterproto2_compiler.Enum): """ """ TYPE_DOUBLE = 1 @@ -266,7 +266,7 @@ class FieldDescriptorProtoType(betterproto.Enum): """Uses ZigZag encoding.""" -class FieldDescriptorProtoLabel(betterproto.Enum): +class FieldDescriptorProtoLabel(betterproto2_compiler.Enum): """ """ LABEL_OPTIONAL = 1 @@ -283,7 +283,7 @@ class FieldDescriptorProtoLabel(betterproto.Enum): """ -class FileOptionsOptimizeMode(betterproto.Enum): +class FileOptionsOptimizeMode(betterproto2_compiler.Enum): """Generated classes can be optimized for speed or code size.""" SPEED = 1 @@ -300,7 +300,7 @@ class FileOptionsOptimizeMode(betterproto.Enum): """Generate code using MessageLite and the lite runtime.""" -class FieldOptionsCType(betterproto.Enum): +class FieldOptionsCType(betterproto2_compiler.Enum): """ """ STRING = 0 @@ -317,7 +317,7 @@ class FieldOptionsCType(betterproto.Enum): """ -class FieldOptionsJsType(betterproto.Enum): +class FieldOptionsJsType(betterproto2_compiler.Enum): """ """ JS_NORMAL = 0 @@ -330,7 +330,7 @@ class FieldOptionsJsType(betterproto.Enum): """Use JavaScript numbers.""" -class MethodOptionsIdempotencyLevel(betterproto.Enum): +class MethodOptionsIdempotencyLevel(betterproto2_compiler.Enum): """ Is this method side-effect-free (or safe in HTTP parlance), or idempotent, or neither? HTTP based RPC implementation may choose GET verb for safe @@ -349,7 +349,7 @@ class MethodOptionsIdempotencyLevel(betterproto.Enum): """idempotent, but may have side effects""" -class NullValue(betterproto.Enum): +class NullValue(betterproto2_compiler.Enum): """ `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. @@ -362,7 +362,7 @@ class NullValue(betterproto.Enum): @dataclass(eq=False, repr=False) -class Any(betterproto.Message): +class Any(betterproto2_compiler.Message): """ `Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. @@ -444,7 +444,7 @@ class Any(betterproto.Message): } """ - type_url: str = betterproto.string_field(1) + type_url: str = betterproto2_compiler.string_field(1) """ A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least @@ -475,20 +475,20 @@ class Any(betterproto.Message): used with implementation specific semantics. """ - value: bytes = betterproto.bytes_field(2) + value: bytes = betterproto2_compiler.bytes_field(2) """ Must be a valid serialized protocol buffer of the above specified type. """ @dataclass(eq=False, repr=False) -class SourceContext(betterproto.Message): +class SourceContext(betterproto2_compiler.Message): """ `SourceContext` represents information about the source of a protobuf element, like the file in which it is defined. """ - file_name: str = betterproto.string_field(1) + file_name: str = betterproto2_compiler.string_field(1) """ The path-qualified name of the .proto file that contained the associated protobuf element. For example: `"google/protobuf/source_context.proto"`. @@ -496,115 +496,117 @@ class SourceContext(betterproto.Message): @dataclass(eq=False, repr=False) -class Type(betterproto.Message): +class Type(betterproto2_compiler.Message): """A protocol buffer message type.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The fully qualified message name.""" - fields: List["Field"] = betterproto.message_field(2, repeated=True) + fields: List["Field"] = betterproto2_compiler.message_field(2, repeated=True) """The list of fields.""" - oneofs: List[str] = betterproto.string_field(3, repeated=True) + oneofs: List[str] = betterproto2_compiler.string_field(3, repeated=True) """The list of types appearing in `oneof` definitions in this type.""" - options: List["Option"] = betterproto.message_field(4, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(4, repeated=True) """The protocol buffer options.""" - source_context: "SourceContext" = betterproto.message_field(5) + source_context: "SourceContext" = betterproto2_compiler.message_field(5) """The source context.""" - syntax: "Syntax" = betterproto.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" @dataclass(eq=False, repr=False) -class Field(betterproto.Message): +class Field(betterproto2_compiler.Message): """A single field of a message type.""" - kind: "FieldKind" = betterproto.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) + kind: "FieldKind" = betterproto2_compiler.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) """The field type.""" - cardinality: "FieldCardinality" = betterproto.enum_field( + cardinality: "FieldCardinality" = betterproto2_compiler.enum_field( 2, enum_default_value=lambda: FieldCardinality.try_value(0) ) """The field cardinality.""" - number: int = betterproto.int32_field(3) + number: int = betterproto2_compiler.int32_field(3) """The field number.""" - name: str = betterproto.string_field(4) + name: str = betterproto2_compiler.string_field(4) """The field name.""" - type_url: str = betterproto.string_field(6) + type_url: str = betterproto2_compiler.string_field(6) """ The field type URL, without the scheme, for message or enumeration types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. """ - oneof_index: int = betterproto.int32_field(7) + oneof_index: int = betterproto2_compiler.int32_field(7) """ The index of the field type in `Type.oneofs`, for message or enumeration types. The first type has index 1; zero means the type is not in the list. """ - packed: bool = betterproto.bool_field(8) + packed: bool = betterproto2_compiler.bool_field(8) """Whether to use alternative packed wire representation.""" - options: List["Option"] = betterproto.message_field(9, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(9, repeated=True) """The protocol buffer options.""" - json_name: str = betterproto.string_field(10) + json_name: str = betterproto2_compiler.string_field(10) """The field JSON name.""" - default_value: str = betterproto.string_field(11) + default_value: str = betterproto2_compiler.string_field(11) """ The string value of the default value of this field. Proto2 syntax only. """ @dataclass(eq=False, repr=False) -class Enum(betterproto.Message): +class Enum(betterproto2_compiler.Message): """Enum type definition.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """Enum type name.""" - enumvalue: List["EnumValue"] = betterproto.message_field(2, wraps=betterproto.TYPE_ENUM, repeated=True) + enumvalue: List["EnumValue"] = betterproto2_compiler.message_field( + 2, wraps=betterproto2_compiler.TYPE_ENUM, repeated=True + ) """Enum value definitions.""" - options: List["Option"] = betterproto.message_field(3, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) """Protocol buffer options.""" - source_context: "SourceContext" = betterproto.message_field(4) + source_context: "SourceContext" = betterproto2_compiler.message_field(4) """The source context.""" - syntax: "Syntax" = betterproto.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" @dataclass(eq=False, repr=False) -class EnumValue(betterproto.Message): +class EnumValue(betterproto2_compiler.Message): """Enum value definition.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """Enum value name.""" - number: int = betterproto.int32_field(2) + number: int = betterproto2_compiler.int32_field(2) """Enum value number.""" - options: List["Option"] = betterproto.message_field(3, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) """Protocol buffer options.""" @dataclass(eq=False, repr=False) -class Option(betterproto.Message): +class Option(betterproto2_compiler.Message): """ A protocol buffer option, which can be attached to a message, field, enumeration, etc. """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The option's name. For protobuf built-in options (options defined in descriptor.proto), this is the short name. For example, `"map_entry"`. @@ -612,7 +614,7 @@ class Option(betterproto.Message): `"google.api.http"`. """ - value: "Any" = betterproto.message_field(2) + value: "Any" = betterproto2_compiler.message_field(2) """ The option's value packed in an Any message. If the value is a primitive, the corresponding wrapper type defined in google/protobuf/wrappers.proto @@ -622,7 +624,7 @@ class Option(betterproto.Message): @dataclass(eq=False, repr=False) -class Api(betterproto.Message): +class Api(betterproto2_compiler.Message): """ Api is a light-weight descriptor for an API Interface. @@ -635,19 +637,19 @@ class Api(betterproto.Message): detailed terminology. """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The fully qualified name of this interface, including package name followed by the interface's simple name. """ - methods: List["Method"] = betterproto.message_field(2, repeated=True) + methods: List["Method"] = betterproto2_compiler.message_field(2, repeated=True) """The methods of this interface, in unspecified order.""" - options: List["Option"] = betterproto.message_field(3, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) """Any metadata attached to the interface.""" - version: str = betterproto.string_field(4) + version: str = betterproto2_compiler.string_field(4) """ A version string for this interface. If specified, must have the form `major-version.minor-version`, as in `1.10`. If the minor version is @@ -670,47 +672,47 @@ class Api(betterproto.Message): experimental, non-GA interfaces. """ - source_context: "SourceContext" = betterproto.message_field(5) + source_context: "SourceContext" = betterproto2_compiler.message_field(5) """ Source context for the protocol buffer service represented by this message. """ - mixins: List["Mixin"] = betterproto.message_field(6, repeated=True) + mixins: List["Mixin"] = betterproto2_compiler.message_field(6, repeated=True) """Included interfaces. See [Mixin][].""" - syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of the service.""" @dataclass(eq=False, repr=False) -class Method(betterproto.Message): +class Method(betterproto2_compiler.Message): """Method represents a method of an API interface.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The simple name of this method.""" - request_type_url: str = betterproto.string_field(2) + request_type_url: str = betterproto2_compiler.string_field(2) """A URL of the input message type.""" - request_streaming: bool = betterproto.bool_field(3) + request_streaming: bool = betterproto2_compiler.bool_field(3) """If true, the request is streamed.""" - response_type_url: str = betterproto.string_field(4) + response_type_url: str = betterproto2_compiler.string_field(4) """The URL of the output message type.""" - response_streaming: bool = betterproto.bool_field(5) + response_streaming: bool = betterproto2_compiler.bool_field(5) """If true, the response is streamed.""" - options: List["Option"] = betterproto.message_field(6, repeated=True) + options: List["Option"] = betterproto2_compiler.message_field(6, repeated=True) """Any metadata attached to the method.""" - syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of this method.""" @dataclass(eq=False, repr=False) -class Mixin(betterproto.Message): +class Mixin(betterproto2_compiler.Message): """ Declares an API Interface to be included in this interface. The including interface must redeclare all the methods from the included interface, but @@ -792,10 +794,10 @@ class Mixin(betterproto.Message): } """ - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """The fully qualified name of the interface which is included.""" - root: str = betterproto.string_field(2) + root: str = betterproto2_compiler.string_field(2) """ If non-empty specifies a path under which inherited HTTP paths are rooted. @@ -803,64 +805,64 @@ class Mixin(betterproto.Message): @dataclass(eq=False, repr=False) -class FileDescriptorSet(betterproto.Message): +class FileDescriptorSet(betterproto2_compiler.Message): """ The protocol compiler can output a FileDescriptorSet containing the .proto files it parses. """ - file: List["FileDescriptorProto"] = betterproto.message_field(1, repeated=True) + file: List["FileDescriptorProto"] = betterproto2_compiler.message_field(1, repeated=True) """ """ @dataclass(eq=False, repr=False) -class FileDescriptorProto(betterproto.Message): +class FileDescriptorProto(betterproto2_compiler.Message): """Describes a complete .proto file.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """file name, relative to root of source tree""" - package: str = betterproto.string_field(2) + package: str = betterproto2_compiler.string_field(2) """e.g. "foo", "foo.bar", etc.""" - dependency: List[str] = betterproto.string_field(3, repeated=True) + dependency: List[str] = betterproto2_compiler.string_field(3, repeated=True) """Names of files imported by this file.""" - public_dependency: List[int] = betterproto.int32_field(10, repeated=True) + public_dependency: List[int] = betterproto2_compiler.int32_field(10, repeated=True) """Indexes of the public imported files in the dependency list above.""" - weak_dependency: List[int] = betterproto.int32_field(11, repeated=True) + weak_dependency: List[int] = betterproto2_compiler.int32_field(11, repeated=True) """ Indexes of the weak imported files in the dependency list. For Google-internal migration only. Do not use. """ - message_type: List["DescriptorProto"] = betterproto.message_field(4, repeated=True) + message_type: List["DescriptorProto"] = betterproto2_compiler.message_field(4, repeated=True) """All top-level definitions in this file.""" - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5, repeated=True) + enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(5, repeated=True) """ """ - service: List["ServiceDescriptorProto"] = betterproto.message_field(6, repeated=True) + service: List["ServiceDescriptorProto"] = betterproto2_compiler.message_field(6, repeated=True) """ """ - extension: List["FieldDescriptorProto"] = betterproto.message_field(7, repeated=True) + extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(7, repeated=True) """ """ - options: "FileOptions" = betterproto.message_field(8) + options: "FileOptions" = betterproto2_compiler.message_field(8) """ """ - source_code_info: "SourceCodeInfo" = betterproto.message_field(9) + source_code_info: "SourceCodeInfo" = betterproto2_compiler.message_field(9) """ This field contains optional information about the original source code. You may safely remove this entire field without harming runtime @@ -868,7 +870,7 @@ class FileDescriptorProto(betterproto.Message): development tools. """ - syntax: str = betterproto.string_field(12) + syntax: str = betterproto2_compiler.string_field(12) """ The syntax of the proto file. The supported values are "proto2" and "proto3". @@ -876,55 +878,55 @@ class FileDescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class DescriptorProto(betterproto.Message): +class DescriptorProto(betterproto2_compiler.Message): """Describes a message type.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - field: List["FieldDescriptorProto"] = betterproto.message_field(2, repeated=True) + field: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) """ """ - extension: List["FieldDescriptorProto"] = betterproto.message_field(6, repeated=True) + extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(6, repeated=True) """ """ - nested_type: List["DescriptorProto"] = betterproto.message_field(3, repeated=True) + nested_type: List["DescriptorProto"] = betterproto2_compiler.message_field(3, repeated=True) """ """ - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4, repeated=True) + enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(4, repeated=True) """ """ - extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field(5, repeated=True) + extension_range: List["DescriptorProtoExtensionRange"] = betterproto2_compiler.message_field(5, repeated=True) """ """ - oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8, repeated=True) + oneof_decl: List["OneofDescriptorProto"] = betterproto2_compiler.message_field(8, repeated=True) """ """ - options: "MessageOptions" = betterproto.message_field(7) + options: "MessageOptions" = betterproto2_compiler.message_field(7) """ """ - reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9, repeated=True) + reserved_range: List["DescriptorProtoReservedRange"] = betterproto2_compiler.message_field(9, repeated=True) """ """ - reserved_name: List[str] = betterproto.string_field(10, repeated=True) + reserved_name: List[str] = betterproto2_compiler.string_field(10, repeated=True) """ Reserved field names, which may not be used by fields in the same message. A given name may only be reserved once. @@ -932,66 +934,66 @@ class DescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class DescriptorProtoExtensionRange(betterproto.Message): +class DescriptorProtoExtensionRange(betterproto2_compiler.Message): """ """ - start: int = betterproto.int32_field(1) + start: int = betterproto2_compiler.int32_field(1) """Inclusive.""" - end: int = betterproto.int32_field(2) + end: int = betterproto2_compiler.int32_field(2) """Exclusive.""" - options: "ExtensionRangeOptions" = betterproto.message_field(3) + options: "ExtensionRangeOptions" = betterproto2_compiler.message_field(3) """ """ @dataclass(eq=False, repr=False) -class DescriptorProtoReservedRange(betterproto.Message): +class DescriptorProtoReservedRange(betterproto2_compiler.Message): """ Range of reserved tag numbers. Reserved tag numbers may not be used by fields or extension ranges in the same message. Reserved ranges may not overlap. """ - start: int = betterproto.int32_field(1) + start: int = betterproto2_compiler.int32_field(1) """Inclusive.""" - end: int = betterproto.int32_field(2) + end: int = betterproto2_compiler.int32_field(2) """Exclusive.""" @dataclass(eq=False, repr=False) -class ExtensionRangeOptions(betterproto.Message): +class ExtensionRangeOptions(betterproto2_compiler.Message): """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class FieldDescriptorProto(betterproto.Message): +class FieldDescriptorProto(betterproto2_compiler.Message): """Describes a field within a message.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - number: int = betterproto.int32_field(3) + number: int = betterproto2_compiler.int32_field(3) """ """ - label: "FieldDescriptorProtoLabel" = betterproto.enum_field( + label: "FieldDescriptorProtoLabel" = betterproto2_compiler.enum_field( 4, enum_default_value=lambda: FieldDescriptorProtoLabel.try_value(0) ) """ """ - type: "FieldDescriptorProtoType" = betterproto.enum_field( + type: "FieldDescriptorProtoType" = betterproto2_compiler.enum_field( 5, enum_default_value=lambda: FieldDescriptorProtoType.try_value(0) ) """ @@ -999,7 +1001,7 @@ class FieldDescriptorProto(betterproto.Message): are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. """ - type_name: str = betterproto.string_field(6) + type_name: str = betterproto2_compiler.string_field(6) """ For message and enum types, this is the name of the type. If the name starts with a '.', it is fully-qualified. Otherwise, C++-like scoping @@ -1008,13 +1010,13 @@ class FieldDescriptorProto(betterproto.Message): namespace). """ - extendee: str = betterproto.string_field(2) + extendee: str = betterproto2_compiler.string_field(2) """ For extensions, this is the name of the type being extended. It is resolved in the same manner as type_name. """ - default_value: str = betterproto.string_field(7) + default_value: str = betterproto2_compiler.string_field(7) """ For numeric types, contains the original text representation of the value. For booleans, "true" or "false". @@ -1023,13 +1025,13 @@ class FieldDescriptorProto(betterproto.Message): TODO(kenton): Base-64 encode? """ - oneof_index: Optional[int] = betterproto.int32_field(9, optional=True) + oneof_index: Optional[int] = betterproto2_compiler.int32_field(9, optional=True) """ If set, gives the index of a oneof in the containing type's oneof_decl list. This field is a member of that oneof. """ - json_name: str = betterproto.string_field(10) + json_name: str = betterproto2_compiler.string_field(10) """ JSON name of this field. The value is set by protocol compiler. If the user has set a "json_name" option on this field, that option's value @@ -1037,12 +1039,12 @@ class FieldDescriptorProto(betterproto.Message): it to camelCase. """ - options: "FieldOptions" = betterproto.message_field(8) + options: "FieldOptions" = betterproto2_compiler.message_field(8) """ """ - proto3_optional: bool = betterproto.bool_field(17) + proto3_optional: bool = betterproto2_compiler.bool_field(17) """ If true, this is a proto3 "optional". When a proto3 field is optional, it tracks presence regardless of field type. @@ -1069,47 +1071,47 @@ class FieldDescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class OneofDescriptorProto(betterproto.Message): +class OneofDescriptorProto(betterproto2_compiler.Message): """Describes a oneof.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - options: "OneofOptions" = betterproto.message_field(2) + options: "OneofOptions" = betterproto2_compiler.message_field(2) """ """ @dataclass(eq=False, repr=False) -class EnumDescriptorProto(betterproto.Message): +class EnumDescriptorProto(betterproto2_compiler.Message): """Describes an enum type.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - value: List["EnumValueDescriptorProto"] = betterproto.message_field(2, repeated=True) + value: List["EnumValueDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) """ """ - options: "EnumOptions" = betterproto.message_field(3) + options: "EnumOptions" = betterproto2_compiler.message_field(3) """ """ - reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto.message_field(4, repeated=True) + reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto2_compiler.message_field(4, repeated=True) """ Range of reserved numeric values. Reserved numeric values may not be used by enum values in the same enum declaration. Reserved ranges may not overlap. """ - reserved_name: List[str] = betterproto.string_field(5, repeated=True) + reserved_name: List[str] = betterproto2_compiler.string_field(5, repeated=True) """ Reserved enum value names, which may not be reused. A given name may only be reserved once. @@ -1117,7 +1119,7 @@ class EnumDescriptorProto(betterproto.Message): @dataclass(eq=False, repr=False) -class EnumDescriptorProtoEnumReservedRange(betterproto.Message): +class EnumDescriptorProtoEnumReservedRange(betterproto2_compiler.Message): """ Range of reserved numeric values. Reserved values may not be used by entries in the same enum. Reserved ranges may not overlap. @@ -1127,87 +1129,87 @@ class EnumDescriptorProtoEnumReservedRange(betterproto.Message): domain. """ - start: int = betterproto.int32_field(1) + start: int = betterproto2_compiler.int32_field(1) """Inclusive.""" - end: int = betterproto.int32_field(2) + end: int = betterproto2_compiler.int32_field(2) """Inclusive.""" @dataclass(eq=False, repr=False) -class EnumValueDescriptorProto(betterproto.Message): +class EnumValueDescriptorProto(betterproto2_compiler.Message): """Describes a value within an enum.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - number: int = betterproto.int32_field(2) + number: int = betterproto2_compiler.int32_field(2) """ """ - options: "EnumValueOptions" = betterproto.message_field(3) + options: "EnumValueOptions" = betterproto2_compiler.message_field(3) """ """ @dataclass(eq=False, repr=False) -class ServiceDescriptorProto(betterproto.Message): +class ServiceDescriptorProto(betterproto2_compiler.Message): """Describes a service.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - method: List["MethodDescriptorProto"] = betterproto.message_field(2, repeated=True) + method: List["MethodDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) """ """ - options: "ServiceOptions" = betterproto.message_field(3) + options: "ServiceOptions" = betterproto2_compiler.message_field(3) """ """ @dataclass(eq=False, repr=False) -class MethodDescriptorProto(betterproto.Message): +class MethodDescriptorProto(betterproto2_compiler.Message): """Describes a method of a service.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ """ - input_type: str = betterproto.string_field(2) + input_type: str = betterproto2_compiler.string_field(2) """ Input and output type names. These are resolved in the same way as FieldDescriptorProto.type_name, but must refer to a message type. """ - output_type: str = betterproto.string_field(3) + output_type: str = betterproto2_compiler.string_field(3) """ """ - options: "MethodOptions" = betterproto.message_field(4) + options: "MethodOptions" = betterproto2_compiler.message_field(4) """ """ - client_streaming: bool = betterproto.bool_field(5) + client_streaming: bool = betterproto2_compiler.bool_field(5) """Identifies if client streams multiple client messages""" - server_streaming: bool = betterproto.bool_field(6) + server_streaming: bool = betterproto2_compiler.bool_field(6) """Identifies if server streams multiple server messages""" @dataclass(eq=False, repr=False) -class FileOptions(betterproto.Message): +class FileOptions(betterproto2_compiler.Message): """ =================================================================== Options @@ -1242,7 +1244,7 @@ class FileOptions(betterproto.Message): to automatically assign option numbers. """ - java_package: str = betterproto.string_field(1) + java_package: str = betterproto2_compiler.string_field(1) """ Sets the Java package where classes generated from this .proto will be placed. By default, the proto package is used, but this is often @@ -1250,7 +1252,7 @@ class FileOptions(betterproto.Message): domain names. """ - java_outer_classname: str = betterproto.string_field(8) + java_outer_classname: str = betterproto2_compiler.string_field(8) """ If set, all the classes from the .proto file are wrapped in a single outer class with the given name. This applies to both Proto1 @@ -1259,7 +1261,7 @@ class FileOptions(betterproto.Message): explicitly choose the class name). """ - java_multiple_files: bool = betterproto.bool_field(10) + java_multiple_files: bool = betterproto2_compiler.bool_field(10) """ If set true, then the Java code generator will generate a separate .java file for each top-level message, enum, and service defined in the .proto @@ -1269,10 +1271,10 @@ class FileOptions(betterproto.Message): top-level extensions defined in the file. """ - java_generate_equals_and_hash: bool = betterproto.bool_field(20) + java_generate_equals_and_hash: bool = betterproto2_compiler.bool_field(20) """This option does nothing.""" - java_string_check_utf8: bool = betterproto.bool_field(27) + java_string_check_utf8: bool = betterproto2_compiler.bool_field(27) """ If set true, then the Java2 code generator will generate code that throws an exception whenever an attempt is made to assign a non-UTF-8 @@ -1282,14 +1284,14 @@ class FileOptions(betterproto.Message): This option has no effect on when used with the lite runtime. """ - optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field( + optimize_for: "FileOptionsOptimizeMode" = betterproto2_compiler.enum_field( 9, enum_default_value=lambda: FileOptionsOptimizeMode.try_value(0) ) """ """ - go_package: str = betterproto.string_field(11) + go_package: str = betterproto2_compiler.string_field(11) """ Sets the Go package where structs generated from this .proto will be placed. If omitted, the Go package will be derived from the following: @@ -1298,7 +1300,7 @@ class FileOptions(betterproto.Message): - Otherwise, the basename of the .proto file, without extension. """ - cc_generic_services: bool = betterproto.bool_field(16) + cc_generic_services: bool = betterproto2_compiler.bool_field(16) """ Should generic services be generated in each language? "Generic" services are not specific to any particular RPC system. They are generated by the @@ -1312,22 +1314,22 @@ class FileOptions(betterproto.Message): explicitly set them to true. """ - java_generic_services: bool = betterproto.bool_field(17) + java_generic_services: bool = betterproto2_compiler.bool_field(17) """ """ - py_generic_services: bool = betterproto.bool_field(18) + py_generic_services: bool = betterproto2_compiler.bool_field(18) """ """ - php_generic_services: bool = betterproto.bool_field(42) + php_generic_services: bool = betterproto2_compiler.bool_field(42) """ """ - deprecated: bool = betterproto.bool_field(23) + deprecated: bool = betterproto2_compiler.bool_field(23) """ Is this file deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1335,22 +1337,22 @@ class FileOptions(betterproto.Message): least, this is a formalization for deprecating files. """ - cc_enable_arenas: bool = betterproto.bool_field(31) + cc_enable_arenas: bool = betterproto2_compiler.bool_field(31) """ Enables the use of arenas for the proto messages in this file. This applies only to generated classes for C++. """ - objc_class_prefix: str = betterproto.string_field(36) + objc_class_prefix: str = betterproto2_compiler.string_field(36) """ Sets the objective c class prefix which is prepended to all objective c generated classes from this .proto. There is no default. """ - csharp_namespace: str = betterproto.string_field(37) + csharp_namespace: str = betterproto2_compiler.string_field(37) """Namespace for generated classes; defaults to the package.""" - swift_prefix: str = betterproto.string_field(39) + swift_prefix: str = betterproto2_compiler.string_field(39) """ By default Swift generators will take the proto package and CamelCase it replacing '.' with underscore and use that to prefix the types/symbols @@ -1358,34 +1360,34 @@ class FileOptions(betterproto.Message): to prefix the types/symbols defined. """ - php_class_prefix: str = betterproto.string_field(40) + php_class_prefix: str = betterproto2_compiler.string_field(40) """ Sets the php class prefix which is prepended to all php generated classes from this .proto. Default is empty. """ - php_namespace: str = betterproto.string_field(41) + php_namespace: str = betterproto2_compiler.string_field(41) """ Use this option to change the namespace of php generated classes. Default is empty. When this option is empty, the package name will be used for determining the namespace. """ - php_metadata_namespace: str = betterproto.string_field(44) + php_metadata_namespace: str = betterproto2_compiler.string_field(44) """ Use this option to change the namespace of php generated metadata classes. Default is empty. When this option is empty, the proto file name will be used for determining the namespace. """ - ruby_package: str = betterproto.string_field(45) + ruby_package: str = betterproto2_compiler.string_field(45) """ Use this option to change the package of ruby generated classes. Default is empty. When this option is not set, the package name will be used for determining the ruby package. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """ The parser stores options it doesn't recognize here. See the documentation for the "Options" section above. @@ -1401,10 +1403,10 @@ def __post_init__(self) -> None: @dataclass(eq=False, repr=False) -class MessageOptions(betterproto.Message): +class MessageOptions(betterproto2_compiler.Message): """ """ - message_set_wire_format: bool = betterproto.bool_field(1) + message_set_wire_format: bool = betterproto2_compiler.bool_field(1) """ Set true to use the old proto1 MessageSet wire format for extensions. This is provided for backwards-compatibility with the MessageSet wire @@ -1426,14 +1428,14 @@ class MessageOptions(betterproto.Message): the protocol compiler. """ - no_standard_descriptor_accessor: bool = betterproto.bool_field(2) + no_standard_descriptor_accessor: bool = betterproto2_compiler.bool_field(2) """ Disables the generation of the standard "descriptor()" accessor, which can conflict with a field of the same name. This is meant to make migration from proto1 easier; new code should avoid fields named "descriptor". """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this message deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1441,7 +1443,7 @@ class MessageOptions(betterproto.Message): this is a formalization for deprecating messages. """ - map_entry: bool = betterproto.bool_field(7) + map_entry: bool = betterproto2_compiler.bool_field(7) """ Whether the message is an automatically generated map entry type for the maps field. @@ -1466,15 +1468,17 @@ class MessageOptions(betterproto.Message): parser. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class FieldOptions(betterproto.Message): +class FieldOptions(betterproto2_compiler.Message): """ """ - ctype: "FieldOptionsCType" = betterproto.enum_field(1, enum_default_value=lambda: FieldOptionsCType.try_value(0)) + ctype: "FieldOptionsCType" = betterproto2_compiler.enum_field( + 1, enum_default_value=lambda: FieldOptionsCType.try_value(0) + ) """ The ctype option instructs the C++ code generator to use a different representation of the field than it normally would. See the specific @@ -1482,7 +1486,7 @@ class FieldOptions(betterproto.Message): release -- sorry, we'll try to include it in a future version! """ - packed: bool = betterproto.bool_field(2) + packed: bool = betterproto2_compiler.bool_field(2) """ The packed option can be enabled for repeated primitive fields to enable a more efficient representation on the wire. Rather than repeatedly @@ -1491,7 +1495,9 @@ class FieldOptions(betterproto.Message): false will avoid using packed encoding. """ - jstype: "FieldOptionsJsType" = betterproto.enum_field(6, enum_default_value=lambda: FieldOptionsJsType.try_value(0)) + jstype: "FieldOptionsJsType" = betterproto2_compiler.enum_field( + 6, enum_default_value=lambda: FieldOptionsJsType.try_value(0) + ) """ The jstype option determines the JavaScript type used for values of the field. The option is permitted only for 64 bit integral and fixed types @@ -1506,7 +1512,7 @@ class FieldOptions(betterproto.Message): goog.math.Integer. """ - lazy: bool = betterproto.bool_field(5) + lazy: bool = betterproto2_compiler.bool_field(5) """ Should this field be parsed lazily? Lazy applies only to message-type fields. It means that when the outer message is initially parsed, the @@ -1537,7 +1543,7 @@ class FieldOptions(betterproto.Message): been parsed. """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this field deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1545,32 +1551,32 @@ class FieldOptions(betterproto.Message): is a formalization for deprecating fields. """ - weak: bool = betterproto.bool_field(10) + weak: bool = betterproto2_compiler.bool_field(10) """For Google-internal migration only. Do not use.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class OneofOptions(betterproto.Message): +class OneofOptions(betterproto2_compiler.Message): """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class EnumOptions(betterproto.Message): +class EnumOptions(betterproto2_compiler.Message): """ """ - allow_alias: bool = betterproto.bool_field(2) + allow_alias: bool = betterproto2_compiler.bool_field(2) """ Set this option to true to allow mapping different tag names to the same value. """ - deprecated: bool = betterproto.bool_field(3) + deprecated: bool = betterproto2_compiler.bool_field(3) """ Is this enum deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1578,15 +1584,15 @@ class EnumOptions(betterproto.Message): is a formalization for deprecating enums. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class EnumValueOptions(betterproto.Message): +class EnumValueOptions(betterproto2_compiler.Message): """ """ - deprecated: bool = betterproto.bool_field(1) + deprecated: bool = betterproto2_compiler.bool_field(1) """ Is this enum value deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1594,15 +1600,15 @@ class EnumValueOptions(betterproto.Message): this is a formalization for deprecating enum values. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class ServiceOptions(betterproto.Message): +class ServiceOptions(betterproto2_compiler.Message): """ """ - deprecated: bool = betterproto.bool_field(33) + deprecated: bool = betterproto2_compiler.bool_field(33) """ Note: Field numbers 1 through 32 are reserved for Google's internal RPC framework. We apologize for hoarding these numbers to ourselves, but @@ -1615,15 +1621,15 @@ class ServiceOptions(betterproto.Message): this is a formalization for deprecating services. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class MethodOptions(betterproto.Message): +class MethodOptions(betterproto2_compiler.Message): """ """ - deprecated: bool = betterproto.bool_field(33) + deprecated: bool = betterproto2_compiler.bool_field(33) """ Note: Field numbers 1 through 32 are reserved for Google's internal RPC framework. We apologize for hoarding these numbers to ourselves, but @@ -1636,19 +1642,19 @@ class MethodOptions(betterproto.Message): this is a formalization for deprecating methods. """ - idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field( + idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto2_compiler.enum_field( 34, enum_default_value=lambda: MethodOptionsIdempotencyLevel.try_value(0) ) """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class UninterpretedOption(betterproto.Message): +class UninterpretedOption(betterproto2_compiler.Message): """ A message representing a option the parser does not recognize. This only appears in options protos created by the compiler::Parser class. @@ -1658,45 +1664,45 @@ class UninterpretedOption(betterproto.Message): in them. """ - name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2, repeated=True) + name: List["UninterpretedOptionNamePart"] = betterproto2_compiler.message_field(2, repeated=True) """ """ - identifier_value: str = betterproto.string_field(3) + identifier_value: str = betterproto2_compiler.string_field(3) """ The value of the uninterpreted option, in whatever type the tokenizer identified it as during parsing. Exactly one of these should be set. """ - positive_int_value: int = betterproto.uint64_field(4) + positive_int_value: int = betterproto2_compiler.uint64_field(4) """ """ - negative_int_value: int = betterproto.int64_field(5) + negative_int_value: int = betterproto2_compiler.int64_field(5) """ """ - double_value: float = betterproto.double_field(6) + double_value: float = betterproto2_compiler.double_field(6) """ """ - string_value: bytes = betterproto.bytes_field(7) + string_value: bytes = betterproto2_compiler.bytes_field(7) """ """ - aggregate_value: str = betterproto.string_field(8) + aggregate_value: str = betterproto2_compiler.string_field(8) """ """ @dataclass(eq=False, repr=False) -class UninterpretedOptionNamePart(betterproto.Message): +class UninterpretedOptionNamePart(betterproto2_compiler.Message): """ The name of the uninterpreted option. Each string represents a segment in a dot-separated name. is_extension is true iff a segment represents an @@ -1705,19 +1711,19 @@ class UninterpretedOptionNamePart(betterproto.Message): "foo.(bar.baz).qux". """ - name_part: str = betterproto.string_field(1) + name_part: str = betterproto2_compiler.string_field(1) """ """ - is_extension: bool = betterproto.bool_field(2) + is_extension: bool = betterproto2_compiler.bool_field(2) """ """ @dataclass(eq=False, repr=False) -class SourceCodeInfo(betterproto.Message): +class SourceCodeInfo(betterproto2_compiler.Message): """ =================================================================== Optional source code info @@ -1726,7 +1732,7 @@ class SourceCodeInfo(betterproto.Message): FileDescriptorProto was generated. """ - location: List["SourceCodeInfoLocation"] = betterproto.message_field(1, repeated=True) + location: List["SourceCodeInfoLocation"] = betterproto2_compiler.message_field(1, repeated=True) """ A Location identifies a piece of source code in a .proto file which corresponds to a particular definition. This information is intended @@ -1775,10 +1781,10 @@ class SourceCodeInfo(betterproto.Message): @dataclass(eq=False, repr=False) -class SourceCodeInfoLocation(betterproto.Message): +class SourceCodeInfoLocation(betterproto2_compiler.Message): """ """ - path: List[int] = betterproto.int32_field(1, repeated=True) + path: List[int] = betterproto2_compiler.int32_field(1, repeated=True) """ Identifies which part of the FileDescriptorProto was defined at this location. @@ -1805,7 +1811,7 @@ class SourceCodeInfoLocation(betterproto.Message): of the label to the terminating semicolon). """ - span: List[int] = betterproto.int32_field(2, repeated=True) + span: List[int] = betterproto2_compiler.int32_field(2, repeated=True) """ Always has exactly three or four elements: start line, start column, end line (optional, otherwise assumed same as start line), end column. @@ -1814,7 +1820,7 @@ class SourceCodeInfoLocation(betterproto.Message): 1 to each before displaying to a user. """ - leading_comments: str = betterproto.string_field(3) + leading_comments: str = betterproto2_compiler.string_field(3) """ If this SourceCodeInfo represents a complete declaration, these are any comments appearing before and after the declaration which appear to be @@ -1865,26 +1871,26 @@ class SourceCodeInfoLocation(betterproto.Message): // ignored detached comments. """ - trailing_comments: str = betterproto.string_field(4) + trailing_comments: str = betterproto2_compiler.string_field(4) """ """ - leading_detached_comments: List[str] = betterproto.string_field(6, repeated=True) + leading_detached_comments: List[str] = betterproto2_compiler.string_field(6, repeated=True) """ """ @dataclass(eq=False, repr=False) -class GeneratedCodeInfo(betterproto.Message): +class GeneratedCodeInfo(betterproto2_compiler.Message): """ Describes the relationship between generated code and its original source file. A GeneratedCodeInfo message is associated with only one generated source file, but may contain references to different source .proto files. """ - annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1, repeated=True) + annotation: List["GeneratedCodeInfoAnnotation"] = betterproto2_compiler.message_field(1, repeated=True) """ An Annotation connects some span of text in generated code to an element of its generating .proto file. @@ -1892,25 +1898,25 @@ class GeneratedCodeInfo(betterproto.Message): @dataclass(eq=False, repr=False) -class GeneratedCodeInfoAnnotation(betterproto.Message): +class GeneratedCodeInfoAnnotation(betterproto2_compiler.Message): """ """ - path: List[int] = betterproto.int32_field(1, repeated=True) + path: List[int] = betterproto2_compiler.int32_field(1, repeated=True) """ Identifies the element in the original source .proto file. This field is formatted the same as SourceCodeInfo.Location.path. """ - source_file: str = betterproto.string_field(2) + source_file: str = betterproto2_compiler.string_field(2) """Identifies the filesystem path to the original source .proto.""" - begin: int = betterproto.int32_field(3) + begin: int = betterproto2_compiler.int32_field(3) """ Identifies the starting offset in bytes in the generated code that relates to the identified object. """ - end: int = betterproto.int32_field(4) + end: int = betterproto2_compiler.int32_field(4) """ Identifies the ending offset in bytes in the generated code that relates to the identified offset. The end offset should be one past @@ -1919,7 +1925,7 @@ class GeneratedCodeInfoAnnotation(betterproto.Message): @dataclass(eq=False, repr=False) -class Duration(betterproto.Message): +class Duration(betterproto2_compiler.Message): """ A Duration represents a signed, fixed-length span of time represented as a count of seconds and fractions of seconds at nanosecond @@ -1981,14 +1987,14 @@ class Duration(betterproto.Message): microsecond should be expressed in JSON format as "3.000001s". """ - seconds: int = betterproto.int64_field(1) + seconds: int = betterproto2_compiler.int64_field(1) """ Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years """ - nanos: int = betterproto.int32_field(2) + nanos: int = betterproto2_compiler.int32_field(2) """ Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 @@ -2000,7 +2006,7 @@ class Duration(betterproto.Message): @dataclass(eq=False, repr=False) -class Empty(betterproto.Message): +class Empty(betterproto2_compiler.Message): """ A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request @@ -2017,7 +2023,7 @@ class Empty(betterproto.Message): @dataclass(eq=False, repr=False) -class FieldMask(betterproto.Message): +class FieldMask(betterproto2_compiler.Message): """ `FieldMask` represents a set of symbolic field paths, for example: @@ -2219,12 +2225,12 @@ class FieldMask(betterproto.Message): `INVALID_ARGUMENT` error if any path is unmappable. """ - paths: List[str] = betterproto.string_field(1, repeated=True) + paths: List[str] = betterproto2_compiler.string_field(1, repeated=True) """The set of field mask paths.""" @dataclass(eq=False, repr=False) -class Struct(betterproto.Message): +class Struct(betterproto2_compiler.Message): """ `Struct` represents a structured data value, consisting of fields which map to dynamically typed values. In some languages, `Struct` @@ -2236,7 +2242,9 @@ class Struct(betterproto.Message): The JSON representation for `Struct` is JSON object. """ - fields: Dict[str, "Value"] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE) + fields: Dict[str, "Value"] = betterproto2_compiler.map_field( + 1, betterproto2_compiler.TYPE_STRING, betterproto2_compiler.TYPE_MESSAGE + ) """Unordered map of dynamically typed values.""" @hybridmethod @@ -2256,7 +2264,7 @@ def from_dict(self, value: Mapping[str, Any]) -> Self: def to_dict( self, - casing: betterproto.Casing = betterproto.Casing.CAMEL, + casing: betterproto2_compiler.Casing = betterproto2_compiler.Casing.CAMEL, include_default_values: bool = False, ) -> Dict[str, Any]: output = {**self.fields} @@ -2267,7 +2275,7 @@ def to_dict( @dataclass(eq=False, repr=False) -class Value(betterproto.Message): +class Value(betterproto2_compiler.Message): """ `Value` represents a dynamically typed value which can be either null, a number, a string, a boolean, a recursive struct value, or a @@ -2277,39 +2285,41 @@ class Value(betterproto.Message): The JSON representation for `Value` is JSON value. """ - null_value: "NullValue" = betterproto.enum_field(1, enum_default_value=lambda: NullValue.try_value(0), group="kind") + null_value: "NullValue" = betterproto2_compiler.enum_field( + 1, enum_default_value=lambda: NullValue.try_value(0), group="kind" + ) """Represents a null value.""" - number_value: float = betterproto.double_field(2, group="kind") + number_value: float = betterproto2_compiler.double_field(2, group="kind") """Represents a double value.""" - string_value: str = betterproto.string_field(3, group="kind") + string_value: str = betterproto2_compiler.string_field(3, group="kind") """Represents a string value.""" - bool_value: bool = betterproto.bool_field(4, group="kind") + bool_value: bool = betterproto2_compiler.bool_field(4, group="kind") """Represents a boolean value.""" - struct_value: "Struct" = betterproto.message_field(5, group="kind") + struct_value: "Struct" = betterproto2_compiler.message_field(5, group="kind") """Represents a structured value.""" - list_value: "ListValue" = betterproto.message_field(6, group="kind") + list_value: "ListValue" = betterproto2_compiler.message_field(6, group="kind") """Represents a repeated `Value`.""" @dataclass(eq=False, repr=False) -class ListValue(betterproto.Message): +class ListValue(betterproto2_compiler.Message): """ `ListValue` is a wrapper around a repeated field of values. The JSON representation for `ListValue` is JSON array. """ - values: List["Value"] = betterproto.message_field(1, repeated=True) + values: List["Value"] = betterproto2_compiler.message_field(1, repeated=True) """Repeated field of dynamically typed values.""" @dataclass(eq=False, repr=False) -class Timestamp(betterproto.Message): +class Timestamp(betterproto2_compiler.Message): """ A Timestamp represents a point in time independent of any time zone or local calendar, encoded as a count of seconds and fractions of seconds at @@ -2394,14 +2404,14 @@ class Timestamp(betterproto.Message): ) to obtain a formatter capable of generating timestamps in this format. """ - seconds: int = betterproto.int64_field(1) + seconds: int = betterproto2_compiler.int64_field(1) """ Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. """ - nanos: int = betterproto.int32_field(2) + nanos: int = betterproto2_compiler.int32_field(2) """ Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values @@ -2411,108 +2421,108 @@ class Timestamp(betterproto.Message): @dataclass(eq=False, repr=False) -class DoubleValue(betterproto.Message): +class DoubleValue(betterproto2_compiler.Message): """ Wrapper message for `double`. The JSON representation for `DoubleValue` is JSON number. """ - value: float = betterproto.double_field(1) + value: float = betterproto2_compiler.double_field(1) """The double value.""" @dataclass(eq=False, repr=False) -class FloatValue(betterproto.Message): +class FloatValue(betterproto2_compiler.Message): """ Wrapper message for `float`. The JSON representation for `FloatValue` is JSON number. """ - value: float = betterproto.float_field(1) + value: float = betterproto2_compiler.float_field(1) """The float value.""" @dataclass(eq=False, repr=False) -class Int64Value(betterproto.Message): +class Int64Value(betterproto2_compiler.Message): """ Wrapper message for `int64`. The JSON representation for `Int64Value` is JSON string. """ - value: int = betterproto.int64_field(1) + value: int = betterproto2_compiler.int64_field(1) """The int64 value.""" @dataclass(eq=False, repr=False) -class UInt64Value(betterproto.Message): +class UInt64Value(betterproto2_compiler.Message): """ Wrapper message for `uint64`. The JSON representation for `UInt64Value` is JSON string. """ - value: int = betterproto.uint64_field(1) + value: int = betterproto2_compiler.uint64_field(1) """The uint64 value.""" @dataclass(eq=False, repr=False) -class Int32Value(betterproto.Message): +class Int32Value(betterproto2_compiler.Message): """ Wrapper message for `int32`. The JSON representation for `Int32Value` is JSON number. """ - value: int = betterproto.int32_field(1) + value: int = betterproto2_compiler.int32_field(1) """The int32 value.""" @dataclass(eq=False, repr=False) -class UInt32Value(betterproto.Message): +class UInt32Value(betterproto2_compiler.Message): """ Wrapper message for `uint32`. The JSON representation for `UInt32Value` is JSON number. """ - value: int = betterproto.uint32_field(1) + value: int = betterproto2_compiler.uint32_field(1) """The uint32 value.""" @dataclass(eq=False, repr=False) -class BoolValue(betterproto.Message): +class BoolValue(betterproto2_compiler.Message): """ Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON `true` and `false`. """ - value: bool = betterproto.bool_field(1) + value: bool = betterproto2_compiler.bool_field(1) """The bool value.""" @dataclass(eq=False, repr=False) -class StringValue(betterproto.Message): +class StringValue(betterproto2_compiler.Message): """ Wrapper message for `string`. The JSON representation for `StringValue` is JSON string. """ - value: str = betterproto.string_field(1) + value: str = betterproto2_compiler.string_field(1) """The string value.""" @dataclass(eq=False, repr=False) -class BytesValue(betterproto.Message): +class BytesValue(betterproto2_compiler.Message): """ Wrapper message for `bytes`. The JSON representation for `BytesValue` is JSON string. """ - value: bytes = betterproto.bytes_field(1) + value: bytes = betterproto2_compiler.bytes_field(1) """The bytes value.""" diff --git a/src/betterproto/lib/std/google/protobuf/compiler/__init__.py b/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py similarity index 82% rename from src/betterproto/lib/std/google/protobuf/compiler/__init__.py rename to src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py index 3455a19a..33087afc 100644 --- a/src/betterproto/lib/std/google/protobuf/compiler/__init__.py +++ b/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py @@ -6,11 +6,11 @@ from dataclasses import dataclass from typing import List -import betterproto -import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf +import betterproto2_compiler +import betterproto2_compiler.lib.google.protobuf as betterproto_lib_google_protobuf -class CodeGeneratorResponseFeature(betterproto.Enum): +class CodeGeneratorResponseFeature(betterproto2_compiler.Enum): """Sync with code_generator.h.""" FEATURE_NONE = 0 @@ -19,13 +19,13 @@ class CodeGeneratorResponseFeature(betterproto.Enum): @dataclass(eq=False, repr=False) -class Version(betterproto.Message): +class Version(betterproto2_compiler.Message): """The version number of protocol compiler.""" - major: int = betterproto.int32_field(1) - minor: int = betterproto.int32_field(2) - patch: int = betterproto.int32_field(3) - suffix: str = betterproto.string_field(4) + major: int = betterproto2_compiler.int32_field(1) + minor: int = betterproto2_compiler.int32_field(2) + patch: int = betterproto2_compiler.int32_field(3) + suffix: str = betterproto2_compiler.string_field(4) """ A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should be empty for mainline stable releases. @@ -33,20 +33,20 @@ class Version(betterproto.Message): @dataclass(eq=False, repr=False) -class CodeGeneratorRequest(betterproto.Message): +class CodeGeneratorRequest(betterproto2_compiler.Message): """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" - file_to_generate: List[str] = betterproto.string_field(1, repeated=True) + file_to_generate: List[str] = betterproto2_compiler.string_field(1, repeated=True) """ The .proto files that were explicitly listed on the command-line. The code generator should generate code only for these files. Each file's descriptor will be included in proto_file, below. """ - parameter: str = betterproto.string_field(2) + parameter: str = betterproto2_compiler.string_field(2) """The generator parameter passed on the command-line.""" - proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto.message_field( + proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto2_compiler.message_field( 15, repeated=True ) """ @@ -71,8 +71,8 @@ class CodeGeneratorRequest(betterproto.Message): fully qualified. """ - source_file_descriptors: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto.message_field( - 17, repeated=True + source_file_descriptors: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = ( + betterproto2_compiler.message_field(17, repeated=True) ) """ File descriptors with all options, including source-retention options. @@ -80,15 +80,15 @@ class CodeGeneratorRequest(betterproto.Message): files_to_generate. """ - compiler_version: "Version" = betterproto.message_field(3) + compiler_version: "Version" = betterproto2_compiler.message_field(3) """The version number of protocol compiler.""" @dataclass(eq=False, repr=False) -class CodeGeneratorResponse(betterproto.Message): +class CodeGeneratorResponse(betterproto2_compiler.Message): """The plugin writes an encoded CodeGeneratorResponse to stdout.""" - error: str = betterproto.string_field(1) + error: str = betterproto2_compiler.string_field(1) """ Error message. If non-empty, code generation failed. The plugin process should exit with status code zero even if it reports an error in this way. @@ -100,13 +100,13 @@ class CodeGeneratorResponse(betterproto.Message): exiting with a non-zero status code. """ - supported_features: int = betterproto.uint64_field(2) + supported_features: int = betterproto2_compiler.uint64_field(2) """ A bitmask of supported features that the code generator supports. This is a bitwise "or" of values from the Feature enum. """ - minimum_edition: int = betterproto.int32_field(3) + minimum_edition: int = betterproto2_compiler.int32_field(3) """ The minimum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -114,7 +114,7 @@ class CodeGeneratorResponse(betterproto.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - maximum_edition: int = betterproto.int32_field(4) + maximum_edition: int = betterproto2_compiler.int32_field(4) """ The maximum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -122,14 +122,14 @@ class CodeGeneratorResponse(betterproto.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15, repeated=True) + file: List["CodeGeneratorResponseFile"] = betterproto2_compiler.message_field(15, repeated=True) @dataclass(eq=False, repr=False) -class CodeGeneratorResponseFile(betterproto.Message): +class CodeGeneratorResponseFile(betterproto2_compiler.Message): """Represents a single generated file.""" - name: str = betterproto.string_field(1) + name: str = betterproto2_compiler.string_field(1) """ The file name, relative to the output directory. The name must not contain "." or ".." components and must be relative, not be absolute (so, @@ -144,7 +144,7 @@ class CodeGeneratorResponseFile(betterproto.Message): CodeGeneratorResponse before writing files to disk. """ - insertion_point: str = betterproto.string_field(2) + insertion_point: str = betterproto2_compiler.string_field(2) """ If non-empty, indicates that the named file should already exist, and the content here is to be inserted into that file at a defined insertion @@ -185,10 +185,10 @@ class CodeGeneratorResponseFile(betterproto.Message): If |insertion_point| is present, |name| must also be present. """ - content: str = betterproto.string_field(15) + content: str = betterproto2_compiler.string_field(15) """The file contents.""" - generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(16) + generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = betterproto2_compiler.message_field(16) """ Information describing the file content being inserted. If an insertion point is used, this information will be appropriately offset and inserted diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto2_compiler/plugin/__init__.py similarity index 100% rename from src/betterproto/plugin/__init__.py rename to src/betterproto2_compiler/plugin/__init__.py diff --git a/src/betterproto/plugin/__main__.py b/src/betterproto2_compiler/plugin/__main__.py similarity index 100% rename from src/betterproto/plugin/__main__.py rename to src/betterproto2_compiler/plugin/__main__.py diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto2_compiler/plugin/compiler.py similarity index 100% rename from src/betterproto/plugin/compiler.py rename to src/betterproto2_compiler/plugin/compiler.py diff --git a/src/betterproto/plugin/main.py b/src/betterproto2_compiler/plugin/main.py similarity index 91% rename from src/betterproto/plugin/main.py rename to src/betterproto2_compiler/plugin/main.py index c9dcc617..eaa3ef56 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto2_compiler/plugin/main.py @@ -3,12 +3,12 @@ import os import sys -from betterproto.lib.google.protobuf.compiler import ( +from betterproto2_compiler.lib.google.protobuf.compiler import ( CodeGeneratorRequest, ) # from betterproto.plugin.models import monkey_patch_oneof_index -from betterproto.plugin.parser import generate_code +from betterproto2_compiler.plugin.parser import generate_code def main() -> None: diff --git a/src/betterproto/plugin/models.py b/src/betterproto2_compiler/plugin/models.py similarity index 98% rename from src/betterproto/plugin/models.py rename to src/betterproto2_compiler/plugin/models.py index 1dfbc789..f228c8e1 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto2_compiler/plugin/models.py @@ -46,13 +46,13 @@ Union, ) -import betterproto -from betterproto.compile.naming import ( +import betterproto2_compiler +from betterproto2_compiler.compile.naming import ( pythonize_class_name, pythonize_field_name, pythonize_method_name, ) -from betterproto.lib.google.protobuf import ( +from betterproto2_compiler.lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, FieldDescriptorProto, @@ -61,7 +61,7 @@ FileDescriptorProto, MethodDescriptorProto, ) -from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest +from betterproto2_compiler.lib.google.protobuf.compiler import CodeGeneratorRequest from ..compile.importing import get_type_reference, parse_source_type_name from ..compile.naming import ( @@ -182,7 +182,7 @@ class ProtoContentBase: source_file: FileDescriptorProto typing_compiler: TypingCompiler path: List[int] - parent: Union["betterproto.Message", "OutputTemplate"] + parent: Union["betterproto2_compiler.Message", "OutputTemplate"] __dataclass_fields__: Dict[str, object] @@ -420,7 +420,7 @@ def field_wraps(self) -> Optional[str]: match_wrapper = re.match(r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name) if match_wrapper: wrapped_type = "TYPE_" + match_wrapper.group(1).upper() - if hasattr(betterproto, wrapped_type): + if hasattr(betterproto2_compiler, wrapped_type): return f"betterproto.{wrapped_type}" return None diff --git a/src/betterproto/plugin/module_validation.py b/src/betterproto2_compiler/plugin/module_validation.py similarity index 100% rename from src/betterproto/plugin/module_validation.py rename to src/betterproto2_compiler/plugin/module_validation.py diff --git a/src/betterproto/plugin/parser.py b/src/betterproto2_compiler/plugin/parser.py similarity index 98% rename from src/betterproto/plugin/parser.py rename to src/betterproto2_compiler/plugin/parser.py index 6c1f7ba1..38456b78 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto2_compiler/plugin/parser.py @@ -8,14 +8,14 @@ Union, ) -from betterproto.lib.google.protobuf import ( +from betterproto2_compiler.lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, FieldDescriptorProto, FileDescriptorProto, ServiceDescriptorProto, ) -from betterproto.lib.google.protobuf.compiler import ( +from betterproto2_compiler.lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponse, CodeGeneratorResponseFeature, diff --git a/src/betterproto/plugin/plugin.bat b/src/betterproto2_compiler/plugin/plugin.bat similarity index 100% rename from src/betterproto/plugin/plugin.bat rename to src/betterproto2_compiler/plugin/plugin.bat diff --git a/src/betterproto/plugin/typing_compiler.py b/src/betterproto2_compiler/plugin/typing_compiler.py similarity index 100% rename from src/betterproto/plugin/typing_compiler.py rename to src/betterproto2_compiler/plugin/typing_compiler.py diff --git a/src/betterproto/py.typed b/src/betterproto2_compiler/py.typed similarity index 100% rename from src/betterproto/py.typed rename to src/betterproto2_compiler/py.typed diff --git a/src/betterproto/templates/header.py.j2 b/src/betterproto2_compiler/templates/header.py.j2 similarity index 100% rename from src/betterproto/templates/header.py.j2 rename to src/betterproto2_compiler/templates/header.py.j2 diff --git a/src/betterproto/templates/template.py.j2 b/src/betterproto2_compiler/templates/template.py.j2 similarity index 100% rename from src/betterproto/templates/template.py.j2 rename to src/betterproto2_compiler/templates/template.py.j2 diff --git a/src/betterproto/utils.py b/src/betterproto2_compiler/utils.py similarity index 100% rename from src/betterproto/utils.py rename to src/betterproto2_compiler/utils.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/generate.py b/tests/generate.py new file mode 100644 index 00000000..67dad859 --- /dev/null +++ b/tests/generate.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python +import asyncio +import os +import shutil +import sys +from pathlib import Path +from typing import Set + +from tests.util import ( + get_directories, + inputs_path, + output_path_betterproto, + output_path_betterproto_pydantic, + output_path_reference, + protoc, +) + +# Force pure-python implementation instead of C++, otherwise imports +# break things because we can't properly reset the symbol database. +os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" + + +def clear_directory(dir_path: Path): + for file_or_directory in dir_path.glob("*"): + if file_or_directory.is_dir(): + shutil.rmtree(file_or_directory) + else: + file_or_directory.unlink() + + +async def generate(whitelist: Set[str], verbose: bool): + test_case_names = set(get_directories(inputs_path)) - {"__pycache__"} + + path_whitelist = set() + name_whitelist = set() + for item in whitelist: + if item in test_case_names: + name_whitelist.add(item) + continue + path_whitelist.add(item) + + generation_tasks = [] + for test_case_name in sorted(test_case_names): + test_case_input_path = inputs_path.joinpath(test_case_name).resolve() + if whitelist and str(test_case_input_path) not in path_whitelist and test_case_name not in name_whitelist: + continue + generation_tasks.append(generate_test_case_output(test_case_input_path, test_case_name, verbose)) + + failed_test_cases = [] + # Wait for all subprocs and match any failures to names to report + for test_case_name, result in zip(sorted(test_case_names), await asyncio.gather(*generation_tasks)): + if result != 0: + failed_test_cases.append(test_case_name) + + if len(failed_test_cases) > 0: + sys.stderr.write("\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n") + for failed_test_case in failed_test_cases: + sys.stderr.write(f"- {failed_test_case}\n") + + sys.exit(1) + + +async def generate_test_case_output(test_case_input_path: Path, test_case_name: str, verbose: bool) -> int: + """ + Returns the max of the subprocess return values + """ + + test_case_output_path_reference = output_path_reference.joinpath(test_case_name) + test_case_output_path_betterproto = output_path_betterproto + test_case_output_path_betterproto_pyd = output_path_betterproto_pydantic + + os.makedirs(test_case_output_path_reference, exist_ok=True) + os.makedirs(test_case_output_path_betterproto, exist_ok=True) + os.makedirs(test_case_output_path_betterproto_pyd, exist_ok=True) + + clear_directory(test_case_output_path_reference) + clear_directory(test_case_output_path_betterproto) + + ( + (ref_out, ref_err, ref_code), + (plg_out, plg_err, plg_code), + (plg_out_pyd, plg_err_pyd, plg_code_pyd), + ) = await asyncio.gather( + protoc(test_case_input_path, test_case_output_path_reference, True), + protoc(test_case_input_path, test_case_output_path_betterproto, False), + protoc(test_case_input_path, test_case_output_path_betterproto_pyd, False, True), + ) + + if ref_code == 0: + print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m") + else: + print(f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m") + print(ref_err.decode()) + + if verbose: + if ref_out: + print("Reference stdout:") + sys.stdout.buffer.write(ref_out) + sys.stdout.buffer.flush() + + if ref_err: + print("Reference stderr:") + sys.stderr.buffer.write(ref_err) + sys.stderr.buffer.flush() + + if plg_code == 0: + print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m") + else: + print(f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m") + print(plg_err.decode()) + + if verbose: + if plg_out: + print("Plugin stdout:") + sys.stdout.buffer.write(plg_out) + sys.stdout.buffer.flush() + + if plg_err: + print("Plugin stderr:") + sys.stderr.buffer.write(plg_err) + sys.stderr.buffer.flush() + + if plg_code_pyd == 0: + print(f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m") + else: + print(f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m") + print(plg_err_pyd.decode()) + + if verbose: + if plg_out_pyd: + print("Plugin stdout:") + sys.stdout.buffer.write(plg_out_pyd) + sys.stdout.buffer.flush() + + if plg_err_pyd: + print("Plugin stderr:") + sys.stderr.buffer.write(plg_err_pyd) + sys.stderr.buffer.flush() + + return max(ref_code, plg_code, plg_code_pyd) + + +HELP = "\n".join( + ( + "Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]", + "Generate python classes for standard tests.", + "", + "DIRECTORIES One or more relative or absolute directories of test-cases to generate" "classes for.", + " python generate.py inputs/bool inputs/double inputs/enum", + "", + "NAMES One or more test-case names to generate classes for.", + " python generate.py bool double enums", + ) +) + + +def main(): + if set(sys.argv).intersection({"-h", "--help"}): + print(HELP) + return + if sys.argv[1:2] == ["-v"]: + verbose = True + whitelist = set(sys.argv[2:]) + else: + verbose = False + whitelist = set(sys.argv[1:]) + + asyncio.run(generate(whitelist, verbose)) + + +if __name__ == "__main__": + main() diff --git a/tests/inputs/bool/bool.json b/tests/inputs/bool/bool.json new file mode 100644 index 00000000..348e0319 --- /dev/null +++ b/tests/inputs/bool/bool.json @@ -0,0 +1,3 @@ +{ + "value": true +} diff --git a/tests/inputs/bool/bool.proto b/tests/inputs/bool/bool.proto new file mode 100644 index 00000000..77836b8e --- /dev/null +++ b/tests/inputs/bool/bool.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package bool; + +message Test { + bool value = 1; +} diff --git a/tests/inputs/bool/test_bool.py b/tests/inputs/bool/test_bool.py new file mode 100644 index 00000000..6b0ad0be --- /dev/null +++ b/tests/inputs/bool/test_bool.py @@ -0,0 +1,24 @@ +import pytest + +from tests.output_betterproto.bool import Test +from tests.output_betterproto_pydantic.bool import Test as TestPyd + + +def test_value(): + message = Test() + assert not message.value, "Boolean is False by default" + + +def test_pydantic_no_value(): + message = TestPyd() + assert not message.value, "Boolean is False by default" + + +def test_pydantic_value(): + message = TestPyd(value=False) + assert not message.value + + +def test_pydantic_bad_value(): + with pytest.raises(ValueError): + TestPyd(value=123) diff --git a/tests/inputs/bytes/bytes.json b/tests/inputs/bytes/bytes.json new file mode 100644 index 00000000..34c4554c --- /dev/null +++ b/tests/inputs/bytes/bytes.json @@ -0,0 +1,3 @@ +{ + "data": "SGVsbG8sIFdvcmxkIQ==" +} diff --git a/tests/inputs/bytes/bytes.proto b/tests/inputs/bytes/bytes.proto new file mode 100644 index 00000000..98954685 --- /dev/null +++ b/tests/inputs/bytes/bytes.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package bytes; + +message Test { + bytes data = 1; +} diff --git a/tests/inputs/casing/casing.json b/tests/inputs/casing/casing.json new file mode 100644 index 00000000..559104b1 --- /dev/null +++ b/tests/inputs/casing/casing.json @@ -0,0 +1,4 @@ +{ + "camelCase": 1, + "snakeCase": "ONE" +} diff --git a/tests/inputs/casing/casing.proto b/tests/inputs/casing/casing.proto new file mode 100644 index 00000000..2023d934 --- /dev/null +++ b/tests/inputs/casing/casing.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package casing; + +enum my_enum { + ZERO = 0; + ONE = 1; + TWO = 2; +} + +message Test { + int32 camelCase = 1; + my_enum snake_case = 2; + snake_case_message snake_case_message = 3; + int32 UPPERCASE = 4; +} + +message snake_case_message { + +} \ No newline at end of file diff --git a/tests/inputs/casing/test_casing.py b/tests/inputs/casing/test_casing.py new file mode 100644 index 00000000..feee009a --- /dev/null +++ b/tests/inputs/casing/test_casing.py @@ -0,0 +1,17 @@ +import tests.output_betterproto.casing as casing +from tests.output_betterproto.casing import Test + + +def test_message_attributes(): + message = Test() + assert hasattr(message, "snake_case_message"), "snake_case field name is same in python" + assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" + assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python" + + +def test_message_casing(): + assert hasattr(casing, "SnakeCaseMessage"), "snake_case Message name is converted to CamelCase in python" + + +def test_enum_casing(): + assert hasattr(casing, "MyEnum"), "snake_case Enum name is converted to CamelCase in python" diff --git a/tests/inputs/casing_inner_class/casing_inner_class.proto b/tests/inputs/casing_inner_class/casing_inner_class.proto new file mode 100644 index 00000000..7d231beb --- /dev/null +++ b/tests/inputs/casing_inner_class/casing_inner_class.proto @@ -0,0 +1,11 @@ +// https://github.com/danielgtaylor/python-betterproto/issues/344 +syntax = "proto3"; + +package casing_inner_class; + +message Test { + message inner_class { + sint32 old_exp = 1; + } + inner_class inner = 2; +} \ No newline at end of file diff --git a/tests/inputs/casing_inner_class/test_casing_inner_class.py b/tests/inputs/casing_inner_class/test_casing_inner_class.py new file mode 100644 index 00000000..2560b6c2 --- /dev/null +++ b/tests/inputs/casing_inner_class/test_casing_inner_class.py @@ -0,0 +1,10 @@ +import tests.output_betterproto.casing_inner_class as casing_inner_class + + +def test_message_casing_inner_class_name(): + assert hasattr(casing_inner_class, "TestInnerClass"), "Inline defined Message is correctly converted to CamelCase" + + +def test_message_casing_inner_class_attributes(): + message = casing_inner_class.Test(inner=casing_inner_class.TestInnerClass()) + assert hasattr(message.inner, "old_exp"), "Inline defined Message attribute is snake_case" diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto new file mode 100644 index 00000000..c6d42c31 --- /dev/null +++ b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package casing_message_field_uppercase; + +message Test { + int32 UPPERCASE = 1; + int32 UPPERCASE_V2 = 2; + int32 UPPER_CAMEL_CASE = 3; +} \ No newline at end of file diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py new file mode 100644 index 00000000..6dc69256 --- /dev/null +++ b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py @@ -0,0 +1,8 @@ +from tests.output_betterproto.casing_message_field_uppercase import Test + + +def test_message_casing(): + message = Test() + assert hasattr(message, "uppercase"), "UPPERCASE attribute is converted to 'uppercase' in python" + assert hasattr(message, "uppercase_v2"), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" + assert hasattr(message, "upper_camel_case"), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" diff --git a/tests/inputs/config.py b/tests/inputs/config.py new file mode 100644 index 00000000..4fb1565f --- /dev/null +++ b/tests/inputs/config.py @@ -0,0 +1,29 @@ +# Test cases that are expected to fail, e.g. unimplemented features or bug-fixes. +# Remove from list when fixed. +xfail = { + "namespace_keywords", # 70 + "googletypes_struct", # 9 + "googletypes_value", # 9 + "example", # This is the example in the readme. Not a test. +} + +services = { + "googletypes_request", + "googletypes_response", + "googletypes_response_embedded", + "service", + "service_separate_packages", + "import_service_input_message", + "googletypes_service_returns_empty", + "googletypes_service_returns_googletype", + "example_service", + "empty_service", + "service_uppercase", +} + + +# Indicate json sample messages to skip when testing that json (de)serialization +# is symmetrical becuase some cases legitimately are not symmetrical. +# Each key references the name of the test scenario and the values in the tuple +# Are the names of the json files. +non_symmetrical_json = {"empty_repeated": ("empty_repeated",)} diff --git a/tests/inputs/deprecated/deprecated.json b/tests/inputs/deprecated/deprecated.json new file mode 100644 index 00000000..43b2b65a --- /dev/null +++ b/tests/inputs/deprecated/deprecated.json @@ -0,0 +1,6 @@ +{ + "message": { + "value": "hello" + }, + "value": 10 +} diff --git a/tests/inputs/deprecated/deprecated.proto b/tests/inputs/deprecated/deprecated.proto new file mode 100644 index 00000000..f504d03a --- /dev/null +++ b/tests/inputs/deprecated/deprecated.proto @@ -0,0 +1,21 @@ +syntax = "proto3"; + +package deprecated; + +// Some documentation about the Test message. +message Test { + Message message = 1 [deprecated=true]; + int32 value = 2; +} + +message Message { + option deprecated = true; + string value = 1; +} + +message Empty {} + +service TestService { + rpc func(Empty) returns (Empty); + rpc deprecated_func(Empty) returns (Empty) { option deprecated = true; }; +} diff --git a/tests/inputs/documentation/documentation.proto b/tests/inputs/documentation/documentation.proto new file mode 100644 index 00000000..7fc6c83b --- /dev/null +++ b/tests/inputs/documentation/documentation.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; +package documentation; + +// Documentation of message 1 +// other line 1 + +// Documentation of message 2 +// other line 2 +message Test { // Documentation of message 3 + // Documentation of field 1 + // other line 1 + + // Documentation of field 2 + // other line 2 + uint32 x = 1; // Documentation of field 3 +} + +// Documentation of enum 1 +// other line 1 + +// Documentation of enum 2 +// other line 2 +enum Enum { // Documentation of enum 3 + // Documentation of variant 1 + // other line 1 + + // Documentation of variant 2 + // other line 2 + Enum_Variant = 0; // Documentation of variant 3 +} + +// Documentation of service 1 +// other line 1 + +// Documentation of service 2 +// other line 2 +service Service { // Documentation of service 3 + // Documentation of method 1 + // other line 1 + + // Documentation of method 2 + // other line 2 + rpc get(Test) returns (Test); // Documentation of method 3 +} diff --git a/tests/inputs/double/double-negative.json b/tests/inputs/double/double-negative.json new file mode 100644 index 00000000..e0776c73 --- /dev/null +++ b/tests/inputs/double/double-negative.json @@ -0,0 +1,3 @@ +{ + "count": -123.45 +} diff --git a/tests/inputs/double/double.json b/tests/inputs/double/double.json new file mode 100644 index 00000000..321412e5 --- /dev/null +++ b/tests/inputs/double/double.json @@ -0,0 +1,3 @@ +{ + "count": 123.45 +} diff --git a/tests/inputs/double/double.proto b/tests/inputs/double/double.proto new file mode 100644 index 00000000..66aea95d --- /dev/null +++ b/tests/inputs/double/double.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package double; + +message Test { + double count = 1; +} diff --git a/tests/inputs/empty_repeated/empty_repeated.json b/tests/inputs/empty_repeated/empty_repeated.json new file mode 100644 index 00000000..12a801c6 --- /dev/null +++ b/tests/inputs/empty_repeated/empty_repeated.json @@ -0,0 +1,3 @@ +{ + "msg": [{"values":[]}] +} diff --git a/tests/inputs/empty_repeated/empty_repeated.proto b/tests/inputs/empty_repeated/empty_repeated.proto new file mode 100644 index 00000000..f787301f --- /dev/null +++ b/tests/inputs/empty_repeated/empty_repeated.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package empty_repeated; + +message MessageA { + repeated float values = 1; +} + +message Test { + repeated MessageA msg = 1; +} diff --git a/tests/inputs/empty_service/empty_service.proto b/tests/inputs/empty_service/empty_service.proto new file mode 100644 index 00000000..e96ff649 --- /dev/null +++ b/tests/inputs/empty_service/empty_service.proto @@ -0,0 +1,7 @@ +/* Empty service without comments */ +syntax = "proto3"; + +package empty_service; + +service Test { +} diff --git a/tests/inputs/entry/entry.proto b/tests/inputs/entry/entry.proto new file mode 100644 index 00000000..3f2af4d1 --- /dev/null +++ b/tests/inputs/entry/entry.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package entry; + +// This is a minimal example of a repeated message field that caused issues when +// checking whether a message is a map. +// +// During the check wheter a field is a "map", the string "entry" is added to +// the field name, checked against the type name and then further checks are +// made against the nested type of a parent message. In this edge-case, the +// first check would pass even though it shouldn't and that would cause an +// error because the parent type does not have a "nested_type" attribute. + +message Test { + repeated ExportEntry export = 1; +} + +message ExportEntry { + string name = 1; +} diff --git a/tests/inputs/enum/enum.json b/tests/inputs/enum/enum.json new file mode 100644 index 00000000..d68f1c50 --- /dev/null +++ b/tests/inputs/enum/enum.json @@ -0,0 +1,9 @@ +{ + "choice": "FOUR", + "choices": [ + "ZERO", + "ONE", + "THREE", + "FOUR" + ] +} diff --git a/tests/inputs/enum/enum.proto b/tests/inputs/enum/enum.proto new file mode 100644 index 00000000..5e2e80c1 --- /dev/null +++ b/tests/inputs/enum/enum.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package enum; + +// Tests that enums are correctly serialized and that it correctly handles skipped and out-of-order enum values +message Test { + Choice choice = 1; + repeated Choice choices = 2; +} + +enum Choice { + ZERO = 0; + ONE = 1; + // TWO = 2; + FOUR = 4; + THREE = 3; +} + +// A "C" like enum with the enum name prefixed onto members, these should be stripped +enum ArithmeticOperator { + ARITHMETIC_OPERATOR_NONE = 0; + ARITHMETIC_OPERATOR_PLUS = 1; + ARITHMETIC_OPERATOR_MINUS = 2; + ARITHMETIC_OPERATOR_0_PREFIXED = 3; +} diff --git a/tests/inputs/enum/test_enum.py b/tests/inputs/enum/test_enum.py new file mode 100644 index 00000000..20c9a4d5 --- /dev/null +++ b/tests/inputs/enum/test_enum.py @@ -0,0 +1,107 @@ +from tests.output_betterproto.enum import ( + ArithmeticOperator, + Choice, + Test, +) + + +def test_enum_set_and_get(): + assert Test(choice=Choice.ZERO).choice == Choice.ZERO + assert Test(choice=Choice.ONE).choice == Choice.ONE + assert Test(choice=Choice.THREE).choice == Choice.THREE + assert Test(choice=Choice.FOUR).choice == Choice.FOUR + + +def test_enum_set_with_int(): + assert Test(choice=0).choice == Choice.ZERO + assert Test(choice=1).choice == Choice.ONE + assert Test(choice=3).choice == Choice.THREE + assert Test(choice=4).choice == Choice.FOUR + + +def test_enum_is_comparable_with_int(): + assert Test(choice=Choice.ZERO).choice == 0 + assert Test(choice=Choice.ONE).choice == 1 + assert Test(choice=Choice.THREE).choice == 3 + assert Test(choice=Choice.FOUR).choice == 4 + + +def test_enum_to_dict(): + assert "choice" not in Test(choice=Choice.ZERO).to_dict(), "Default enum value is not serialized" + assert Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"] == "ZERO" + assert Test(choice=Choice.ONE).to_dict()["choice"] == "ONE" + assert Test(choice=Choice.THREE).to_dict()["choice"] == "THREE" + assert Test(choice=Choice.FOUR).to_dict()["choice"] == "FOUR" + + +def test_repeated_enum_is_comparable_with_int(): + assert Test(choices=[Choice.ZERO]).choices == [0] + assert Test(choices=[Choice.ONE]).choices == [1] + assert Test(choices=[Choice.THREE]).choices == [3] + assert Test(choices=[Choice.FOUR]).choices == [4] + + +def test_repeated_enum_set_and_get(): + assert Test(choices=[Choice.ZERO]).choices == [Choice.ZERO] + assert Test(choices=[Choice.ONE]).choices == [Choice.ONE] + assert Test(choices=[Choice.THREE]).choices == [Choice.THREE] + assert Test(choices=[Choice.FOUR]).choices == [Choice.FOUR] + + +def test_repeated_enum_to_dict(): + assert Test(choices=[Choice.ZERO]).to_dict()["choices"] == ["ZERO"] + assert Test(choices=[Choice.ONE]).to_dict()["choices"] == ["ONE"] + assert Test(choices=[Choice.THREE]).to_dict()["choices"] == ["THREE"] + assert Test(choices=[Choice.FOUR]).to_dict()["choices"] == ["FOUR"] + + all_enums_dict = Test(choices=[Choice.ZERO, Choice.ONE, Choice.THREE, Choice.FOUR]).to_dict() + assert (all_enums_dict["choices"]) == ["ZERO", "ONE", "THREE", "FOUR"] + + +def test_repeated_enum_with_single_value_to_dict(): + assert Test(choices=Choice.ONE).to_dict()["choices"] == ["ONE"] + assert Test(choices=1).to_dict()["choices"] == ["ONE"] + + +def test_repeated_enum_with_non_list_iterables_to_dict(): + assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"] + assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"] + assert Test(choices=(Choice.ONE, Choice.THREE)).to_dict()["choices"] == [ + "ONE", + "THREE", + ] + + def enum_generator(): + yield Choice.ONE + yield Choice.THREE + + assert Test(choices=enum_generator()).to_dict()["choices"] == ["ONE", "THREE"] + + +def test_enum_mapped_on_parse(): + # test default value + b = Test().parse(bytes(Test())) + assert b.choice.name == Choice.ZERO.name + assert b.choices == [] + + # test non default value + a = Test().parse(bytes(Test(choice=Choice.ONE))) + assert a.choice.name == Choice.ONE.name + assert b.choices == [] + + # test repeated + c = Test().parse(bytes(Test(choices=[Choice.THREE, Choice.FOUR]))) + assert c.choices[0].name == Choice.THREE.name + assert c.choices[1].name == Choice.FOUR.name + + # bonus: defaults after empty init are also mapped + assert Test().choice.name == Choice.ZERO.name + + +def test_renamed_enum_members(): + assert set(ArithmeticOperator.__members__) == { + "NONE", + "PLUS", + "MINUS", + "_0_PREFIXED", + } diff --git a/tests/inputs/example/example.proto b/tests/inputs/example/example.proto new file mode 100644 index 00000000..56bd3647 --- /dev/null +++ b/tests/inputs/example/example.proto @@ -0,0 +1,911 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package example; + +// package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/tests/inputs/example_service/example_service.proto b/tests/inputs/example_service/example_service.proto new file mode 100644 index 00000000..96455cc3 --- /dev/null +++ b/tests/inputs/example_service/example_service.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package example_service; + +service Test { + rpc ExampleUnaryUnary(ExampleRequest) returns (ExampleResponse); + rpc ExampleUnaryStream(ExampleRequest) returns (stream ExampleResponse); + rpc ExampleStreamUnary(stream ExampleRequest) returns (ExampleResponse); + rpc ExampleStreamStream(stream ExampleRequest) returns (stream ExampleResponse); +} + +message ExampleRequest { + string example_string = 1; + int64 example_integer = 2; +} + +message ExampleResponse { + string example_string = 1; + int64 example_integer = 2; +} diff --git a/tests/inputs/example_service/test_example_service.py b/tests/inputs/example_service/test_example_service.py new file mode 100644 index 00000000..cd2cc40f --- /dev/null +++ b/tests/inputs/example_service/test_example_service.py @@ -0,0 +1,81 @@ +from typing import ( + AsyncIterator, +) + +import pytest +from grpclib.testing import ChannelFor + +from tests.output_betterproto.example_service import ( + ExampleRequest, + ExampleResponse, + TestBase, + TestStub, +) + + +class ExampleService(TestBase): + async def example_unary_unary(self, example_request: ExampleRequest) -> "ExampleResponse": + return ExampleResponse( + example_string=example_request.example_string, + example_integer=example_request.example_integer, + ) + + async def example_unary_stream(self, example_request: ExampleRequest) -> AsyncIterator["ExampleResponse"]: + response = ExampleResponse( + example_string=example_request.example_string, + example_integer=example_request.example_integer, + ) + yield response + yield response + yield response + + async def example_stream_unary( + self, example_request_iterator: AsyncIterator["ExampleRequest"] + ) -> "ExampleResponse": + async for example_request in example_request_iterator: + return ExampleResponse( + example_string=example_request.example_string, + example_integer=example_request.example_integer, + ) + + async def example_stream_stream( + self, example_request_iterator: AsyncIterator["ExampleRequest"] + ) -> AsyncIterator["ExampleResponse"]: + async for example_request in example_request_iterator: + yield ExampleResponse( + example_string=example_request.example_string, + example_integer=example_request.example_integer, + ) + + +@pytest.mark.asyncio +async def test_calls_with_different_cardinalities(): + example_request = ExampleRequest("test string", 42) + + async with ChannelFor([ExampleService()]) as channel: + stub = TestStub(channel) + + # unary unary + response = await stub.example_unary_unary(example_request) + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer + + # unary stream + async for response in stub.example_unary_stream(example_request): + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer + + # stream unary + async def request_iterator(): + yield example_request + yield example_request + yield example_request + + response = await stub.example_stream_unary(request_iterator()) + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer + + # stream stream + async for response in stub.example_stream_stream(request_iterator()): + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json new file mode 100644 index 00000000..7a6e7ae8 --- /dev/null +++ b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json @@ -0,0 +1,7 @@ +{ + "int": 26, + "float": 26.0, + "str": "value-for-str", + "bytes": "001a", + "bool": true +} \ No newline at end of file diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto new file mode 100644 index 00000000..81a0fc43 --- /dev/null +++ b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package field_name_identical_to_type; + +// Tests that messages may contain fields with names that are identical to their python types (PR #294) + +message Test { + int32 int = 1; + float float = 2; + string str = 3; + bytes bytes = 4; + bool bool = 5; +} \ No newline at end of file diff --git a/tests/inputs/fixed/fixed.json b/tests/inputs/fixed/fixed.json new file mode 100644 index 00000000..88587806 --- /dev/null +++ b/tests/inputs/fixed/fixed.json @@ -0,0 +1,6 @@ +{ + "foo": 4294967295, + "bar": -2147483648, + "baz": "18446744073709551615", + "qux": "-9223372036854775808" +} diff --git a/tests/inputs/fixed/fixed.proto b/tests/inputs/fixed/fixed.proto new file mode 100644 index 00000000..0f0ffb4e --- /dev/null +++ b/tests/inputs/fixed/fixed.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package fixed; + +message Test { + fixed32 foo = 1; + sfixed32 bar = 2; + fixed64 baz = 3; + sfixed64 qux = 4; +} diff --git a/tests/inputs/float/float.json b/tests/inputs/float/float.json new file mode 100644 index 00000000..3adac974 --- /dev/null +++ b/tests/inputs/float/float.json @@ -0,0 +1,9 @@ +{ + "positive": "Infinity", + "negative": "-Infinity", + "nan": "NaN", + "three": 3.0, + "threePointOneFour": 3.14, + "negThree": -3.0, + "negThreePointOneFour": -3.14 + } diff --git a/tests/inputs/float/float.proto b/tests/inputs/float/float.proto new file mode 100644 index 00000000..fea12b3d --- /dev/null +++ b/tests/inputs/float/float.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package float; + +// Some documentation about the Test message. +message Test { + double positive = 1; + double negative = 2; + double nan = 3; + double three = 4; + double three_point_one_four = 5; + double neg_three = 6; + double neg_three_point_one_four = 7; +} diff --git a/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto b/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto new file mode 100644 index 00000000..66ef8a64 --- /dev/null +++ b/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +import "google/protobuf/timestamp.proto"; +package google_impl_behavior_equivalence; + +message Foo { int64 bar = 1; } + +message Test { + oneof group { + string string = 1; + int64 integer = 2; + Foo foo = 3; + } +} + +message Spam { + google.protobuf.Timestamp ts = 1; +} + +message Request { Empty foo = 1; } + +message Empty {} diff --git a/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py b/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py new file mode 100644 index 00000000..b6ed5e0f --- /dev/null +++ b/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py @@ -0,0 +1,84 @@ +from datetime import ( + datetime, + timezone, +) + +import pytest +from google.protobuf import json_format +from google.protobuf.timestamp_pb2 import Timestamp + +from tests.output_betterproto.google_impl_behavior_equivalence import ( + Empty, + Foo, + Request, + Spam, + Test, +) +from tests.output_reference.google_impl_behavior_equivalence.google_impl_behavior_equivalence_pb2 import ( + Empty as ReferenceEmpty, + Foo as ReferenceFoo, + Request as ReferenceRequest, + Spam as ReferenceSpam, + Test as ReferenceTest, +) + + +def test_oneof_serializes_similar_to_google_oneof(): + tests = [ + (Test(string="abc"), ReferenceTest(string="abc")), + (Test(integer=2), ReferenceTest(integer=2)), + (Test(foo=Foo(bar=1)), ReferenceTest(foo=ReferenceFoo(bar=1))), + # Default values should also behave the same within oneofs + (Test(string=""), ReferenceTest(string="")), + (Test(integer=0), ReferenceTest(integer=0)), + (Test(foo=Foo(bar=0)), ReferenceTest(foo=ReferenceFoo(bar=0))), + ] + for message, message_reference in tests: + # NOTE: As of July 2020, MessageToJson inserts newlines in the output string so, + # just compare dicts + assert message.to_dict() == json_format.MessageToDict(message_reference) + + +def test_bytes_are_the_same_for_oneof(): + message = Test(string="") + message_reference = ReferenceTest(string="") + + message_bytes = bytes(message) + message_reference_bytes = message_reference.SerializeToString() + + assert message_bytes == message_reference_bytes + + message2 = Test().parse(message_reference_bytes) + message_reference2 = ReferenceTest() + message_reference2.ParseFromString(message_reference_bytes) + + assert message == message2 + assert message_reference == message_reference2 + + # None of these fields were explicitly set BUT they should not actually be null + # themselves + assert message.foo is None + assert message2.foo is None + + assert isinstance(message_reference.foo, ReferenceFoo) + assert isinstance(message_reference2.foo, ReferenceFoo) + + +@pytest.mark.parametrize("dt", (datetime.min.replace(tzinfo=timezone.utc),)) +def test_datetime_clamping(dt): # see #407 + ts = Timestamp() + ts.FromDatetime(dt) + assert bytes(Spam(dt)) == ReferenceSpam(ts=ts).SerializeToString() + message_bytes = bytes(Spam(dt)) + + assert Spam().parse(message_bytes).ts.timestamp() == ReferenceSpam.FromString(message_bytes).ts.seconds + + +def test_empty_message_field(): + message = Request() + reference_message = ReferenceRequest() + + message.foo = Empty() + reference_message.foo.CopyFrom(ReferenceEmpty()) + + assert bytes(message) == reference_message.SerializeToString() diff --git a/tests/inputs/googletypes/googletypes-missing.json b/tests/inputs/googletypes/googletypes-missing.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/inputs/googletypes/googletypes-missing.json @@ -0,0 +1 @@ +{} diff --git a/tests/inputs/googletypes/googletypes.json b/tests/inputs/googletypes/googletypes.json new file mode 100644 index 00000000..0a002e9b --- /dev/null +++ b/tests/inputs/googletypes/googletypes.json @@ -0,0 +1,7 @@ +{ + "maybe": false, + "ts": "1972-01-01T10:00:20.021Z", + "duration": "1.200s", + "important": 10, + "empty": {} +} diff --git a/tests/inputs/googletypes/googletypes.proto b/tests/inputs/googletypes/googletypes.proto new file mode 100644 index 00000000..ef8cb4a1 --- /dev/null +++ b/tests/inputs/googletypes/googletypes.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package googletypes; + +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; +import "google/protobuf/empty.proto"; + +message Test { + google.protobuf.BoolValue maybe = 1; + google.protobuf.Timestamp ts = 2; + google.protobuf.Duration duration = 3; + google.protobuf.Int32Value important = 4; + google.protobuf.Empty empty = 5; +} diff --git a/tests/inputs/googletypes_request/googletypes_request.proto b/tests/inputs/googletypes_request/googletypes_request.proto new file mode 100644 index 00000000..1cedcaaf --- /dev/null +++ b/tests/inputs/googletypes_request/googletypes_request.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package googletypes_request; + +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +// Tests that google types can be used as params + +service Test { + rpc SendDouble (google.protobuf.DoubleValue) returns (Input); + rpc SendFloat (google.protobuf.FloatValue) returns (Input); + rpc SendInt64 (google.protobuf.Int64Value) returns (Input); + rpc SendUInt64 (google.protobuf.UInt64Value) returns (Input); + rpc SendInt32 (google.protobuf.Int32Value) returns (Input); + rpc SendUInt32 (google.protobuf.UInt32Value) returns (Input); + rpc SendBool (google.protobuf.BoolValue) returns (Input); + rpc SendString (google.protobuf.StringValue) returns (Input); + rpc SendBytes (google.protobuf.BytesValue) returns (Input); + rpc SendDatetime (google.protobuf.Timestamp) returns (Input); + rpc SendTimedelta (google.protobuf.Duration) returns (Input); + rpc SendEmpty (google.protobuf.Empty) returns (Input); +} + +message Input { + +} diff --git a/tests/inputs/googletypes_request/test_googletypes_request.py b/tests/inputs/googletypes_request/test_googletypes_request.py new file mode 100644 index 00000000..05573cc3 --- /dev/null +++ b/tests/inputs/googletypes_request/test_googletypes_request.py @@ -0,0 +1,46 @@ +from datetime import ( + datetime, + timedelta, +) +from typing import ( + Any, + Callable, +) + +import betterproto.lib.google.protobuf as protobuf +import pytest + +from tests.mocks import MockChannel +from tests.output_betterproto.googletypes_request import ( + Input, + TestStub, +) + +test_cases = [ + (TestStub.send_double, protobuf.DoubleValue, 2.5), + (TestStub.send_float, protobuf.FloatValue, 2.5), + (TestStub.send_int64, protobuf.Int64Value, -64), + (TestStub.send_u_int64, protobuf.UInt64Value, 64), + (TestStub.send_int32, protobuf.Int32Value, -32), + (TestStub.send_u_int32, protobuf.UInt32Value, 32), + (TestStub.send_bool, protobuf.BoolValue, True), + (TestStub.send_string, protobuf.StringValue, "string"), + (TestStub.send_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), + (TestStub.send_datetime, protobuf.Timestamp, datetime(2038, 1, 19, 3, 14, 8)), + (TestStub.send_timedelta, protobuf.Duration, timedelta(seconds=123456)), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) +async def test_channel_receives_wrapped_type( + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value +): + wrapped_value = wrapper_class() + wrapped_value.value = value + channel = MockChannel(responses=[Input()]) + service = TestStub(channel) + + await service_method(service, wrapped_value) + + assert channel.requests[0]["request"] == type(wrapped_value) diff --git a/tests/inputs/googletypes_response/googletypes_response.proto b/tests/inputs/googletypes_response/googletypes_response.proto new file mode 100644 index 00000000..8917d1c7 --- /dev/null +++ b/tests/inputs/googletypes_response/googletypes_response.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package googletypes_response; + +import "google/protobuf/wrappers.proto"; + +// Tests that wrapped values can be used directly as return values + +service Test { + rpc GetDouble (Input) returns (google.protobuf.DoubleValue); + rpc GetFloat (Input) returns (google.protobuf.FloatValue); + rpc GetInt64 (Input) returns (google.protobuf.Int64Value); + rpc GetUInt64 (Input) returns (google.protobuf.UInt64Value); + rpc GetInt32 (Input) returns (google.protobuf.Int32Value); + rpc GetUInt32 (Input) returns (google.protobuf.UInt32Value); + rpc GetBool (Input) returns (google.protobuf.BoolValue); + rpc GetString (Input) returns (google.protobuf.StringValue); + rpc GetBytes (Input) returns (google.protobuf.BytesValue); +} + +message Input { + +} diff --git a/tests/inputs/googletypes_response/test_googletypes_response.py b/tests/inputs/googletypes_response/test_googletypes_response.py new file mode 100644 index 00000000..75f6accd --- /dev/null +++ b/tests/inputs/googletypes_response/test_googletypes_response.py @@ -0,0 +1,63 @@ +from typing import ( + Any, + Callable, + Optional, +) + +import betterproto.lib.google.protobuf as protobuf +import pytest + +from tests.mocks import MockChannel +from tests.output_betterproto.googletypes_response import ( + Input, + TestStub, +) + +test_cases = [ + (TestStub.get_double, protobuf.DoubleValue, 2.5), + (TestStub.get_float, protobuf.FloatValue, 2.5), + (TestStub.get_int64, protobuf.Int64Value, -64), + (TestStub.get_u_int64, protobuf.UInt64Value, 64), + (TestStub.get_int32, protobuf.Int32Value, -32), + (TestStub.get_u_int32, protobuf.UInt32Value, 32), + (TestStub.get_bool, protobuf.BoolValue, True), + (TestStub.get_string, protobuf.StringValue, "string"), + (TestStub.get_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) +async def test_channel_receives_wrapped_type( + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value +): + wrapped_value = wrapper_class() + wrapped_value.value = value + channel = MockChannel(responses=[wrapped_value]) + service = TestStub(channel) + method_param = Input() + + await service_method(service, method_param) + + assert channel.requests[0]["response_type"] != Optional[type(value)] + assert channel.requests[0]["response_type"] == type(wrapped_value) + + +@pytest.mark.asyncio +@pytest.mark.xfail +@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) +async def test_service_unwraps_response( + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value +): + """ + grpclib does not unwrap wrapper values returned by services + """ + wrapped_value = wrapper_class() + wrapped_value.value = value + service = TestStub(MockChannel(responses=[wrapped_value])) + method_param = Input() + + response_value = await service_method(service, method_param) + + assert response_value == value + assert type(response_value) == type(value) diff --git a/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto b/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto new file mode 100644 index 00000000..47284e3a --- /dev/null +++ b/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package googletypes_response_embedded; + +import "google/protobuf/wrappers.proto"; + +// Tests that wrapped values are supported as part of output message +service Test { + rpc getOutput (Input) returns (Output); +} + +message Input { + +} + +message Output { + google.protobuf.DoubleValue double_value = 1; + google.protobuf.FloatValue float_value = 2; + google.protobuf.Int64Value int64_value = 3; + google.protobuf.UInt64Value uint64_value = 4; + google.protobuf.Int32Value int32_value = 5; + google.protobuf.UInt32Value uint32_value = 6; + google.protobuf.BoolValue bool_value = 7; + google.protobuf.StringValue string_value = 8; + google.protobuf.BytesValue bytes_value = 9; +} diff --git a/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py b/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py new file mode 100644 index 00000000..57ebce1b --- /dev/null +++ b/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py @@ -0,0 +1,40 @@ +import pytest + +from tests.mocks import MockChannel +from tests.output_betterproto.googletypes_response_embedded import ( + Input, + Output, + TestStub, +) + + +@pytest.mark.asyncio +async def test_service_passes_through_unwrapped_values_embedded_in_response(): + """ + We do not not need to implement value unwrapping for embedded well-known types, + as this is already handled by grpclib. This test merely shows that this is the case. + """ + output = Output( + double_value=10.0, + float_value=12.0, + int64_value=-13, + uint64_value=14, + int32_value=-15, + uint32_value=16, + bool_value=True, + string_value="string", + bytes_value=bytes(0xFF)[0:4], + ) + + service = TestStub(MockChannel(responses=[output])) + response = await service.get_output(Input()) + + assert response.double_value == 10.0 + assert response.float_value == 12.0 + assert response.int64_value == -13 + assert response.uint64_value == 14 + assert response.int32_value == -15 + assert response.uint32_value == 16 + assert response.bool_value + assert response.string_value == "string" + assert response.bytes_value == bytes(0xFF)[0:4] diff --git a/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto b/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto new file mode 100644 index 00000000..2153ad5e --- /dev/null +++ b/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package googletypes_service_returns_empty; + +import "google/protobuf/empty.proto"; + +service Test { + rpc Send (RequestMessage) returns (google.protobuf.Empty) { + } +} + +message RequestMessage { +} \ No newline at end of file diff --git a/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto b/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto new file mode 100644 index 00000000..457707b7 --- /dev/null +++ b/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package googletypes_service_returns_googletype; + +import "google/protobuf/empty.proto"; +import "google/protobuf/struct.proto"; + +// Tests that imports are generated correctly when returning Google well-known types + +service Test { + rpc GetEmpty (RequestMessage) returns (google.protobuf.Empty); + rpc GetStruct (RequestMessage) returns (google.protobuf.Struct); + rpc GetListValue (RequestMessage) returns (google.protobuf.ListValue); + rpc GetValue (RequestMessage) returns (google.protobuf.Value); +} + +message RequestMessage { +} \ No newline at end of file diff --git a/tests/inputs/googletypes_struct/googletypes_struct.json b/tests/inputs/googletypes_struct/googletypes_struct.json new file mode 100644 index 00000000..ecc175e0 --- /dev/null +++ b/tests/inputs/googletypes_struct/googletypes_struct.json @@ -0,0 +1,5 @@ +{ + "struct": { + "key": true + } +} diff --git a/tests/inputs/googletypes_struct/googletypes_struct.proto b/tests/inputs/googletypes_struct/googletypes_struct.proto new file mode 100644 index 00000000..2b8b5c55 --- /dev/null +++ b/tests/inputs/googletypes_struct/googletypes_struct.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package googletypes_struct; + +import "google/protobuf/struct.proto"; + +message Test { + google.protobuf.Struct struct = 1; +} diff --git a/tests/inputs/googletypes_value/googletypes_value.json b/tests/inputs/googletypes_value/googletypes_value.json new file mode 100644 index 00000000..db52d5c0 --- /dev/null +++ b/tests/inputs/googletypes_value/googletypes_value.json @@ -0,0 +1,11 @@ +{ + "value1": "hello world", + "value2": true, + "value3": 1, + "value4": null, + "value5": [ + 1, + 2, + 3 + ] +} diff --git a/tests/inputs/googletypes_value/googletypes_value.proto b/tests/inputs/googletypes_value/googletypes_value.proto new file mode 100644 index 00000000..d5089d5e --- /dev/null +++ b/tests/inputs/googletypes_value/googletypes_value.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package googletypes_value; + +import "google/protobuf/struct.proto"; + +// Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values. + +message Test { + google.protobuf.Value value1 = 1; + google.protobuf.Value value2 = 2; + google.protobuf.Value value3 = 3; + google.protobuf.Value value4 = 4; + google.protobuf.Value value5 = 5; +} diff --git a/tests/inputs/import_capitalized_package/capitalized.proto b/tests/inputs/import_capitalized_package/capitalized.proto new file mode 100644 index 00000000..e80c95cd --- /dev/null +++ b/tests/inputs/import_capitalized_package/capitalized.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + + +package import_capitalized_package.Capitalized; + +message Message { + +} diff --git a/tests/inputs/import_capitalized_package/test.proto b/tests/inputs/import_capitalized_package/test.proto new file mode 100644 index 00000000..38c9b2d7 --- /dev/null +++ b/tests/inputs/import_capitalized_package/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_capitalized_package; + +import "capitalized.proto"; + +// Tests that we can import from a package with a capital name, that looks like a nested type, but isn't. + +message Test { + Capitalized.Message message = 1; +} diff --git a/tests/inputs/import_child_package_from_package/child.proto b/tests/inputs/import_child_package_from_package/child.proto new file mode 100644 index 00000000..d99c7c31 --- /dev/null +++ b/tests/inputs/import_child_package_from_package/child.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_child_package_from_package.package.childpackage; + +message ChildMessage { + +} diff --git a/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto b/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto new file mode 100644 index 00000000..66e0aa81 --- /dev/null +++ b/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_child_package_from_package; + +import "package_message.proto"; + +// Tests generated imports when a message in a package refers to a message in a nested child package. + +message Test { + package.PackageMessage message = 1; +} diff --git a/tests/inputs/import_child_package_from_package/package_message.proto b/tests/inputs/import_child_package_from_package/package_message.proto new file mode 100644 index 00000000..79d66f37 --- /dev/null +++ b/tests/inputs/import_child_package_from_package/package_message.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +import "child.proto"; + +package import_child_package_from_package.package; + +message PackageMessage { + package.childpackage.ChildMessage c = 1; +} diff --git a/tests/inputs/import_child_package_from_root/child.proto b/tests/inputs/import_child_package_from_root/child.proto new file mode 100644 index 00000000..2a46d5f5 --- /dev/null +++ b/tests/inputs/import_child_package_from_root/child.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_child_package_from_root.childpackage; + +message Message { + +} diff --git a/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto b/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto new file mode 100644 index 00000000..62998310 --- /dev/null +++ b/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_child_package_from_root; + +import "child.proto"; + +// Tests generated imports when a message in root refers to a message in a child package. + +message Test { + childpackage.Message child = 1; +} diff --git a/tests/inputs/import_child_scoping_rules/child.proto b/tests/inputs/import_child_scoping_rules/child.proto new file mode 100644 index 00000000..f491e0da --- /dev/null +++ b/tests/inputs/import_child_scoping_rules/child.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_child_scoping_rules.aaa.bbb.ccc.ddd; + +message ChildMessage { + +} diff --git a/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto b/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto new file mode 100644 index 00000000..272852cc --- /dev/null +++ b/tests/inputs/import_child_scoping_rules/import_child_scoping_rules.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package import_child_scoping_rules; + +import "package.proto"; + +message Test { + aaa.bbb.Msg msg = 1; +} diff --git a/tests/inputs/import_child_scoping_rules/package.proto b/tests/inputs/import_child_scoping_rules/package.proto new file mode 100644 index 00000000..6b51fe56 --- /dev/null +++ b/tests/inputs/import_child_scoping_rules/package.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package import_child_scoping_rules.aaa.bbb; + +import "child.proto"; + +message Msg { + .import_child_scoping_rules.aaa.bbb.ccc.ddd.ChildMessage a = 1; + import_child_scoping_rules.aaa.bbb.ccc.ddd.ChildMessage b = 2; + aaa.bbb.ccc.ddd.ChildMessage c = 3; + bbb.ccc.ddd.ChildMessage d = 4; + ccc.ddd.ChildMessage e = 5; +} diff --git a/tests/inputs/import_circular_dependency/import_circular_dependency.proto b/tests/inputs/import_circular_dependency/import_circular_dependency.proto new file mode 100644 index 00000000..4441be9f --- /dev/null +++ b/tests/inputs/import_circular_dependency/import_circular_dependency.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package import_circular_dependency; + +import "root.proto"; +import "other.proto"; + +// This test-case verifies support for circular dependencies in the generated python files. +// +// This is important because we generate 1 python file/module per package, rather than 1 file per proto file. +// +// Scenario: +// +// The proto messages depend on each other in a non-circular way: +// +// Test -------> RootPackageMessage <--------------. +// `------------------------------------> OtherPackageMessage +// +// Test and RootPackageMessage are in different files, but belong to the same package (root): +// +// (Test -------> RootPackageMessage) <------------. +// `------------------------------------> OtherPackageMessage +// +// After grouping the packages into single files or modules, a circular dependency is created: +// +// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) +message Test { + RootPackageMessage message = 1; + other.OtherPackageMessage other_value = 2; +} diff --git a/tests/inputs/import_circular_dependency/other.proto b/tests/inputs/import_circular_dependency/other.proto new file mode 100644 index 00000000..833b8699 --- /dev/null +++ b/tests/inputs/import_circular_dependency/other.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +import "root.proto"; +package import_circular_dependency.other; + +message OtherPackageMessage { + RootPackageMessage rootPackageMessage = 1; +} diff --git a/tests/inputs/import_circular_dependency/root.proto b/tests/inputs/import_circular_dependency/root.proto new file mode 100644 index 00000000..73839477 --- /dev/null +++ b/tests/inputs/import_circular_dependency/root.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_circular_dependency; + +message RootPackageMessage { + +} diff --git a/tests/inputs/import_cousin_package/cousin.proto b/tests/inputs/import_cousin_package/cousin.proto new file mode 100644 index 00000000..2870dfe9 --- /dev/null +++ b/tests/inputs/import_cousin_package/cousin.proto @@ -0,0 +1,6 @@ +syntax = "proto3"; + +package import_cousin_package.cousin.cousin_subpackage; + +message CousinMessage { +} diff --git a/tests/inputs/import_cousin_package/test.proto b/tests/inputs/import_cousin_package/test.proto new file mode 100644 index 00000000..89ec3d84 --- /dev/null +++ b/tests/inputs/import_cousin_package/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_cousin_package.test.subpackage; + +import "cousin.proto"; + +// Verify that we can import message unrelated to us + +message Test { + cousin.cousin_subpackage.CousinMessage message = 1; +} diff --git a/tests/inputs/import_cousin_package_same_name/cousin.proto b/tests/inputs/import_cousin_package_same_name/cousin.proto new file mode 100644 index 00000000..84b6a407 --- /dev/null +++ b/tests/inputs/import_cousin_package_same_name/cousin.proto @@ -0,0 +1,6 @@ +syntax = "proto3"; + +package import_cousin_package_same_name.cousin.subpackage; + +message CousinMessage { +} diff --git a/tests/inputs/import_cousin_package_same_name/test.proto b/tests/inputs/import_cousin_package_same_name/test.proto new file mode 100644 index 00000000..7b420d30 --- /dev/null +++ b/tests/inputs/import_cousin_package_same_name/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_cousin_package_same_name.test.subpackage; + +import "cousin.proto"; + +// Verify that we can import a message unrelated to us, in a subpackage with the same name as us. + +message Test { + cousin.subpackage.CousinMessage message = 1; +} diff --git a/tests/inputs/import_nested_child_package_from_root/child.proto b/tests/inputs/import_nested_child_package_from_root/child.proto new file mode 100644 index 00000000..fcd7e2f6 --- /dev/null +++ b/tests/inputs/import_nested_child_package_from_root/child.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_nested_child_package_from_root.package.child.otherchild; + +message ChildMessage { + +} diff --git a/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto b/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto new file mode 100644 index 00000000..96da1ace --- /dev/null +++ b/tests/inputs/import_nested_child_package_from_root/import_nested_child_package_from_root.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package import_nested_child_package_from_root; + +import "child.proto"; + +message Test { + package.child.otherchild.ChildMessage child = 1; +} diff --git a/tests/inputs/import_packages_same_name/import_packages_same_name.proto b/tests/inputs/import_packages_same_name/import_packages_same_name.proto new file mode 100644 index 00000000..dff7efed --- /dev/null +++ b/tests/inputs/import_packages_same_name/import_packages_same_name.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package import_packages_same_name; + +import "users_v1.proto"; +import "posts_v1.proto"; + +// Tests generated message can correctly reference two packages with the same leaf-name + +message Test { + users.v1.User user = 1; + posts.v1.Post post = 2; +} diff --git a/tests/inputs/import_packages_same_name/posts_v1.proto b/tests/inputs/import_packages_same_name/posts_v1.proto new file mode 100644 index 00000000..d3b9b1ca --- /dev/null +++ b/tests/inputs/import_packages_same_name/posts_v1.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_packages_same_name.posts.v1; + +message Post { + +} diff --git a/tests/inputs/import_packages_same_name/users_v1.proto b/tests/inputs/import_packages_same_name/users_v1.proto new file mode 100644 index 00000000..d3a17e92 --- /dev/null +++ b/tests/inputs/import_packages_same_name/users_v1.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_packages_same_name.users.v1; + +message User { + +} diff --git a/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto b/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto new file mode 100644 index 00000000..edc47362 --- /dev/null +++ b/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +import "parent_package_message.proto"; + +package import_parent_package_from_child.parent.child; + +// Tests generated imports when a message refers to a message defined in its parent package + +message Test { + ParentPackageMessage message_implicit = 1; + parent.ParentPackageMessage message_explicit = 2; +} diff --git a/tests/inputs/import_parent_package_from_child/parent_package_message.proto b/tests/inputs/import_parent_package_from_child/parent_package_message.proto new file mode 100644 index 00000000..fb3fd31d --- /dev/null +++ b/tests/inputs/import_parent_package_from_child/parent_package_message.proto @@ -0,0 +1,6 @@ +syntax = "proto3"; + +package import_parent_package_from_child.parent; + +message ParentPackageMessage { +} diff --git a/tests/inputs/import_root_package_from_child/child.proto b/tests/inputs/import_root_package_from_child/child.proto new file mode 100644 index 00000000..bd519677 --- /dev/null +++ b/tests/inputs/import_root_package_from_child/child.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_root_package_from_child.child; + +import "root.proto"; + +// Verify that we can import root message from child package + +message Test { + RootMessage message = 1; +} diff --git a/tests/inputs/import_root_package_from_child/root.proto b/tests/inputs/import_root_package_from_child/root.proto new file mode 100644 index 00000000..6ae955ad --- /dev/null +++ b/tests/inputs/import_root_package_from_child/root.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_root_package_from_child; + + +message RootMessage { +} diff --git a/tests/inputs/import_root_sibling/import_root_sibling.proto b/tests/inputs/import_root_sibling/import_root_sibling.proto new file mode 100644 index 00000000..759e606f --- /dev/null +++ b/tests/inputs/import_root_sibling/import_root_sibling.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package import_root_sibling; + +import "sibling.proto"; + +// Tests generated imports when a message in the root package refers to another message in the root package + +message Test { + SiblingMessage sibling = 1; +} diff --git a/tests/inputs/import_root_sibling/sibling.proto b/tests/inputs/import_root_sibling/sibling.proto new file mode 100644 index 00000000..6b6ba2ea --- /dev/null +++ b/tests/inputs/import_root_sibling/sibling.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_root_sibling; + +message SiblingMessage { + +} diff --git a/tests/inputs/import_service_input_message/child_package_request_message.proto b/tests/inputs/import_service_input_message/child_package_request_message.proto new file mode 100644 index 00000000..54fc1123 --- /dev/null +++ b/tests/inputs/import_service_input_message/child_package_request_message.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_service_input_message.child; + +message ChildRequestMessage { + int32 child_argument = 1; +} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/import_service_input_message.proto b/tests/inputs/import_service_input_message/import_service_input_message.proto new file mode 100644 index 00000000..cbf48fa9 --- /dev/null +++ b/tests/inputs/import_service_input_message/import_service_input_message.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package import_service_input_message; + +import "request_message.proto"; +import "child_package_request_message.proto"; + +// Tests generated service correctly imports the RequestMessage + +service Test { + rpc DoThing (RequestMessage) returns (RequestResponse); + rpc DoThing2 (child.ChildRequestMessage) returns (RequestResponse); + rpc DoThing3 (Nested.RequestMessage) returns (RequestResponse); +} + + +message RequestResponse { + int32 value = 1; +} + +message Nested { + message RequestMessage { + int32 nestedArgument = 1; + } +} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/request_message.proto b/tests/inputs/import_service_input_message/request_message.proto new file mode 100644 index 00000000..36a6e788 --- /dev/null +++ b/tests/inputs/import_service_input_message/request_message.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package import_service_input_message; + +message RequestMessage { + int32 argument = 1; +} \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/test_import_service_input_message.py b/tests/inputs/import_service_input_message/test_import_service_input_message.py new file mode 100644 index 00000000..60c3e202 --- /dev/null +++ b/tests/inputs/import_service_input_message/test_import_service_input_message.py @@ -0,0 +1,36 @@ +import pytest + +from tests.mocks import MockChannel +from tests.output_betterproto.import_service_input_message import ( + NestedRequestMessage, + RequestMessage, + RequestResponse, + TestStub, +) +from tests.output_betterproto.import_service_input_message.child import ( + ChildRequestMessage, +) + + +@pytest.mark.asyncio +async def test_service_correctly_imports_reference_message(): + mock_response = RequestResponse(value=10) + service = TestStub(MockChannel([mock_response])) + response = await service.do_thing(RequestMessage(1)) + assert mock_response == response + + +@pytest.mark.asyncio +async def test_service_correctly_imports_reference_message_from_child_package(): + mock_response = RequestResponse(value=10) + service = TestStub(MockChannel([mock_response])) + response = await service.do_thing2(ChildRequestMessage(1)) + assert mock_response == response + + +@pytest.mark.asyncio +async def test_service_correctly_imports_nested_reference(): + mock_response = RequestResponse(value=10) + service = TestStub(MockChannel([mock_response])) + response = await service.do_thing3(NestedRequestMessage(1)) + assert mock_response == response diff --git a/tests/inputs/int32/int32.json b/tests/inputs/int32/int32.json new file mode 100644 index 00000000..34d41119 --- /dev/null +++ b/tests/inputs/int32/int32.json @@ -0,0 +1,4 @@ +{ + "positive": 150, + "negative": -150 +} diff --git a/tests/inputs/int32/int32.proto b/tests/inputs/int32/int32.proto new file mode 100644 index 00000000..4721c235 --- /dev/null +++ b/tests/inputs/int32/int32.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package int32; + +// Some documentation about the Test message. +message Test { + // Some documentation about the count. + int32 positive = 1; + int32 negative = 2; +} diff --git a/tests/inputs/invalid_field/invalid_field.proto b/tests/inputs/invalid_field/invalid_field.proto new file mode 100644 index 00000000..e3a73ce1 --- /dev/null +++ b/tests/inputs/invalid_field/invalid_field.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package invalid_field; + +message Test { + int32 x = 1; +} diff --git a/tests/inputs/invalid_field/test_invalid_field.py b/tests/inputs/invalid_field/test_invalid_field.py new file mode 100644 index 00000000..947b8e13 --- /dev/null +++ b/tests/inputs/invalid_field/test_invalid_field.py @@ -0,0 +1,17 @@ +import pytest + + +def test_invalid_field(): + from tests.output_betterproto.invalid_field import Test + + with pytest.raises(TypeError): + Test(unknown_field=12) + + +def test_invalid_field_pydantic(): + from pydantic import ValidationError + + from tests.output_betterproto_pydantic.invalid_field import Test + + with pytest.raises(ValidationError): + Test(unknown_field=12) diff --git a/tests/inputs/map/map.json b/tests/inputs/map/map.json new file mode 100644 index 00000000..6a1e853b --- /dev/null +++ b/tests/inputs/map/map.json @@ -0,0 +1,7 @@ +{ + "counts": { + "item1": 1, + "item2": 2, + "item3": 3 + } +} diff --git a/tests/inputs/map/map.proto b/tests/inputs/map/map.proto new file mode 100644 index 00000000..ecef3ccb --- /dev/null +++ b/tests/inputs/map/map.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package map; + +message Test { + map counts = 1; +} diff --git a/tests/inputs/mapmessage/mapmessage.json b/tests/inputs/mapmessage/mapmessage.json new file mode 100644 index 00000000..a944ddd1 --- /dev/null +++ b/tests/inputs/mapmessage/mapmessage.json @@ -0,0 +1,10 @@ +{ + "items": { + "foo": { + "count": 1 + }, + "bar": { + "count": 2 + } + } +} diff --git a/tests/inputs/mapmessage/mapmessage.proto b/tests/inputs/mapmessage/mapmessage.proto new file mode 100644 index 00000000..2c704a49 --- /dev/null +++ b/tests/inputs/mapmessage/mapmessage.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package mapmessage; + +message Test { + map items = 1; +} + +message Nested { + int32 count = 1; +} \ No newline at end of file diff --git a/tests/inputs/namespace_builtin_types/namespace_builtin_types.json b/tests/inputs/namespace_builtin_types/namespace_builtin_types.json new file mode 100644 index 00000000..82000323 --- /dev/null +++ b/tests/inputs/namespace_builtin_types/namespace_builtin_types.json @@ -0,0 +1,16 @@ +{ + "int": "value-for-int", + "float": "value-for-float", + "complex": "value-for-complex", + "list": "value-for-list", + "tuple": "value-for-tuple", + "range": "value-for-range", + "str": "value-for-str", + "bytearray": "value-for-bytearray", + "bytes": "value-for-bytes", + "memoryview": "value-for-memoryview", + "set": "value-for-set", + "frozenset": "value-for-frozenset", + "map": "value-for-map", + "bool": "value-for-bool" +} \ No newline at end of file diff --git a/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto b/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto new file mode 100644 index 00000000..71cb0298 --- /dev/null +++ b/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto @@ -0,0 +1,40 @@ +syntax = "proto3"; + +package namespace_builtin_types; + +// Tests that messages may contain fields with names that are python types + +message Test { + // https://docs.python.org/2/library/stdtypes.html#numeric-types-int-float-long-complex + string int = 1; + string float = 2; + string complex = 3; + + // https://docs.python.org/3/library/stdtypes.html#sequence-types-list-tuple-range + string list = 4; + string tuple = 5; + string range = 6; + + // https://docs.python.org/3/library/stdtypes.html#str + string str = 7; + + // https://docs.python.org/3/library/stdtypes.html#bytearray-objects + string bytearray = 8; + + // https://docs.python.org/3/library/stdtypes.html#bytes-and-bytearray-operations + string bytes = 9; + + // https://docs.python.org/3/library/stdtypes.html#memory-views + string memoryview = 10; + + // https://docs.python.org/3/library/stdtypes.html#set-types-set-frozenset + string set = 11; + string frozenset = 12; + + // https://docs.python.org/3/library/stdtypes.html#dict + string map = 13; + string dict = 14; + + // https://docs.python.org/3/library/stdtypes.html#boolean-values + string bool = 15; +} \ No newline at end of file diff --git a/tests/inputs/namespace_keywords/namespace_keywords.json b/tests/inputs/namespace_keywords/namespace_keywords.json new file mode 100644 index 00000000..4f11b602 --- /dev/null +++ b/tests/inputs/namespace_keywords/namespace_keywords.json @@ -0,0 +1,37 @@ +{ + "False": 1, + "None": 2, + "True": 3, + "and": 4, + "as": 5, + "assert": 6, + "async": 7, + "await": 8, + "break": 9, + "class": 10, + "continue": 11, + "def": 12, + "del": 13, + "elif": 14, + "else": 15, + "except": 16, + "finally": 17, + "for": 18, + "from": 19, + "global": 20, + "if": 21, + "import": 22, + "in": 23, + "is": 24, + "lambda": 25, + "nonlocal": 26, + "not": 27, + "or": 28, + "pass": 29, + "raise": 30, + "return": 31, + "try": 32, + "while": 33, + "with": 34, + "yield": 35 +} diff --git a/tests/inputs/namespace_keywords/namespace_keywords.proto b/tests/inputs/namespace_keywords/namespace_keywords.proto new file mode 100644 index 00000000..ac3e5c52 --- /dev/null +++ b/tests/inputs/namespace_keywords/namespace_keywords.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; + +package namespace_keywords; + +// Tests that messages may contain fields that are Python keywords +// +// Generated with Python 3.7.6 +// print('\n'.join(f'string {k} = {i+1};' for i,k in enumerate(keyword.kwlist))) + +message Test { + string False = 1; + string None = 2; + string True = 3; + string and = 4; + string as = 5; + string assert = 6; + string async = 7; + string await = 8; + string break = 9; + string class = 10; + string continue = 11; + string def = 12; + string del = 13; + string elif = 14; + string else = 15; + string except = 16; + string finally = 17; + string for = 18; + string from = 19; + string global = 20; + string if = 21; + string import = 22; + string in = 23; + string is = 24; + string lambda = 25; + string nonlocal = 26; + string not = 27; + string or = 28; + string pass = 29; + string raise = 30; + string return = 31; + string try = 32; + string while = 33; + string with = 34; + string yield = 35; +} \ No newline at end of file diff --git a/tests/inputs/nested/nested.json b/tests/inputs/nested/nested.json new file mode 100644 index 00000000..f460cadb --- /dev/null +++ b/tests/inputs/nested/nested.json @@ -0,0 +1,7 @@ +{ + "nested": { + "count": 150 + }, + "sibling": {}, + "msg": "THIS" +} diff --git a/tests/inputs/nested/nested.proto b/tests/inputs/nested/nested.proto new file mode 100644 index 00000000..619c721c --- /dev/null +++ b/tests/inputs/nested/nested.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package nested; + +// A test message with a nested message inside of it. +message Test { + // This is the nested type. + message Nested { + // Stores a simple counter. + int32 count = 1; + } + // This is the nested enum. + enum Msg { + NONE = 0; + THIS = 1; + } + + Nested nested = 1; + Sibling sibling = 2; + Sibling sibling2 = 3; + Msg msg = 4; +} + +message Sibling { + int32 foo = 1; +} \ No newline at end of file diff --git a/tests/inputs/nested2/nested2.proto b/tests/inputs/nested2/nested2.proto new file mode 100644 index 00000000..cd6510c5 --- /dev/null +++ b/tests/inputs/nested2/nested2.proto @@ -0,0 +1,21 @@ +syntax = "proto3"; + +package nested2; + +import "package.proto"; + +message Game { + message Player { + enum Race { + human = 0; + orc = 1; + } + } +} + +message Test { + Game game = 1; + Game.Player GamePlayer = 2; + Game.Player.Race GamePlayerRace = 3; + equipment.Weapon Weapon = 4; +} \ No newline at end of file diff --git a/tests/inputs/nested2/package.proto b/tests/inputs/nested2/package.proto new file mode 100644 index 00000000..e12abb12 --- /dev/null +++ b/tests/inputs/nested2/package.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package nested2.equipment; + +message Weapon { + +} \ No newline at end of file diff --git a/tests/inputs/nestedtwice/nestedtwice.json b/tests/inputs/nestedtwice/nestedtwice.json new file mode 100644 index 00000000..c9531328 --- /dev/null +++ b/tests/inputs/nestedtwice/nestedtwice.json @@ -0,0 +1,11 @@ +{ + "top": { + "name": "double-nested", + "middle": { + "bottom": [{"foo": "hello"}], + "enumBottom": ["A"], + "topMiddleBottom": [{"a": "hello"}], + "bar": true + } + } +} diff --git a/tests/inputs/nestedtwice/nestedtwice.proto b/tests/inputs/nestedtwice/nestedtwice.proto new file mode 100644 index 00000000..84d142a3 --- /dev/null +++ b/tests/inputs/nestedtwice/nestedtwice.proto @@ -0,0 +1,40 @@ +syntax = "proto3"; + +package nestedtwice; + +/* Test doc. */ +message Test { + /* Top doc. */ + message Top { + /* Middle doc. */ + message Middle { + /* TopMiddleBottom doc.*/ + message TopMiddleBottom { + // TopMiddleBottom.a doc. + string a = 1; + } + /* EnumBottom doc. */ + enum EnumBottom{ + /* EnumBottom.A doc. */ + A = 0; + B = 1; + } + /* Bottom doc. */ + message Bottom { + /* Bottom.foo doc. */ + string foo = 1; + } + reserved 1; + /* Middle.bottom doc. */ + repeated Bottom bottom = 2; + repeated EnumBottom enumBottom=3; + repeated TopMiddleBottom topMiddleBottom=4; + bool bar = 5; + } + /* Top.name doc. */ + string name = 1; + Middle middle = 2; + } + /* Test.top doc. */ + Top top = 1; +} diff --git a/tests/inputs/nestedtwice/test_nestedtwice.py b/tests/inputs/nestedtwice/test_nestedtwice.py new file mode 100644 index 00000000..ca0557a7 --- /dev/null +++ b/tests/inputs/nestedtwice/test_nestedtwice.py @@ -0,0 +1,25 @@ +import pytest + +from tests.output_betterproto.nestedtwice import ( + Test, + TestTop, + TestTopMiddle, + TestTopMiddleBottom, + TestTopMiddleEnumBottom, + TestTopMiddleTopMiddleBottom, +) + + +@pytest.mark.parametrize( + ("cls", "expected_comment"), + [ + (Test, "Test doc."), + (TestTopMiddleEnumBottom, "EnumBottom doc."), + (TestTop, "Top doc."), + (TestTopMiddle, "Middle doc."), + (TestTopMiddleTopMiddleBottom, "TopMiddleBottom doc."), + (TestTopMiddleBottom, "Bottom doc."), + ], +) +def test_comment(cls, expected_comment): + assert cls.__doc__.strip() == expected_comment diff --git a/tests/inputs/oneof/oneof-name.json b/tests/inputs/oneof/oneof-name.json new file mode 100644 index 00000000..605484b6 --- /dev/null +++ b/tests/inputs/oneof/oneof-name.json @@ -0,0 +1,3 @@ +{ + "pitier": "Mr. T" +} diff --git a/tests/inputs/oneof/oneof.json b/tests/inputs/oneof/oneof.json new file mode 100644 index 00000000..65cafc5f --- /dev/null +++ b/tests/inputs/oneof/oneof.json @@ -0,0 +1,3 @@ +{ + "pitied": 100 +} diff --git a/tests/inputs/oneof/oneof.proto b/tests/inputs/oneof/oneof.proto new file mode 100644 index 00000000..41f93b0e --- /dev/null +++ b/tests/inputs/oneof/oneof.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package oneof; + +message MixedDrink { + int32 shots = 1; +} + +message Test { + oneof foo { + int32 pitied = 1; + string pitier = 2; + } + + int32 just_a_regular_field = 3; + + oneof bar { + int32 drinks = 11; + string bar_name = 12; + MixedDrink mixed_drink = 13; + } +} + diff --git a/tests/inputs/oneof/oneof_name.json b/tests/inputs/oneof/oneof_name.json new file mode 100644 index 00000000..605484b6 --- /dev/null +++ b/tests/inputs/oneof/oneof_name.json @@ -0,0 +1,3 @@ +{ + "pitier": "Mr. T" +} diff --git a/tests/inputs/oneof/test_oneof.py b/tests/inputs/oneof/test_oneof.py new file mode 100644 index 00000000..a9d675c9 --- /dev/null +++ b/tests/inputs/oneof/test_oneof.py @@ -0,0 +1,43 @@ +import betterproto +import pytest + +from tests.output_betterproto.oneof import ( + MixedDrink, + Test, +) +from tests.output_betterproto_pydantic.oneof import Test as TestPyd +from tests.util import get_test_case_json_data + + +def test_which_count(): + message = Test() + message.from_json(get_test_case_json_data("oneof")[0].json) + assert betterproto.which_one_of(message, "foo") == ("pitied", 100) + + +def test_which_name(): + message = Test() + message.from_json(get_test_case_json_data("oneof", "oneof_name.json")[0].json) + assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") + + +def test_which_count_pyd(): + message = TestPyd(pitier="Mr. T", just_a_regular_field=2, bar_name="a_bar") + assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") + + +def test_oneof_constructor_assign(): + message = Test(mixed_drink=MixedDrink(shots=42)) + field, value = betterproto.which_one_of(message, "bar") + assert field == "mixed_drink" + assert value.shots == 42 + + +# Issue #305: +@pytest.mark.xfail +def test_oneof_nested_assign(): + message = Test() + message.mixed_drink.shots = 42 + field, value = betterproto.which_one_of(message, "bar") + assert field == "mixed_drink" + assert value.shots == 42 diff --git a/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto b/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto new file mode 100644 index 00000000..f7ac6fe8 --- /dev/null +++ b/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package oneof_default_value_serialization; + +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +message Message{ + int64 value = 1; +} + +message NestedMessage{ + int64 id = 1; + oneof value_type{ + Message wrapped_message_value = 2; + } +} + +message Test{ + oneof value_type { + bool bool_value = 1; + int64 int64_value = 2; + google.protobuf.Timestamp timestamp_value = 3; + google.protobuf.Duration duration_value = 4; + Message wrapped_message_value = 5; + NestedMessage wrapped_nested_message_value = 6; + google.protobuf.BoolValue wrapped_bool_value = 7; + } +} \ No newline at end of file diff --git a/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py b/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py new file mode 100644 index 00000000..4053478e --- /dev/null +++ b/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py @@ -0,0 +1,70 @@ +import datetime + +import betterproto + +from tests.output_betterproto.oneof_default_value_serialization import ( + Message, + NestedMessage, + Test, +) + + +def assert_round_trip_serialization_works(message: Test) -> None: + assert betterproto.which_one_of(message, "value_type") == betterproto.which_one_of( + Test().from_json(message.to_json()), "value_type" + ) + + +def test_oneof_default_value_serialization_works_for_all_values(): + """ + Serialization from message with oneof set to default -> JSON -> message should keep + default value field intact. + """ + + test_cases = [ + Test(bool_value=False), + Test(int64_value=0), + Test( + timestamp_value=datetime.datetime( + year=1970, + month=1, + day=1, + hour=0, + minute=0, + tzinfo=datetime.timezone.utc, + ) + ), + Test(duration_value=datetime.timedelta(0)), + Test(wrapped_message_value=Message(value=0)), + # NOTE: Do NOT use betterproto.BoolValue here, it will cause JSON serialization + # errors. + # TODO: Do we want to allow use of BoolValue directly within a wrapped field or + # should we simply hard fail here? + Test(wrapped_bool_value=False), + ] + for message in test_cases: + assert_round_trip_serialization_works(message) + + +def test_oneof_no_default_values_passed(): + message = Test() + assert ( + betterproto.which_one_of(message, "value_type") + == betterproto.which_one_of(Test().from_json(message.to_json()), "value_type") + == ("", None) + ) + + +def test_oneof_nested_oneof_messages_are_serialized_with_defaults(): + """ + Nested messages with oneofs should also be handled + """ + message = Test(wrapped_nested_message_value=NestedMessage(id=0, wrapped_message_value=Message(value=0))) + assert ( + betterproto.which_one_of(message, "value_type") + == betterproto.which_one_of(Test().from_json(message.to_json()), "value_type") + == ( + "wrapped_nested_message_value", + NestedMessage(id=0, wrapped_message_value=Message(value=0)), + ) + ) diff --git a/tests/inputs/oneof_empty/oneof_empty.json b/tests/inputs/oneof_empty/oneof_empty.json new file mode 100644 index 00000000..9d21c897 --- /dev/null +++ b/tests/inputs/oneof_empty/oneof_empty.json @@ -0,0 +1,3 @@ +{ + "nothing": {} +} diff --git a/tests/inputs/oneof_empty/oneof_empty.proto b/tests/inputs/oneof_empty/oneof_empty.proto new file mode 100644 index 00000000..ca51d5ae --- /dev/null +++ b/tests/inputs/oneof_empty/oneof_empty.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package oneof_empty; + +message Nothing {} + +message MaybeNothing { + string sometimes = 42; +} + +message Test { + oneof empty { + Nothing nothing = 1; + MaybeNothing maybe1 = 2; + MaybeNothing maybe2 = 3; + } +} diff --git a/tests/inputs/oneof_empty/oneof_empty_maybe1.json b/tests/inputs/oneof_empty/oneof_empty_maybe1.json new file mode 100644 index 00000000..f7a2d278 --- /dev/null +++ b/tests/inputs/oneof_empty/oneof_empty_maybe1.json @@ -0,0 +1,3 @@ +{ + "maybe1": {} +} diff --git a/tests/inputs/oneof_empty/oneof_empty_maybe2.json b/tests/inputs/oneof_empty/oneof_empty_maybe2.json new file mode 100644 index 00000000..bc2b385b --- /dev/null +++ b/tests/inputs/oneof_empty/oneof_empty_maybe2.json @@ -0,0 +1,5 @@ +{ + "maybe2": { + "sometimes": "now" + } +} diff --git a/tests/inputs/oneof_empty/test_oneof_empty.py b/tests/inputs/oneof_empty/test_oneof_empty.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/inputs/oneof_enum/oneof_enum-enum-0.json b/tests/inputs/oneof_enum/oneof_enum-enum-0.json new file mode 100644 index 00000000..be30cf08 --- /dev/null +++ b/tests/inputs/oneof_enum/oneof_enum-enum-0.json @@ -0,0 +1,3 @@ +{ + "signal": "PASS" +} diff --git a/tests/inputs/oneof_enum/oneof_enum-enum-1.json b/tests/inputs/oneof_enum/oneof_enum-enum-1.json new file mode 100644 index 00000000..cb638737 --- /dev/null +++ b/tests/inputs/oneof_enum/oneof_enum-enum-1.json @@ -0,0 +1,3 @@ +{ + "signal": "RESIGN" +} diff --git a/tests/inputs/oneof_enum/oneof_enum.json b/tests/inputs/oneof_enum/oneof_enum.json new file mode 100644 index 00000000..3220b706 --- /dev/null +++ b/tests/inputs/oneof_enum/oneof_enum.json @@ -0,0 +1,6 @@ +{ + "move": { + "x": 2, + "y": 3 + } +} diff --git a/tests/inputs/oneof_enum/oneof_enum.proto b/tests/inputs/oneof_enum/oneof_enum.proto new file mode 100644 index 00000000..906abcb1 --- /dev/null +++ b/tests/inputs/oneof_enum/oneof_enum.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package oneof_enum; + +message Test { + oneof action { + Signal signal = 1; + Move move = 2; + } +} + +enum Signal { + PASS = 0; + RESIGN = 1; +} + +message Move { + int32 x = 1; + int32 y = 2; +} \ No newline at end of file diff --git a/tests/inputs/oneof_enum/test_oneof_enum.py b/tests/inputs/oneof_enum/test_oneof_enum.py new file mode 100644 index 00000000..375ea196 --- /dev/null +++ b/tests/inputs/oneof_enum/test_oneof_enum.py @@ -0,0 +1,40 @@ +import betterproto + +from tests.output_betterproto.oneof_enum import ( + Move, + Signal, + Test, +) +from tests.util import get_test_case_json_data + + +def test_which_one_of_returns_enum_with_default_value(): + """ + returns first field when it is enum and set with default value + """ + message = Test() + message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-0.json")[0].json) + + assert message.move is None + assert message.signal == Signal.PASS + assert betterproto.which_one_of(message, "action") == ("signal", Signal.PASS) + + +def test_which_one_of_returns_enum_with_non_default_value(): + """ + returns first field when it is enum and set with non default value + """ + message = Test() + message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-1.json")[0].json) + + assert message.move is None + assert message.signal == Signal.RESIGN + assert betterproto.which_one_of(message, "action") == ("signal", Signal.RESIGN) + + +def test_which_one_of_returns_second_field_when_set(): + message = Test() + message.from_json(get_test_case_json_data("oneof_enum")[0].json) + assert message.move == Move(x=2, y=3) + assert message.signal is None + assert betterproto.which_one_of(message, "action") == ("move", Move(x=2, y=3)) diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.json b/tests/inputs/proto3_field_presence/proto3_field_presence.json new file mode 100644 index 00000000..988df8e8 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence.json @@ -0,0 +1,13 @@ +{ + "test1": 128, + "test2": true, + "test3": "A value", + "test4": "aGVsbG8=", + "test5": { + "test": "Hello" + }, + "test6": "B", + "test7": "8589934592", + "test8": 2.5, + "test9": "2022-01-24T12:12:42Z" +} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.proto b/tests/inputs/proto3_field_presence/proto3_field_presence.proto new file mode 100644 index 00000000..f28123df --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package proto3_field_presence; + +import "google/protobuf/timestamp.proto"; + +message InnerTest { + string test = 1; +} + +message Test { + optional uint32 test1 = 1; + optional bool test2 = 2; + optional string test3 = 3; + optional bytes test4 = 4; + optional InnerTest test5 = 5; + optional TestEnum test6 = 6; + optional uint64 test7 = 7; + optional float test8 = 8; + optional google.protobuf.Timestamp test9 = 9; +} + +enum TestEnum { + A = 0; + B = 1; +} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_default.json b/tests/inputs/proto3_field_presence/proto3_field_presence_default.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence_default.json @@ -0,0 +1 @@ +{} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json b/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json new file mode 100644 index 00000000..b19ae980 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json @@ -0,0 +1,9 @@ +{ + "test1": 0, + "test2": false, + "test3": "", + "test4": "", + "test6": "A", + "test7": "0", + "test8": 0 +} diff --git a/tests/inputs/proto3_field_presence/test_proto3_field_presence.py b/tests/inputs/proto3_field_presence/test_proto3_field_presence.py new file mode 100644 index 00000000..9c2d6e69 --- /dev/null +++ b/tests/inputs/proto3_field_presence/test_proto3_field_presence.py @@ -0,0 +1,46 @@ +import json + +from tests.output_betterproto.proto3_field_presence import ( + Test, +) + + +def test_null_fields_json(): + """Ensure that using "null" in JSON is equivalent to not specifying a + field, for fields with explicit presence""" + + def test_json(ref_json: str, obj_json: str) -> None: + """`ref_json` and `obj_json` are JSON strings describing a `Test` object. + Test that deserializing both leads to the same object, and that + `ref_json` is the normalized format.""" + ref_obj = Test().from_json(ref_json) + obj = Test().from_json(obj_json) + + assert obj == ref_obj + assert json.loads(obj.to_json(0)) == json.loads(ref_json) + + test_json("{}", '{ "test1": null, "test2": null, "test3": null }') + test_json("{}", '{ "test4": null, "test5": null, "test6": null }') + test_json("{}", '{ "test7": null, "test8": null }') + test_json('{ "test5": {} }', '{ "test3": null, "test5": {} }') + + # Make sure that if include_default_values is set, None values are + # exported. + obj = Test() + assert obj.to_dict() == {} + assert obj.to_dict(include_default_values=True) == { + "test1": None, + "test2": None, + "test3": None, + "test4": None, + "test5": None, + "test6": None, + "test7": None, + "test8": None, + "test9": None, + } + + +def test_unset_access(): # see #523 + assert Test().test1 is None + assert Test(test1=None).test1 is None diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json new file mode 100644 index 00000000..da081927 --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json @@ -0,0 +1,3 @@ +{ + "nested": {} +} diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto new file mode 100644 index 00000000..caa76ec8 --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package proto3_field_presence_oneof; + +message Test { + oneof kind { + Nested nested = 1; + WithOptional with_optional = 2; + } +} + +message InnerNested { + optional bool a = 1; +} + +message Nested { + InnerNested inner = 1; +} + +message WithOptional { + optional bool b = 2; +} diff --git a/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py b/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py new file mode 100644 index 00000000..2320dc64 --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py @@ -0,0 +1,27 @@ +from tests.output_betterproto.proto3_field_presence_oneof import ( + Nested, + Test, + WithOptional, +) + + +def test_serialization(): + """Ensure that serialization of fields unset but with explicit field + presence do not bloat the serialized payload with length-delimited fields + with length 0""" + + def test_empty_nested(message: Test) -> None: + # '0a' => tag 1, length delimited + # '00' => length: 0 + assert bytes(message) == bytearray.fromhex("0a 00") + + test_empty_nested(Test(nested=Nested())) + test_empty_nested(Test(nested=Nested(inner=None))) + + def test_empty_with_optional(message: Test) -> None: + # '12' => tag 2, length delimited + # '00' => length: 0 + assert bytes(message) == bytearray.fromhex("12 00") + + test_empty_with_optional(Test(with_optional=WithOptional())) + test_empty_with_optional(Test(with_optional=WithOptional(b=None))) diff --git a/tests/inputs/recursivemessage/recursivemessage.json b/tests/inputs/recursivemessage/recursivemessage.json new file mode 100644 index 00000000..e92c3fbf --- /dev/null +++ b/tests/inputs/recursivemessage/recursivemessage.json @@ -0,0 +1,12 @@ +{ + "name": "Zues", + "child": { + "name": "Hercules" + }, + "intermediate": { + "child": { + "name": "Douglas Adams" + }, + "number": 42 + } +} diff --git a/tests/inputs/recursivemessage/recursivemessage.proto b/tests/inputs/recursivemessage/recursivemessage.proto new file mode 100644 index 00000000..1da2b57e --- /dev/null +++ b/tests/inputs/recursivemessage/recursivemessage.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package recursivemessage; + +message Test { + string name = 1; + Test child = 2; + Intermediate intermediate = 3; +} + + +message Intermediate { + int32 number = 1; + Test child = 2; +} diff --git a/tests/inputs/ref/ref.json b/tests/inputs/ref/ref.json new file mode 100644 index 00000000..2c6bdc10 --- /dev/null +++ b/tests/inputs/ref/ref.json @@ -0,0 +1,5 @@ +{ + "greeting": { + "greeting": "hello" + } +} diff --git a/tests/inputs/ref/ref.proto b/tests/inputs/ref/ref.proto new file mode 100644 index 00000000..69455909 --- /dev/null +++ b/tests/inputs/ref/ref.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package ref; + +import "repeatedmessage.proto"; + +message Test { + repeatedmessage.Sub greeting = 1; +} diff --git a/tests/inputs/ref/repeatedmessage.proto b/tests/inputs/ref/repeatedmessage.proto new file mode 100644 index 00000000..0ffacafd --- /dev/null +++ b/tests/inputs/ref/repeatedmessage.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package repeatedmessage; + +message Test { + repeated Sub greetings = 1; +} + +message Sub { + string greeting = 1; +} \ No newline at end of file diff --git a/tests/inputs/regression_387/regression_387.proto b/tests/inputs/regression_387/regression_387.proto new file mode 100644 index 00000000..57bd9544 --- /dev/null +++ b/tests/inputs/regression_387/regression_387.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package regression_387; + +message Test { + uint64 id = 1; +} + +message ParentElement { + string name = 1; + repeated Test elems = 2; +} \ No newline at end of file diff --git a/tests/inputs/regression_387/test_regression_387.py b/tests/inputs/regression_387/test_regression_387.py new file mode 100644 index 00000000..7bb40b2e --- /dev/null +++ b/tests/inputs/regression_387/test_regression_387.py @@ -0,0 +1,12 @@ +from tests.output_betterproto.regression_387 import ( + ParentElement, + Test, +) + + +def test_regression_387(): + el = ParentElement(name="test", elems=[Test(id=0), Test(id=42)]) + binary = bytes(el) + decoded = ParentElement().parse(binary) + assert decoded == el + assert decoded.elems == [Test(id=0), Test(id=42)] diff --git a/tests/inputs/regression_414/regression_414.proto b/tests/inputs/regression_414/regression_414.proto new file mode 100644 index 00000000..d20dddab --- /dev/null +++ b/tests/inputs/regression_414/regression_414.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package regression_414; + +message Test { + bytes body = 1; + bytes auth = 2; + repeated bytes signatures = 3; +} \ No newline at end of file diff --git a/tests/inputs/regression_414/test_regression_414.py b/tests/inputs/regression_414/test_regression_414.py new file mode 100644 index 00000000..742c97b4 --- /dev/null +++ b/tests/inputs/regression_414/test_regression_414.py @@ -0,0 +1,15 @@ +from tests.output_betterproto.regression_414 import Test + + +def test_full_cycle(): + body = bytes([0, 1]) + auth = bytes([2, 3]) + sig = [b""] + + obj = Test(body=body, auth=auth, signatures=sig) + + decoded = Test().parse(bytes(obj)) + assert decoded == obj + assert decoded.body == body + assert decoded.auth == auth + assert decoded.signatures == sig diff --git a/tests/inputs/repeated/repeated.json b/tests/inputs/repeated/repeated.json new file mode 100644 index 00000000..b8a7c4eb --- /dev/null +++ b/tests/inputs/repeated/repeated.json @@ -0,0 +1,3 @@ +{ + "names": ["one", "two", "three"] +} diff --git a/tests/inputs/repeated/repeated.proto b/tests/inputs/repeated/repeated.proto new file mode 100644 index 00000000..4f3c788c --- /dev/null +++ b/tests/inputs/repeated/repeated.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package repeated; + +message Test { + repeated string names = 1; +} diff --git a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json new file mode 100644 index 00000000..6ce7b34c --- /dev/null +++ b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.json @@ -0,0 +1,4 @@ +{ + "times": ["1972-01-01T10:00:20.021Z", "1972-01-01T10:00:20.021Z"], + "durations": ["1.200s", "1.200s"] +} diff --git a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto new file mode 100644 index 00000000..38f1eaa3 --- /dev/null +++ b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package repeated_duration_timestamp; + +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + + +message Test { + repeated google.protobuf.Timestamp times = 1; + repeated google.protobuf.Duration durations = 2; +} diff --git a/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py b/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py new file mode 100644 index 00000000..efc34866 --- /dev/null +++ b/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py @@ -0,0 +1,12 @@ +from datetime import ( + datetime, + timedelta, +) + +from tests.output_betterproto.repeated_duration_timestamp import Test + + +def test_roundtrip(): + message = Test() + message.times = [datetime.now(), datetime.now()] + message.durations = [timedelta(), timedelta()] diff --git a/tests/inputs/repeatedmessage/repeatedmessage.json b/tests/inputs/repeatedmessage/repeatedmessage.json new file mode 100644 index 00000000..90ec5967 --- /dev/null +++ b/tests/inputs/repeatedmessage/repeatedmessage.json @@ -0,0 +1,10 @@ +{ + "greetings": [ + { + "greeting": "hello" + }, + { + "greeting": "hi" + } + ] +} diff --git a/tests/inputs/repeatedmessage/repeatedmessage.proto b/tests/inputs/repeatedmessage/repeatedmessage.proto new file mode 100644 index 00000000..0ffacafd --- /dev/null +++ b/tests/inputs/repeatedmessage/repeatedmessage.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package repeatedmessage; + +message Test { + repeated Sub greetings = 1; +} + +message Sub { + string greeting = 1; +} \ No newline at end of file diff --git a/tests/inputs/repeatedpacked/repeatedpacked.json b/tests/inputs/repeatedpacked/repeatedpacked.json new file mode 100644 index 00000000..106fd908 --- /dev/null +++ b/tests/inputs/repeatedpacked/repeatedpacked.json @@ -0,0 +1,5 @@ +{ + "counts": [1, 2, -1, -2], + "signed": ["1", "2", "-1", "-2"], + "fixed": [1.0, 2.7, 3.4] +} diff --git a/tests/inputs/repeatedpacked/repeatedpacked.proto b/tests/inputs/repeatedpacked/repeatedpacked.proto new file mode 100644 index 00000000..a037d1b8 --- /dev/null +++ b/tests/inputs/repeatedpacked/repeatedpacked.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package repeatedpacked; + +message Test { + repeated int32 counts = 1; + repeated sint64 signed = 2; + repeated double fixed = 3; +} diff --git a/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto b/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto new file mode 100644 index 00000000..9a4449c6 --- /dev/null +++ b/tests/inputs/rpc_empty_input_message/rpc_empty_input_message.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package rpc_empty_input_message; + +message Test {} + +message Response { + int32 v = 1; +} + +service Service { + rpc read(Test) returns (Response); +} diff --git a/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py b/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py new file mode 100644 index 00000000..f77578f6 --- /dev/null +++ b/tests/inputs/rpc_empty_input_message/test_rpc_empty_input_message.py @@ -0,0 +1,24 @@ +import pytest +from grpclib.testing import ChannelFor + + +@pytest.mark.asyncio +async def test_rpc_input_message(): + from tests.output_betterproto.rpc_empty_input_message import ( + Response, + ServiceBase, + ServiceStub, + Test, + ) + + class Service(ServiceBase): + async def read(self, test: "Test") -> "Response": + return Response(v=42) + + async with ChannelFor([Service()]) as channel: + client = ServiceStub(channel) + + assert (await client.read(Test())).v == 42 + + # Check that we can call the method without providing the message + assert (await client.read()).v == 42 diff --git a/tests/inputs/service/service.proto b/tests/inputs/service/service.proto new file mode 100644 index 00000000..53d84fbd --- /dev/null +++ b/tests/inputs/service/service.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; + +package service; + +enum ThingType { + UNKNOWN = 0; + LIVING = 1; + DEAD = 2; +} + +message DoThingRequest { + string name = 1; + repeated string comments = 2; + ThingType type = 3; +} + +message DoThingResponse { + repeated string names = 1; +} + +message GetThingRequest { + string name = 1; +} + +message GetThingResponse { + string name = 1; + int32 version = 2; +} + +service Test { + rpc DoThing (DoThingRequest) returns (DoThingResponse); + rpc DoManyThings (stream DoThingRequest) returns (DoThingResponse); + rpc GetThingVersions (GetThingRequest) returns (stream GetThingResponse); + rpc GetDifferentThings (stream GetThingRequest) returns (stream GetThingResponse); +} diff --git a/tests/inputs/service_separate_packages/messages.proto b/tests/inputs/service_separate_packages/messages.proto new file mode 100644 index 00000000..270b188f --- /dev/null +++ b/tests/inputs/service_separate_packages/messages.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; + +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +package service_separate_packages.things.messages; + +message DoThingRequest { + string name = 1; + + // use `repeated` so we can check if `List` is correctly imported + repeated string comments = 2; + + // use google types `timestamp` and `duration` so we can check + // if everything from `datetime` is correctly imported + google.protobuf.Timestamp when = 3; + google.protobuf.Duration duration = 4; +} + +message DoThingResponse { + repeated string names = 1; +} + +message GetThingRequest { + string name = 1; +} + +message GetThingResponse { + string name = 1; + int32 version = 2; +} diff --git a/tests/inputs/service_separate_packages/service.proto b/tests/inputs/service_separate_packages/service.proto new file mode 100644 index 00000000..950eab49 --- /dev/null +++ b/tests/inputs/service_separate_packages/service.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +import "messages.proto"; + +package service_separate_packages.things.service; + +service Test { + rpc DoThing (things.messages.DoThingRequest) returns (things.messages.DoThingResponse); + rpc DoManyThings (stream things.messages.DoThingRequest) returns (things.messages.DoThingResponse); + rpc GetThingVersions (things.messages.GetThingRequest) returns (stream things.messages.GetThingResponse); + rpc GetDifferentThings (stream things.messages.GetThingRequest) returns (stream things.messages.GetThingResponse); +} diff --git a/tests/inputs/service_uppercase/service.proto b/tests/inputs/service_uppercase/service.proto new file mode 100644 index 00000000..786eec2c --- /dev/null +++ b/tests/inputs/service_uppercase/service.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package service_uppercase; + +message DoTHINGRequest { + string name = 1; + repeated string comments = 2; +} + +message DoTHINGResponse { + repeated string names = 1; +} + +service Test { + rpc DoThing (DoTHINGRequest) returns (DoTHINGResponse); +} diff --git a/tests/inputs/service_uppercase/test_service.py b/tests/inputs/service_uppercase/test_service.py new file mode 100644 index 00000000..35405e13 --- /dev/null +++ b/tests/inputs/service_uppercase/test_service.py @@ -0,0 +1,8 @@ +import inspect + +from tests.output_betterproto.service_uppercase import TestStub + + +def test_parameters(): + sig = inspect.signature(TestStub.do_thing) + assert len(sig.parameters) == 5, "Expected 5 parameters" diff --git a/tests/inputs/signed/signed.json b/tests/inputs/signed/signed.json new file mode 100644 index 00000000..b171e155 --- /dev/null +++ b/tests/inputs/signed/signed.json @@ -0,0 +1,6 @@ +{ + "signed32": 150, + "negative32": -150, + "string64": "150", + "negative64": "-150" +} diff --git a/tests/inputs/signed/signed.proto b/tests/inputs/signed/signed.proto new file mode 100644 index 00000000..b40aad49 --- /dev/null +++ b/tests/inputs/signed/signed.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package signed; + +message Test { + // todo: rename fields after fixing bug where 'signed_32_positive' will map to 'signed_32Positive' as output json + sint32 signed32 = 1; // signed_32_positive + sint32 negative32 = 2; // signed_32_negative + sint64 string64 = 3; // signed_64_positive + sint64 negative64 = 4; // signed_64_negative +} diff --git a/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py b/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py new file mode 100644 index 00000000..35783ea6 --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py @@ -0,0 +1,78 @@ +from datetime import ( + datetime, + timedelta, + timezone, +) + +import pytest + +from tests.output_betterproto.timestamp_dict_encode import Test + +# Current World Timezone range (UTC-12 to UTC+14) +MIN_UTC_OFFSET_MIN = -12 * 60 +MAX_UTC_OFFSET_MIN = 14 * 60 + +# Generate all timezones in range in 15 min increments +timezones = [timezone(timedelta(minutes=x)) for x in range(MIN_UTC_OFFSET_MIN, MAX_UTC_OFFSET_MIN + 1, 15)] + + +@pytest.mark.parametrize("tz", timezones) +def test_timezone_aware_datetime_dict_encode(tz: timezone): + original_time = datetime.now(tz=tz) + original_message = Test() + original_message.ts = original_time + encoded = original_message.to_dict() + decoded_message = Test() + decoded_message.from_dict(encoded) + + # check that the timestamps are equal after decoding from dict + assert original_message.ts.tzinfo is not None + assert decoded_message.ts.tzinfo is not None + assert original_message.ts == decoded_message.ts + + +def test_naive_datetime_dict_encode(): + # make suer naive datetime objects are still treated as utc + original_time = datetime.now() + assert original_time.tzinfo is None + original_message = Test() + original_message.ts = original_time + original_time_utc = original_time.replace(tzinfo=timezone.utc) + encoded = original_message.to_dict() + decoded_message = Test() + decoded_message.from_dict(encoded) + + # check that the timestamps are equal after decoding from dict + assert decoded_message.ts.tzinfo is not None + assert original_time_utc == decoded_message.ts + + +@pytest.mark.parametrize("tz", timezones) +def test_timezone_aware_json_serialize(tz: timezone): + original_time = datetime.now(tz=tz) + original_message = Test() + original_message.ts = original_time + json_serialized = original_message.to_json() + decoded_message = Test() + decoded_message.from_json(json_serialized) + + # check that the timestamps are equal after decoding from dict + assert original_message.ts.tzinfo is not None + assert decoded_message.ts.tzinfo is not None + assert original_message.ts == decoded_message.ts + + +def test_naive_datetime_json_serialize(): + # make suer naive datetime objects are still treated as utc + original_time = datetime.now() + assert original_time.tzinfo is None + original_message = Test() + original_message.ts = original_time + original_time_utc = original_time.replace(tzinfo=timezone.utc) + json_serialized = original_message.to_json() + decoded_message = Test() + decoded_message.from_json(json_serialized) + + # check that the timestamps are equal after decoding from dict + assert decoded_message.ts.tzinfo is not None + assert original_time_utc == decoded_message.ts diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json new file mode 100644 index 00000000..3f455587 --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json @@ -0,0 +1,3 @@ +{ + "ts" : "2023-03-15T22:35:51.253277Z" +} \ No newline at end of file diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto new file mode 100644 index 00000000..9c4081ac --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package timestamp_dict_encode; + +import "google/protobuf/timestamp.proto"; + +message Test { + google.protobuf.Timestamp ts = 1; +} \ No newline at end of file diff --git a/tests/util.py b/tests/util.py new file mode 100644 index 00000000..4f67aaee --- /dev/null +++ b/tests/util.py @@ -0,0 +1,158 @@ +import asyncio +import atexit +import importlib +import os +import platform +import sys +import tempfile +from dataclasses import dataclass +from pathlib import Path +from types import ModuleType +from typing import ( + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Union, +) + +os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" + +root_path = Path(__file__).resolve().parent +inputs_path = root_path.joinpath("inputs") +output_path_reference = root_path.joinpath("output_reference") +output_path_betterproto = root_path.joinpath("output_betterproto") +output_path_betterproto_pydantic = root_path.joinpath("output_betterproto_pydantic") + + +def get_files(path, suffix: str) -> Generator[str, None, None]: + for r, dirs, files in os.walk(path): + for filename in [f for f in files if f.endswith(suffix)]: + yield os.path.join(r, filename) + + +def get_directories(path): + for root, directories, files in os.walk(path): + yield from directories + + +async def protoc( + path: Union[str, Path], + output_dir: Union[str, Path], + reference: bool = False, + pydantic_dataclasses: bool = False, +): + path: Path = Path(path).resolve() + output_dir: Path = Path(output_dir).resolve() + python_out_option: str = "python_betterproto_out" if not reference else "python_out" + + if pydantic_dataclasses: + plugin_path = Path("src/betterproto2_compiler/plugin/main.py") + + if "Win" in platform.system(): + with tempfile.NamedTemporaryFile("w", encoding="UTF-8", suffix=".bat", delete=False) as tf: + # See https://stackoverflow.com/a/42622705 + tf.writelines( + [ + "@echo off", + f"\nchdir {os.getcwd()}", + f"\n{sys.executable} -u {plugin_path.as_posix()}", + ] + ) + + tf.flush() + + plugin_path = Path(tf.name) + atexit.register(os.remove, plugin_path) + + command = [ + sys.executable, + "-m", + "grpc.tools.protoc", + f"--plugin=protoc-gen-custom={plugin_path.as_posix()}", + "--experimental_allow_proto3_optional", + "--custom_opt=pydantic_dataclasses", + f"--proto_path={path.as_posix()}", + f"--custom_out={output_dir.as_posix()}", + *[p.as_posix() for p in path.glob("*.proto")], + ] + else: + command = [ + sys.executable, + "-m", + "grpc.tools.protoc", + f"--proto_path={path.as_posix()}", + f"--{python_out_option}={output_dir.as_posix()}", + *[p.as_posix() for p in path.glob("*.proto")], + ] + proc = await asyncio.create_subprocess_exec( + *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await proc.communicate() + return stdout, stderr, proc.returncode + + +@dataclass +class TestCaseJsonFile: + json: str + test_name: str + file_name: str + + def belongs_to(self, non_symmetrical_json: Dict[str, Tuple[str, ...]]) -> bool: + return self.file_name in non_symmetrical_json.get(self.test_name, ()) + + +def get_test_case_json_data(test_case_name: str, *json_file_names: str) -> List[TestCaseJsonFile]: + """ + :return: + A list of all files found in "{inputs_path}/test_case_name" with names matching + f"{test_case_name}.json" or f"{test_case_name}_*.json", OR given by + json_file_names + """ + test_case_dir = inputs_path.joinpath(test_case_name) + possible_file_paths = [ + *(test_case_dir.joinpath(json_file_name) for json_file_name in json_file_names), + test_case_dir.joinpath(f"{test_case_name}.json"), + *test_case_dir.glob(f"{test_case_name}_*.json"), + ] + + result = [] + for test_data_file_path in possible_file_paths: + if not test_data_file_path.exists(): + continue + with test_data_file_path.open("r") as fh: + result.append(TestCaseJsonFile(fh.read(), test_case_name, test_data_file_path.name.split(".")[0])) + + return result + + +def find_module(module: ModuleType, predicate: Callable[[ModuleType], bool]) -> Optional[ModuleType]: + """ + Recursively search module tree for a module that matches the search predicate. + Assumes that the submodules are directories containing __init__.py. + + Example: + + # find module inside foo that contains Test + import foo + test_module = find_module(foo, lambda m: hasattr(m, 'Test')) + """ + if predicate(module): + return module + + module_path = Path(*module.__path__) + + for sub in [sub.parent for sub in module_path.glob("**/__init__.py")]: + if sub == module_path: + continue + sub_module_path = sub.relative_to(module_path) + sub_module_name = ".".join(sub_module_path.parts) + + sub_module = importlib.import_module(f".{sub_module_name}", module.__name__) + + if predicate(sub_module): + return sub_module + + return None From 28c439ee63239fc0302c75ec5273618de1ded086 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:47:33 +0100 Subject: [PATCH 04/13] Update Python version --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f03ad310..b96efb10 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,10 +16,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python 3.8 + - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.10 - name: Install poetry run: python -m pip install poetry - name: Build package From 300e957c128cfe808b2b519d49687fd72fbb57e6 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:50:48 +0100 Subject: [PATCH 05/13] Remove lib files --- src/betterproto2_compiler/__init__.py | 1716 ----------------- .../lib/std/google/protobuf/__init__.py | 524 +++-- .../std/google/protobuf/compiler/__init__.py | 51 +- 3 files changed, 283 insertions(+), 2008 deletions(-) diff --git a/src/betterproto2_compiler/__init__.py b/src/betterproto2_compiler/__init__.py index dfa9dd5e..e69de29b 100644 --- a/src/betterproto2_compiler/__init__.py +++ b/src/betterproto2_compiler/__init__.py @@ -1,1716 +0,0 @@ -from __future__ import annotations - -__all__ = ["__version__"] - -import dataclasses -import enum as builtin_enum -import json -import math -import struct -import sys -import typing -import warnings -from abc import ABC -from base64 import ( - b64decode, - b64encode, -) -from copy import deepcopy -from datetime import ( - datetime, - timedelta, - timezone, -) -from io import BytesIO -from itertools import count -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Dict, - Generator, - Iterable, - Mapping, - Optional, - Set, - Tuple, - Type, - Union, - get_type_hints, -) - -from dateutil.parser import isoparse -from typing_extensions import Self - -from ._types import T -from ._version import __version__ -from .casing import ( - camel_case, - safe_snake_case, - snake_case, -) -from .enum import Enum as Enum -from .grpc.grpclib_client import ServiceStub as ServiceStub -from .utils import ( - classproperty, - hybridmethod, -) - -if TYPE_CHECKING: - from _typeshed import ( - SupportsRead, - SupportsWrite, - ) - -if sys.version_info >= (3, 10): - from types import UnionType as _types_UnionType -else: - - class _types_UnionType: ... - - -# Proto 3 data types -TYPE_ENUM = "enum" -TYPE_BOOL = "bool" -TYPE_INT32 = "int32" -TYPE_INT64 = "int64" -TYPE_UINT32 = "uint32" -TYPE_UINT64 = "uint64" -TYPE_SINT32 = "sint32" -TYPE_SINT64 = "sint64" -TYPE_FLOAT = "float" -TYPE_DOUBLE = "double" -TYPE_FIXED32 = "fixed32" -TYPE_SFIXED32 = "sfixed32" -TYPE_FIXED64 = "fixed64" -TYPE_SFIXED64 = "sfixed64" -TYPE_STRING = "string" -TYPE_BYTES = "bytes" -TYPE_MESSAGE = "message" -TYPE_MAP = "map" - -# Fields that use a fixed amount of space (4 or 8 bytes) -FIXED_TYPES = [ - TYPE_FLOAT, - TYPE_DOUBLE, - TYPE_FIXED32, - TYPE_SFIXED32, - TYPE_FIXED64, - TYPE_SFIXED64, -] - -# Fields that are numerical 64-bit types -INT_64_TYPES = [TYPE_INT64, TYPE_UINT64, TYPE_SINT64, TYPE_FIXED64, TYPE_SFIXED64] - -# Fields that are efficiently packed when -PACKED_TYPES = [ - TYPE_ENUM, - TYPE_BOOL, - TYPE_INT32, - TYPE_INT64, - TYPE_UINT32, - TYPE_UINT64, - TYPE_SINT32, - TYPE_SINT64, - TYPE_FLOAT, - TYPE_DOUBLE, - TYPE_FIXED32, - TYPE_SFIXED32, - TYPE_FIXED64, - TYPE_SFIXED64, -] - -# Wire types -# https://developers.google.com/protocol-buffers/docs/encoding#structure -WIRE_VARINT = 0 -WIRE_FIXED_64 = 1 -WIRE_LEN_DELIM = 2 -WIRE_FIXED_32 = 5 - -# Mappings of which Proto 3 types correspond to which wire types. -WIRE_VARINT_TYPES = [ - TYPE_ENUM, - TYPE_BOOL, - TYPE_INT32, - TYPE_INT64, - TYPE_UINT32, - TYPE_UINT64, - TYPE_SINT32, - TYPE_SINT64, -] - -WIRE_FIXED_32_TYPES = [TYPE_FLOAT, TYPE_FIXED32, TYPE_SFIXED32] -WIRE_FIXED_64_TYPES = [TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64] -WIRE_LEN_DELIM_TYPES = [TYPE_STRING, TYPE_BYTES, TYPE_MESSAGE, TYPE_MAP] - -# Indicator of message delimitation in streams -SIZE_DELIMITED = -1 - - -# Protobuf datetimes start at the Unix Epoch in 1970 in UTC. -def datetime_default_gen() -> datetime: - return datetime(1970, 1, 1, tzinfo=timezone.utc) - - -DATETIME_ZERO = datetime_default_gen() - - -# Special protobuf json doubles -INFINITY = "Infinity" -NEG_INFINITY = "-Infinity" -NAN = "NaN" - - -class Casing(builtin_enum.Enum): - """Casing constants for serialization.""" - - CAMEL = camel_case #: A camelCase sterilization function. - SNAKE = snake_case #: A snake_case sterilization function. - - -@dataclasses.dataclass(frozen=True) -class FieldMetadata: - """Stores internal metadata used for parsing & serialization.""" - - # Protobuf field number - number: int - # Protobuf type name - proto_type: str - # Map information if the proto_type is a map - map_types: Optional[Tuple[str, str]] = None - # Groups several "one-of" fields together - group: Optional[str] = None - # Describes the wrapped type (e.g. when using google.protobuf.BoolValue) - wraps: Optional[str] = None - # Is the field optional - optional: Optional[bool] = False - - @staticmethod - def get(field: dataclasses.Field) -> "FieldMetadata": - """Returns the field metadata for a dataclass field.""" - return field.metadata["betterproto"] - - -def dataclass_field( - number: int, - proto_type: str, - default_factory: Callable[[], Any], - *, - map_types: Optional[Tuple[str, str]] = None, - group: Optional[str] = None, - wraps: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> dataclasses.Field: - """Creates a dataclass field with attached protobuf metadata.""" - if repeated: - default_factory = list - - elif optional or group: - default_factory = type(None) - - return dataclasses.field( - default_factory=default_factory, - metadata={"betterproto": FieldMetadata(number, proto_type, map_types, group, wraps, optional)}, - ) - - -# Note: the fields below return `Any` to prevent type errors in the generated -# data classes since the types won't match with `Field` and they get swapped -# out at runtime. The generated dataclass variables are still typed correctly. - - -def enum_field( - number: int, - enum_default_value: Callable[[], Enum], - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_ENUM, - enum_default_value, - group=group, - optional=optional, - repeated=repeated, - ) - - -def bool_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_BOOL, - bool, - group=group, - optional=optional, - repeated=repeated, - ) - - -def int32_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field(number, TYPE_INT32, int, group=group, optional=optional, repeated=repeated) - - -def int64_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field(number, TYPE_INT64, int, group=group, optional=optional, repeated=repeated) - - -def uint32_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_UINT32, - int, - group=group, - optional=optional, - repeated=repeated, - ) - - -def uint64_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_UINT64, - int, - group=group, - optional=optional, - repeated=repeated, - ) - - -def sint32_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_SINT32, - int, - group=group, - optional=optional, - repeated=repeated, - ) - - -def sint64_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_SINT64, - int, - group=group, - optional=optional, - repeated=repeated, - ) - - -def float_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_FLOAT, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def double_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_DOUBLE, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def fixed32_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_FIXED32, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def fixed64_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_FIXED64, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def sfixed32_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_SFIXED32, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def sfixed64_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_SFIXED64, - float, - group=group, - optional=optional, - repeated=repeated, - ) - - -def string_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_STRING, - str, - group=group, - optional=optional, - repeated=repeated, - ) - - -def bytes_field( - number: int, - group: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_BYTES, - bytes, - group=group, - optional=optional, - repeated=repeated, - ) - - -def message_field( - number: int, - group: Optional[str] = None, - wraps: Optional[str] = None, - optional: bool = False, - repeated: bool = False, -) -> Any: - return dataclass_field( - number, - TYPE_MESSAGE, - type(None), - group=group, - wraps=wraps, - optional=optional, - repeated=repeated, - ) - - -def map_field(number: int, key_type: str, value_type: str, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_MAP, dict, map_types=(key_type, value_type), group=group) - - -def _pack_fmt(proto_type: str) -> str: - """Returns a little-endian format string for reading/writing binary.""" - return { - TYPE_DOUBLE: " None: - """Encodes a single varint and dumps it into the provided stream.""" - if value < -(1 << 63): - raise ValueError( - "Negative value is not representable as a 64-bit integer" " - unable to encode a varint within 10 bytes." - ) - elif value < 0: - value += 1 << 64 - - bits = value & 0x7F - value >>= 7 - while value: - stream.write((0x80 | bits).to_bytes(1, "little")) - bits = value & 0x7F - value >>= 7 - stream.write(bits.to_bytes(1, "little")) - - -def encode_varint(value: int) -> bytes: - """Encodes a single varint value for serialization.""" - with BytesIO() as stream: - dump_varint(value, stream) - return stream.getvalue() - - -def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes: - """Adjusts values before serialization.""" - if proto_type in ( - TYPE_ENUM, - TYPE_BOOL, - TYPE_INT32, - TYPE_INT64, - TYPE_UINT32, - TYPE_UINT64, - ): - return encode_varint(value) - elif proto_type in (TYPE_SINT32, TYPE_SINT64): - # Handle zig-zag encoding. - return encode_varint(value << 1 if value >= 0 else (value << 1) ^ (~0)) - elif proto_type in FIXED_TYPES: - return struct.pack(_pack_fmt(proto_type), value) - elif proto_type == TYPE_STRING: - return value.encode("utf-8") - elif proto_type == TYPE_MESSAGE: - if isinstance(value, datetime): - # Convert the `datetime` to a timestamp message. - value = _Timestamp.from_datetime(value) - elif isinstance(value, timedelta): - # Convert the `timedelta` to a duration message. - value = _Duration.from_timedelta(value) - elif wraps: - if value is None: - return b"" - value = _get_wrapper(wraps)(value=value) - - return bytes(value) - - return value - - -def _serialize_single( - field_number: int, - proto_type: str, - value: Any, - *, - wraps: str = "", -) -> bytes: - """Serializes a single field and value.""" - value = _preprocess_single(proto_type, wraps, value) - - output = bytearray() - if proto_type in WIRE_VARINT_TYPES: - key = encode_varint(field_number << 3) - output += key + value - elif proto_type in WIRE_FIXED_32_TYPES: - key = encode_varint((field_number << 3) | 5) - output += key + value - elif proto_type in WIRE_FIXED_64_TYPES: - key = encode_varint((field_number << 3) | 1) - output += key + value - elif proto_type in WIRE_LEN_DELIM_TYPES: - key = encode_varint((field_number << 3) | 2) - output += key + encode_varint(len(value)) + value - else: - raise NotImplementedError(proto_type) - - return bytes(output) - - -def _parse_float(value: Any) -> float: - """Parse the given value to a float - - Parameters - ---------- - value: Any - Value to parse - - Returns - ------- - float - Parsed value - """ - if value == INFINITY: - return float("inf") - if value == NEG_INFINITY: - return -float("inf") - if value == NAN: - return float("nan") - return float(value) - - -def _dump_float(value: float) -> Union[float, str]: - """Dump the given float to JSON - - Parameters - ---------- - value: float - Value to dump - - Returns - ------- - Union[float, str] - Dumped value, either a float or the strings - """ - if value == float("inf"): - return INFINITY - if value == -float("inf"): - return NEG_INFINITY - if isinstance(value, float) and math.isnan(value): - return NAN - return value - - -def load_varint(stream: "SupportsRead[bytes]") -> Tuple[int, bytes]: - """ - Load a single varint value from a stream. Returns the value and the raw bytes read. - """ - result = 0 - raw = b"" - for shift in count(0, 7): - if shift >= 64: - raise ValueError("Too many bytes when decoding varint.") - b = stream.read(1) - if not b: - raise EOFError("Stream ended unexpectedly while attempting to load varint.") - raw += b - b_int = int.from_bytes(b, byteorder="little") - result |= (b_int & 0x7F) << shift - if not (b_int & 0x80): - return result, raw - - -def decode_varint(buffer: bytes, pos: int) -> Tuple[int, int]: - """ - Decode a single varint value from a byte buffer. Returns the value and the - new position in the buffer. - """ - with BytesIO(buffer) as stream: - stream.seek(pos) - value, raw = load_varint(stream) - return value, pos + len(raw) - - -@dataclasses.dataclass(frozen=True) -class ParsedField: - number: int - wire_type: int - value: Any - raw: bytes - - -def load_fields(stream: "SupportsRead[bytes]") -> Generator[ParsedField, None, None]: - while True: - try: - num_wire, raw = load_varint(stream) - except EOFError: - return - number = num_wire >> 3 - wire_type = num_wire & 0x7 - - decoded: Any = None - if wire_type == WIRE_VARINT: - decoded, r = load_varint(stream) - raw += r - elif wire_type == WIRE_FIXED_64: - decoded = stream.read(8) - raw += decoded - elif wire_type == WIRE_LEN_DELIM: - length, r = load_varint(stream) - decoded = stream.read(length) - raw += r - raw += decoded - elif wire_type == WIRE_FIXED_32: - decoded = stream.read(4) - raw += decoded - - yield ParsedField(number=number, wire_type=wire_type, value=decoded, raw=raw) - - -def parse_fields(value: bytes) -> Generator[ParsedField, None, None]: - i = 0 - while i < len(value): - start = i - num_wire, i = decode_varint(value, i) - number = num_wire >> 3 - wire_type = num_wire & 0x7 - - decoded: Any = None - if wire_type == WIRE_VARINT: - decoded, i = decode_varint(value, i) - elif wire_type == WIRE_FIXED_64: - decoded, i = value[i : i + 8], i + 8 - elif wire_type == WIRE_LEN_DELIM: - length, i = decode_varint(value, i) - decoded = value[i : i + length] - i += length - elif wire_type == WIRE_FIXED_32: - decoded, i = value[i : i + 4], i + 4 - - yield ParsedField(number=number, wire_type=wire_type, value=decoded, raw=value[start:i]) - - -class ProtoClassMetadata: - __slots__ = ( - "oneof_group_by_field", - "oneof_field_by_group", - "default_gen", - "cls_by_field", - "field_name_by_number", - "meta_by_field_name", - "sorted_field_names", - ) - - oneof_group_by_field: Dict[str, str] - oneof_field_by_group: Dict[str, Set[dataclasses.Field]] - field_name_by_number: Dict[int, str] - meta_by_field_name: Dict[str, FieldMetadata] - sorted_field_names: Tuple[str, ...] - default_gen: Dict[str, Callable[[], Any]] - cls_by_field: Dict[str, Type] - - def __init__(self, cls: Type["Message"]): - by_field = {} - by_group: Dict[str, Set] = {} - by_field_name = {} - by_field_number = {} - - fields = dataclasses.fields(cls) - for field in fields: - meta = FieldMetadata.get(field) - - if meta.group: - # This is part of a one-of group. - by_field[field.name] = meta.group - - by_group.setdefault(meta.group, set()).add(field) - - by_field_name[field.name] = meta - by_field_number[meta.number] = field.name - - self.oneof_group_by_field = by_field - self.oneof_field_by_group = by_group - self.field_name_by_number = by_field_number - self.meta_by_field_name = by_field_name - self.sorted_field_names = tuple(by_field_number[number] for number in sorted(by_field_number)) - self.default_gen = self._get_default_gen(cls, fields) - self.cls_by_field = self._get_cls_by_field(cls, fields) - - @staticmethod - def _get_default_gen(cls: Type["Message"], fields: Iterable[dataclasses.Field]) -> Dict[str, Callable[[], Any]]: - return {field.name: field.default_factory for field in fields} - - @staticmethod - def _get_cls_by_field(cls: Type["Message"], fields: Iterable[dataclasses.Field]) -> Dict[str, Type]: - field_cls = {} - - for field in fields: - meta = FieldMetadata.get(field) - if meta.proto_type == TYPE_MAP: - assert meta.map_types - kt = cls._cls_for(field, index=0) - vt = cls._cls_for(field, index=1) - field_cls[field.name] = dataclasses.make_dataclass( - "Entry", - [ - ( - "key", - kt, - dataclass_field(1, meta.map_types[0], default_factory=kt), - ), - ( - "value", - vt, - dataclass_field(2, meta.map_types[1], default_factory=vt), - ), - ], - bases=(Message,), - ) - field_cls[f"{field.name}.value"] = vt - else: - field_cls[field.name] = cls._cls_for(field) - - return field_cls - - -class Message(ABC): - """ - The base class for protobuf messages, all generated messages will inherit from - this. This class registers the message fields which are used by the serializers and - parsers to go between the Python, binary and JSON representations of the message. - - .. container:: operations - - .. describe:: bytes(x) - - Calls :meth:`__bytes__`. - - .. describe:: bool(x) - - Calls :meth:`__bool__`. - """ - - _unknown_fields: bytes - _betterproto_meta: ClassVar[ProtoClassMetadata] - - def __post_init__(self) -> None: - self._unknown_fields = b"" - - def __eq__(self, other) -> bool: - if type(self) is not type(other): - return NotImplemented - - for field_name in self._betterproto.meta_by_field_name: - self_val = self.__getattribute__(field_name) - other_val = other.__getattribute__(field_name) - - if self_val != other_val: - # We consider two nan values to be the same for the - # purposes of comparing messages (otherwise a message - # is not equal to itself) - if ( - isinstance(self_val, float) - and isinstance(other_val, float) - and math.isnan(self_val) - and math.isnan(other_val) - ): - continue - else: - return False - - return True - - def __repr__(self) -> str: - parts = [ - f"{field_name}={value!r}" - for field_name in self._betterproto.sorted_field_names - for value in (self.__getattribute__(field_name),) - if value != self._get_field_default(field_name) - ] - return f"{self.__class__.__name__}({', '.join(parts)})" - - # def __rich_repr__(self) -> Iterable[Tuple[str, Any, Any]]: - # for field_name in self._betterproto.sorted_field_names: - # yield field_name, self.__getattribute__(field_name), PLACEHOLDER - - def __bool__(self) -> bool: - """True if the Message has any fields with non-default values.""" - return any( - self.__getattribute__(field_name) != self._get_field_default(field_name) - for field_name in self._betterproto.meta_by_field_name - ) - - def __deepcopy__(self: T, _: Any = {}) -> T: - kwargs = {} - for name in self._betterproto.sorted_field_names: - value = self.__getattribute__(name) - kwargs[name] = deepcopy(value) - return self.__class__(**kwargs) # type: ignore - - def __copy__(self: T, _: Any = {}) -> T: - kwargs = {} - for name in self._betterproto.sorted_field_names: - value = self.__getattribute__(name) - kwargs[name] = value - return self.__class__(**kwargs) # type: ignore - - @classproperty - def _betterproto(cls: type[Self]) -> ProtoClassMetadata: # type: ignore - """ - Lazy initialize metadata for each protobuf class. - It may be initialized multiple times in a multi-threaded environment, - but that won't affect the correctness. - """ - try: - return cls._betterproto_meta - except AttributeError: - cls._betterproto_meta = meta = ProtoClassMetadata(cls) - return meta - - def dump(self, stream: "SupportsWrite[bytes]", delimit: bool = False) -> None: - """ - Dumps the binary encoded Protobuf message to the stream. - - Parameters - ----------- - stream: :class:`BinaryIO` - The stream to dump the message to. - delimit: - Whether to prefix the message with a varint declaring its size. - TODO is it actually needed? - """ - b = bytes(self) - - if delimit: - dump_varint(len(b), stream) - - stream.write(b) - - def __bytes__(self) -> bytes: - """ - Get the binary encoded Protobuf representation of this message instance. - """ - with BytesIO() as stream: - for field_name, meta in self._betterproto.meta_by_field_name.items(): - value = getattr(self, field_name) - - if value is None: - # Optional items should be skipped. This is used for the Google - # wrapper types and proto3 field presence/optional fields. - continue - - if value == self._get_field_default(field_name): - # Default (zero) values are not serialized. - continue - - if isinstance(value, list): - if meta.proto_type in PACKED_TYPES: - # Packed lists look like a length-delimited field. First, - # preprocess/encode each value into a buffer and then - # treat it like a field of raw bytes. - buf = bytearray() - for item in value: - buf += _preprocess_single(meta.proto_type, "", item) - stream.write(_serialize_single(meta.number, TYPE_BYTES, buf)) - else: - for item in value: - stream.write( - _serialize_single( - meta.number, - meta.proto_type, - item, - wraps=meta.wraps or "", - ) - # if it's an empty message it still needs to be - # represented as an item in the repeated list - or b"\n\x00" - ) - - elif isinstance(value, dict): - for k, v in value.items(): - assert meta.map_types - sk = _serialize_single(1, meta.map_types[0], k) - sv = _serialize_single(2, meta.map_types[1], v) - stream.write(_serialize_single(meta.number, meta.proto_type, sk + sv)) - else: - stream.write( - _serialize_single( - meta.number, - meta.proto_type, - value, - wraps=meta.wraps or "", - ) - ) - - stream.write(self._unknown_fields) - return stream.getvalue() - - # For compatibility with other libraries - def SerializeToString(self: T) -> bytes: - """ - Get the binary encoded Protobuf representation of this message instance. - - .. note:: - This is a method for compatibility with other libraries, - you should really use ``bytes(x)``. - - Returns - -------- - :class:`bytes` - The binary encoded Protobuf representation of this message instance - """ - return bytes(self) - - def __getstate__(self) -> bytes: - return bytes(self) - - def __setstate__(self: T, pickled_bytes: bytes) -> T: - return self.parse(pickled_bytes) - - def __reduce__(self) -> Tuple[Any, ...]: - return (self.__class__.FromString, (bytes(self),)) - - @classmethod - def _type_hint(cls, field_name: str) -> Type: - return cls._type_hints()[field_name] - - @classmethod - def _type_hints(cls) -> Dict[str, Type]: - module = sys.modules[cls.__module__] - return get_type_hints(cls, module.__dict__, {}) - - @classmethod - def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type: - """Get the message class for a field from the type hints.""" - field_cls = cls._type_hint(field.name) - if hasattr(field_cls, "__args__") and index >= 0 and field_cls.__args__ is not None: - field_cls = field_cls.__args__[index] - return field_cls - - def _get_field_default(self, field_name: str) -> Any: - with warnings.catch_warnings(): - # ignore warnings when initialising deprecated field defaults - warnings.filterwarnings("ignore", category=DeprecationWarning) - return self._betterproto.default_gen[field_name]() - - def _postprocess_single(self, wire_type: int, meta: FieldMetadata, field_name: str, value: Any) -> Any: - """Adjusts values after parsing.""" - if wire_type == WIRE_VARINT: - if meta.proto_type in (TYPE_INT32, TYPE_INT64): - bits = int(meta.proto_type[3:]) - value = value & ((1 << bits) - 1) - signbit = 1 << (bits - 1) - value = int((value ^ signbit) - signbit) - elif meta.proto_type in (TYPE_SINT32, TYPE_SINT64): - # Undo zig-zag encoding - value = (value >> 1) ^ (-(value & 1)) - elif meta.proto_type == TYPE_BOOL: - # Booleans use a varint encoding, so convert it to true/false. - value = value > 0 - elif meta.proto_type == TYPE_ENUM: - # Convert enum ints to python enum instances - value = self._betterproto.cls_by_field[field_name].try_value(value) - elif wire_type in (WIRE_FIXED_32, WIRE_FIXED_64): - fmt = _pack_fmt(meta.proto_type) - value = struct.unpack(fmt, value)[0] - elif wire_type == WIRE_LEN_DELIM: - if meta.proto_type == TYPE_STRING: - value = str(value, "utf-8") - elif meta.proto_type == TYPE_MESSAGE: - cls = self._betterproto.cls_by_field[field_name] - - if cls == datetime: - value = _Timestamp().parse(value).to_datetime() - elif cls == timedelta: - value = _Duration().parse(value).to_timedelta() - elif meta.wraps: - # This is a Google wrapper value message around a single - # scalar type. - value = _get_wrapper(meta.wraps)().parse(value).value - else: - value = cls().parse(value) - elif meta.proto_type == TYPE_MAP: - value = self._betterproto.cls_by_field[field_name]().parse(value) - - return value - - def load( - self: T, - stream: "SupportsRead[bytes]", - size: Optional[int] = None, - ) -> T: - """ - Load the binary encoded Protobuf from a stream into this message instance. This - returns the instance itself and is therefore assignable and chainable. - - Parameters - ----------- - stream: :class:`bytes` - The stream to load the message from. - size: :class:`Optional[int]` - The size of the message in the stream. - Reads stream until EOF if ``None`` is given. - Reads based on a size delimiter prefix varint if SIZE_DELIMITED is given. - - Returns - -------- - :class:`Message` - The initialized message. - """ - # If the message is delimited, parse the message delimiter - if size == SIZE_DELIMITED: - size, _ = load_varint(stream) - - # Got some data over the wire - proto_meta = self._betterproto - read = 0 - for parsed in load_fields(stream): - field_name = proto_meta.field_name_by_number.get(parsed.number) - if not field_name: - self._unknown_fields += parsed.raw - continue - - meta = proto_meta.meta_by_field_name[field_name] - - value: Any - if parsed.wire_type == WIRE_LEN_DELIM and meta.proto_type in PACKED_TYPES: - # This is a packed repeated field. - pos = 0 - value = [] - while pos < len(parsed.value): - if meta.proto_type in (TYPE_FLOAT, TYPE_FIXED32, TYPE_SFIXED32): - decoded, pos = parsed.value[pos : pos + 4], pos + 4 - wire_type = WIRE_FIXED_32 - elif meta.proto_type in (TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64): - decoded, pos = parsed.value[pos : pos + 8], pos + 8 - wire_type = WIRE_FIXED_64 - else: - decoded, pos = decode_varint(parsed.value, pos) - wire_type = WIRE_VARINT - decoded = self._postprocess_single(wire_type, meta, field_name, decoded) - value.append(decoded) - else: - value = self._postprocess_single(parsed.wire_type, meta, field_name, parsed.value) - - current = getattr(self, field_name) - - if meta.proto_type == TYPE_MAP: - # Value represents a single key/value pair entry in the map. - current[value.key] = value.value - elif isinstance(current, list): - if isinstance(value, list): - current.extend(value) - else: - current.append(value) - else: - setattr(self, field_name, value) - - # If we have now loaded the expected length of the message, stop - if size is not None: - prev = read - read += len(parsed.raw) - if read == size: - break - elif read > size: - raise ValueError( - f"Expected message of size {size}, can only read " - f"either {prev} or {read} bytes - there is no " - "message of the expected size in the stream." - ) - - if size is not None and read < size: - raise ValueError( - f"Expected message of size {size}, but was only able to " - f"read {read} bytes - the stream may have ended too soon," - " or the expected size may have been incorrect." - ) - - return self - - def parse(self: T, data: bytes) -> T: - """ - Parse the binary encoded Protobuf into this message instance. This - returns the instance itself and is therefore assignable and chainable. - - Parameters - ----------- - data: :class:`bytes` - The data to parse the message from. - - Returns - -------- - :class:`Message` - The initialized message. - """ - with BytesIO(data) as stream: - return self.load(stream) - - # For compatibility with other libraries. - @classmethod - def FromString(cls: Type[T], data: bytes) -> T: - """ - Parse the binary encoded Protobuf into this message instance. This - returns the instance itself and is therefore assignable and chainable. - - .. note:: - This is a method for compatibility with other libraries, - you should really use :meth:`parse`. - - - Parameters - ----------- - data: :class:`bytes` - The data to parse the protobuf from. - - Returns - -------- - :class:`Message` - The initialized message. - """ - return cls().parse(data) - - def to_dict(self, casing: Casing = Casing.CAMEL, include_default_values: bool = False) -> Dict[str, Any]: - """ - Returns a JSON serializable dict representation of this object. - - Parameters - ----------- - casing: :class:`Casing` - The casing to use for key values. Default is :attr:`Casing.CAMEL` for - compatibility purposes. - include_default_values: :class:`bool` - If ``True`` will include the default values of fields. Default is ``False``. - E.g. an ``int32`` field will be included with a value of ``0`` if this is - set to ``True``, otherwise this would be ignored. - - Returns - -------- - Dict[:class:`str`, Any] - The JSON serializable dict representation of this object. - """ - output: Dict[str, Any] = {} - field_types = self._type_hints() - defaults = self._betterproto.default_gen - for field_name, meta in self._betterproto.meta_by_field_name.items(): - field_is_repeated = defaults[field_name] is list - value = getattr(self, field_name) - cased_name = casing(field_name).rstrip("_") # type: ignore - if meta.proto_type == TYPE_MESSAGE: - if isinstance(value, datetime): - output[cased_name] = _Timestamp.timestamp_to_json(value) - elif isinstance(value, timedelta): - output[cased_name] = _Duration.delta_to_json(value) - elif meta.wraps: - if value is not None or include_default_values: - output[cased_name] = value - elif field_is_repeated: - # Convert each item. - cls = self._betterproto.cls_by_field[field_name] - if cls == datetime: - value = [_Timestamp.timestamp_to_json(i) for i in value] - elif cls == timedelta: - value = [_Duration.delta_to_json(i) for i in value] - else: - value = [i.to_dict(casing, include_default_values) for i in value] - if value or include_default_values: - output[cased_name] = value - elif value is None: - if include_default_values: - output[cased_name] = value - else: - output[cased_name] = value.to_dict(casing, include_default_values) - elif meta.proto_type == TYPE_MAP: - output_map = {**value} - for k in value: - if hasattr(value[k], "to_dict"): - output_map[k] = value[k].to_dict(casing, include_default_values) - - if value or include_default_values: - output[cased_name] = output_map - elif value != self._get_field_default(field_name) or include_default_values: - if meta.proto_type in INT_64_TYPES: - if field_is_repeated: - output[cased_name] = [str(n) for n in value] - elif value is None: - if include_default_values: - output[cased_name] = value - else: - output[cased_name] = str(value) - elif meta.proto_type == TYPE_BYTES: - if field_is_repeated: - output[cased_name] = [b64encode(b).decode("utf8") for b in value] - elif value is None and include_default_values: - output[cased_name] = value - else: - output[cased_name] = b64encode(value).decode("utf8") - elif meta.proto_type == TYPE_ENUM: - if field_is_repeated: - enum_class = field_types[field_name].__args__[0] - if isinstance(value, typing.Iterable) and not isinstance(value, str): - output[cased_name] = [enum_class(el).name for el in value] - else: - # transparently upgrade single value to repeated - output[cased_name] = [enum_class(value).name] - elif value is None: - if include_default_values: - output[cased_name] = value - elif meta.optional: - enum_class = field_types[field_name].__args__[0] - output[cased_name] = enum_class(value).name - else: - enum_class = field_types[field_name] # noqa - output[cased_name] = enum_class(value).name - elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE): - if field_is_repeated: - output[cased_name] = [_dump_float(n) for n in value] - else: - output[cased_name] = _dump_float(value) - else: - output[cased_name] = value - return output - - @classmethod - def _from_dict_init(cls, mapping: Mapping[str, Any]) -> Mapping[str, Any]: - init_kwargs: Dict[str, Any] = {} - for key, value in mapping.items(): - field_name = safe_snake_case(key) - try: - meta = cls._betterproto.meta_by_field_name[field_name] - except KeyError: - continue - if value is None: - continue - - if meta.proto_type == TYPE_MESSAGE: - sub_cls = cls._betterproto.cls_by_field[field_name] - if sub_cls == datetime: - value = [isoparse(item) for item in value] if isinstance(value, list) else isoparse(value) - elif sub_cls == timedelta: - value = ( - [timedelta(seconds=float(item[:-1])) for item in value] - if isinstance(value, list) - else timedelta(seconds=float(value[:-1])) - ) - elif not meta.wraps: - value = ( - [sub_cls.from_dict(item) for item in value] - if isinstance(value, list) - else sub_cls.from_dict(value) - ) - elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: - sub_cls = cls._betterproto.cls_by_field[f"{field_name}.value"] - value = {k: sub_cls.from_dict(v) for k, v in value.items()} - else: - if meta.proto_type in INT_64_TYPES: - value = [int(n) for n in value] if isinstance(value, list) else int(value) - elif meta.proto_type == TYPE_BYTES: - value = [b64decode(n) for n in value] if isinstance(value, list) else b64decode(value) - elif meta.proto_type == TYPE_ENUM: - enum_cls = cls._betterproto.cls_by_field[field_name] - if isinstance(value, list): - value = [enum_cls.from_string(e) for e in value] - elif isinstance(value, str): - value = enum_cls.from_string(value) - elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE): - value = [_parse_float(n) for n in value] if isinstance(value, list) else _parse_float(value) - - init_kwargs[field_name] = value - return init_kwargs - - @hybridmethod - def from_dict(cls: type[Self], value: Mapping[str, Any]) -> Self: # type: ignore - """ - Parse the key/value pairs into the a new message instance. - - Parameters - ----------- - value: Dict[:class:`str`, Any] - The dictionary to parse from. - - Returns - -------- - :class:`Message` - The initialized message. - """ - return cls(**cls._from_dict_init(value)) - - @from_dict.instancemethod - def from_dict(self, value: Mapping[str, Any]) -> Self: - """ - Parse the key/value pairs into the current message instance. This returns the - instance itself and is therefore assignable and chainable. - - Parameters - ----------- - value: Dict[:class:`str`, Any] - The dictionary to parse from. - - Returns - -------- - :class:`Message` - The initialized message. - """ - for field, value in self._from_dict_init(value).items(): - setattr(self, field, value) - return self - - def to_json( - self, - indent: Union[None, int, str] = None, - include_default_values: bool = False, - casing: Casing = Casing.CAMEL, - ) -> str: - """A helper function to parse the message instance into its JSON - representation. - - This is equivalent to:: - - json.dumps(message.to_dict(), indent=indent) - - Parameters - ----------- - indent: Optional[Union[:class:`int`, :class:`str`]] - The indent to pass to :func:`json.dumps`. - - include_default_values: :class:`bool` - If ``True`` will include the default values of fields. Default is ``False``. - E.g. an ``int32`` field will be included with a value of ``0`` if this is - set to ``True``, otherwise this would be ignored. - - casing: :class:`Casing` - The casing to use for key values. Default is :attr:`Casing.CAMEL` for - compatibility purposes. - - Returns - -------- - :class:`str` - The JSON representation of the message. - """ - return json.dumps( - self.to_dict(include_default_values=include_default_values, casing=casing), - indent=indent, - ) - - def from_json(self: T, value: Union[str, bytes]) -> T: - """A helper function to return the message instance from its JSON - representation. This returns the instance itself and is therefore assignable - and chainable. - - This is equivalent to:: - - return message.from_dict(json.loads(value)) - - Parameters - ----------- - value: Union[:class:`str`, :class:`bytes`] - The value to pass to :func:`json.loads`. - - Returns - -------- - :class:`Message` - The initialized message. - """ - return self.from_dict(json.loads(value)) - - def to_pydict(self, casing: Casing = Casing.CAMEL, include_default_values: bool = False) -> Dict[str, Any]: - """ - Returns a python dict representation of this object. - - Parameters - ----------- - casing: :class:`Casing` - The casing to use for key values. Default is :attr:`Casing.CAMEL` for - compatibility purposes. - include_default_values: :class:`bool` - If ``True`` will include the default values of fields. Default is ``False``. - E.g. an ``int32`` field will be included with a value of ``0`` if this is - set to ``True``, otherwise this would be ignored. - - Returns - -------- - Dict[:class:`str`, Any] - The python dict representation of this object. - """ - output: Dict[str, Any] = {} - defaults = self._betterproto.default_gen - for field_name, meta in self._betterproto.meta_by_field_name.items(): - field_is_repeated = defaults[field_name] is list - value = getattr(self, field_name) - cased_name = casing(field_name).rstrip("_") # type: ignore - if meta.proto_type == TYPE_MESSAGE: - if isinstance(value, datetime): - if ( - value != DATETIME_ZERO - or include_default_values - or self._include_default_value_for_oneof(field_name=field_name, meta=meta) - ): - output[cased_name] = value - elif isinstance(value, timedelta): - if ( - value != timedelta(0) - or include_default_values - or self._include_default_value_for_oneof(field_name=field_name, meta=meta) - ): - output[cased_name] = value - elif meta.wraps: - if value is not None or include_default_values: - output[cased_name] = value - elif field_is_repeated: - # Convert each item. - value = [i.to_pydict(casing, include_default_values) for i in value] - if value or include_default_values: - output[cased_name] = value - elif value is None: - if include_default_values: - output[cased_name] = None - else: - output[cased_name] = value.to_pydict(casing, include_default_values) - elif meta.proto_type == TYPE_MAP: - for k in value: - if hasattr(value[k], "to_pydict"): - value[k] = value[k].to_pydict(casing, include_default_values) - - if value or include_default_values: - output[cased_name] = value - elif value != self._get_field_default(field_name) or include_default_values: - output[cased_name] = value - return output - - def from_pydict(self: T, value: Mapping[str, Any]) -> T: - """ - Parse the key/value pairs into the current message instance. This returns the - instance itself and is therefore assignable and chainable. - - Parameters - ----------- - value: Dict[:class:`str`, Any] - The dictionary to parse from. - - Returns - -------- - :class:`Message` - The initialized message. - """ - for key in value: - field_name = safe_snake_case(key) - meta = self._betterproto.meta_by_field_name.get(field_name) - if not meta: - continue - - if value[key] is not None: - if meta.proto_type == TYPE_MESSAGE: - v = getattr(self, field_name) - cls = self._betterproto.cls_by_field[field_name] - if issubclass(cls, list): - for item in value[key]: - v.append(cls().from_pydict(item)) - elif issubclass(cls, datetime): - v = value[key] - elif issubclass(cls, timedelta): - v = value[key] - elif meta.wraps: - v = value[key] - else: - v = cls().from_pydict(value[key]) - elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: - v = getattr(self, field_name) - cls = self._betterproto.cls_by_field[f"{field_name}.value"] - for k in value[key]: - v[k] = cls().from_pydict(value[key][k]) - else: - v = value[key] - - if v is not None: - setattr(self, field_name, v) - return self - - def is_set(self, name: str) -> bool: - """ - Check if field with the given name has been set. - - Parameters - ----------- - name: :class:`str` - The name of the field to check for. - - Returns - -------- - :class:`bool` - `True` if field has been set, otherwise `False`. - """ - return self.__getattribute__(name) is not self._get_field_default(name) - - @classmethod - def _validate_field_groups(cls, values): - group_to_one_ofs = cls._betterproto.oneof_field_by_group - field_name_to_meta = cls._betterproto.meta_by_field_name - - for group, field_set in group_to_one_ofs.items(): - if len(field_set) == 1: - (field,) = field_set - field_name = field.name - meta = field_name_to_meta[field_name] - - # This is a synthetic oneof; we should ignore it's presence and not - # consider it as a oneof. - if meta.optional: - continue - - set_fields = [field.name for field in field_set if getattr(values, field.name, None) is not None] - - if len(set_fields) > 1: - set_fields_str = ", ".join(set_fields) - raise ValueError(f"Group {group} has more than one value;" f" fields {set_fields_str} are not None") - - return values - - -Message.__annotations__ = {} # HACK to avoid typing.get_type_hints breaking :) - - -def which_one_of(message: Message, group_name: str) -> Tuple[str, Optional[Any]]: - """ - Return the name and value of a message's one-of field group. - - Returns - -------- - Tuple[:class:`str`, Any] - The field name and the value for that field. - """ - field_name, value = "", None - for field in message._betterproto.oneof_field_by_group[group_name]: - v = getattr(message, field.name) - - if v is not None: - if field_name: - raise RuntimeError(f"more than one field set in oneof: {field.name} and {field_name}") - field_name, value = field.name, v - - return field_name, value - - -# Circular import workaround: google.protobuf depends on base classes defined above. -from .lib.google.protobuf import ( # noqa - BoolValue, - BytesValue, - DoubleValue, - Duration, - EnumValue, - FloatValue, - Int32Value, - Int64Value, - StringValue, - Timestamp, - UInt32Value, - UInt64Value, -) - - -class _Duration(Duration): - @classmethod - def from_timedelta(cls, delta: timedelta, *, _1_microsecond: timedelta = timedelta(microseconds=1)) -> "_Duration": - total_ms = delta // _1_microsecond - seconds = int(total_ms / 1e6) - nanos = int((total_ms % 1e6) * 1e3) - return cls(seconds, nanos) - - def to_timedelta(self) -> timedelta: - return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3) - - @staticmethod - def delta_to_json(delta: timedelta) -> str: - parts = str(delta.total_seconds()).split(".") - if len(parts) > 1: - while len(parts[1]) not in (3, 6, 9): - parts[1] = f"{parts[1]}0" - return f"{'.'.join(parts)}s" - - -class _Timestamp(Timestamp): - @classmethod - def from_datetime(cls, dt: datetime) -> "_Timestamp": - # manual epoch offset calulation to avoid rounding errors, - # to support negative timestamps (before 1970) and skirt - # around datetime bugs (apparently 0 isn't a year in [0, 9999]??) - offset = dt - DATETIME_ZERO - # below is the same as timedelta.total_seconds() but without dividing by 1e6 - # so we end up with microseconds as integers instead of seconds as float - offset_us = (offset.days * 24 * 60 * 60 + offset.seconds) * 10**6 + offset.microseconds - seconds, us = divmod(offset_us, 10**6) - return cls(seconds, us * 1000) - - def to_datetime(self) -> datetime: - # datetime.fromtimestamp() expects a timestamp in seconds, not microseconds - # if we pass it as a floating point number, we will run into rounding errors - # see also #407 - offset = timedelta(seconds=self.seconds, microseconds=self.nanos // 1000) - return DATETIME_ZERO + offset - - @staticmethod - def timestamp_to_json(dt: datetime) -> str: - nanos = dt.microsecond * 1e3 - if dt.tzinfo is not None: - # change timezone aware datetime objects to utc - dt = dt.astimezone(timezone.utc) - copy = dt.replace(microsecond=0, tzinfo=None) - result = copy.isoformat() - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return f"{result}Z" - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return f"{result}.{int(nanos // 1e6) :03d}Z" - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return f"{result}.{int(nanos // 1e3) :06d}Z" - # Serialize 9 fractional digits. - return f"{result}.{nanos:09d}" - - -def _get_wrapper(proto_type: str) -> Type: - """Get the wrapper message class for a wrapped type.""" - - # TODO: include ListValue and NullValue? - return { - TYPE_BOOL: BoolValue, - TYPE_BYTES: BytesValue, - TYPE_DOUBLE: DoubleValue, - TYPE_FLOAT: FloatValue, - TYPE_ENUM: EnumValue, - TYPE_INT32: Int32Value, - TYPE_INT64: Int64Value, - TYPE_STRING: StringValue, - TYPE_UINT32: UInt32Value, - TYPE_UINT64: UInt64Value, - }[proto_type] diff --git a/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py b/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py index 5045fd7b..3f59a88d 100644 --- a/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py +++ b/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py @@ -81,13 +81,13 @@ Optional, ) +import betterproto from typing_extensions import Self -import betterproto2_compiler from betterproto2_compiler.utils import hybridmethod -class Syntax(betterproto2_compiler.Enum): +class Syntax(betterproto.Enum): """The syntax in which a protocol buffer element is defined.""" PROTO2 = 0 @@ -97,7 +97,7 @@ class Syntax(betterproto2_compiler.Enum): """Syntax `proto3`.""" -class FieldKind(betterproto2_compiler.Enum): +class FieldKind(betterproto.Enum): """Basic field types.""" TYPE_UNKNOWN = 0 @@ -158,7 +158,7 @@ class FieldKind(betterproto2_compiler.Enum): """Field type sint64.""" -class FieldCardinality(betterproto2_compiler.Enum): +class FieldCardinality(betterproto.Enum): """Whether a field is optional, required, or repeated.""" CARDINALITY_UNKNOWN = 0 @@ -174,7 +174,7 @@ class FieldCardinality(betterproto2_compiler.Enum): """For repeated fields.""" -class FieldDescriptorProtoType(betterproto2_compiler.Enum): +class FieldDescriptorProtoType(betterproto.Enum): """ """ TYPE_DOUBLE = 1 @@ -266,7 +266,7 @@ class FieldDescriptorProtoType(betterproto2_compiler.Enum): """Uses ZigZag encoding.""" -class FieldDescriptorProtoLabel(betterproto2_compiler.Enum): +class FieldDescriptorProtoLabel(betterproto.Enum): """ """ LABEL_OPTIONAL = 1 @@ -283,7 +283,7 @@ class FieldDescriptorProtoLabel(betterproto2_compiler.Enum): """ -class FileOptionsOptimizeMode(betterproto2_compiler.Enum): +class FileOptionsOptimizeMode(betterproto.Enum): """Generated classes can be optimized for speed or code size.""" SPEED = 1 @@ -300,7 +300,7 @@ class FileOptionsOptimizeMode(betterproto2_compiler.Enum): """Generate code using MessageLite and the lite runtime.""" -class FieldOptionsCType(betterproto2_compiler.Enum): +class FieldOptionsCType(betterproto.Enum): """ """ STRING = 0 @@ -317,7 +317,7 @@ class FieldOptionsCType(betterproto2_compiler.Enum): """ -class FieldOptionsJsType(betterproto2_compiler.Enum): +class FieldOptionsJsType(betterproto.Enum): """ """ JS_NORMAL = 0 @@ -330,7 +330,7 @@ class FieldOptionsJsType(betterproto2_compiler.Enum): """Use JavaScript numbers.""" -class MethodOptionsIdempotencyLevel(betterproto2_compiler.Enum): +class MethodOptionsIdempotencyLevel(betterproto.Enum): """ Is this method side-effect-free (or safe in HTTP parlance), or idempotent, or neither? HTTP based RPC implementation may choose GET verb for safe @@ -349,7 +349,7 @@ class MethodOptionsIdempotencyLevel(betterproto2_compiler.Enum): """idempotent, but may have side effects""" -class NullValue(betterproto2_compiler.Enum): +class NullValue(betterproto.Enum): """ `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. @@ -362,7 +362,7 @@ class NullValue(betterproto2_compiler.Enum): @dataclass(eq=False, repr=False) -class Any(betterproto2_compiler.Message): +class Any(betterproto.Message): """ `Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. @@ -444,7 +444,7 @@ class Any(betterproto2_compiler.Message): } """ - type_url: str = betterproto2_compiler.string_field(1) + type_url: str = betterproto.string_field(1) """ A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least @@ -475,20 +475,20 @@ class Any(betterproto2_compiler.Message): used with implementation specific semantics. """ - value: bytes = betterproto2_compiler.bytes_field(2) + value: bytes = betterproto.bytes_field(2) """ Must be a valid serialized protocol buffer of the above specified type. """ @dataclass(eq=False, repr=False) -class SourceContext(betterproto2_compiler.Message): +class SourceContext(betterproto.Message): """ `SourceContext` represents information about the source of a protobuf element, like the file in which it is defined. """ - file_name: str = betterproto2_compiler.string_field(1) + file_name: str = betterproto.string_field(1) """ The path-qualified name of the .proto file that contained the associated protobuf element. For example: `"google/protobuf/source_context.proto"`. @@ -496,117 +496,115 @@ class SourceContext(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class Type(betterproto2_compiler.Message): +class Type(betterproto.Message): """A protocol buffer message type.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """The fully qualified message name.""" - fields: List["Field"] = betterproto2_compiler.message_field(2, repeated=True) + fields: List["Field"] = betterproto.message_field(2, repeated=True) """The list of fields.""" - oneofs: List[str] = betterproto2_compiler.string_field(3, repeated=True) + oneofs: List[str] = betterproto.string_field(3, repeated=True) """The list of types appearing in `oneof` definitions in this type.""" - options: List["Option"] = betterproto2_compiler.message_field(4, repeated=True) + options: List["Option"] = betterproto.message_field(4, repeated=True) """The protocol buffer options.""" - source_context: "SourceContext" = betterproto2_compiler.message_field(5) + source_context: "SourceContext" = betterproto.message_field(5) """The source context.""" - syntax: "Syntax" = betterproto2_compiler.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto.enum_field(6, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" @dataclass(eq=False, repr=False) -class Field(betterproto2_compiler.Message): +class Field(betterproto.Message): """A single field of a message type.""" - kind: "FieldKind" = betterproto2_compiler.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) + kind: "FieldKind" = betterproto.enum_field(1, enum_default_value=lambda: FieldKind.try_value(0)) """The field type.""" - cardinality: "FieldCardinality" = betterproto2_compiler.enum_field( + cardinality: "FieldCardinality" = betterproto.enum_field( 2, enum_default_value=lambda: FieldCardinality.try_value(0) ) """The field cardinality.""" - number: int = betterproto2_compiler.int32_field(3) + number: int = betterproto.int32_field(3) """The field number.""" - name: str = betterproto2_compiler.string_field(4) + name: str = betterproto.string_field(4) """The field name.""" - type_url: str = betterproto2_compiler.string_field(6) + type_url: str = betterproto.string_field(6) """ The field type URL, without the scheme, for message or enumeration types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. """ - oneof_index: int = betterproto2_compiler.int32_field(7) + oneof_index: int = betterproto.int32_field(7) """ The index of the field type in `Type.oneofs`, for message or enumeration types. The first type has index 1; zero means the type is not in the list. """ - packed: bool = betterproto2_compiler.bool_field(8) + packed: bool = betterproto.bool_field(8) """Whether to use alternative packed wire representation.""" - options: List["Option"] = betterproto2_compiler.message_field(9, repeated=True) + options: List["Option"] = betterproto.message_field(9, repeated=True) """The protocol buffer options.""" - json_name: str = betterproto2_compiler.string_field(10) + json_name: str = betterproto.string_field(10) """The field JSON name.""" - default_value: str = betterproto2_compiler.string_field(11) + default_value: str = betterproto.string_field(11) """ The string value of the default value of this field. Proto2 syntax only. """ @dataclass(eq=False, repr=False) -class Enum(betterproto2_compiler.Message): +class Enum(betterproto.Message): """Enum type definition.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """Enum type name.""" - enumvalue: List["EnumValue"] = betterproto2_compiler.message_field( - 2, wraps=betterproto2_compiler.TYPE_ENUM, repeated=True - ) + enumvalue: List["EnumValue"] = betterproto.message_field(2, wraps=betterproto.TYPE_ENUM, repeated=True) """Enum value definitions.""" - options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) + options: List["Option"] = betterproto.message_field(3, repeated=True) """Protocol buffer options.""" - source_context: "SourceContext" = betterproto2_compiler.message_field(4) + source_context: "SourceContext" = betterproto.message_field(4) """The source context.""" - syntax: "Syntax" = betterproto2_compiler.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto.enum_field(5, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax.""" @dataclass(eq=False, repr=False) -class EnumValue(betterproto2_compiler.Message): +class EnumValue(betterproto.Message): """Enum value definition.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """Enum value name.""" - number: int = betterproto2_compiler.int32_field(2) + number: int = betterproto.int32_field(2) """Enum value number.""" - options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) + options: List["Option"] = betterproto.message_field(3, repeated=True) """Protocol buffer options.""" @dataclass(eq=False, repr=False) -class Option(betterproto2_compiler.Message): +class Option(betterproto.Message): """ A protocol buffer option, which can be attached to a message, field, enumeration, etc. """ - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ The option's name. For protobuf built-in options (options defined in descriptor.proto), this is the short name. For example, `"map_entry"`. @@ -614,7 +612,7 @@ class Option(betterproto2_compiler.Message): `"google.api.http"`. """ - value: "Any" = betterproto2_compiler.message_field(2) + value: "Any" = betterproto.message_field(2) """ The option's value packed in an Any message. If the value is a primitive, the corresponding wrapper type defined in google/protobuf/wrappers.proto @@ -624,7 +622,7 @@ class Option(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class Api(betterproto2_compiler.Message): +class Api(betterproto.Message): """ Api is a light-weight descriptor for an API Interface. @@ -637,19 +635,19 @@ class Api(betterproto2_compiler.Message): detailed terminology. """ - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ The fully qualified name of this interface, including package name followed by the interface's simple name. """ - methods: List["Method"] = betterproto2_compiler.message_field(2, repeated=True) + methods: List["Method"] = betterproto.message_field(2, repeated=True) """The methods of this interface, in unspecified order.""" - options: List["Option"] = betterproto2_compiler.message_field(3, repeated=True) + options: List["Option"] = betterproto.message_field(3, repeated=True) """Any metadata attached to the interface.""" - version: str = betterproto2_compiler.string_field(4) + version: str = betterproto.string_field(4) """ A version string for this interface. If specified, must have the form `major-version.minor-version`, as in `1.10`. If the minor version is @@ -672,47 +670,47 @@ class Api(betterproto2_compiler.Message): experimental, non-GA interfaces. """ - source_context: "SourceContext" = betterproto2_compiler.message_field(5) + source_context: "SourceContext" = betterproto.message_field(5) """ Source context for the protocol buffer service represented by this message. """ - mixins: List["Mixin"] = betterproto2_compiler.message_field(6, repeated=True) + mixins: List["Mixin"] = betterproto.message_field(6, repeated=True) """Included interfaces. See [Mixin][].""" - syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of the service.""" @dataclass(eq=False, repr=False) -class Method(betterproto2_compiler.Message): +class Method(betterproto.Message): """Method represents a method of an API interface.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """The simple name of this method.""" - request_type_url: str = betterproto2_compiler.string_field(2) + request_type_url: str = betterproto.string_field(2) """A URL of the input message type.""" - request_streaming: bool = betterproto2_compiler.bool_field(3) + request_streaming: bool = betterproto.bool_field(3) """If true, the request is streamed.""" - response_type_url: str = betterproto2_compiler.string_field(4) + response_type_url: str = betterproto.string_field(4) """The URL of the output message type.""" - response_streaming: bool = betterproto2_compiler.bool_field(5) + response_streaming: bool = betterproto.bool_field(5) """If true, the response is streamed.""" - options: List["Option"] = betterproto2_compiler.message_field(6, repeated=True) + options: List["Option"] = betterproto.message_field(6, repeated=True) """Any metadata attached to the method.""" - syntax: "Syntax" = betterproto2_compiler.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) + syntax: "Syntax" = betterproto.enum_field(7, enum_default_value=lambda: Syntax.try_value(0)) """The source syntax of this method.""" @dataclass(eq=False, repr=False) -class Mixin(betterproto2_compiler.Message): +class Mixin(betterproto.Message): """ Declares an API Interface to be included in this interface. The including interface must redeclare all the methods from the included interface, but @@ -794,10 +792,10 @@ class Mixin(betterproto2_compiler.Message): } """ - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """The fully qualified name of the interface which is included.""" - root: str = betterproto2_compiler.string_field(2) + root: str = betterproto.string_field(2) """ If non-empty specifies a path under which inherited HTTP paths are rooted. @@ -805,64 +803,64 @@ class Mixin(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class FileDescriptorSet(betterproto2_compiler.Message): +class FileDescriptorSet(betterproto.Message): """ The protocol compiler can output a FileDescriptorSet containing the .proto files it parses. """ - file: List["FileDescriptorProto"] = betterproto2_compiler.message_field(1, repeated=True) + file: List["FileDescriptorProto"] = betterproto.message_field(1, repeated=True) """ """ @dataclass(eq=False, repr=False) -class FileDescriptorProto(betterproto2_compiler.Message): +class FileDescriptorProto(betterproto.Message): """Describes a complete .proto file.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """file name, relative to root of source tree""" - package: str = betterproto2_compiler.string_field(2) + package: str = betterproto.string_field(2) """e.g. "foo", "foo.bar", etc.""" - dependency: List[str] = betterproto2_compiler.string_field(3, repeated=True) + dependency: List[str] = betterproto.string_field(3, repeated=True) """Names of files imported by this file.""" - public_dependency: List[int] = betterproto2_compiler.int32_field(10, repeated=True) + public_dependency: List[int] = betterproto.int32_field(10, repeated=True) """Indexes of the public imported files in the dependency list above.""" - weak_dependency: List[int] = betterproto2_compiler.int32_field(11, repeated=True) + weak_dependency: List[int] = betterproto.int32_field(11, repeated=True) """ Indexes of the weak imported files in the dependency list. For Google-internal migration only. Do not use. """ - message_type: List["DescriptorProto"] = betterproto2_compiler.message_field(4, repeated=True) + message_type: List["DescriptorProto"] = betterproto.message_field(4, repeated=True) """All top-level definitions in this file.""" - enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(5, repeated=True) + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5, repeated=True) """ """ - service: List["ServiceDescriptorProto"] = betterproto2_compiler.message_field(6, repeated=True) + service: List["ServiceDescriptorProto"] = betterproto.message_field(6, repeated=True) """ """ - extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(7, repeated=True) + extension: List["FieldDescriptorProto"] = betterproto.message_field(7, repeated=True) """ """ - options: "FileOptions" = betterproto2_compiler.message_field(8) + options: "FileOptions" = betterproto.message_field(8) """ """ - source_code_info: "SourceCodeInfo" = betterproto2_compiler.message_field(9) + source_code_info: "SourceCodeInfo" = betterproto.message_field(9) """ This field contains optional information about the original source code. You may safely remove this entire field without harming runtime @@ -870,7 +868,7 @@ class FileDescriptorProto(betterproto2_compiler.Message): development tools. """ - syntax: str = betterproto2_compiler.string_field(12) + syntax: str = betterproto.string_field(12) """ The syntax of the proto file. The supported values are "proto2" and "proto3". @@ -878,55 +876,55 @@ class FileDescriptorProto(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class DescriptorProto(betterproto2_compiler.Message): +class DescriptorProto(betterproto.Message): """Describes a message type.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - field: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) + field: List["FieldDescriptorProto"] = betterproto.message_field(2, repeated=True) """ """ - extension: List["FieldDescriptorProto"] = betterproto2_compiler.message_field(6, repeated=True) + extension: List["FieldDescriptorProto"] = betterproto.message_field(6, repeated=True) """ """ - nested_type: List["DescriptorProto"] = betterproto2_compiler.message_field(3, repeated=True) + nested_type: List["DescriptorProto"] = betterproto.message_field(3, repeated=True) """ """ - enum_type: List["EnumDescriptorProto"] = betterproto2_compiler.message_field(4, repeated=True) + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4, repeated=True) """ """ - extension_range: List["DescriptorProtoExtensionRange"] = betterproto2_compiler.message_field(5, repeated=True) + extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field(5, repeated=True) """ """ - oneof_decl: List["OneofDescriptorProto"] = betterproto2_compiler.message_field(8, repeated=True) + oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8, repeated=True) """ """ - options: "MessageOptions" = betterproto2_compiler.message_field(7) + options: "MessageOptions" = betterproto.message_field(7) """ """ - reserved_range: List["DescriptorProtoReservedRange"] = betterproto2_compiler.message_field(9, repeated=True) + reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9, repeated=True) """ """ - reserved_name: List[str] = betterproto2_compiler.string_field(10, repeated=True) + reserved_name: List[str] = betterproto.string_field(10, repeated=True) """ Reserved field names, which may not be used by fields in the same message. A given name may only be reserved once. @@ -934,66 +932,66 @@ class DescriptorProto(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class DescriptorProtoExtensionRange(betterproto2_compiler.Message): +class DescriptorProtoExtensionRange(betterproto.Message): """ """ - start: int = betterproto2_compiler.int32_field(1) + start: int = betterproto.int32_field(1) """Inclusive.""" - end: int = betterproto2_compiler.int32_field(2) + end: int = betterproto.int32_field(2) """Exclusive.""" - options: "ExtensionRangeOptions" = betterproto2_compiler.message_field(3) + options: "ExtensionRangeOptions" = betterproto.message_field(3) """ """ @dataclass(eq=False, repr=False) -class DescriptorProtoReservedRange(betterproto2_compiler.Message): +class DescriptorProtoReservedRange(betterproto.Message): """ Range of reserved tag numbers. Reserved tag numbers may not be used by fields or extension ranges in the same message. Reserved ranges may not overlap. """ - start: int = betterproto2_compiler.int32_field(1) + start: int = betterproto.int32_field(1) """Inclusive.""" - end: int = betterproto2_compiler.int32_field(2) + end: int = betterproto.int32_field(2) """Exclusive.""" @dataclass(eq=False, repr=False) -class ExtensionRangeOptions(betterproto2_compiler.Message): +class ExtensionRangeOptions(betterproto.Message): """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class FieldDescriptorProto(betterproto2_compiler.Message): +class FieldDescriptorProto(betterproto.Message): """Describes a field within a message.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - number: int = betterproto2_compiler.int32_field(3) + number: int = betterproto.int32_field(3) """ """ - label: "FieldDescriptorProtoLabel" = betterproto2_compiler.enum_field( + label: "FieldDescriptorProtoLabel" = betterproto.enum_field( 4, enum_default_value=lambda: FieldDescriptorProtoLabel.try_value(0) ) """ """ - type: "FieldDescriptorProtoType" = betterproto2_compiler.enum_field( + type: "FieldDescriptorProtoType" = betterproto.enum_field( 5, enum_default_value=lambda: FieldDescriptorProtoType.try_value(0) ) """ @@ -1001,7 +999,7 @@ class FieldDescriptorProto(betterproto2_compiler.Message): are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. """ - type_name: str = betterproto2_compiler.string_field(6) + type_name: str = betterproto.string_field(6) """ For message and enum types, this is the name of the type. If the name starts with a '.', it is fully-qualified. Otherwise, C++-like scoping @@ -1010,13 +1008,13 @@ class FieldDescriptorProto(betterproto2_compiler.Message): namespace). """ - extendee: str = betterproto2_compiler.string_field(2) + extendee: str = betterproto.string_field(2) """ For extensions, this is the name of the type being extended. It is resolved in the same manner as type_name. """ - default_value: str = betterproto2_compiler.string_field(7) + default_value: str = betterproto.string_field(7) """ For numeric types, contains the original text representation of the value. For booleans, "true" or "false". @@ -1025,13 +1023,13 @@ class FieldDescriptorProto(betterproto2_compiler.Message): TODO(kenton): Base-64 encode? """ - oneof_index: Optional[int] = betterproto2_compiler.int32_field(9, optional=True) + oneof_index: Optional[int] = betterproto.int32_field(9, optional=True) """ If set, gives the index of a oneof in the containing type's oneof_decl list. This field is a member of that oneof. """ - json_name: str = betterproto2_compiler.string_field(10) + json_name: str = betterproto.string_field(10) """ JSON name of this field. The value is set by protocol compiler. If the user has set a "json_name" option on this field, that option's value @@ -1039,12 +1037,12 @@ class FieldDescriptorProto(betterproto2_compiler.Message): it to camelCase. """ - options: "FieldOptions" = betterproto2_compiler.message_field(8) + options: "FieldOptions" = betterproto.message_field(8) """ """ - proto3_optional: bool = betterproto2_compiler.bool_field(17) + proto3_optional: bool = betterproto.bool_field(17) """ If true, this is a proto3 "optional". When a proto3 field is optional, it tracks presence regardless of field type. @@ -1071,47 +1069,47 @@ class FieldDescriptorProto(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class OneofDescriptorProto(betterproto2_compiler.Message): +class OneofDescriptorProto(betterproto.Message): """Describes a oneof.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - options: "OneofOptions" = betterproto2_compiler.message_field(2) + options: "OneofOptions" = betterproto.message_field(2) """ """ @dataclass(eq=False, repr=False) -class EnumDescriptorProto(betterproto2_compiler.Message): +class EnumDescriptorProto(betterproto.Message): """Describes an enum type.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - value: List["EnumValueDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) + value: List["EnumValueDescriptorProto"] = betterproto.message_field(2, repeated=True) """ """ - options: "EnumOptions" = betterproto2_compiler.message_field(3) + options: "EnumOptions" = betterproto.message_field(3) """ """ - reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto2_compiler.message_field(4, repeated=True) + reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = betterproto.message_field(4, repeated=True) """ Range of reserved numeric values. Reserved numeric values may not be used by enum values in the same enum declaration. Reserved ranges may not overlap. """ - reserved_name: List[str] = betterproto2_compiler.string_field(5, repeated=True) + reserved_name: List[str] = betterproto.string_field(5, repeated=True) """ Reserved enum value names, which may not be reused. A given name may only be reserved once. @@ -1119,7 +1117,7 @@ class EnumDescriptorProto(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class EnumDescriptorProtoEnumReservedRange(betterproto2_compiler.Message): +class EnumDescriptorProtoEnumReservedRange(betterproto.Message): """ Range of reserved numeric values. Reserved values may not be used by entries in the same enum. Reserved ranges may not overlap. @@ -1129,87 +1127,87 @@ class EnumDescriptorProtoEnumReservedRange(betterproto2_compiler.Message): domain. """ - start: int = betterproto2_compiler.int32_field(1) + start: int = betterproto.int32_field(1) """Inclusive.""" - end: int = betterproto2_compiler.int32_field(2) + end: int = betterproto.int32_field(2) """Inclusive.""" @dataclass(eq=False, repr=False) -class EnumValueDescriptorProto(betterproto2_compiler.Message): +class EnumValueDescriptorProto(betterproto.Message): """Describes a value within an enum.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - number: int = betterproto2_compiler.int32_field(2) + number: int = betterproto.int32_field(2) """ """ - options: "EnumValueOptions" = betterproto2_compiler.message_field(3) + options: "EnumValueOptions" = betterproto.message_field(3) """ """ @dataclass(eq=False, repr=False) -class ServiceDescriptorProto(betterproto2_compiler.Message): +class ServiceDescriptorProto(betterproto.Message): """Describes a service.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - method: List["MethodDescriptorProto"] = betterproto2_compiler.message_field(2, repeated=True) + method: List["MethodDescriptorProto"] = betterproto.message_field(2, repeated=True) """ """ - options: "ServiceOptions" = betterproto2_compiler.message_field(3) + options: "ServiceOptions" = betterproto.message_field(3) """ """ @dataclass(eq=False, repr=False) -class MethodDescriptorProto(betterproto2_compiler.Message): +class MethodDescriptorProto(betterproto.Message): """Describes a method of a service.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ """ - input_type: str = betterproto2_compiler.string_field(2) + input_type: str = betterproto.string_field(2) """ Input and output type names. These are resolved in the same way as FieldDescriptorProto.type_name, but must refer to a message type. """ - output_type: str = betterproto2_compiler.string_field(3) + output_type: str = betterproto.string_field(3) """ """ - options: "MethodOptions" = betterproto2_compiler.message_field(4) + options: "MethodOptions" = betterproto.message_field(4) """ """ - client_streaming: bool = betterproto2_compiler.bool_field(5) + client_streaming: bool = betterproto.bool_field(5) """Identifies if client streams multiple client messages""" - server_streaming: bool = betterproto2_compiler.bool_field(6) + server_streaming: bool = betterproto.bool_field(6) """Identifies if server streams multiple server messages""" @dataclass(eq=False, repr=False) -class FileOptions(betterproto2_compiler.Message): +class FileOptions(betterproto.Message): """ =================================================================== Options @@ -1244,7 +1242,7 @@ class FileOptions(betterproto2_compiler.Message): to automatically assign option numbers. """ - java_package: str = betterproto2_compiler.string_field(1) + java_package: str = betterproto.string_field(1) """ Sets the Java package where classes generated from this .proto will be placed. By default, the proto package is used, but this is often @@ -1252,7 +1250,7 @@ class FileOptions(betterproto2_compiler.Message): domain names. """ - java_outer_classname: str = betterproto2_compiler.string_field(8) + java_outer_classname: str = betterproto.string_field(8) """ If set, all the classes from the .proto file are wrapped in a single outer class with the given name. This applies to both Proto1 @@ -1261,7 +1259,7 @@ class FileOptions(betterproto2_compiler.Message): explicitly choose the class name). """ - java_multiple_files: bool = betterproto2_compiler.bool_field(10) + java_multiple_files: bool = betterproto.bool_field(10) """ If set true, then the Java code generator will generate a separate .java file for each top-level message, enum, and service defined in the .proto @@ -1271,10 +1269,10 @@ class FileOptions(betterproto2_compiler.Message): top-level extensions defined in the file. """ - java_generate_equals_and_hash: bool = betterproto2_compiler.bool_field(20) + java_generate_equals_and_hash: bool = betterproto.bool_field(20) """This option does nothing.""" - java_string_check_utf8: bool = betterproto2_compiler.bool_field(27) + java_string_check_utf8: bool = betterproto.bool_field(27) """ If set true, then the Java2 code generator will generate code that throws an exception whenever an attempt is made to assign a non-UTF-8 @@ -1284,14 +1282,14 @@ class FileOptions(betterproto2_compiler.Message): This option has no effect on when used with the lite runtime. """ - optimize_for: "FileOptionsOptimizeMode" = betterproto2_compiler.enum_field( + optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field( 9, enum_default_value=lambda: FileOptionsOptimizeMode.try_value(0) ) """ """ - go_package: str = betterproto2_compiler.string_field(11) + go_package: str = betterproto.string_field(11) """ Sets the Go package where structs generated from this .proto will be placed. If omitted, the Go package will be derived from the following: @@ -1300,7 +1298,7 @@ class FileOptions(betterproto2_compiler.Message): - Otherwise, the basename of the .proto file, without extension. """ - cc_generic_services: bool = betterproto2_compiler.bool_field(16) + cc_generic_services: bool = betterproto.bool_field(16) """ Should generic services be generated in each language? "Generic" services are not specific to any particular RPC system. They are generated by the @@ -1314,22 +1312,22 @@ class FileOptions(betterproto2_compiler.Message): explicitly set them to true. """ - java_generic_services: bool = betterproto2_compiler.bool_field(17) + java_generic_services: bool = betterproto.bool_field(17) """ """ - py_generic_services: bool = betterproto2_compiler.bool_field(18) + py_generic_services: bool = betterproto.bool_field(18) """ """ - php_generic_services: bool = betterproto2_compiler.bool_field(42) + php_generic_services: bool = betterproto.bool_field(42) """ """ - deprecated: bool = betterproto2_compiler.bool_field(23) + deprecated: bool = betterproto.bool_field(23) """ Is this file deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1337,22 +1335,22 @@ class FileOptions(betterproto2_compiler.Message): least, this is a formalization for deprecating files. """ - cc_enable_arenas: bool = betterproto2_compiler.bool_field(31) + cc_enable_arenas: bool = betterproto.bool_field(31) """ Enables the use of arenas for the proto messages in this file. This applies only to generated classes for C++. """ - objc_class_prefix: str = betterproto2_compiler.string_field(36) + objc_class_prefix: str = betterproto.string_field(36) """ Sets the objective c class prefix which is prepended to all objective c generated classes from this .proto. There is no default. """ - csharp_namespace: str = betterproto2_compiler.string_field(37) + csharp_namespace: str = betterproto.string_field(37) """Namespace for generated classes; defaults to the package.""" - swift_prefix: str = betterproto2_compiler.string_field(39) + swift_prefix: str = betterproto.string_field(39) """ By default Swift generators will take the proto package and CamelCase it replacing '.' with underscore and use that to prefix the types/symbols @@ -1360,34 +1358,34 @@ class FileOptions(betterproto2_compiler.Message): to prefix the types/symbols defined. """ - php_class_prefix: str = betterproto2_compiler.string_field(40) + php_class_prefix: str = betterproto.string_field(40) """ Sets the php class prefix which is prepended to all php generated classes from this .proto. Default is empty. """ - php_namespace: str = betterproto2_compiler.string_field(41) + php_namespace: str = betterproto.string_field(41) """ Use this option to change the namespace of php generated classes. Default is empty. When this option is empty, the package name will be used for determining the namespace. """ - php_metadata_namespace: str = betterproto2_compiler.string_field(44) + php_metadata_namespace: str = betterproto.string_field(44) """ Use this option to change the namespace of php generated metadata classes. Default is empty. When this option is empty, the proto file name will be used for determining the namespace. """ - ruby_package: str = betterproto2_compiler.string_field(45) + ruby_package: str = betterproto.string_field(45) """ Use this option to change the package of ruby generated classes. Default is empty. When this option is not set, the package name will be used for determining the ruby package. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """ The parser stores options it doesn't recognize here. See the documentation for the "Options" section above. @@ -1403,10 +1401,10 @@ def __post_init__(self) -> None: @dataclass(eq=False, repr=False) -class MessageOptions(betterproto2_compiler.Message): +class MessageOptions(betterproto.Message): """ """ - message_set_wire_format: bool = betterproto2_compiler.bool_field(1) + message_set_wire_format: bool = betterproto.bool_field(1) """ Set true to use the old proto1 MessageSet wire format for extensions. This is provided for backwards-compatibility with the MessageSet wire @@ -1428,14 +1426,14 @@ class MessageOptions(betterproto2_compiler.Message): the protocol compiler. """ - no_standard_descriptor_accessor: bool = betterproto2_compiler.bool_field(2) + no_standard_descriptor_accessor: bool = betterproto.bool_field(2) """ Disables the generation of the standard "descriptor()" accessor, which can conflict with a field of the same name. This is meant to make migration from proto1 easier; new code should avoid fields named "descriptor". """ - deprecated: bool = betterproto2_compiler.bool_field(3) + deprecated: bool = betterproto.bool_field(3) """ Is this message deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1443,7 +1441,7 @@ class MessageOptions(betterproto2_compiler.Message): this is a formalization for deprecating messages. """ - map_entry: bool = betterproto2_compiler.bool_field(7) + map_entry: bool = betterproto.bool_field(7) """ Whether the message is an automatically generated map entry type for the maps field. @@ -1468,17 +1466,15 @@ class MessageOptions(betterproto2_compiler.Message): parser. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class FieldOptions(betterproto2_compiler.Message): +class FieldOptions(betterproto.Message): """ """ - ctype: "FieldOptionsCType" = betterproto2_compiler.enum_field( - 1, enum_default_value=lambda: FieldOptionsCType.try_value(0) - ) + ctype: "FieldOptionsCType" = betterproto.enum_field(1, enum_default_value=lambda: FieldOptionsCType.try_value(0)) """ The ctype option instructs the C++ code generator to use a different representation of the field than it normally would. See the specific @@ -1486,7 +1482,7 @@ class FieldOptions(betterproto2_compiler.Message): release -- sorry, we'll try to include it in a future version! """ - packed: bool = betterproto2_compiler.bool_field(2) + packed: bool = betterproto.bool_field(2) """ The packed option can be enabled for repeated primitive fields to enable a more efficient representation on the wire. Rather than repeatedly @@ -1495,9 +1491,7 @@ class FieldOptions(betterproto2_compiler.Message): false will avoid using packed encoding. """ - jstype: "FieldOptionsJsType" = betterproto2_compiler.enum_field( - 6, enum_default_value=lambda: FieldOptionsJsType.try_value(0) - ) + jstype: "FieldOptionsJsType" = betterproto.enum_field(6, enum_default_value=lambda: FieldOptionsJsType.try_value(0)) """ The jstype option determines the JavaScript type used for values of the field. The option is permitted only for 64 bit integral and fixed types @@ -1512,7 +1506,7 @@ class FieldOptions(betterproto2_compiler.Message): goog.math.Integer. """ - lazy: bool = betterproto2_compiler.bool_field(5) + lazy: bool = betterproto.bool_field(5) """ Should this field be parsed lazily? Lazy applies only to message-type fields. It means that when the outer message is initially parsed, the @@ -1543,7 +1537,7 @@ class FieldOptions(betterproto2_compiler.Message): been parsed. """ - deprecated: bool = betterproto2_compiler.bool_field(3) + deprecated: bool = betterproto.bool_field(3) """ Is this field deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1551,32 +1545,32 @@ class FieldOptions(betterproto2_compiler.Message): is a formalization for deprecating fields. """ - weak: bool = betterproto2_compiler.bool_field(10) + weak: bool = betterproto.bool_field(10) """For Google-internal migration only. Do not use.""" - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class OneofOptions(betterproto2_compiler.Message): +class OneofOptions(betterproto.Message): """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class EnumOptions(betterproto2_compiler.Message): +class EnumOptions(betterproto.Message): """ """ - allow_alias: bool = betterproto2_compiler.bool_field(2) + allow_alias: bool = betterproto.bool_field(2) """ Set this option to true to allow mapping different tag names to the same value. """ - deprecated: bool = betterproto2_compiler.bool_field(3) + deprecated: bool = betterproto.bool_field(3) """ Is this enum deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1584,15 +1578,15 @@ class EnumOptions(betterproto2_compiler.Message): is a formalization for deprecating enums. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class EnumValueOptions(betterproto2_compiler.Message): +class EnumValueOptions(betterproto.Message): """ """ - deprecated: bool = betterproto2_compiler.bool_field(1) + deprecated: bool = betterproto.bool_field(1) """ Is this enum value deprecated? Depending on the target platform, this can emit Deprecated annotations @@ -1600,15 +1594,15 @@ class EnumValueOptions(betterproto2_compiler.Message): this is a formalization for deprecating enum values. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class ServiceOptions(betterproto2_compiler.Message): +class ServiceOptions(betterproto.Message): """ """ - deprecated: bool = betterproto2_compiler.bool_field(33) + deprecated: bool = betterproto.bool_field(33) """ Note: Field numbers 1 through 32 are reserved for Google's internal RPC framework. We apologize for hoarding these numbers to ourselves, but @@ -1621,15 +1615,15 @@ class ServiceOptions(betterproto2_compiler.Message): this is a formalization for deprecating services. """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class MethodOptions(betterproto2_compiler.Message): +class MethodOptions(betterproto.Message): """ """ - deprecated: bool = betterproto2_compiler.bool_field(33) + deprecated: bool = betterproto.bool_field(33) """ Note: Field numbers 1 through 32 are reserved for Google's internal RPC framework. We apologize for hoarding these numbers to ourselves, but @@ -1642,19 +1636,19 @@ class MethodOptions(betterproto2_compiler.Message): this is a formalization for deprecating methods. """ - idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto2_compiler.enum_field( + idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field( 34, enum_default_value=lambda: MethodOptionsIdempotencyLevel.try_value(0) ) """ """ - uninterpreted_option: List["UninterpretedOption"] = betterproto2_compiler.message_field(999, repeated=True) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999, repeated=True) """The parser stores options it doesn't recognize here. See above.""" @dataclass(eq=False, repr=False) -class UninterpretedOption(betterproto2_compiler.Message): +class UninterpretedOption(betterproto.Message): """ A message representing a option the parser does not recognize. This only appears in options protos created by the compiler::Parser class. @@ -1664,45 +1658,45 @@ class UninterpretedOption(betterproto2_compiler.Message): in them. """ - name: List["UninterpretedOptionNamePart"] = betterproto2_compiler.message_field(2, repeated=True) + name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2, repeated=True) """ """ - identifier_value: str = betterproto2_compiler.string_field(3) + identifier_value: str = betterproto.string_field(3) """ The value of the uninterpreted option, in whatever type the tokenizer identified it as during parsing. Exactly one of these should be set. """ - positive_int_value: int = betterproto2_compiler.uint64_field(4) + positive_int_value: int = betterproto.uint64_field(4) """ """ - negative_int_value: int = betterproto2_compiler.int64_field(5) + negative_int_value: int = betterproto.int64_field(5) """ """ - double_value: float = betterproto2_compiler.double_field(6) + double_value: float = betterproto.double_field(6) """ """ - string_value: bytes = betterproto2_compiler.bytes_field(7) + string_value: bytes = betterproto.bytes_field(7) """ """ - aggregate_value: str = betterproto2_compiler.string_field(8) + aggregate_value: str = betterproto.string_field(8) """ """ @dataclass(eq=False, repr=False) -class UninterpretedOptionNamePart(betterproto2_compiler.Message): +class UninterpretedOptionNamePart(betterproto.Message): """ The name of the uninterpreted option. Each string represents a segment in a dot-separated name. is_extension is true iff a segment represents an @@ -1711,19 +1705,19 @@ class UninterpretedOptionNamePart(betterproto2_compiler.Message): "foo.(bar.baz).qux". """ - name_part: str = betterproto2_compiler.string_field(1) + name_part: str = betterproto.string_field(1) """ """ - is_extension: bool = betterproto2_compiler.bool_field(2) + is_extension: bool = betterproto.bool_field(2) """ """ @dataclass(eq=False, repr=False) -class SourceCodeInfo(betterproto2_compiler.Message): +class SourceCodeInfo(betterproto.Message): """ =================================================================== Optional source code info @@ -1732,7 +1726,7 @@ class SourceCodeInfo(betterproto2_compiler.Message): FileDescriptorProto was generated. """ - location: List["SourceCodeInfoLocation"] = betterproto2_compiler.message_field(1, repeated=True) + location: List["SourceCodeInfoLocation"] = betterproto.message_field(1, repeated=True) """ A Location identifies a piece of source code in a .proto file which corresponds to a particular definition. This information is intended @@ -1781,10 +1775,10 @@ class SourceCodeInfo(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class SourceCodeInfoLocation(betterproto2_compiler.Message): +class SourceCodeInfoLocation(betterproto.Message): """ """ - path: List[int] = betterproto2_compiler.int32_field(1, repeated=True) + path: List[int] = betterproto.int32_field(1, repeated=True) """ Identifies which part of the FileDescriptorProto was defined at this location. @@ -1811,7 +1805,7 @@ class SourceCodeInfoLocation(betterproto2_compiler.Message): of the label to the terminating semicolon). """ - span: List[int] = betterproto2_compiler.int32_field(2, repeated=True) + span: List[int] = betterproto.int32_field(2, repeated=True) """ Always has exactly three or four elements: start line, start column, end line (optional, otherwise assumed same as start line), end column. @@ -1820,7 +1814,7 @@ class SourceCodeInfoLocation(betterproto2_compiler.Message): 1 to each before displaying to a user. """ - leading_comments: str = betterproto2_compiler.string_field(3) + leading_comments: str = betterproto.string_field(3) """ If this SourceCodeInfo represents a complete declaration, these are any comments appearing before and after the declaration which appear to be @@ -1871,26 +1865,26 @@ class SourceCodeInfoLocation(betterproto2_compiler.Message): // ignored detached comments. """ - trailing_comments: str = betterproto2_compiler.string_field(4) + trailing_comments: str = betterproto.string_field(4) """ """ - leading_detached_comments: List[str] = betterproto2_compiler.string_field(6, repeated=True) + leading_detached_comments: List[str] = betterproto.string_field(6, repeated=True) """ """ @dataclass(eq=False, repr=False) -class GeneratedCodeInfo(betterproto2_compiler.Message): +class GeneratedCodeInfo(betterproto.Message): """ Describes the relationship between generated code and its original source file. A GeneratedCodeInfo message is associated with only one generated source file, but may contain references to different source .proto files. """ - annotation: List["GeneratedCodeInfoAnnotation"] = betterproto2_compiler.message_field(1, repeated=True) + annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1, repeated=True) """ An Annotation connects some span of text in generated code to an element of its generating .proto file. @@ -1898,25 +1892,25 @@ class GeneratedCodeInfo(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class GeneratedCodeInfoAnnotation(betterproto2_compiler.Message): +class GeneratedCodeInfoAnnotation(betterproto.Message): """ """ - path: List[int] = betterproto2_compiler.int32_field(1, repeated=True) + path: List[int] = betterproto.int32_field(1, repeated=True) """ Identifies the element in the original source .proto file. This field is formatted the same as SourceCodeInfo.Location.path. """ - source_file: str = betterproto2_compiler.string_field(2) + source_file: str = betterproto.string_field(2) """Identifies the filesystem path to the original source .proto.""" - begin: int = betterproto2_compiler.int32_field(3) + begin: int = betterproto.int32_field(3) """ Identifies the starting offset in bytes in the generated code that relates to the identified object. """ - end: int = betterproto2_compiler.int32_field(4) + end: int = betterproto.int32_field(4) """ Identifies the ending offset in bytes in the generated code that relates to the identified offset. The end offset should be one past @@ -1925,7 +1919,7 @@ class GeneratedCodeInfoAnnotation(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class Duration(betterproto2_compiler.Message): +class Duration(betterproto.Message): """ A Duration represents a signed, fixed-length span of time represented as a count of seconds and fractions of seconds at nanosecond @@ -1987,14 +1981,14 @@ class Duration(betterproto2_compiler.Message): microsecond should be expressed in JSON format as "3.000001s". """ - seconds: int = betterproto2_compiler.int64_field(1) + seconds: int = betterproto.int64_field(1) """ Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years """ - nanos: int = betterproto2_compiler.int32_field(2) + nanos: int = betterproto.int32_field(2) """ Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 @@ -2006,7 +2000,7 @@ class Duration(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class Empty(betterproto2_compiler.Message): +class Empty(betterproto.Message): """ A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request @@ -2023,7 +2017,7 @@ class Empty(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class FieldMask(betterproto2_compiler.Message): +class FieldMask(betterproto.Message): """ `FieldMask` represents a set of symbolic field paths, for example: @@ -2225,12 +2219,12 @@ class FieldMask(betterproto2_compiler.Message): `INVALID_ARGUMENT` error if any path is unmappable. """ - paths: List[str] = betterproto2_compiler.string_field(1, repeated=True) + paths: List[str] = betterproto.string_field(1, repeated=True) """The set of field mask paths.""" @dataclass(eq=False, repr=False) -class Struct(betterproto2_compiler.Message): +class Struct(betterproto.Message): """ `Struct` represents a structured data value, consisting of fields which map to dynamically typed values. In some languages, `Struct` @@ -2242,9 +2236,7 @@ class Struct(betterproto2_compiler.Message): The JSON representation for `Struct` is JSON object. """ - fields: Dict[str, "Value"] = betterproto2_compiler.map_field( - 1, betterproto2_compiler.TYPE_STRING, betterproto2_compiler.TYPE_MESSAGE - ) + fields: Dict[str, "Value"] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE) """Unordered map of dynamically typed values.""" @hybridmethod @@ -2264,7 +2256,7 @@ def from_dict(self, value: Mapping[str, Any]) -> Self: def to_dict( self, - casing: betterproto2_compiler.Casing = betterproto2_compiler.Casing.CAMEL, + casing: betterproto.Casing = betterproto.Casing.CAMEL, include_default_values: bool = False, ) -> Dict[str, Any]: output = {**self.fields} @@ -2275,7 +2267,7 @@ def to_dict( @dataclass(eq=False, repr=False) -class Value(betterproto2_compiler.Message): +class Value(betterproto.Message): """ `Value` represents a dynamically typed value which can be either null, a number, a string, a boolean, a recursive struct value, or a @@ -2285,41 +2277,39 @@ class Value(betterproto2_compiler.Message): The JSON representation for `Value` is JSON value. """ - null_value: "NullValue" = betterproto2_compiler.enum_field( - 1, enum_default_value=lambda: NullValue.try_value(0), group="kind" - ) + null_value: "NullValue" = betterproto.enum_field(1, enum_default_value=lambda: NullValue.try_value(0), group="kind") """Represents a null value.""" - number_value: float = betterproto2_compiler.double_field(2, group="kind") + number_value: float = betterproto.double_field(2, group="kind") """Represents a double value.""" - string_value: str = betterproto2_compiler.string_field(3, group="kind") + string_value: str = betterproto.string_field(3, group="kind") """Represents a string value.""" - bool_value: bool = betterproto2_compiler.bool_field(4, group="kind") + bool_value: bool = betterproto.bool_field(4, group="kind") """Represents a boolean value.""" - struct_value: "Struct" = betterproto2_compiler.message_field(5, group="kind") + struct_value: "Struct" = betterproto.message_field(5, group="kind") """Represents a structured value.""" - list_value: "ListValue" = betterproto2_compiler.message_field(6, group="kind") + list_value: "ListValue" = betterproto.message_field(6, group="kind") """Represents a repeated `Value`.""" @dataclass(eq=False, repr=False) -class ListValue(betterproto2_compiler.Message): +class ListValue(betterproto.Message): """ `ListValue` is a wrapper around a repeated field of values. The JSON representation for `ListValue` is JSON array. """ - values: List["Value"] = betterproto2_compiler.message_field(1, repeated=True) + values: List["Value"] = betterproto.message_field(1, repeated=True) """Repeated field of dynamically typed values.""" @dataclass(eq=False, repr=False) -class Timestamp(betterproto2_compiler.Message): +class Timestamp(betterproto.Message): """ A Timestamp represents a point in time independent of any time zone or local calendar, encoded as a count of seconds and fractions of seconds at @@ -2404,14 +2394,14 @@ class Timestamp(betterproto2_compiler.Message): ) to obtain a formatter capable of generating timestamps in this format. """ - seconds: int = betterproto2_compiler.int64_field(1) + seconds: int = betterproto.int64_field(1) """ Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. """ - nanos: int = betterproto2_compiler.int32_field(2) + nanos: int = betterproto.int32_field(2) """ Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values @@ -2421,108 +2411,108 @@ class Timestamp(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class DoubleValue(betterproto2_compiler.Message): +class DoubleValue(betterproto.Message): """ Wrapper message for `double`. The JSON representation for `DoubleValue` is JSON number. """ - value: float = betterproto2_compiler.double_field(1) + value: float = betterproto.double_field(1) """The double value.""" @dataclass(eq=False, repr=False) -class FloatValue(betterproto2_compiler.Message): +class FloatValue(betterproto.Message): """ Wrapper message for `float`. The JSON representation for `FloatValue` is JSON number. """ - value: float = betterproto2_compiler.float_field(1) + value: float = betterproto.float_field(1) """The float value.""" @dataclass(eq=False, repr=False) -class Int64Value(betterproto2_compiler.Message): +class Int64Value(betterproto.Message): """ Wrapper message for `int64`. The JSON representation for `Int64Value` is JSON string. """ - value: int = betterproto2_compiler.int64_field(1) + value: int = betterproto.int64_field(1) """The int64 value.""" @dataclass(eq=False, repr=False) -class UInt64Value(betterproto2_compiler.Message): +class UInt64Value(betterproto.Message): """ Wrapper message for `uint64`. The JSON representation for `UInt64Value` is JSON string. """ - value: int = betterproto2_compiler.uint64_field(1) + value: int = betterproto.uint64_field(1) """The uint64 value.""" @dataclass(eq=False, repr=False) -class Int32Value(betterproto2_compiler.Message): +class Int32Value(betterproto.Message): """ Wrapper message for `int32`. The JSON representation for `Int32Value` is JSON number. """ - value: int = betterproto2_compiler.int32_field(1) + value: int = betterproto.int32_field(1) """The int32 value.""" @dataclass(eq=False, repr=False) -class UInt32Value(betterproto2_compiler.Message): +class UInt32Value(betterproto.Message): """ Wrapper message for `uint32`. The JSON representation for `UInt32Value` is JSON number. """ - value: int = betterproto2_compiler.uint32_field(1) + value: int = betterproto.uint32_field(1) """The uint32 value.""" @dataclass(eq=False, repr=False) -class BoolValue(betterproto2_compiler.Message): +class BoolValue(betterproto.Message): """ Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON `true` and `false`. """ - value: bool = betterproto2_compiler.bool_field(1) + value: bool = betterproto.bool_field(1) """The bool value.""" @dataclass(eq=False, repr=False) -class StringValue(betterproto2_compiler.Message): +class StringValue(betterproto.Message): """ Wrapper message for `string`. The JSON representation for `StringValue` is JSON string. """ - value: str = betterproto2_compiler.string_field(1) + value: str = betterproto.string_field(1) """The string value.""" @dataclass(eq=False, repr=False) -class BytesValue(betterproto2_compiler.Message): +class BytesValue(betterproto.Message): """ Wrapper message for `bytes`. The JSON representation for `BytesValue` is JSON string. """ - value: bytes = betterproto2_compiler.bytes_field(1) + value: bytes = betterproto.bytes_field(1) """The bytes value.""" diff --git a/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py b/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py index 33087afc..fb93bd3e 100644 --- a/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py +++ b/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py @@ -6,11 +6,12 @@ from dataclasses import dataclass from typing import List -import betterproto2_compiler +import betterproto + import betterproto2_compiler.lib.google.protobuf as betterproto_lib_google_protobuf -class CodeGeneratorResponseFeature(betterproto2_compiler.Enum): +class CodeGeneratorResponseFeature(betterproto.Enum): """Sync with code_generator.h.""" FEATURE_NONE = 0 @@ -19,13 +20,13 @@ class CodeGeneratorResponseFeature(betterproto2_compiler.Enum): @dataclass(eq=False, repr=False) -class Version(betterproto2_compiler.Message): +class Version(betterproto.Message): """The version number of protocol compiler.""" - major: int = betterproto2_compiler.int32_field(1) - minor: int = betterproto2_compiler.int32_field(2) - patch: int = betterproto2_compiler.int32_field(3) - suffix: str = betterproto2_compiler.string_field(4) + major: int = betterproto.int32_field(1) + minor: int = betterproto.int32_field(2) + patch: int = betterproto.int32_field(3) + suffix: str = betterproto.string_field(4) """ A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should be empty for mainline stable releases. @@ -33,20 +34,20 @@ class Version(betterproto2_compiler.Message): @dataclass(eq=False, repr=False) -class CodeGeneratorRequest(betterproto2_compiler.Message): +class CodeGeneratorRequest(betterproto.Message): """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" - file_to_generate: List[str] = betterproto2_compiler.string_field(1, repeated=True) + file_to_generate: List[str] = betterproto.string_field(1, repeated=True) """ The .proto files that were explicitly listed on the command-line. The code generator should generate code only for these files. Each file's descriptor will be included in proto_file, below. """ - parameter: str = betterproto2_compiler.string_field(2) + parameter: str = betterproto.string_field(2) """The generator parameter passed on the command-line.""" - proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto2_compiler.message_field( + proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto.message_field( 15, repeated=True ) """ @@ -71,8 +72,8 @@ class CodeGeneratorRequest(betterproto2_compiler.Message): fully qualified. """ - source_file_descriptors: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = ( - betterproto2_compiler.message_field(17, repeated=True) + source_file_descriptors: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = betterproto.message_field( + 17, repeated=True ) """ File descriptors with all options, including source-retention options. @@ -80,15 +81,15 @@ class CodeGeneratorRequest(betterproto2_compiler.Message): files_to_generate. """ - compiler_version: "Version" = betterproto2_compiler.message_field(3) + compiler_version: "Version" = betterproto.message_field(3) """The version number of protocol compiler.""" @dataclass(eq=False, repr=False) -class CodeGeneratorResponse(betterproto2_compiler.Message): +class CodeGeneratorResponse(betterproto.Message): """The plugin writes an encoded CodeGeneratorResponse to stdout.""" - error: str = betterproto2_compiler.string_field(1) + error: str = betterproto.string_field(1) """ Error message. If non-empty, code generation failed. The plugin process should exit with status code zero even if it reports an error in this way. @@ -100,13 +101,13 @@ class CodeGeneratorResponse(betterproto2_compiler.Message): exiting with a non-zero status code. """ - supported_features: int = betterproto2_compiler.uint64_field(2) + supported_features: int = betterproto.uint64_field(2) """ A bitmask of supported features that the code generator supports. This is a bitwise "or" of values from the Feature enum. """ - minimum_edition: int = betterproto2_compiler.int32_field(3) + minimum_edition: int = betterproto.int32_field(3) """ The minimum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -114,7 +115,7 @@ class CodeGeneratorResponse(betterproto2_compiler.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - maximum_edition: int = betterproto2_compiler.int32_field(4) + maximum_edition: int = betterproto.int32_field(4) """ The maximum edition this plugin supports. This will be treated as an Edition enum, but we want to allow unknown values. It should be specified @@ -122,14 +123,14 @@ class CodeGeneratorResponse(betterproto2_compiler.Message): effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. """ - file: List["CodeGeneratorResponseFile"] = betterproto2_compiler.message_field(15, repeated=True) + file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15, repeated=True) @dataclass(eq=False, repr=False) -class CodeGeneratorResponseFile(betterproto2_compiler.Message): +class CodeGeneratorResponseFile(betterproto.Message): """Represents a single generated file.""" - name: str = betterproto2_compiler.string_field(1) + name: str = betterproto.string_field(1) """ The file name, relative to the output directory. The name must not contain "." or ".." components and must be relative, not be absolute (so, @@ -144,7 +145,7 @@ class CodeGeneratorResponseFile(betterproto2_compiler.Message): CodeGeneratorResponse before writing files to disk. """ - insertion_point: str = betterproto2_compiler.string_field(2) + insertion_point: str = betterproto.string_field(2) """ If non-empty, indicates that the named file should already exist, and the content here is to be inserted into that file at a defined insertion @@ -185,10 +186,10 @@ class CodeGeneratorResponseFile(betterproto2_compiler.Message): If |insertion_point| is present, |name| must also be present. """ - content: str = betterproto2_compiler.string_field(15) + content: str = betterproto.string_field(15) """The file contents.""" - generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = betterproto2_compiler.message_field(16) + generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(16) """ Information describing the file content being inserted. If an insertion point is used, this information will be appropriately offset and inserted From 26bac324e5e5fcbdf1b0e6be6c01b618f194d45e Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:54:18 +0100 Subject: [PATCH 06/13] Update CI --- .github/workflows/ci.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6e5503fa..3a1858ea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: fail-fast: false matrix: os: [Ubuntu, MacOS, Windows] - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v4 @@ -54,12 +54,8 @@ jobs: - name: Install dependencies shell: bash - run: poetry install -E compiler + run: poetry install - name: Generate code from proto files shell: bash run: poetry run python -m tests.generate -v - - - name: Execute test suite - shell: bash - run: poetry run python -m pytest tests/ From b31f4e72751fa1caef52740fce6b93d7313774a0 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 11:56:50 +0100 Subject: [PATCH 07/13] Fix Python version --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b96efb10..c81ae974 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.10 + python-version: "3.10" - name: Install poetry run: python -m pip install poetry - name: Build package From 94af6a3bbe91a204c11b9de064889d324d69eca6 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:22:43 +0100 Subject: [PATCH 08/13] Add compilation workflow --- .github/workflows/compilation.yml | 69 +++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 .github/workflows/compilation.yml diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml new file mode 100644 index 00000000..03cf2b4d --- /dev/null +++ b/.github/workflows/compilation.yml @@ -0,0 +1,69 @@ +name: compilation + +on: + push: + branches: + - main + pull_request: # TODEL + branches: + - '**' + +permissions: + contents: write + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Configure Git Credentials + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: 3.12 + + - name: Install poetry + shell: bash + run: | + python -m pip install poetry + echo "$HOME/.poetry/bin" >> $GITHUB_PATH + + - name: Configure poetry + shell: bash + run: poetry config virtualenvs.in-project true + + - name: Set up cache + uses: actions/cache@v3 + id: cache + with: + path: .venv + key: venv-compilation-${{ hashFiles('**/poetry.lock') }} + + - name: Ensure cache is healthy + if: steps.cache.outputs.cache-hit == 'true' + shell: bash + run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + + - name: Install dependencies + shell: bash + run: poetry install + + - name: Compile proto files + shell: bash + run: poetry run poe generate + + - name: Publish the compiled files + shell: bash + run: | + git checkout --orphan compiled-test-files + cp tests/output_betterproto tests_betterproto + cp tests/output_betterproto_pydantic tests_betterproto_pydantic + cp tests/output_reference tests_reference + git add tests_betterproto tests_betterproto_pydantic tests_reference + git commit -m "Add compilation output" + git push --force origin compiled-test-files From dbf4260157885e77b802913432d4fe9816e2cd39 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:24:15 +0100 Subject: [PATCH 09/13] Fix cp command --- .github/workflows/compilation.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 03cf2b4d..0725fc2d 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -1,9 +1,9 @@ -name: compilation +name: Test files compilation on: push: branches: - - main + - "**" # TODO main pull_request: # TODEL branches: - '**' @@ -12,7 +12,7 @@ permissions: contents: write jobs: - deploy: + compilation: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -61,9 +61,9 @@ jobs: shell: bash run: | git checkout --orphan compiled-test-files - cp tests/output_betterproto tests_betterproto - cp tests/output_betterproto_pydantic tests_betterproto_pydantic - cp tests/output_reference tests_reference + cp -r tests/output_betterproto tests_betterproto + cp -r tests/output_betterproto_pydantic tests_betterproto_pydantic + cp -r tests/output_reference tests_reference git add tests_betterproto tests_betterproto_pydantic tests_reference git commit -m "Add compilation output" git push --force origin compiled-test-files From 5b9c7e10f37fc807094748ceeaa0a66e243831a2 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:28:18 +0100 Subject: [PATCH 10/13] Execute only on push to main --- .github/workflows/compilation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 0725fc2d..43cb4ef2 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -3,7 +3,7 @@ name: Test files compilation on: push: branches: - - "**" # TODO main + - main pull_request: # TODEL branches: - '**' From 085ad196f194f8084cc562a23f75c02df1f39bf6 Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:30:24 +0100 Subject: [PATCH 11/13] Reset git before adding generated files --- .github/workflows/compilation.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 43cb4ef2..2cc98c2c 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -61,6 +61,7 @@ jobs: shell: bash run: | git checkout --orphan compiled-test-files + git reset --hard cp -r tests/output_betterproto tests_betterproto cp -r tests/output_betterproto_pydantic tests_betterproto_pydantic cp -r tests/output_reference tests_reference From 4223e9a1348c13989b488ea8abd5e9dd39b6e2ce Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:32:59 +0100 Subject: [PATCH 12/13] Don't compile on PRs --- .github/workflows/compilation.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index 2cc98c2c..7dcfb2de 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -4,9 +4,6 @@ on: push: branches: - main - pull_request: # TODEL - branches: - - '**' permissions: contents: write From a2d5f9a798a66fda0dc47a7b342fba03a2a7673e Mon Sep 17 00:00:00 2001 From: Adrien Vannson Date: Fri, 20 Dec 2024 12:37:08 +0100 Subject: [PATCH 13/13] Update doc config --- mkdocs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index 1c0e0b42..e7089370 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,5 +1,5 @@ -site_name: Betterproto2 -site_url: https://betterproto.github.io/python-betterproto2/ +site_name: Betterproto2 Compiler +site_url: https://betterproto.github.io/python-betterproto2-compiler/ theme: name: material palette: