From 0d6ced5bc276221dec844bb8b3f1d2bb6464c9e7 Mon Sep 17 00:00:00 2001 From: James Date: Mon, 24 Aug 2020 14:04:24 +0100 Subject: [PATCH 01/46] Implement Message.__bool__ for #130 --- src/betterproto/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index f956a5cd2..12e7af60d 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -554,6 +554,12 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) + def __bool__(self) -> bool: + return any( + self.__raw_get(field_name) is not PLACEHOLDER + for field_name in self._betterproto.meta_by_field_name + ) + @property def _betterproto(self): """ From 7746b91aa267db15c0e2fa2ba5f02ffd4cd730f3 Mon Sep 17 00:00:00 2001 From: James Date: Mon, 24 Aug 2020 14:10:28 +0100 Subject: [PATCH 02/46] Add a test for it --- tests/test_features.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/test_features.py b/tests/test_features.py index b5b381126..077f91896 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -317,3 +317,26 @@ def _round_trip_serialization(foo: Foo) -> Foo: == betterproto.which_one_of(_round_trip_serialization(foo3), "group1") == ("", None) ) + + +def test_bool(): + # evaluates similarly to a collection + # >>> bool(list()) + # False + # >>> bool([1]] + # True + # >>> bool([0]) + # True + + @dataclass + class Falsy(betterproto.Message): + pass + + @dataclass + class Truthy(betterproto.Message): + bar: int = betterproto.int32_field(1) + + assert not Falsy() + assert not Truthy() + assert Truthy(bar=1) + assert Truthy(bar=0) From 9da923d362f29d62543a50537b984ea66386040d Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Wed, 2 Sep 2020 14:37:58 +0100 Subject: [PATCH 03/46] Blacken --- tests/test_features.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_features.py b/tests/test_features.py index f93759372..09c00c5bd 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -319,7 +319,6 @@ def _round_trip_serialization(foo: Foo) -> Foo: ) - def test_bool(): # evaluates similarly to a collection # >>> bool(list()) From 17e31f4ff7d0267aa8b7e1aee7c2d242764bf507 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 19 Sep 2020 01:16:01 +0100 Subject: [PATCH 04/46] Update tests --- tests/test_features.py | 53 ++++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/tests/test_features.py b/tests/test_features.py index 09c00c5bd..ef33d8730 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -319,29 +319,6 @@ def _round_trip_serialization(foo: Foo) -> Foo: ) -def test_bool(): - # evaluates similarly to a collection - # >>> bool(list()) - # False - # >>> bool([1]] - # True - # >>> bool([0]) - # True - - @dataclass - class Falsy(betterproto.Message): - pass - - @dataclass - class Truthy(betterproto.Message): - bar: int = betterproto.int32_field(1) - - assert not Falsy() - assert not Truthy() - assert Truthy(bar=1) - assert Truthy(bar=0) - - def test_recursive_message(): from tests.output_betterproto.recursivemessage import Test as RecursiveMessage @@ -388,3 +365,33 @@ def test_message_repr(): assert repr(Test(name="Loki")) == "Test(name='Loki')" assert repr(Test(child=Test(), name="Loki")) == "Test(name='Loki', child=Test())" + + +def test_bool(): + """Messages should evaluate similarly to a collection + >>> test = [] + >>> bool(test) + ... False + >>> test.append(1) + >>> bool(test) + ... True + >>> del test[0] + >>> bool(test) + ... False + """ + + @dataclass + class Falsy(betterproto.Message): + pass + + @dataclass + class Truthy(betterproto.Message): + bar: int = betterproto.int32_field(1) + + assert not Falsy() + t = Truthy() + assert not t + t.bar = 1 + assert t + t.bar = 0 + assert not t From a53d805e1930381ec047eab4142b0e8564906e69 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 19 Sep 2020 01:16:34 +0100 Subject: [PATCH 05/46] Fix bool --- src/betterproto/__init__.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index 489f44927..90dec23db 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -591,10 +591,11 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) def __bool__(self) -> bool: - return any( - self.__raw_get(field_name) is not PLACEHOLDER - for field_name in self._betterproto.meta_by_field_name - ) + for field in dataclasses.fields(self): + value = self.__raw_get(field.name) + if self._get_field_default(field.name) != value: + return True + return False @property def _betterproto(self): From b3b7c00d92147b449559e2fb952b28d854b17086 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 19 Sep 2020 01:20:26 +0100 Subject: [PATCH 06/46] Fix failing tests --- src/betterproto/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index 90dec23db..4834a0409 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -591,6 +591,8 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) def __bool__(self) -> bool: + if not self._serialized_on_wire: + return False for field in dataclasses.fields(self): value = self.__raw_get(field.name) if self._get_field_default(field.name) != value: From 18f22fa8bcc6e6675067a2e6648cf36e481b46e6 Mon Sep 17 00:00:00 2001 From: Nat Noordanus Date: Sun, 18 Oct 2020 22:47:58 +0200 Subject: [PATCH 07/46] Make plugin use betterproto generated classes internally This means the betterproto plugin no longer needs to depend durectly on protobuf. This requires a small runtime hack to monkey patch some google types to get around the fact that the compiler uses proto2, but betterproto expects proto3. Also: - regenerate google.protobuf package - fix a regex bug in the logic for determining whether to use a google wrapper type. - fix a bug causing comments to get mixed up when multiple proto files generate code into a single python module --- README.md | 4 +- src/betterproto/__init__.py | 14 +- .../lib/google/protobuf/__init__.py | 1047 ++++++++++------- .../lib/google/protobuf/compiler/__init__.py | 125 ++ src/betterproto/plugin/main.py | 17 +- src/betterproto/plugin/models.py | 192 +-- src/betterproto/plugin/parser.py | 87 +- tests/inputs/example/example.proto | 911 +++++++++++++- tests/inputs/oneof/oneof-name.json | 2 +- tests/inputs/oneof/oneof.json | 2 +- tests/inputs/oneof/oneof.proto | 12 +- tests/inputs/oneof/test_oneof.py | 4 +- 12 files changed, 1838 insertions(+), 579 deletions(-) create mode 100644 src/betterproto/lib/google/protobuf/compiler/__init__.py mode change 100644 => 100755 src/betterproto/plugin/main.py diff --git a/README.md b/README.md index c70aca7ee..5e9d9c345 100644 --- a/README.md +++ b/README.md @@ -413,9 +413,9 @@ Assuming your `google.protobuf` source files (included with all releases of `pro ```sh protoc \ - --plugin=protoc-gen-custom=betterproto/plugin.py \ + --plugin=protoc-gen-custom=src/betterproto/plugin/main.py \ --custom_opt=INCLUDE_GOOGLE \ - --custom_out=betterproto/lib \ + --custom_out=src/betterproto/lib \ -I /usr/local/include/ \ /usr/local/include/google/protobuf/*.proto ``` diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index b90dd0550..230f1fbf9 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -1149,6 +1149,7 @@ def which_one_of(message: Message, group_name: str) -> Tuple[str, Optional[Any]] BytesValue, DoubleValue, Duration, + EnumValue, FloatValue, Int32Value, Int64Value, @@ -1215,14 +1216,17 @@ def from_dict(self: T, value: Any) -> T: def _get_wrapper(proto_type: str) -> Type: """Get the wrapper message class for a wrapped type.""" + + # TODO: include ListValue and NullValue? return { TYPE_BOOL: BoolValue, + TYPE_BYTES: BytesValue, + TYPE_DOUBLE: DoubleValue, + TYPE_FLOAT: FloatValue, + TYPE_ENUM: EnumValue, TYPE_INT32: Int32Value, - TYPE_UINT32: UInt32Value, TYPE_INT64: Int64Value, - TYPE_UINT64: UInt64Value, - TYPE_FLOAT: FloatValue, - TYPE_DOUBLE: DoubleValue, TYPE_STRING: StringValue, - TYPE_BYTES: BytesValue, + TYPE_UINT32: UInt32Value, + TYPE_UINT64: UInt64Value, }[proto_type] diff --git a/src/betterproto/lib/google/protobuf/__init__.py b/src/betterproto/lib/google/protobuf/__init__.py index 936d17529..d1f21f947 100644 --- a/src/betterproto/lib/google/protobuf/__init__.py +++ b/src/betterproto/lib/google/protobuf/__init__.py @@ -1,6 +1,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: google/protobuf/any.proto, google/protobuf/source_context.proto, google/protobuf/type.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/wrappers.proto +# sources: google/protobuf/timestamp.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/any.proto, google/protobuf/type.proto, google/protobuf/api.proto, google/protobuf/duration.proto, google/protobuf/struct.proto, google/protobuf/wrappers.proto, google/protobuf/empty.proto, google/protobuf/descriptor.proto # plugin: python-betterproto +import warnings from dataclasses import dataclass from typing import Dict, List @@ -45,6 +46,17 @@ class FieldCardinality(betterproto.Enum): CARDINALITY_REPEATED = 3 +class NullValue(betterproto.Enum): + """ + `NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. The JSON representation for `NullValue` is JSON + `null`. + """ + + # Null value. + NULL_VALUE = 0 + + class FieldDescriptorProtoType(betterproto.Enum): TYPE_DOUBLE = 1 TYPE_FLOAT = 2 @@ -96,18 +108,177 @@ class MethodOptionsIdempotencyLevel(betterproto.Enum): IDEMPOTENT = 2 -class NullValue(betterproto.Enum): +@dataclass(eq=False, repr=False) +class Timestamp(betterproto.Message): """ - `NullValue` is a singleton enumeration to represent the null value for the - `Value` type union. The JSON representation for `NullValue` is JSON - `null`. + A Timestamp represents a point in time independent of any time zone or + local calendar, encoded as a count of seconds and fractions of seconds at + nanosecond resolution. The count is relative to an epoch at UTC midnight on + January 1, 1970, in the proleptic Gregorian calendar which extends the + Gregorian calendar backwards to year one. All minutes are 60 seconds long. + Leap seconds are "smeared" so that no leap second table is needed for + interpretation, using a [24-hour linear + smear](https://developers.google.com/time/smear). The range is from + 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By restricting to + that range, we ensure that we can convert to and from [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. # Examples + Example 1: Compute Timestamp from POSIX `time()`. Timestamp timestamp; + timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0); Example 2: + Compute Timestamp from POSIX `gettimeofday()`. struct timeval tv; + gettimeofday(&tv, NULL); Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.tv_usec * + 1000); Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = + (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; // A Windows + tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is + 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp + timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - + 11644473600LL)); timestamp.set_nanos((INT32) ((ticks % 10000000) * + 100)); Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + long millis = System.currentTimeMillis(); Timestamp timestamp = + Timestamp.newBuilder().setSeconds(millis / 1000) .setNanos((int) + ((millis % 1000) * 1000000)).build(); Example 5: Compute Timestamp from + current time in Python. timestamp = Timestamp() + timestamp.GetCurrentTime() # JSON Mapping In JSON format, the Timestamp + type is encoded as a string in the [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is + "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" where {year} is + always expressed using four digits while {month}, {day}, {hour}, {min}, and + {sec} are zero-padded to two digits each. The fractional seconds, which can + go up to 9 digits (i.e. up to 1 nanosecond resolution), are optional. The + "Z" suffix indicates the timezone ("UTC"); the timezone is required. A + proto3 JSON serializer should always use UTC (as indicated by "Z") when + printing the Timestamp type and a proto3 JSON parser should be able to + accept both UTC and other timezones (as indicated by an offset). For + example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on + January 15, 2017. In JavaScript, one can convert a Date object to this + format using the standard [toISOString()](https://developer.mozilla.org/en- + US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) method. + In Python, a standard `datetime.datetime` object can be converted to this + format using + [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) + with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one + can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( + http://www.joda.org/joda- + time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) + to obtain a formatter capable of generating timestamps in this format. """ - # Null value. - NULL_VALUE = 0 + # Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must + # be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. + seconds: int = betterproto.int64_field(1) + # Non-negative fractions of a second at nanosecond resolution. Negative + # second values with fractions must still have non-negative nanos values that + # count forward in time. Must be from 0 to 999,999,999 inclusive. + nanos: int = betterproto.int32_field(2) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class FieldMask(betterproto.Message): + """ + `FieldMask` represents a set of symbolic field paths, for example: + paths: "f.a" paths: "f.b.d" Here `f` represents a field in some root + message, `a` and `b` fields in the message found in `f`, and `d` a field + found in the message in `f.b`. Field masks are used to specify a subset of + fields that should be returned by a get operation or modified by an update + operation. Field masks also have a custom JSON encoding (see below). # + Field Masks in Projections When used in the context of a projection, a + response message or sub-message is filtered by the API to only contain + those fields as specified in the mask. For example, if the mask in the + previous example is applied to a response message as follows: f { + a : 22 b { d : 1 x : 2 } y : 13 } + z: 8 The result will not contain specific values for fields x,y and z + (their value will be set to the default, and omitted in proto text output): + f { a : 22 b { d : 1 } } A repeated field is + not allowed except at the last position of a paths string. If a FieldMask + object is not present in a get operation, the operation applies to all + fields (as if a FieldMask of all fields had been specified). Note that a + field mask does not necessarily apply to the top-level response message. In + case of a REST get operation, the field mask applies directly to the + response, but in case of a REST list operation, the mask instead applies to + each individual message in the returned resource list. In case of a REST + custom method, other definitions may be used. Where the mask applies will + be clearly documented together with its declaration in the API. In any + case, the effect on the returned resource/resources is required behavior + for APIs. # Field Masks in Update Operations A field mask in update + operations specifies which fields of the targeted resource are going to be + updated. The API is required to only change the values of the fields as + specified in the mask and leave the others untouched. If a resource is + passed in to describe the updated values, the API ignores the values of all + fields not covered by the mask. If a repeated field is specified for an + update operation, new values will be appended to the existing repeated + field in the target resource. Note that a repeated field is only allowed in + the last position of a `paths` string. If a sub-message is specified in the + last position of the field mask for an update operation, then new value + will be merged into the existing sub-message in the target resource. For + example, given the target message: f { b { d: 1 + x: 2 } c: [1] } And an update message: f { b { + d: 10 } c: [2] } then if the field mask is: paths: ["f.b", + "f.c"] then the result will be: f { b { d: 10 x: + 2 } c: [1, 2] } An implementation may provide options to + override this default behavior for repeated and message fields. In order to + reset a field's value to the default, the field must be in the mask and set + to the default value in the provided resource. Hence, in order to reset all + fields of a resource, provide a default instance of the resource and set + all fields in the mask, or do not provide a mask as described below. If a + field mask is not present on update, the operation applies to all fields + (as if a field mask of all fields has been specified). Note that in the + presence of schema evolution, this may mean that fields the client does not + know and has therefore not filled into the request will be reset to their + default. If this is unwanted behavior, a specific service may require a + client to always specify a field mask, producing an error if not. As with + get operations, the location of the resource which describes the updated + values in the request message depends on the operation kind. In any case, + the effect of the field mask is required to be honored by the API. ## + Considerations for HTTP REST The HTTP kind of an update operation which + uses a field mask must be set to PATCH instead of PUT in order to satisfy + HTTP semantics (PUT must only be used for full updates). # JSON Encoding of + Field Masks In JSON, a field mask is encoded as a single string where paths + are separated by a comma. Fields name in each path are converted to/from + lower-camel naming conventions. As an example, consider the following + message declarations: message Profile { User user = 1; + Photo photo = 2; } message User { string display_name = 1; + string address = 2; } In proto a field mask for `Profile` may look as + such: mask { paths: "user.display_name" paths: "photo" + } In JSON, the same mask is represented as below: { mask: + "user.displayName,photo" } # Field Masks and Oneof Fields Field masks + treat fields in oneofs just as regular fields. Consider the following + message: message SampleMessage { oneof test_oneof { + string name = 4; SubMessage sub_message = 9; } } The + field mask can be: mask { paths: "name" } Or: mask { + paths: "sub_message" } Note that oneof type names ("test_oneof" in this + case) cannot be used in paths. ## Field Mask Verification The + implementation of any API method which has a FieldMask type field in the + request should verify the included field paths, and return an + `INVALID_ARGUMENT` error if any path is unmappable. + """ + + # The set of field mask paths. + paths: List[str] = betterproto.string_field(1) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) +class SourceContext(betterproto.Message): + """ + `SourceContext` represents information about the source of a protobuf + element, like the file in which it is defined. + """ + + # The path-qualified name of the .proto file that contained the associated + # protobuf element. For example: `"google/protobuf/source_context.proto"`. + file_name: str = betterproto.string_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) class Any(betterproto.Message): """ `Any` contains an arbitrary serialized protocol buffer message along with a @@ -164,20 +335,11 @@ class Any(betterproto.Message): # Must be a valid serialized protocol buffer of the above specified type. value: bytes = betterproto.bytes_field(2) - -@dataclass -class SourceContext(betterproto.Message): - """ - `SourceContext` represents information about the source of a protobuf - element, like the file in which it is defined. - """ - - # The path-qualified name of the .proto file that contained the associated - # protobuf element. For example: `"google/protobuf/source_context.proto"`. - file_name: str = betterproto.string_field(1) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass +@dataclass(eq=False, repr=False) class Type(betterproto.Message): """A protocol buffer message type.""" @@ -194,8 +356,11 @@ class Type(betterproto.Message): # The source syntax. syntax: "Syntax" = betterproto.enum_field(6) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class Field(betterproto.Message): """A single field of a message type.""" @@ -222,8 +387,11 @@ class Field(betterproto.Message): # The string value of the default value of this field. Proto2 syntax only. default_value: str = betterproto.string_field(11) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class Enum(betterproto.Message): """Enum type definition.""" @@ -240,8 +408,11 @@ class Enum(betterproto.Message): # The source syntax. syntax: "Syntax" = betterproto.enum_field(5) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class EnumValue(betterproto.Message): """Enum value definition.""" @@ -252,8 +423,11 @@ class EnumValue(betterproto.Message): # Protocol buffer options. options: List["Option"] = betterproto.message_field(3) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class Option(betterproto.Message): """ A protocol buffer option, which can be attached to a message, field, @@ -271,8 +445,11 @@ class Option(betterproto.Message): # value using the google.protobuf.Int32Value type. value: "Any" = betterproto.message_field(2) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class Api(betterproto.Message): """ Api is a light-weight descriptor for an API Interface. Interfaces are also @@ -314,8 +491,11 @@ class Api(betterproto.Message): # The source syntax of the service. syntax: "Syntax" = betterproto.enum_field(7) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class Method(betterproto.Message): """Method represents a method of an API interface.""" @@ -334,8 +514,11 @@ class Method(betterproto.Message): # The source syntax of this method. syntax: "Syntax" = betterproto.enum_field(7) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class Mixin(betterproto.Message): """ Declares an API Interface to be included in this interface. The including @@ -379,47 +562,307 @@ class Mixin(betterproto.Message): # If non-empty specifies a path under which inherited HTTP paths are rooted. root: str = betterproto.string_field(2) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass -class FileDescriptorSet(betterproto.Message): + +@dataclass(eq=False, repr=False) +class Duration(betterproto.Message): """ - The protocol compiler can output a FileDescriptorSet containing the .proto - files it parses. + A Duration represents a signed, fixed-length span of time represented as a + count of seconds and fractions of seconds at nanosecond resolution. It is + independent of any calendar and concepts like "day" or "month". It is + related to Timestamp in that the difference between two Timestamp values is + a Duration and it can be added or subtracted from a Timestamp. Range is + approximately +-10,000 years. # Examples Example 1: Compute Duration from + two Timestamps in pseudo code. Timestamp start = ...; Timestamp end + = ...; Duration duration = ...; duration.seconds = end.seconds - + start.seconds; duration.nanos = end.nanos - start.nanos; if + (duration.seconds < 0 && duration.nanos > 0) { duration.seconds += 1; + duration.nanos -= 1000000000; } else if (duration.seconds > 0 && + duration.nanos < 0) { duration.seconds -= 1; duration.nanos += + 1000000000; } Example 2: Compute Timestamp from Timestamp + Duration in + pseudo code. Timestamp start = ...; Duration duration = ...; + Timestamp end = ...; end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; if (end.nanos < 0) { + end.seconds -= 1; end.nanos += 1000000000; } else if (end.nanos + >= 1000000000) { end.seconds += 1; end.nanos -= 1000000000; + } Example 3: Compute Duration from datetime.timedelta in Python. td = + datetime.timedelta(days=3, minutes=10) duration = Duration() + duration.FromTimedelta(td) # JSON Mapping In JSON format, the Duration type + is encoded as a string rather than an object, where the string ends in the + suffix "s" (indicating seconds) and is preceded by the number of seconds, + with nanoseconds expressed as fractional seconds. For example, 3 seconds + with 0 nanoseconds should be encoded in JSON format as "3s", while 3 + seconds and 1 nanosecond should be expressed in JSON format as + "3.000000001s", and 3 seconds and 1 microsecond should be expressed in JSON + format as "3.000001s". """ - file: List["FileDescriptorProto"] = betterproto.message_field(1) + # Signed seconds of the span of time. Must be from -315,576,000,000 to + # +315,576,000,000 inclusive. Note: these bounds are computed from: 60 + # sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + seconds: int = betterproto.int64_field(1) + # Signed fractions of a second at nanosecond resolution of the span of time. + # Durations less than one second are represented with a 0 `seconds` field and + # a positive or negative `nanos` field. For durations of one second or more, + # a non-zero value for the `nanos` field must be of the same sign as the + # `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. + nanos: int = betterproto.int32_field(2) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass -class FileDescriptorProto(betterproto.Message): - """Describes a complete .proto file.""" - name: str = betterproto.string_field(1) - package: str = betterproto.string_field(2) - # Names of files imported by this file. - dependency: List[str] = betterproto.string_field(3) - # Indexes of the public imported files in the dependency list above. - public_dependency: List[int] = betterproto.int32_field(10) - # Indexes of the weak imported files in the dependency list. For Google- - # internal migration only. Do not use. - weak_dependency: List[int] = betterproto.int32_field(11) - # All top-level definitions in this file. - message_type: List["DescriptorProto"] = betterproto.message_field(4) - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) - service: List["ServiceDescriptorProto"] = betterproto.message_field(6) - extension: List["FieldDescriptorProto"] = betterproto.message_field(7) - options: "FileOptions" = betterproto.message_field(8) - # This field contains optional information about the original source code. - # You may safely remove this entire field without harming runtime - # functionality of the descriptors -- the information is needed only by - # development tools. - source_code_info: "SourceCodeInfo" = betterproto.message_field(9) +@dataclass(eq=False, repr=False) +class Struct(betterproto.Message): + """ + `Struct` represents a structured data value, consisting of fields which map + to dynamically typed values. In some languages, `Struct` might be supported + by a native representation. For example, in scripting languages like JS a + struct is represented as an object. The details of that representation are + described together with the proto support for the language. The JSON + representation for `Struct` is JSON object. + """ + + # Unordered map of dynamically typed values. + fields: Dict[str, "Value"] = betterproto.map_field( + 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class Value(betterproto.Message): + """ + `Value` represents a dynamically typed value which can be either null, a + number, a string, a boolean, a recursive struct value, or a list of values. + A producer of value is expected to set one of that variants, absence of any + variant indicates an error. The JSON representation for `Value` is JSON + value. + """ + + # Represents a null value. + null_value: "NullValue" = betterproto.enum_field(1, group="kind") + # Represents a double value. + number_value: float = betterproto.double_field(2, group="kind") + # Represents a string value. + string_value: str = betterproto.string_field(3, group="kind") + # Represents a boolean value. + bool_value: bool = betterproto.bool_field(4, group="kind") + # Represents a structured value. + struct_value: "Struct" = betterproto.message_field(5, group="kind") + # Represents a repeated `Value`. + list_value: "ListValue" = betterproto.message_field(6, group="kind") + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class ListValue(betterproto.Message): + """ + `ListValue` is a wrapper around a repeated field of values. The JSON + representation for `ListValue` is JSON array. + """ + + # Repeated field of dynamically typed values. + values: List["Value"] = betterproto.message_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class DoubleValue(betterproto.Message): + """ + Wrapper message for `double`. The JSON representation for `DoubleValue` is + JSON number. + """ + + # The double value. + value: float = betterproto.double_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class FloatValue(betterproto.Message): + """ + Wrapper message for `float`. The JSON representation for `FloatValue` is + JSON number. + """ + + # The float value. + value: float = betterproto.float_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class Int64Value(betterproto.Message): + """ + Wrapper message for `int64`. The JSON representation for `Int64Value` is + JSON string. + """ + + # The int64 value. + value: int = betterproto.int64_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class UInt64Value(betterproto.Message): + """ + Wrapper message for `uint64`. The JSON representation for `UInt64Value` is + JSON string. + """ + + # The uint64 value. + value: int = betterproto.uint64_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class Int32Value(betterproto.Message): + """ + Wrapper message for `int32`. The JSON representation for `Int32Value` is + JSON number. + """ + + # The int32 value. + value: int = betterproto.int32_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class UInt32Value(betterproto.Message): + """ + Wrapper message for `uint32`. The JSON representation for `UInt32Value` is + JSON number. + """ + + # The uint32 value. + value: int = betterproto.uint32_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class BoolValue(betterproto.Message): + """ + Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON + `true` and `false`. + """ + + # The bool value. + value: bool = betterproto.bool_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class StringValue(betterproto.Message): + """ + Wrapper message for `string`. The JSON representation for `StringValue` is + JSON string. + """ + + # The string value. + value: str = betterproto.string_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class BytesValue(betterproto.Message): + """ + Wrapper message for `bytes`. The JSON representation for `BytesValue` is + JSON string. + """ + + # The bytes value. + value: bytes = betterproto.bytes_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class Empty(betterproto.Message): + """ + A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to use it as the request + or the response type of an API method. For instance: service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The + JSON representation for `Empty` is empty JSON object `{}`. + """ + + pass + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class FileDescriptorSet(betterproto.Message): + """ + The protocol compiler can output a FileDescriptorSet containing the .proto + files it parses. + """ + + file: List["FileDescriptorProto"] = betterproto.message_field(1) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class FileDescriptorProto(betterproto.Message): + """Describes a complete .proto file.""" + + name: str = betterproto.string_field(1) + package: str = betterproto.string_field(2) + # Names of files imported by this file. + dependency: List[str] = betterproto.string_field(3) + # Indexes of the public imported files in the dependency list above. + public_dependency: List[int] = betterproto.int32_field(10) + # Indexes of the weak imported files in the dependency list. For Google- + # internal migration only. Do not use. + weak_dependency: List[int] = betterproto.int32_field(11) + # All top-level definitions in this file. + message_type: List["DescriptorProto"] = betterproto.message_field(4) + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) + service: List["ServiceDescriptorProto"] = betterproto.message_field(6) + extension: List["FieldDescriptorProto"] = betterproto.message_field(7) + options: "FileOptions" = betterproto.message_field(8) + # This field contains optional information about the original source code. + # You may safely remove this entire field without harming runtime + # functionality of the descriptors -- the information is needed only by + # development tools. + source_code_info: "SourceCodeInfo" = betterproto.message_field(9) # The syntax of the proto file. The supported values are "proto2" and # "proto3". syntax: str = betterproto.string_field(12) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class DescriptorProto(betterproto.Message): """Describes a message type.""" @@ -438,15 +881,21 @@ class DescriptorProto(betterproto.Message): # A given name may only be reserved once. reserved_name: List[str] = betterproto.string_field(10) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class DescriptorProtoExtensionRange(betterproto.Message): start: int = betterproto.int32_field(1) end: int = betterproto.int32_field(2) options: "ExtensionRangeOptions" = betterproto.message_field(3) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class DescriptorProtoReservedRange(betterproto.Message): """ Range of reserved tag numbers. Reserved tag numbers may not be used by @@ -457,14 +906,20 @@ class DescriptorProtoReservedRange(betterproto.Message): start: int = betterproto.int32_field(1) end: int = betterproto.int32_field(2) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class ExtensionRangeOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class FieldDescriptorProto(betterproto.Message): """Describes a field within a message.""" @@ -497,23 +952,27 @@ class FieldDescriptorProto(betterproto.Message): json_name: str = betterproto.string_field(10) options: "FieldOptions" = betterproto.message_field(8) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class OneofDescriptorProto(betterproto.Message): """Describes a oneof.""" name: str = betterproto.string_field(1) options: "OneofOptions" = betterproto.message_field(2) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class EnumDescriptorProto(betterproto.Message): """Describes an enum type.""" name: str = betterproto.string_field(1) - value: List["EnumValueDescriptorProto"] = betterproto.message_field( - 2, wraps=betterproto.TYPE_ENUM - ) + value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) options: "EnumOptions" = betterproto.message_field(3) # Range of reserved numeric values. Reserved numeric values may not be used # by enum values in the same enum declaration. Reserved ranges may not @@ -525,8 +984,11 @@ class EnumDescriptorProto(betterproto.Message): # be reserved once. reserved_name: List[str] = betterproto.string_field(5) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class EnumDescriptorProtoEnumReservedRange(betterproto.Message): """ Range of reserved numeric values. Reserved values may not be used by @@ -538,19 +1000,23 @@ class EnumDescriptorProtoEnumReservedRange(betterproto.Message): start: int = betterproto.int32_field(1) end: int = betterproto.int32_field(2) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class EnumValueDescriptorProto(betterproto.Message): """Describes a value within an enum.""" name: str = betterproto.string_field(1) number: int = betterproto.int32_field(2) - options: "EnumValueOptions" = betterproto.message_field( - 3, wraps=betterproto.TYPE_ENUM - ) + options: "EnumValueOptions" = betterproto.message_field(3) + + def __post_init__(self) -> None: + super().__post_init__() -@dataclass +@dataclass(eq=False, repr=False) class ServiceDescriptorProto(betterproto.Message): """Describes a service.""" @@ -558,8 +1024,11 @@ class ServiceDescriptorProto(betterproto.Message): method: List["MethodDescriptorProto"] = betterproto.message_field(2) options: "ServiceOptions" = betterproto.message_field(3) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class MethodDescriptorProto(betterproto.Message): """Describes a method of a service.""" @@ -574,8 +1043,11 @@ class MethodDescriptorProto(betterproto.Message): # Identifies if server streams multiple server messages server_streaming: bool = betterproto.bool_field(6) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class FileOptions(betterproto.Message): # Sets the Java package where classes generated from this .proto will be # placed. By default, the proto package is used, but this is often @@ -657,8 +1129,16 @@ class FileOptions(betterproto.Message): # for the "Options" section above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + if self.java_generate_equals_and_hash: + warnings.warn( + "FileOptions.java_generate_equals_and_hash is deprecated", + DeprecationWarning, + ) -@dataclass + +@dataclass(eq=False, repr=False) class MessageOptions(betterproto.Message): # Set true to use the old proto1 MessageSet wire format for extensions. This # is provided for backwards-compatibility with the MessageSet wire format. @@ -694,8 +1174,11 @@ class MessageOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class FieldOptions(betterproto.Message): # The ctype option instructs the C++ code generator to use a different # representation of the field than it normally would. See the specific @@ -751,14 +1234,20 @@ class FieldOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class OneofOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class EnumOptions(betterproto.Message): # Set this option to true to allow mapping different tag names to the same # value. @@ -770,8 +1259,11 @@ class EnumOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class EnumValueOptions(betterproto.Message): # Is this enum value deprecated? Depending on the target platform, this can # emit Deprecated annotations for the enum value, or it will be completely @@ -781,8 +1273,11 @@ class EnumValueOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class ServiceOptions(betterproto.Message): # Is this service deprecated? Depending on the target platform, this can emit # Deprecated annotations for the service, or it will be completely ignored; @@ -791,8 +1286,11 @@ class ServiceOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class MethodOptions(betterproto.Message): # Is this method deprecated? Depending on the target platform, this can emit # Deprecated annotations for the method, or it will be completely ignored; in @@ -802,8 +1300,11 @@ class MethodOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class UninterpretedOption(betterproto.Message): """ A message representing a option the parser does not recognize. This only @@ -824,8 +1325,11 @@ class UninterpretedOption(betterproto.Message): string_value: bytes = betterproto.bytes_field(7) aggregate_value: str = betterproto.string_field(8) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class UninterpretedOptionNamePart(betterproto.Message): """ The name of the uninterpreted option. Each string represents a segment in @@ -838,8 +1342,11 @@ class UninterpretedOptionNamePart(betterproto.Message): name_part: str = betterproto.string_field(1) is_extension: bool = betterproto.bool_field(2) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class SourceCodeInfo(betterproto.Message): """ Encapsulates information about the original source file from which a @@ -877,8 +1384,11 @@ class SourceCodeInfo(betterproto.Message): # as more types of locations could be recorded in the future. location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class SourceCodeInfoLocation(betterproto.Message): # Identifies which part of the FileDescriptorProto was defined at this # location. Each element is a field number or an index. They form a path @@ -924,8 +1434,11 @@ class SourceCodeInfoLocation(betterproto.Message): trailing_comments: str = betterproto.string_field(4) leading_detached_comments: List[str] = betterproto.string_field(6) + def __post_init__(self) -> None: + super().__post_init__() + -@dataclass +@dataclass(eq=False, repr=False) class GeneratedCodeInfo(betterproto.Message): """ Describes the relationship between generated code and its original source @@ -937,8 +1450,11 @@ class GeneratedCodeInfo(betterproto.Message): # its generating .proto file. annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) + def __post_init__(self) -> None: + super().__post_init__() -@dataclass + +@dataclass(eq=False, repr=False) class GeneratedCodeInfoAnnotation(betterproto.Message): # Identifies the element in the original source .proto file. This field is # formatted the same as SourceCodeInfo.Location.path. @@ -953,360 +1469,5 @@ class GeneratedCodeInfoAnnotation(betterproto.Message): # byte (so the length of the text = end - begin). end: int = betterproto.int32_field(4) - -@dataclass -class Duration(betterproto.Message): - """ - A Duration represents a signed, fixed-length span of time represented as a - count of seconds and fractions of seconds at nanosecond resolution. It is - independent of any calendar and concepts like "day" or "month". It is - related to Timestamp in that the difference between two Timestamp values is - a Duration and it can be added or subtracted from a Timestamp. Range is - approximately +-10,000 years. # Examples Example 1: Compute Duration from - two Timestamps in pseudo code. Timestamp start = ...; Timestamp end - = ...; Duration duration = ...; duration.seconds = end.seconds - - start.seconds; duration.nanos = end.nanos - start.nanos; if - (duration.seconds < 0 && duration.nanos > 0) { duration.seconds += 1; - duration.nanos -= 1000000000; } else if (duration.seconds > 0 && - duration.nanos < 0) { duration.seconds -= 1; duration.nanos += - 1000000000; } Example 2: Compute Timestamp from Timestamp + Duration in - pseudo code. Timestamp start = ...; Duration duration = ...; - Timestamp end = ...; end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; if (end.nanos < 0) { - end.seconds -= 1; end.nanos += 1000000000; } else if (end.nanos - >= 1000000000) { end.seconds += 1; end.nanos -= 1000000000; - } Example 3: Compute Duration from datetime.timedelta in Python. td = - datetime.timedelta(days=3, minutes=10) duration = Duration() - duration.FromTimedelta(td) # JSON Mapping In JSON format, the Duration type - is encoded as a string rather than an object, where the string ends in the - suffix "s" (indicating seconds) and is preceded by the number of seconds, - with nanoseconds expressed as fractional seconds. For example, 3 seconds - with 0 nanoseconds should be encoded in JSON format as "3s", while 3 - seconds and 1 nanosecond should be expressed in JSON format as - "3.000000001s", and 3 seconds and 1 microsecond should be expressed in JSON - format as "3.000001s". - """ - - # Signed seconds of the span of time. Must be from -315,576,000,000 to - # +315,576,000,000 inclusive. Note: these bounds are computed from: 60 - # sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - seconds: int = betterproto.int64_field(1) - # Signed fractions of a second at nanosecond resolution of the span of time. - # Durations less than one second are represented with a 0 `seconds` field and - # a positive or negative `nanos` field. For durations of one second or more, - # a non-zero value for the `nanos` field must be of the same sign as the - # `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. - nanos: int = betterproto.int32_field(2) - - -@dataclass -class Empty(betterproto.Message): - """ - A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to use it as the request - or the response type of an API method. For instance: service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The - JSON representation for `Empty` is empty JSON object `{}`. - """ - - pass - - -@dataclass -class FieldMask(betterproto.Message): - """ - `FieldMask` represents a set of symbolic field paths, for example: - paths: "f.a" paths: "f.b.d" Here `f` represents a field in some root - message, `a` and `b` fields in the message found in `f`, and `d` a field - found in the message in `f.b`. Field masks are used to specify a subset of - fields that should be returned by a get operation or modified by an update - operation. Field masks also have a custom JSON encoding (see below). # - Field Masks in Projections When used in the context of a projection, a - response message or sub-message is filtered by the API to only contain - those fields as specified in the mask. For example, if the mask in the - previous example is applied to a response message as follows: f { - a : 22 b { d : 1 x : 2 } y : 13 } - z: 8 The result will not contain specific values for fields x,y and z - (their value will be set to the default, and omitted in proto text output): - f { a : 22 b { d : 1 } } A repeated field is - not allowed except at the last position of a paths string. If a FieldMask - object is not present in a get operation, the operation applies to all - fields (as if a FieldMask of all fields had been specified). Note that a - field mask does not necessarily apply to the top-level response message. In - case of a REST get operation, the field mask applies directly to the - response, but in case of a REST list operation, the mask instead applies to - each individual message in the returned resource list. In case of a REST - custom method, other definitions may be used. Where the mask applies will - be clearly documented together with its declaration in the API. In any - case, the effect on the returned resource/resources is required behavior - for APIs. # Field Masks in Update Operations A field mask in update - operations specifies which fields of the targeted resource are going to be - updated. The API is required to only change the values of the fields as - specified in the mask and leave the others untouched. If a resource is - passed in to describe the updated values, the API ignores the values of all - fields not covered by the mask. If a repeated field is specified for an - update operation, new values will be appended to the existing repeated - field in the target resource. Note that a repeated field is only allowed in - the last position of a `paths` string. If a sub-message is specified in the - last position of the field mask for an update operation, then new value - will be merged into the existing sub-message in the target resource. For - example, given the target message: f { b { d: 1 - x: 2 } c: [1] } And an update message: f { b { - d: 10 } c: [2] } then if the field mask is: paths: ["f.b", - "f.c"] then the result will be: f { b { d: 10 x: - 2 } c: [1, 2] } An implementation may provide options to - override this default behavior for repeated and message fields. In order to - reset a field's value to the default, the field must be in the mask and set - to the default value in the provided resource. Hence, in order to reset all - fields of a resource, provide a default instance of the resource and set - all fields in the mask, or do not provide a mask as described below. If a - field mask is not present on update, the operation applies to all fields - (as if a field mask of all fields has been specified). Note that in the - presence of schema evolution, this may mean that fields the client does not - know and has therefore not filled into the request will be reset to their - default. If this is unwanted behavior, a specific service may require a - client to always specify a field mask, producing an error if not. As with - get operations, the location of the resource which describes the updated - values in the request message depends on the operation kind. In any case, - the effect of the field mask is required to be honored by the API. ## - Considerations for HTTP REST The HTTP kind of an update operation which - uses a field mask must be set to PATCH instead of PUT in order to satisfy - HTTP semantics (PUT must only be used for full updates). # JSON Encoding of - Field Masks In JSON, a field mask is encoded as a single string where paths - are separated by a comma. Fields name in each path are converted to/from - lower-camel naming conventions. As an example, consider the following - message declarations: message Profile { User user = 1; - Photo photo = 2; } message User { string display_name = 1; - string address = 2; } In proto a field mask for `Profile` may look as - such: mask { paths: "user.display_name" paths: "photo" - } In JSON, the same mask is represented as below: { mask: - "user.displayName,photo" } # Field Masks and Oneof Fields Field masks - treat fields in oneofs just as regular fields. Consider the following - message: message SampleMessage { oneof test_oneof { - string name = 4; SubMessage sub_message = 9; } } The - field mask can be: mask { paths: "name" } Or: mask { - paths: "sub_message" } Note that oneof type names ("test_oneof" in this - case) cannot be used in paths. ## Field Mask Verification The - implementation of any API method which has a FieldMask type field in the - request should verify the included field paths, and return an - `INVALID_ARGUMENT` error if any path is unmappable. - """ - - # The set of field mask paths. - paths: List[str] = betterproto.string_field(1) - - -@dataclass -class Struct(betterproto.Message): - """ - `Struct` represents a structured data value, consisting of fields which map - to dynamically typed values. In some languages, `Struct` might be supported - by a native representation. For example, in scripting languages like JS a - struct is represented as an object. The details of that representation are - described together with the proto support for the language. The JSON - representation for `Struct` is JSON object. - """ - - # Unordered map of dynamically typed values. - fields: Dict[str, "Value"] = betterproto.map_field( - 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE - ) - - -@dataclass -class Value(betterproto.Message): - """ - `Value` represents a dynamically typed value which can be either null, a - number, a string, a boolean, a recursive struct value, or a list of values. - A producer of value is expected to set one of that variants, absence of any - variant indicates an error. The JSON representation for `Value` is JSON - value. - """ - - # Represents a null value. - null_value: "NullValue" = betterproto.enum_field(1, group="kind") - # Represents a double value. - number_value: float = betterproto.double_field(2, group="kind") - # Represents a string value. - string_value: str = betterproto.string_field(3, group="kind") - # Represents a boolean value. - bool_value: bool = betterproto.bool_field(4, group="kind") - # Represents a structured value. - struct_value: "Struct" = betterproto.message_field(5, group="kind") - # Represents a repeated `Value`. - list_value: "ListValue" = betterproto.message_field(6, group="kind") - - -@dataclass -class ListValue(betterproto.Message): - """ - `ListValue` is a wrapper around a repeated field of values. The JSON - representation for `ListValue` is JSON array. - """ - - # Repeated field of dynamically typed values. - values: List["Value"] = betterproto.message_field(1) - - -@dataclass -class Timestamp(betterproto.Message): - """ - A Timestamp represents a point in time independent of any time zone or - local calendar, encoded as a count of seconds and fractions of seconds at - nanosecond resolution. The count is relative to an epoch at UTC midnight on - January 1, 1970, in the proleptic Gregorian calendar which extends the - Gregorian calendar backwards to year one. All minutes are 60 seconds long. - Leap seconds are "smeared" so that no leap second table is needed for - interpretation, using a [24-hour linear - smear](https://developers.google.com/time/smear). The range is from - 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By restricting to - that range, we ensure that we can convert to and from [RFC - 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. # Examples - Example 1: Compute Timestamp from POSIX `time()`. Timestamp timestamp; - timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0); Example 2: - Compute Timestamp from POSIX `gettimeofday()`. struct timeval tv; - gettimeofday(&tv, NULL); Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.tv_usec * - 1000); Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = - (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; // A Windows - tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is - 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp - timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - - 11644473600LL)); timestamp.set_nanos((INT32) ((ticks % 10000000) * - 100)); Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - long millis = System.currentTimeMillis(); Timestamp timestamp = - Timestamp.newBuilder().setSeconds(millis / 1000) .setNanos((int) - ((millis % 1000) * 1000000)).build(); Example 5: Compute Timestamp from - current time in Python. timestamp = Timestamp() - timestamp.GetCurrentTime() # JSON Mapping In JSON format, the Timestamp - type is encoded as a string in the [RFC - 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is - "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" where {year} is - always expressed using four digits while {month}, {day}, {hour}, {min}, and - {sec} are zero-padded to two digits each. The fractional seconds, which can - go up to 9 digits (i.e. up to 1 nanosecond resolution), are optional. The - "Z" suffix indicates the timezone ("UTC"); the timezone is required. A - proto3 JSON serializer should always use UTC (as indicated by "Z") when - printing the Timestamp type and a proto3 JSON parser should be able to - accept both UTC and other timezones (as indicated by an offset). For - example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on - January 15, 2017. In JavaScript, one can convert a Date object to this - format using the standard [toISOString()](https://developer.mozilla.org/en- - US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) method. - In Python, a standard `datetime.datetime` object can be converted to this - format using - [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) - with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one - can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( - http://www.joda.org/joda- - time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) - to obtain a formatter capable of generating timestamps in this format. - """ - - # Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must - # be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. - seconds: int = betterproto.int64_field(1) - # Non-negative fractions of a second at nanosecond resolution. Negative - # second values with fractions must still have non-negative nanos values that - # count forward in time. Must be from 0 to 999,999,999 inclusive. - nanos: int = betterproto.int32_field(2) - - -@dataclass -class DoubleValue(betterproto.Message): - """ - Wrapper message for `double`. The JSON representation for `DoubleValue` is - JSON number. - """ - - # The double value. - value: float = betterproto.double_field(1) - - -@dataclass -class FloatValue(betterproto.Message): - """ - Wrapper message for `float`. The JSON representation for `FloatValue` is - JSON number. - """ - - # The float value. - value: float = betterproto.float_field(1) - - -@dataclass -class Int64Value(betterproto.Message): - """ - Wrapper message for `int64`. The JSON representation for `Int64Value` is - JSON string. - """ - - # The int64 value. - value: int = betterproto.int64_field(1) - - -@dataclass -class UInt64Value(betterproto.Message): - """ - Wrapper message for `uint64`. The JSON representation for `UInt64Value` is - JSON string. - """ - - # The uint64 value. - value: int = betterproto.uint64_field(1) - - -@dataclass -class Int32Value(betterproto.Message): - """ - Wrapper message for `int32`. The JSON representation for `Int32Value` is - JSON number. - """ - - # The int32 value. - value: int = betterproto.int32_field(1) - - -@dataclass -class UInt32Value(betterproto.Message): - """ - Wrapper message for `uint32`. The JSON representation for `UInt32Value` is - JSON number. - """ - - # The uint32 value. - value: int = betterproto.uint32_field(1) - - -@dataclass -class BoolValue(betterproto.Message): - """ - Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON - `true` and `false`. - """ - - # The bool value. - value: bool = betterproto.bool_field(1) - - -@dataclass -class StringValue(betterproto.Message): - """ - Wrapper message for `string`. The JSON representation for `StringValue` is - JSON string. - """ - - # The string value. - value: str = betterproto.string_field(1) - - -@dataclass -class BytesValue(betterproto.Message): - """ - Wrapper message for `bytes`. The JSON representation for `BytesValue` is - JSON string. - """ - - # The bytes value. - value: bytes = betterproto.bytes_field(1) + def __post_init__(self) -> None: + super().__post_init__() diff --git a/src/betterproto/lib/google/protobuf/compiler/__init__.py b/src/betterproto/lib/google/protobuf/compiler/__init__.py new file mode 100644 index 000000000..cfe185b36 --- /dev/null +++ b/src/betterproto/lib/google/protobuf/compiler/__init__.py @@ -0,0 +1,125 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: google/protobuf/compiler/plugin.proto +# plugin: python-betterproto +from dataclasses import dataclass +from typing import List + +import betterproto + + +@dataclass(eq=False, repr=False) +class Version(betterproto.Message): + """The version number of protocol compiler.""" + + major: int = betterproto.int32_field(1) + minor: int = betterproto.int32_field(2) + patch: int = betterproto.int32_field(3) + # A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + # be empty for mainline stable releases. + suffix: str = betterproto.string_field(4) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class CodeGeneratorRequest(betterproto.Message): + """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" + + # The .proto files that were explicitly listed on the command-line. The code + # generator should generate code only for these files. Each file's + # descriptor will be included in proto_file, below. + file_to_generate: List[str] = betterproto.string_field(1) + # The generator parameter passed on the command-line. + parameter: str = betterproto.string_field(2) + # FileDescriptorProtos for all files in files_to_generate and everything they + # import. The files will appear in topological order, so each file appears + # before any file that imports it. protoc guarantees that all proto_files + # will be written after the fields above, even though this is not technically + # guaranteed by the protobuf wire format. This theoretically could allow a + # plugin to stream in the FileDescriptorProtos and handle them one by one + # rather than read the entire set into memory at once. However, as of this + # writing, this is not similarly optimized on protoc's end -- it will store + # all fields in memory at once before sending them to the plugin. Type names + # of fields and extensions in the FileDescriptorProto are always fully + # qualified. + proto_file: List[ + "betterproto_lib_google_protobuf.FileDescriptorProto" + ] = betterproto.message_field(15) + # The version number of protocol compiler. + compiler_version: "Version" = betterproto.message_field(3) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponse(betterproto.Message): + """The plugin writes an encoded CodeGeneratorResponse to stdout.""" + + # Error message. If non-empty, code generation failed. The plugin process + # should exit with status code zero even if it reports an error in this way. + # This should be used to indicate errors in .proto files which prevent the + # code generator from generating correct code. Errors which indicate a + # problem in protoc itself -- such as the input CodeGeneratorRequest being + # unparseable -- should be reported by writing a message to stderr and + # exiting with a non-zero status code. + error: str = betterproto.string_field(1) + file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) + + def __post_init__(self) -> None: + super().__post_init__() + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponseFile(betterproto.Message): + """Represents a single generated file.""" + + # The file name, relative to the output directory. The name must not contain + # "." or ".." components and must be relative, not be absolute (so, the file + # cannot lie outside the output directory). "/" must be used as the path + # separator, not "\". If the name is omitted, the content will be appended to + # the previous file. This allows the generator to break large files into + # small chunks, and allows the generated text to be streamed back to protoc + # so that large files need not reside completely in memory at one time. Note + # that as of this writing protoc does not optimize for this -- it will read + # the entire CodeGeneratorResponse before writing files to disk. + name: str = betterproto.string_field(1) + # If non-empty, indicates that the named file should already exist, and the + # content here is to be inserted into that file at a defined insertion point. + # This feature allows a code generator to extend the output produced by + # another code generator. The original generator may provide insertion + # points by placing special annotations in the file that look like: + # @@protoc_insertion_point(NAME) The annotation can have arbitrary text + # before and after it on the line, which allows it to be placed in a comment. + # NAME should be replaced with an identifier naming the point -- this is what + # other generators will use as the insertion_point. Code inserted at this + # point will be placed immediately above the line containing the insertion + # point (thus multiple insertions to the same point will come out in the + # order they were added). The double-@ is intended to make it unlikely that + # the generated code could contain things that look like insertion points by + # accident. For example, the C++ code generator places the following line in + # the .pb.h files that it generates: // + # @@protoc_insertion_point(namespace_scope) This line appears within the + # scope of the file's package namespace, but outside of any particular class. + # Another plugin can then specify the insertion_point "namespace_scope" to + # generate additional classes or other declarations that should be placed in + # this scope. Note that if the line containing the insertion point begins + # with whitespace, the same whitespace will be added to every line of the + # inserted text. This is useful for languages like Python, where indentation + # matters. In these languages, the insertion point comment should be + # indented the same amount as any inserted code will need to be in order to + # work correctly in that context. The code generator that generates the + # initial file and the one which inserts into it must both run as part of a + # single invocation of protoc. Code generators are executed in the order in + # which they appear on the command line. If |insertion_point| is present, + # |name| must also be present. + insertion_point: str = betterproto.string_field(2) + # The file contents. + content: str = betterproto.string_field(15) + + def __post_init__(self) -> None: + super().__post_init__() + + +import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py old mode 100644 new mode 100755 index dc9d04c94..e0b2557d2 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -3,9 +3,13 @@ import os import sys -from google.protobuf.compiler import plugin_pb2 as plugin +from betterproto.lib.google.protobuf.compiler import ( + CodeGeneratorRequest, + CodeGeneratorResponse, +) from betterproto.plugin.parser import generate_code +from betterproto.plugin.models import monkey_patch_oneof_index def main() -> None: @@ -13,16 +17,19 @@ def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() + # Apply Work around for proto2/3 difference in protoc messages + monkey_patch_oneof_index() + # Parse request - request = plugin.CodeGeneratorRequest() - request.ParseFromString(data) + request = CodeGeneratorRequest() + request.parse(data) dump_file = os.getenv("BETTERPROTO_DUMP") if dump_file: dump_request(dump_file, request) # Create response - response = plugin.CodeGeneratorResponse() + response = CodeGeneratorResponse() # Generate code generate_code(request, response) @@ -34,7 +41,7 @@ def main() -> None: sys.stdout.buffer.write(output) -def dump_request(dump_file: str, request: plugin.CodeGeneratorRequest) -> None: +def dump_request(dump_file: str, request: CodeGeneratorRequest) -> None: """ For developers: Supports running plugin.py standalone so its possible to debug it. Run protoc (or generate.py) with BETTERPROTO_DUMP="yourfile.bin" to write the request to a file. diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index bf314051e..98fd9067b 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -29,12 +29,37 @@ reference to `A` to `B`'s `fields` attribute. """ + +import betterproto +from betterproto import which_one_of +from betterproto.casing import sanitize_name +from betterproto.compile.importing import ( + get_type_reference, + parse_source_type_name, +) +from betterproto.compile.naming import ( + pythonize_class_name, + pythonize_field_name, + pythonize_method_name, +) +from betterproto.lib.google.protobuf import ( + DescriptorProto, + EnumDescriptorProto, + FileDescriptorProto, + MethodDescriptorProto, + Field, + FieldDescriptorProto, + FieldDescriptorProtoType, + FieldDescriptorProtoLabel, +) +from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest + + import re import textwrap from dataclasses import dataclass, field from typing import Dict, Iterator, List, Optional, Set, Text, Type, Union - -import betterproto +import sys from ..casing import sanitize_name from ..compile.importing import get_type_reference, parse_source_type_name @@ -44,26 +69,6 @@ pythonize_method_name, ) -try: - # betterproto[compiler] specific dependencies - from google.protobuf.compiler import plugin_pb2 as plugin - from google.protobuf.descriptor_pb2 import ( - DescriptorProto, - EnumDescriptorProto, - FieldDescriptorProto, - FileDescriptorProto, - MethodDescriptorProto, - ) -except ImportError as err: - print( - "\033[31m" - f"Unable to import `{err.name}` from betterproto plugin! " - "Please ensure that you've installed betterproto as " - '`pip install "betterproto[compiler]"` so that compiler dependencies ' - "are included." - "\033[0m" - ) - raise SystemExit(1) # Create a unique placeholder to deal with # https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses @@ -71,54 +76,75 @@ # Organize proto types into categories PROTO_FLOAT_TYPES = ( - FieldDescriptorProto.TYPE_DOUBLE, # 1 - FieldDescriptorProto.TYPE_FLOAT, # 2 + FieldDescriptorProtoType.TYPE_DOUBLE, # 1 + FieldDescriptorProtoType.TYPE_FLOAT, # 2 ) PROTO_INT_TYPES = ( - FieldDescriptorProto.TYPE_INT64, # 3 - FieldDescriptorProto.TYPE_UINT64, # 4 - FieldDescriptorProto.TYPE_INT32, # 5 - FieldDescriptorProto.TYPE_FIXED64, # 6 - FieldDescriptorProto.TYPE_FIXED32, # 7 - FieldDescriptorProto.TYPE_UINT32, # 13 - FieldDescriptorProto.TYPE_SFIXED32, # 15 - FieldDescriptorProto.TYPE_SFIXED64, # 16 - FieldDescriptorProto.TYPE_SINT32, # 17 - FieldDescriptorProto.TYPE_SINT64, # 18 + FieldDescriptorProtoType.TYPE_INT64, # 3 + FieldDescriptorProtoType.TYPE_UINT64, # 4 + FieldDescriptorProtoType.TYPE_INT32, # 5 + FieldDescriptorProtoType.TYPE_FIXED64, # 6 + FieldDescriptorProtoType.TYPE_FIXED32, # 7 + FieldDescriptorProtoType.TYPE_UINT32, # 13 + FieldDescriptorProtoType.TYPE_SFIXED32, # 15 + FieldDescriptorProtoType.TYPE_SFIXED64, # 16 + FieldDescriptorProtoType.TYPE_SINT32, # 17 + FieldDescriptorProtoType.TYPE_SINT64, # 18 ) -PROTO_BOOL_TYPES = (FieldDescriptorProto.TYPE_BOOL,) # 8 -PROTO_STR_TYPES = (FieldDescriptorProto.TYPE_STRING,) # 9 -PROTO_BYTES_TYPES = (FieldDescriptorProto.TYPE_BYTES,) # 12 +PROTO_BOOL_TYPES = (FieldDescriptorProtoType.TYPE_BOOL,) # 8 +PROTO_STR_TYPES = (FieldDescriptorProtoType.TYPE_STRING,) # 9 +PROTO_BYTES_TYPES = (FieldDescriptorProtoType.TYPE_BYTES,) # 12 PROTO_MESSAGE_TYPES = ( - FieldDescriptorProto.TYPE_MESSAGE, # 11 - FieldDescriptorProto.TYPE_ENUM, # 14 + FieldDescriptorProtoType.TYPE_MESSAGE, # 11 + FieldDescriptorProtoType.TYPE_ENUM, # 14 ) -PROTO_MAP_TYPES = (FieldDescriptorProto.TYPE_MESSAGE,) # 11 +PROTO_MAP_TYPES = (FieldDescriptorProtoType.TYPE_MESSAGE,) # 11 PROTO_PACKED_TYPES = ( - FieldDescriptorProto.TYPE_DOUBLE, # 1 - FieldDescriptorProto.TYPE_FLOAT, # 2 - FieldDescriptorProto.TYPE_INT64, # 3 - FieldDescriptorProto.TYPE_UINT64, # 4 - FieldDescriptorProto.TYPE_INT32, # 5 - FieldDescriptorProto.TYPE_FIXED64, # 6 - FieldDescriptorProto.TYPE_FIXED32, # 7 - FieldDescriptorProto.TYPE_BOOL, # 8 - FieldDescriptorProto.TYPE_UINT32, # 13 - FieldDescriptorProto.TYPE_SFIXED32, # 15 - FieldDescriptorProto.TYPE_SFIXED64, # 16 - FieldDescriptorProto.TYPE_SINT32, # 17 - FieldDescriptorProto.TYPE_SINT64, # 18 + FieldDescriptorProtoType.TYPE_DOUBLE, # 1 + FieldDescriptorProtoType.TYPE_FLOAT, # 2 + FieldDescriptorProtoType.TYPE_INT64, # 3 + FieldDescriptorProtoType.TYPE_UINT64, # 4 + FieldDescriptorProtoType.TYPE_INT32, # 5 + FieldDescriptorProtoType.TYPE_FIXED64, # 6 + FieldDescriptorProtoType.TYPE_FIXED32, # 7 + FieldDescriptorProtoType.TYPE_BOOL, # 8 + FieldDescriptorProtoType.TYPE_UINT32, # 13 + FieldDescriptorProtoType.TYPE_SFIXED32, # 15 + FieldDescriptorProtoType.TYPE_SFIXED64, # 16 + FieldDescriptorProtoType.TYPE_SINT32, # 17 + FieldDescriptorProtoType.TYPE_SINT64, # 18 ) +def monkey_patch_oneof_index(): + """ + The compiler message types are written for proto2, but we read them as proto3. + For this to work in the case of the oneof_index fields, which depend on being able + to tell whether they were set, we have to treat them as oneof fields. This method + monkey patches the generated classes after the fact to force this behaviour. + """ + object.__setattr__( + FieldDescriptorProto.__dataclass_fields__["oneof_index"].metadata[ + "betterproto" + ], + "group", + "oneof_index", + ) + object.__setattr__( + Field.__dataclass_fields__["oneof_index"].metadata["betterproto"], + "group", + "oneof_index", + ) + + def get_comment( proto_file: "FileDescriptorProto", path: List[int], indent: int = 4 ) -> str: pad = " " * indent - for sci in proto_file.source_code_info.location: - if list(sci.path) == path and sci.leading_comments: + for sci_loc in proto_file.source_code_info.location: + if list(sci_loc.path) == path and sci_loc.leading_comments: lines = textwrap.wrap( - sci.leading_comments.strip().replace("\n", ""), width=79 - indent + sci_loc.leading_comments.strip().replace("\n", ""), width=79 - indent ) if path[-2] == 2 and path[-4] != 6: @@ -139,6 +165,7 @@ def get_comment( class ProtoContentBase: """Methods common to MessageCompiler, ServiceCompiler and ServiceMethodCompiler.""" + source_file: FileDescriptorProto path: List[int] comment_indent: int = 4 parent: Union["betterproto.Message", "OutputTemplate"] @@ -156,13 +183,6 @@ def output_file(self) -> "OutputTemplate": current = current.parent return current - @property - def proto_file(self) -> FieldDescriptorProto: - current = self - while not isinstance(current, OutputTemplate): - current = current.parent - return current.package_proto_obj - @property def request(self) -> "PluginRequestCompiler": current = self @@ -176,14 +196,14 @@ def comment(self) -> str: for this object. """ return get_comment( - proto_file=self.proto_file, path=self.path, indent=self.comment_indent + proto_file=self.source_file, path=self.path, indent=self.comment_indent ) @dataclass class PluginRequestCompiler: - plugin_request_obj: plugin.CodeGeneratorRequest + plugin_request_obj: CodeGeneratorRequest output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict) @property @@ -253,6 +273,7 @@ def python_module_imports(self) -> Set[str]: class MessageCompiler(ProtoContentBase): """Representation of a protobuf message.""" + source_file: FileDescriptorProto parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER proto_obj: DescriptorProto = PLACEHOLDER path: List[int] = PLACEHOLDER @@ -296,7 +317,7 @@ def is_map( proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto ) -> bool: """True if proto_field_obj is a map, otherwise False.""" - if proto_field_obj.type == FieldDescriptorProto.TYPE_MESSAGE: + if proto_field_obj.type == FieldDescriptorProtoType.TYPE_MESSAGE: # This might be a map... message_type = proto_field_obj.type_name.split(".").pop().lower() map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry" @@ -311,8 +332,20 @@ def is_map( def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool: - """True if proto_field_obj is a OneOf, otherwise False.""" - return proto_field_obj.HasField("oneof_index") + """ + True if proto_field_obj is a OneOf, otherwise False. + + .. warning:: + Becuase the message from protoc is defined in proto2, and betterproto works with + proto3, and interpreting the FieldDescriptorProto.oneof_index field requires + distinguishing between default and unset values (which proto3 doesn't support), + we have to hack the generated FieldDescriptorProto class for this to work. + The hack consists of setting group="oneof_index" in the field metadata, + essentially making oneof_index the sole member of a one_of group, which allows + us to tell whether it was set, via the which_one_of interface. + """ + + return which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index" @dataclass @@ -360,7 +393,7 @@ def betterproto_field_args(self) -> List[str]: def field_wraps(self) -> Optional[str]: """Returns betterproto wrapped field type or None.""" match_wrapper = re.match( - r"\.google\.protobuf\.(.+)Value", self.proto_obj.type_name + r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name ) if match_wrapper: wrapped_type = "TYPE_" + match_wrapper.group(1).upper() @@ -371,7 +404,7 @@ def field_wraps(self) -> Optional[str]: @property def repeated(self) -> bool: return ( - self.proto_obj.label == FieldDescriptorProto.LABEL_REPEATED + self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED and not is_map(self.proto_obj, self.parent) ) @@ -384,7 +417,9 @@ def mutable(self) -> bool: def field_type(self) -> str: """String representation of proto field type.""" return ( - self.proto_obj.Type.Name(self.proto_obj.type).lower().replace("type_", "") + FieldDescriptorProtoType(self.proto_obj.type) + .name.lower() + .replace("type_", "") ) @property @@ -478,14 +513,19 @@ def __post_init__(self) -> None: ): # Get Python types self.py_k_type = FieldCompiler( - parent=self, proto_obj=nested.field[0] # key + source_file=self.source_file, + parent=self, + proto_obj=nested.field[0], # key ).py_type self.py_v_type = FieldCompiler( - parent=self, proto_obj=nested.field[1] # value + source_file=self.source_file, + parent=self, + proto_obj=nested.field[1], # value ).py_type + # Get proto types - self.proto_k_type = self.proto_obj.Type.Name(nested.field[0].type) - self.proto_v_type = self.proto_obj.Type.Name(nested.field[1].type) + self.proto_k_type = FieldDescriptorProtoType(nested.field[0].type).name + self.proto_v_type = FieldDescriptorProtoType(nested.field[1].type).name super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__ @property @@ -527,7 +567,7 @@ def __post_init__(self) -> None: name=sanitize_name(entry_proto_value.name), value=entry_proto_value.number, comment=get_comment( - proto_file=self.proto_file, path=self.path + [2, entry_number] + proto_file=self.source_file, path=self.path + [2, entry_number] ), ) for entry_number, entry_proto_value in enumerate(self.proto_obj.value) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index a1be2685e..4be99773c 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,28 +1,19 @@ +from betterproto.lib.google.protobuf import ( + DescriptorProto, + EnumDescriptorProto, + FieldDescriptorProto, + FileDescriptorProto, + ServiceDescriptorProto, +) +from betterproto.lib.google.protobuf.compiler import ( + CodeGeneratorRequest, + CodeGeneratorResponse, + CodeGeneratorResponseFile, +) import itertools import pathlib import sys -from typing import TYPE_CHECKING, Iterator, List, Tuple, Union, Set - -try: - # betterproto[compiler] specific dependencies - from google.protobuf.compiler import plugin_pb2 as plugin - from google.protobuf.descriptor_pb2 import ( - DescriptorProto, - EnumDescriptorProto, - FieldDescriptorProto, - ServiceDescriptorProto, - ) -except ImportError as err: - print( - "\033[31m" - f"Unable to import `{err.name}` from betterproto plugin! " - "Please ensure that you've installed betterproto as " - '`pip install "betterproto[compiler]"` so that compiler dependencies ' - "are included." - "\033[0m" - ) - raise SystemExit(1) - +from typing import Iterator, List, Tuple, TYPE_CHECKING, Union from .compiler import outputfile_compiler from .models import ( EnumDefinitionCompiler, @@ -70,7 +61,7 @@ def _traverse( def generate_code( - request: plugin.CodeGeneratorRequest, response: plugin.CodeGeneratorResponse + request: CodeGeneratorRequest, response: CodeGeneratorResponse ) -> None: plugin_options = request.parameter.split(",") if request.parameter else [] @@ -100,7 +91,12 @@ def generate_code( for output_package_name, output_package in request_data.output_packages.items(): for proto_input_file in output_package.input_files: for item, path in traverse(proto_input_file): - read_protobuf_type(item=item, path=path, output_package=output_package) + read_protobuf_type( + source_file=proto_input_file, + item=item, + path=path, + output_package=output_package, + ) # Read Services for output_package_name, output_package in request_data.output_packages.items(): @@ -116,11 +112,13 @@ def generate_code( output_path = pathlib.Path(*output_package_name.split("."), "__init__.py") output_paths.add(output_path) - f: response.File = response.file.add() - f.name = str(output_path) - - # Render and then format the output file - f.content = outputfile_compiler(output_file=output_package) + response.file.append( + CodeGeneratorResponseFile( + name=str(output_path), + # Render and then format the output file + content=outputfile_compiler(output_file=output_package), + ) + ) # Make each output directory a package with __init__ file init_files = { @@ -130,38 +128,53 @@ def generate_code( } - output_paths for init_file in init_files: - init = response.file.add() - init.name = str(init_file) + response.file.append(CodeGeneratorResponseFile(name=str(init_file))) for output_package_name in sorted(output_paths.union(init_files)): print(f"Writing {output_package_name}", file=sys.stderr) def read_protobuf_type( - item: DescriptorProto, path: List[int], output_package: OutputTemplate + item: DescriptorProto, + path: List[int], + source_file: "FileDescriptorProto", + output_package: OutputTemplate, ) -> None: if isinstance(item, DescriptorProto): if item.options.map_entry: # Skip generated map entry messages since we just use dicts return # Process Message - message_data = MessageCompiler(parent=output_package, proto_obj=item, path=path) + message_data = MessageCompiler( + source_file=source_file, parent=output_package, proto_obj=item, path=path + ) for index, field in enumerate(item.field): if is_map(field, item): MapEntryCompiler( - parent=message_data, proto_obj=field, path=path + [2, index] + source_file=source_file, + parent=message_data, + proto_obj=field, + path=path + [2, index], ) elif is_oneof(field): OneOfFieldCompiler( - parent=message_data, proto_obj=field, path=path + [2, index] + source_file=source_file, + parent=message_data, + proto_obj=field, + path=path + [2, index], ) else: FieldCompiler( - parent=message_data, proto_obj=field, path=path + [2, index] + source_file=source_file, + parent=message_data, + proto_obj=field, + path=path + [2, index], ) elif isinstance(item, EnumDescriptorProto): # Enum - EnumDefinitionCompiler(parent=output_package, proto_obj=item, path=path) + EnumDefinitionCompiler( + source_file=source_file, parent=output_package, proto_obj=item, path=path + ) def read_protobuf_service( diff --git a/tests/inputs/example/example.proto b/tests/inputs/example/example.proto index edc4d87f6..311105888 100644 --- a/tests/inputs/example/example.proto +++ b/tests/inputs/example/example.proto @@ -1,8 +1,909 @@ -syntax = "proto3"; +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -package hello; +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). -// Greeting represents a message you can tell a user. -message Greeting { - string message = 1; + +syntax = "proto2"; + +// package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } } diff --git a/tests/inputs/oneof/oneof-name.json b/tests/inputs/oneof/oneof-name.json index 45960e706..605484b6a 100644 --- a/tests/inputs/oneof/oneof-name.json +++ b/tests/inputs/oneof/oneof-name.json @@ -1,3 +1,3 @@ { - "name": "foobar" + "pitier": "Mr. T" } diff --git a/tests/inputs/oneof/oneof.json b/tests/inputs/oneof/oneof.json index 0197c9902..65cafc5f0 100644 --- a/tests/inputs/oneof/oneof.json +++ b/tests/inputs/oneof/oneof.json @@ -1,3 +1,3 @@ { - "count": 100 + "pitied": 100 } diff --git a/tests/inputs/oneof/oneof.proto b/tests/inputs/oneof/oneof.proto index 693e5b53a..1f9c4b279 100644 --- a/tests/inputs/oneof/oneof.proto +++ b/tests/inputs/oneof/oneof.proto @@ -2,7 +2,15 @@ syntax = "proto3"; message Test { oneof foo { - int32 count = 1; - string name = 2; + int32 pitied = 1; + string pitier = 2; + } + + int32 just_a_regular_field = 3; + + oneof bar { + int32 drinks = 11; + string bar_name = 12; } } + diff --git a/tests/inputs/oneof/test_oneof.py b/tests/inputs/oneof/test_oneof.py index c361b53a6..cc8a7dd49 100644 --- a/tests/inputs/oneof/test_oneof.py +++ b/tests/inputs/oneof/test_oneof.py @@ -6,10 +6,10 @@ def test_which_count(): message = Test() message.from_json(get_test_case_json_data("oneof")) - assert betterproto.which_one_of(message, "foo") == ("count", 100) + assert betterproto.which_one_of(message, "foo") == ("pitied", 100) def test_which_name(): message = Test() message.from_json(get_test_case_json_data("oneof", "oneof-name.json")) - assert betterproto.which_one_of(message, "foo") == ("name", "foobar") + assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") From 2cc3e05fa013f18183b5460786712cf86b04051a Mon Sep 17 00:00:00 2001 From: Nat Noordanus Date: Sun, 18 Oct 2020 23:02:08 +0200 Subject: [PATCH 08/46] Update deps & add generate_lib task - Remove plugin dependency on protobuf since it's no longer required. - Update poethepoet to for better pyproject toml syntax support - Add handy generate_lib poe task for maintaining generated libs --- poetry.lock | 727 ++++++++++++++++++++++++++----------------------- pyproject.toml | 15 +- 2 files changed, 392 insertions(+), 350 deletions(-) diff --git a/poetry.lock b/poetry.lock index 09b957bf9..402646a2e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,49 +1,49 @@ [[package]] -name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" category = "dev" +description = "A configurable sidebar-enabled Sphinx theme" +name = "alabaster" optional = false python-versions = "*" +version = "0.7.12" [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" optional = false python-versions = "*" +version = "1.4.4" [[package]] -name = "asv" -version = "0.4.2" -description = "Airspeed Velocity: A simple Python history benchmarking tool" category = "dev" +description = "Airspeed Velocity: A simple Python history benchmarking tool" +name = "asv" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -hg = ["python-hglib (>=1.5)"] +version = "0.4.2" [package.dependencies] six = ">=1.4" +[package.extras] +hg = ["python-hglib (>=1.5)"] + [[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." category = "dev" +description = "Atomic file writes." +marker = "sys_platform == \"win32\"" +name = "atomicwrites" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -marker = "sys_platform == \"win32\"" +version = "1.4.0" [[package]] -name = "attrs" -version = "20.2.0" -description = "Classes Without Boilerplate" category = "dev" +description = "Classes Without Boilerplate" +name = "attrs" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.2.0" [package.extras] dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] @@ -52,36 +52,32 @@ tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -name = "babel" -version = "2.8.0" -description = "Internationalization utilities" category = "dev" +description = "Internationalization utilities" +name = "babel" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8.0" [package.dependencies] pytz = ">=2015.7" [[package]] -name = "backports-datetime-fromisoformat" -version = "1.0.0" -description = "Backport of Python 3.7's datetime.fromisoformat" category = "main" +description = "Backport of Python 3.7's datetime.fromisoformat" +marker = "python_version < \"3.7\"" +name = "backports-datetime-fromisoformat" optional = false python-versions = "*" -marker = "python_version < \"3.7\"" +version = "1.0.0" [[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." category = "main" +description = "The uncompromising code formatter." +name = "black" optional = false python-versions = ">=3.6" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +version = "20.8b1" [package.dependencies] appdirs = "*" @@ -94,32 +90,31 @@ typed-ast = ">=1.4.0" typing-extensions = ">=3.7.4" [package.dependencies.dataclasses] -version = ">=0.6" python = "<3.7" +version = ">=0.6" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -name = "blessings" -version = "1.7" -description = "A thin, practical wrapper around terminal coloring, styling, and positioning" category = "dev" +description = "A thin, practical wrapper around terminal coloring, styling, and positioning" +name = "blessings" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7" [package.dependencies] six = "*" [[package]] -name = "bpython" -version = "0.19" -description = "Fancy Interface to the Python Interpreter" category = "dev" +description = "Fancy Interface to the Python Interpreter" +name = "bpython" optional = false python-versions = "*" - -[package.extras] -jedi = ["jedi"] -urwid = ["urwid"] -watch = ["watchdog"] +version = "0.19" [package.dependencies] curtsies = ">=0.1.18" @@ -128,387 +123,388 @@ pygments = "*" requests = "*" six = ">=1.5" +[package.extras] +jedi = ["jedi"] +urwid = ["urwid"] +watch = ["watchdog"] + [[package]] -name = "certifi" -version = "2020.6.20" -description = "Python package for providing Mozilla's CA Bundle." category = "dev" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" optional = false python-versions = "*" +version = "2020.6.20" [[package]] -name = "chardet" -version = "3.0.4" -description = "Universal encoding detector for Python 2 and 3" category = "dev" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" optional = false python-versions = "*" +version = "3.0.4" [[package]] -name = "click" -version = "7.1.2" -description = "Composable command line interface toolkit" category = "main" +description = "Composable command line interface toolkit" +name = "click" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "7.1.2" [[package]] -name = "colorama" -version = "0.4.3" -description = "Cross-platform colored terminal text." category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +name = "colorama" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +version = "0.4.4" [[package]] -name = "coverage" -version = "5.3" -description = "Code coverage measurement for Python" category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "5.3" [package.extras] toml = ["toml"] [[package]] -name = "curtsies" -version = "0.3.4" -description = "Curses-like terminal wrapper, with colored strings!" category = "dev" +description = "Curses-like terminal wrapper, with colored strings!" +name = "curtsies" optional = false python-versions = "*" +version = "0.3.4" [package.dependencies] blessings = ">=1.5" wcwidth = ">=0.1.4" [[package]] -name = "dataclasses" -version = "0.7" -description = "A backport of the dataclasses module for Python 3.6" category = "main" +description = "A backport of the dataclasses module for Python 3.6" +marker = "python_version >= \"3.6\" and python_version < \"3.7\" or python_version < \"3.7\"" +name = "dataclasses" optional = false python-versions = ">=3.6, <3.7" -marker = "python_version >= \"3.6\" and python_version < \"3.7\" or python_version < \"3.7\"" +version = "0.7" [[package]] -name = "distlib" -version = "0.3.1" -description = "Distribution utilities" category = "dev" +description = "Distribution utilities" +name = "distlib" optional = false python-versions = "*" +version = "0.3.1" [[package]] -name = "docutils" -version = "0.16" -description = "Docutils -- Python Documentation Utilities" category = "dev" +description = "Docutils -- Python Documentation Utilities" +name = "docutils" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.16" [[package]] -name = "filelock" -version = "3.0.12" -description = "A platform independent file lock." category = "dev" +description = "A platform independent file lock." +name = "filelock" optional = false python-versions = "*" +version = "3.0.12" [[package]] -name = "greenlet" -version = "0.4.17" -description = "Lightweight in-process concurrent programming" category = "dev" +description = "Lightweight in-process concurrent programming" +name = "greenlet" optional = false python-versions = "*" +version = "0.4.17" [[package]] -name = "grpcio" -version = "1.32.0" -description = "HTTP/2-based RPC framework" category = "dev" +description = "HTTP/2-based RPC framework" +name = "grpcio" optional = false python-versions = "*" - -[package.extras] -protobuf = ["grpcio-tools (>=1.32.0)"] +version = "1.32.0" [package.dependencies] six = ">=1.5.2" +[package.extras] +protobuf = ["grpcio-tools (>=1.32.0)"] + [[package]] -name = "grpcio-tools" -version = "1.32.0" -description = "Protobuf code generator for gRPC" category = "dev" +description = "Protobuf code generator for gRPC" +name = "grpcio-tools" optional = false python-versions = "*" +version = "1.32.0" [package.dependencies] grpcio = ">=1.32.0" protobuf = ">=3.5.0.post1,<4.0dev" [[package]] -name = "grpclib" -version = "0.4.1" -description = "Pure-Python gRPC implementation for asyncio" category = "main" +description = "Pure-Python gRPC implementation for asyncio" +name = "grpclib" optional = false python-versions = ">=3.6" +version = "0.4.1" [package.dependencies] h2 = ">=3.1.0,<5" multidict = "*" [package.dependencies.dataclasses] -version = "*" python = "<3.7" +version = "*" [[package]] -name = "h2" -version = "3.2.0" -description = "HTTP/2 State-Machine based protocol implementation" category = "main" +description = "HTTP/2 State-Machine based protocol implementation" +name = "h2" optional = false python-versions = "*" +version = "3.2.0" [package.dependencies] hpack = ">=3.0,<4" hyperframe = ">=5.2.0,<6" [[package]] -name = "hpack" -version = "3.0.0" -description = "Pure-Python HPACK header compression" category = "main" +description = "Pure-Python HPACK header compression" +name = "hpack" optional = false python-versions = "*" +version = "3.0.0" [[package]] -name = "hyperframe" -version = "5.2.0" -description = "HTTP/2 framing layer for Python" category = "main" +description = "HTTP/2 framing layer for Python" +name = "hyperframe" optional = false python-versions = "*" +version = "5.2.0" [[package]] -name = "idna" -version = "2.10" -description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.10" [[package]] -name = "imagesize" -version = "1.2.0" -description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +name = "imagesize" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.2.0" [[package]] -name = "importlib-metadata" -version = "1.7.0" -description = "Read metadata from Python packages" category = "dev" +description = "Read metadata from Python packages" +marker = "python_version < \"3.8\"" +name = "importlib-metadata" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -marker = "python_version < \"3.8\"" +version = "2.0.0" + +[package.dependencies] +zipp = ">=0.5" [package.extras] docs = ["sphinx", "rst.linker"] testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] -[package.dependencies] -zipp = ">=0.5" - [[package]] -name = "importlib-resources" -version = "3.0.0" -description = "Read resources from Python packages" category = "dev" +description = "Read resources from Python packages" +marker = "python_version < \"3.7\"" +name = "importlib-resources" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -marker = "python_version < \"3.7\"" - -[package.extras] -docs = ["sphinx", "rst.linker", "jaraco.packaging"] +version = "3.0.0" [package.dependencies] [package.dependencies.zipp] -version = ">=0.4" python = "<3.8" +version = ">=0.4" + +[package.extras] +docs = ["sphinx", "rst.linker", "jaraco.packaging"] [[package]] -name = "jinja2" -version = "2.11.2" -description = "A very fast and expressive template engine." category = "main" +description = "A very fast and expressive template engine." +name = "jinja2" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -i18n = ["Babel (>=0.8)"] +version = "2.11.2" [package.dependencies] MarkupSafe = ">=0.23" +[package.extras] +i18n = ["Babel (>=0.8)"] + [[package]] -name = "markupsafe" -version = "1.1.1" -description = "Safely add untrusted strings to HTML/XML markup." category = "main" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.1" [[package]] -name = "more-itertools" -version = "8.5.0" -description = "More routines for operating on iterables, beyond itertools" category = "dev" +description = "More routines for operating on iterables, beyond itertools" +name = "more-itertools" optional = false python-versions = ">=3.5" +version = "8.5.0" [[package]] -name = "multidict" -version = "4.7.6" -description = "multidict implementation" category = "main" +description = "multidict implementation" +name = "multidict" optional = false python-versions = ">=3.5" +version = "5.0.0" [[package]] -name = "mypy" -version = "0.770" -description = "Optional static typing for Python" category = "dev" +description = "Optional static typing for Python" +name = "mypy" optional = false python-versions = ">=3.5" - -[package.extras] -dmypy = ["psutil (>=4.0)"] +version = "0.770" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" typed-ast = ">=1.4.0,<1.5.0" typing-extensions = ">=3.7.4" +[package.extras] +dmypy = ["psutil (>=4.0)"] + [[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." category = "main" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +name = "mypy-extensions" optional = false python-versions = "*" +version = "0.4.3" [[package]] -name = "packaging" -version = "20.4" -description = "Core utilities for Python packages" category = "dev" +description = "Core utilities for Python packages" +name = "packaging" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" six = "*" [[package]] -name = "pastel" -version = "0.2.1" -description = "Bring colors to your terminal." category = "dev" +description = "Bring colors to your terminal." +name = "pastel" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.2.1" [[package]] -name = "pathspec" -version = "0.8.0" -description = "Utility library for gitignore style pattern matching of file paths." category = "main" +description = "Utility library for gitignore style pattern matching of file paths." +name = "pathspec" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.8.0" [[package]] -name = "pluggy" -version = "0.13.1" -description = "plugin and hook calling mechanisms for python" category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -dev = ["pre-commit", "tox"] +version = "0.13.1" [package.dependencies] [package.dependencies.importlib-metadata] -version = ">=0.12" python = "<3.8" +version = ">=0.12" + +[package.extras] +dev = ["pre-commit", "tox"] [[package]] -name = "poethepoet" -version = "0.5.0" -description = "A task runner that works well with poetry." category = "dev" +description = "A task runner that works well with poetry." +name = "poethepoet" optional = false python-versions = ">=3.6,<4.0" +version = "0.9.0" [package.dependencies] pastel = ">=0.2.0,<0.3.0" -toml = ">=0.10.1,<0.11.0" +tomlkit = ">=0.6.0,<1.0.0" [[package]] -name = "protobuf" -version = "3.13.0" +category = "dev" description = "Protocol Buffers" -category = "main" +name = "protobuf" optional = false python-versions = "*" +version = "3.13.0" [package.dependencies] setuptools = "*" six = ">=1.9" [[package]] -name = "py" -version = "1.9.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.9.0" [[package]] -name = "pygments" -version = "2.7.1" -description = "Pygments is a syntax highlighting package written in Python." category = "dev" +description = "Pygments is a syntax highlighting package written in Python." +name = "pygments" optional = false python-versions = ">=3.5" +version = "2.7.1" [[package]] -name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" category = "dev" +description = "Python parsing module" +name = "pyparsing" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" [[package]] -name = "pytest" -version = "5.4.3" -description = "pytest: simple powerful testing with Python" category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" optional = false python-versions = ">=3.5" - -[package.extras] -checkqa-mypy = ["mypy (v0.761)"] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +version = "5.4.3" [package.dependencies] atomicwrites = ">=1.0" @@ -521,79 +517,79 @@ py = ">=1.5.0" wcwidth = "*" [package.dependencies.importlib-metadata] -version = ">=0.12" python = "<3.8" +version = ">=0.12" + +[package.extras] +checkqa-mypy = ["mypy (v0.761)"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -name = "pytest-asyncio" -version = "0.12.0" -description = "Pytest support for asyncio." category = "dev" +description = "Pytest support for asyncio." +name = "pytest-asyncio" optional = false python-versions = ">= 3.5" - -[package.extras] -testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] +version = "0.12.0" [package.dependencies] pytest = ">=5.4.0" +[package.extras] +testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] + [[package]] -name = "pytest-cov" -version = "2.10.1" -description = "Pytest plugin for measuring coverage." category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] +version = "2.10.1" [package.dependencies] coverage = ">=4.4" pytest = ">=4.6" +[package.extras] +testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] + [[package]] -name = "pytest-mock" -version = "3.3.1" -description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" +description = "Thin-wrapper around the mock package for easier use with pytest" +name = "pytest-mock" optional = false python-versions = ">=3.5" - -[package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +version = "3.3.1" [package.dependencies] pytest = ">=5.0" +[package.extras] +dev = ["pre-commit", "tox", "pytest-asyncio"] + [[package]] -name = "pytz" -version = "2020.1" -description = "World timezone definitions, modern and historical" category = "dev" +description = "World timezone definitions, modern and historical" +name = "pytz" optional = false python-versions = "*" +version = "2020.1" [[package]] -name = "regex" -version = "2020.7.14" -description = "Alternative regular expression module, to replace re." category = "main" +description = "Alternative regular expression module, to replace re." +name = "regex" optional = false python-versions = "*" +version = "2020.10.15" [[package]] -name = "requests" -version = "2.24.0" -description = "Python HTTP for Humans." category = "dev" +description = "Python HTTP for Humans." +name = "requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" @@ -601,44 +597,43 @@ chardet = ">=3.0.2,<4" idna = ">=2.5,<3" urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" +[package.extras] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] + [[package]] -name = "six" -version = "1.15.0" +category = "dev" description = "Python 2 and 3 compatibility utilities" -category = "main" +name = "six" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.15.0" [[package]] -name = "snowballstemmer" -version = "2.0.0" -description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." category = "dev" +description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +name = "snowballstemmer" optional = false python-versions = "*" +version = "2.0.0" [[package]] -name = "sphinx" -version = "3.1.2" -description = "Python documentation generator" category = "dev" +description = "Python documentation generator" +name = "sphinx" optional = false python-versions = ">=3.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] +version = "3.1.2" [package.dependencies] +Jinja2 = ">=2.3" +Pygments = ">=2.0" alabaster = ">=0.7,<0.8" babel = ">=1.3" colorama = ">=0.3.5" docutils = ">=0.12" imagesize = "*" -Jinja2 = ">=2.3" packaging = "*" -Pygments = ">=2.0" requests = ">=2.5.0" setuptools = "*" snowballstemmer = ">=1.1" @@ -649,110 +644,119 @@ sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = "*" +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] +test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] + [[package]] -name = "sphinx-rtd-theme" -version = "0.5.0" -description = "Read the Docs theme for Sphinx" category = "dev" +description = "Read the Docs theme for Sphinx" +name = "sphinx-rtd-theme" optional = false python-versions = "*" - -[package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] +version = "0.5.0" [package.dependencies] sphinx = "*" +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + [[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" category = "dev" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +name = "sphinxcontrib-applehelp" optional = false python-versions = ">=3.5" +version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." category = "dev" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +name = "sphinxcontrib-devhelp" optional = false python-versions = ">=3.5" +version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -name = "sphinxcontrib-htmlhelp" -version = "1.0.3" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "dev" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +name = "sphinxcontrib-htmlhelp" optional = false python-versions = ">=3.5" +version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest", "html5lib"] [[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" category = "dev" +description = "A sphinx extension which renders display math in HTML via JavaScript" +name = "sphinxcontrib-jsmath" optional = false python-versions = ">=3.5" +version = "1.0.1" [package.extras] test = ["pytest", "flake8", "mypy"] [[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." category = "dev" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +name = "sphinxcontrib-qthelp" optional = false python-versions = ">=3.5" +version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.4" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." category = "dev" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +name = "sphinxcontrib-serializinghtml" optional = false python-versions = ">=3.5" +version = "1.1.4" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -name = "toml" -version = "0.10.1" -description = "Python Library for Tom's Obvious, Minimal Language" category = "main" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" optional = false python-versions = "*" +version = "0.10.1" [[package]] -name = "tox" -version = "3.20.0" -description = "tox is a generic virtualenv management and test command line tool" category = "dev" +description = "Style preserving TOML library" +name = "tomlkit" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.7.0" -[package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)"] +[[package]] +category = "dev" +description = "tox is a generic virtualenv management and test command line tool" +name = "tox" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "3.20.1" [package.dependencies] colorama = ">=0.4.1" @@ -765,32 +769,36 @@ toml = ">=0.9.4" virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" [package.dependencies.importlib-metadata] -version = ">=0.12,<2" python = "<3.8" +version = ">=0.12,<3" + +[package.extras] +docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)"] [[package]] -name = "typed-ast" -version = "1.4.1" -description = "a fork of Python 2 and 3 ast modules with type comment support" category = "main" +description = "a fork of Python 2 and 3 ast modules with type comment support" +name = "typed-ast" optional = false python-versions = "*" +version = "1.4.1" [[package]] -name = "typing-extensions" -version = "3.7.4.3" -description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" +description = "Backported and Experimental Type Hints for Python 3.5+" +name = "typing-extensions" optional = false python-versions = "*" +version = "3.7.4.3" [[package]] -name = "urllib3" -version = "1.25.10" -description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] @@ -798,16 +806,12 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0 socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] [[package]] -name = "virtualenv" -version = "20.0.31" -description = "Virtual Python Environment builder" category = "dev" +description = "Virtual Python Environment builder" +name = "virtualenv" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" - -[package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=5)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +version = "20.0.35" [package.dependencies] appdirs = ">=1.4.3,<2" @@ -816,41 +820,45 @@ filelock = ">=3.0.0,<4" six = ">=1.9.0,<2" [package.dependencies.importlib-metadata] -version = ">=0.12,<2" python = "<3.8" +version = ">=0.12,<3" [package.dependencies.importlib-resources] -version = ">=1.0" python = "<3.7" +version = ">=1.0" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" category = "dev" +description = "Measures the displayed width of unicode strings in a terminal" +name = "wcwidth" optional = false python-versions = "*" +version = "0.2.5" [[package]] -name = "zipp" -version = "3.2.0" -description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" +description = "Backport of pathlib-compatible object wrapper for zip files" +marker = "python_version < \"3.8\"" +name = "zipp" optional = false python-versions = ">=3.6" -marker = "python_version < \"3.8\"" +version = "3.3.1" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] -compiler = ["black", "jinja2", "protobuf"] +compiler = ["black", "jinja2"] [metadata] +content-hash = "e43baf152424b7496430ae5b5fdf2f0680fb6cc1c7abac9a0184dd851003762c" lock-version = "1.0" python-versions = "^3.6" -content-hash = "a8e4b87ff691fd815c51637f7fd24606c9ccdfbb7d9466a01c844fdfc58922c0" [metadata.files] alabaster = [ @@ -880,7 +888,6 @@ backports-datetime-fromisoformat = [ {file = "backports-datetime-fromisoformat-1.0.0.tar.gz", hash = "sha256:9577a2a9486cd7383a5f58b23bb8e81cf0821dbbc0eb7c87d3fa198c1df40f5c"}, ] black = [ - {file = "black-20.8b1-py3-none-any.whl", hash = "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b"}, {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] blessings = [ @@ -905,8 +912,8 @@ click = [ {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, ] colorama = [ - {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, - {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, @@ -1090,8 +1097,8 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-1.7.0-py2.py3-none-any.whl", hash = "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"}, - {file = "importlib_metadata-1.7.0.tar.gz", hash = "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83"}, + {file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"}, + {file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"}, ] importlib-resources = [ {file = "importlib_resources-3.0.0-py2.py3-none-any.whl", hash = "sha256:d028f66b66c0d5732dae86ba4276999855e162a749c92620a38c1d779ed138a7"}, @@ -1141,23 +1148,39 @@ more-itertools = [ {file = "more_itertools-8.5.0-py3-none-any.whl", hash = "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c"}, ] multidict = [ - {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, - {file = "multidict-4.7.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a"}, - {file = "multidict-4.7.6-cp35-cp35m-win32.whl", hash = "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5"}, - {file = "multidict-4.7.6-cp35-cp35m-win_amd64.whl", hash = "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3"}, - {file = "multidict-4.7.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87"}, - {file = "multidict-4.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2"}, - {file = "multidict-4.7.6-cp36-cp36m-win32.whl", hash = "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7"}, - {file = "multidict-4.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463"}, - {file = "multidict-4.7.6-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"}, - {file = "multidict-4.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255"}, - {file = "multidict-4.7.6-cp37-cp37m-win32.whl", hash = "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507"}, - {file = "multidict-4.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c"}, - {file = "multidict-4.7.6-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b"}, - {file = "multidict-4.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7"}, - {file = "multidict-4.7.6-cp38-cp38-win32.whl", hash = "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d"}, - {file = "multidict-4.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19"}, - {file = "multidict-4.7.6.tar.gz", hash = "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430"}, + {file = "multidict-5.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:11dcf2366da487d5b9de1d4b2055308c7ed9bde1a52973d07a89b42252af9ebe"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:167bd8e6351b57525bbf2d524ca5a133834699a2fcb090aad0c330c6017f3f3e"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:60af726c19a899ed49bbb276e062f08b80222cb6b9feda44b59a128b5ff52966"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:32f0a904859a6274d7edcbb01752c8ae9c633fb7d1c131771ff5afd32eceee42"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:7561a804093ea4c879e06b5d3d18a64a0bc21004bade3540a4b31342b528d326"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:786ad04ad954afe9927a1b3049aa58722e182160fe2fcac7ad7f35c93595d4f6"}, + {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:02b2ea2bb1277a970d238c5c783023790ca94d386c657aeeb165259950951cc6"}, + {file = "multidict-5.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:932964cf57c0e59d1f3fb63ff342440cf8aaa75bf0dbcbad902c084024975380"}, + {file = "multidict-5.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:c692087913e12b801a759e25a626c3d311f416252dfba2ecdfd254583427949f"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cda06c99cd6f4a36571bb38e560a6fcfb1f136521e57f612e0bc31957b1cd4bd"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:84e4943d8725659942e7401bdf31780acde9cfdaf6fe977ff1449fffafcd93a9"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:bbec545b8f82536bc50afa9abce832176ed250aa22bfff3e20b3463fb90b0b35"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:c339b7d73c0ea5c551025617bb8aa1c00a0111187b6545f48836343e6cfbe6a0"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:0ce1d956ecbf112d49915ebc2f29c03e35fe451fb5e9f491edf9a2f4395ee0af"}, + {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:39713fa2c687e0d0e709ad751a8a709ac051fcdc7f2048f6fd09365dd03c83eb"}, + {file = "multidict-5.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0ffdb4b897b15df798c0a5939a0323ccf703f2bae551dfab4eb1af7fbab38ead"}, + {file = "multidict-5.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:4ef76ce695da72e176f6a51867afb3bf300ce16ba2597824caaef625af5906a9"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:711289412b78cf41a21457f4c806890466013d62bf4296bd3d71fad73ff8a581"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2b0cfc33f53e5c8226f7d7c4e126fa0780f970ef1e96f7c6353da7d01eafe490"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28b5913e5b6fef273e5d4230b61f33c8a51c3ce5f44a88582dee6b5ca5c9977b"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:a5eca9ee72b372199c2b76672145e47d3c829889eefa2037b1f3018f54e5f67d"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:20eaf1c279c543e07c164e4ac02151488829177da06607efa7ccfecd71b21e79"}, + {file = "multidict-5.0.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ec8bc0ab00c76c4260a201eaa58812ea8b1b7fde0ecf5f9c9365a182bd4691ed"}, + {file = "multidict-5.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad240c1429e386af38a2d6761032f0bec5177fed7c5f582c835c99fff135b5c"}, + {file = "multidict-5.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:52b5b51281d760197ce3db063c166fdb626e01c8e428a325aa37198ce31c9565"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5263359a03368985b5296b7a73363d761a269848081879ba04a6e4bfd0cf4a78"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:620c39b1270b68e194023ad471b6a54bdb517bb48515939c9829b56c783504a3"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2739d1d9237835122b27d88990849ecf41ef670e0fcb876159edd236ca9ef40f"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:62f6e66931fb87e9016e7c1cc806ab4f3e39392fd502362df3cac888078b27cb"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:5dd303b545b62f9d2b14f99fbdb84c109a20e64a57f6a192fe6aebcb6263b59d"}, + {file = "multidict-5.0.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:60b12d14bc122ba2dae1e4460a891b3a96e73d815b4365675f6ec0a1725416a5"}, + {file = "multidict-5.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:79dc3e6e7ce853fb7ed17c134e01fcb0d0c826b33201aa2a910fb27ed75c2eb9"}, + {file = "multidict-5.0.0.tar.gz", hash = "sha256:1b324444299c3a49b601b1bf621fc21704e29066f6ac2b7d7e4034a4a18662a1"}, ] mypy = [ {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, @@ -1196,8 +1219,8 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] poethepoet = [ - {file = "poethepoet-0.5.0-py3-none-any.whl", hash = "sha256:dfc7d6666156005ed160b1652270a1fd7acc5329c60d7be170326b33d88b6be2"}, - {file = "poethepoet-0.5.0.tar.gz", hash = "sha256:ef65313de2703f7beead7b05b10668a6c5f216e6c434746d889047562402b619"}, + {file = "poethepoet-0.9.0-py3-none-any.whl", hash = "sha256:6b1df9a755c297d5b10749cd4713924055b41edfa62055770c8bd6b5da8e2c69"}, + {file = "poethepoet-0.9.0.tar.gz", hash = "sha256:ab2263fd7be81d16d38a4b4fe42a055d992d04421e61cad36498b1e4bd8ee2a6"}, ] protobuf = [ {file = "protobuf-3.13.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9c2e63c1743cba12737169c447374fab3dfeb18111a460a8c1a000e35836b18c"}, @@ -1251,27 +1274,33 @@ pytz = [ {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, ] regex = [ - {file = "regex-2020.7.14-cp27-cp27m-win32.whl", hash = "sha256:e46d13f38cfcbb79bfdb2964b0fe12561fe633caf964a77a5f8d4e45fe5d2ef7"}, - {file = "regex-2020.7.14-cp27-cp27m-win_amd64.whl", hash = "sha256:6961548bba529cac7c07af2fd4d527c5b91bb8fe18995fed6044ac22b3d14644"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c50a724d136ec10d920661f1442e4a8b010a4fe5aebd65e0c2241ea41dbe93dc"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8a51f2c6d1f884e98846a0a9021ff6861bdb98457879f412fdc2b42d14494067"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9c568495e35599625f7b999774e29e8d6b01a6fb684d77dee1f56d41b11b40cd"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:51178c738d559a2d1071ce0b0f56e57eb315bcf8f7d4cf127674b533e3101f88"}, - {file = "regex-2020.7.14-cp36-cp36m-win32.whl", hash = "sha256:9eddaafb3c48e0900690c1727fba226c4804b8e6127ea409689c3bb492d06de4"}, - {file = "regex-2020.7.14-cp36-cp36m-win_amd64.whl", hash = "sha256:14a53646369157baa0499513f96091eb70382eb50b2c82393d17d7ec81b7b85f"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1269fef3167bb52631ad4fa7dd27bf635d5a0790b8e6222065d42e91bede4162"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0a5095d52b90ff38592bbdc2644f17c6d495762edf47d876049cfd2968fbccf"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c037fd14c5f4e308b8370b447b469ca10e69427966527edcab07f52d88388f7"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bc3d98f621898b4a9bc7fecc00513eec8f40b5b83913d74ccb445f037d58cd89"}, - {file = "regex-2020.7.14-cp37-cp37m-win32.whl", hash = "sha256:46bac5ca10fb748d6c55843a931855e2727a7a22584f302dd9bb1506e69f83f6"}, - {file = "regex-2020.7.14-cp37-cp37m-win_amd64.whl", hash = "sha256:0dc64ee3f33cd7899f79a8d788abfbec168410be356ed9bd30bbd3f0a23a7204"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5ea81ea3dbd6767873c611687141ec7b06ed8bab43f68fad5b7be184a920dc99"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bbb332d45b32df41200380fff14712cb6093b61bd142272a10b16778c418e98e"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c11d6033115dc4887c456565303f540c44197f4fc1a2bfb192224a301534888e"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:75aaa27aa521a182824d89e5ab0a1d16ca207318a6b65042b046053cfc8ed07a"}, - {file = "regex-2020.7.14-cp38-cp38-win32.whl", hash = "sha256:d6cff2276e502b86a25fd10c2a96973fdb45c7a977dca2138d661417f3728341"}, - {file = "regex-2020.7.14-cp38-cp38-win_amd64.whl", hash = "sha256:7a2dd66d2d4df34fa82c9dc85657c5e019b87932019947faece7983f2089a840"}, - {file = "regex-2020.7.14.tar.gz", hash = "sha256:3a3af27a8d23143c49a3420efe5b3f8cf1a48c6fc8bc6856b03f638abc1833bb"}, + {file = "regex-2020.10.15-cp27-cp27m-win32.whl", hash = "sha256:e935a166a5f4c02afe3f7e4ce92ce5a786f75c6caa0c4ce09c922541d74b77e8"}, + {file = "regex-2020.10.15-cp27-cp27m-win_amd64.whl", hash = "sha256:d81be22d5d462b96a2aa5c512f741255ba182995efb0114e5a946fe254148df1"}, + {file = "regex-2020.10.15-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6d4cdb6c20e752426b2e569128488c5046fb1b16b1beadaceea9815c36da0847"}, + {file = "regex-2020.10.15-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:25991861c6fef1e5fd0a01283cf5658c5e7f7aa644128e85243bc75304e91530"}, + {file = "regex-2020.10.15-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:6e9f72e0ee49f7d7be395bfa29e9533f0507a882e1e6bf302c0a204c65b742bf"}, + {file = "regex-2020.10.15-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:578ac6379e65eb8e6a85299b306c966c852712c834dc7eef0ba78d07a828f67b"}, + {file = "regex-2020.10.15-cp36-cp36m-win32.whl", hash = "sha256:65b6b018b07e9b3b6a05c2c3bb7710ed66132b4df41926c243887c4f1ff303d5"}, + {file = "regex-2020.10.15-cp36-cp36m-win_amd64.whl", hash = "sha256:2f60ba5c33f00ce9be29a140e6f812e39880df8ba9cb92ad333f0016dbc30306"}, + {file = "regex-2020.10.15-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5d4a3221f37520bb337b64a0632716e61b26c8ae6aaffceeeb7ad69c009c404b"}, + {file = "regex-2020.10.15-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:26b85672275d8c7a9d4ff93dbc4954f5146efdb2ecec89ad1de49439984dea14"}, + {file = "regex-2020.10.15-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:828618f3c3439c5e6ef8621e7c885ca561bbaaba90ddbb6a7dfd9e1ec8341103"}, + {file = "regex-2020.10.15-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:aef23aed9d4017cc74d37f703d57ce254efb4c8a6a01905f40f539220348abf9"}, + {file = "regex-2020.10.15-cp37-cp37m-win32.whl", hash = "sha256:6c72adb85adecd4522a488a751e465842cdd2a5606b65464b9168bf029a54272"}, + {file = "regex-2020.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:ef3a55b16c6450574734db92e0a3aca283290889934a23f7498eaf417e3af9f0"}, + {file = "regex-2020.10.15-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8958befc139ac4e3f16d44ec386c490ea2121ed8322f4956f83dd9cad8e9b922"}, + {file = "regex-2020.10.15-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3dd952f3f8dc01b72c0cf05b3631e05c50ac65ddd2afdf26551638e97502107b"}, + {file = "regex-2020.10.15-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:608d6c05452c0e6cc49d4d7407b4767963f19c4d2230fa70b7201732eedc84f2"}, + {file = "regex-2020.10.15-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:02686a2f0b1a4be0facdd0d3ad4dc6c23acaa0f38fb5470d892ae88584ba705c"}, + {file = "regex-2020.10.15-cp38-cp38-win32.whl", hash = "sha256:137da580d1e6302484be3ef41d72cf5c3ad22a076070051b7449c0e13ab2c482"}, + {file = "regex-2020.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:20cdd7e1736f4f61a5161aa30d05ac108ab8efc3133df5eb70fe1e6a23ea1ca6"}, + {file = "regex-2020.10.15-cp39-cp39-manylinux1_i686.whl", hash = "sha256:85b733a1ef2b2e7001aff0e204a842f50ad699c061856a214e48cfb16ace7d0c"}, + {file = "regex-2020.10.15-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:af1f5e997dd1ee71fb6eb4a0fb6921bf7a778f4b62f1f7ef0d7445ecce9155d6"}, + {file = "regex-2020.10.15-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:b5eeaf4b5ef38fab225429478caf71f44d4a0b44d39a1aa4d4422cda23a9821b"}, + {file = "regex-2020.10.15-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:aeac7c9397480450016bc4a840eefbfa8ca68afc1e90648aa6efbfe699e5d3bb"}, + {file = "regex-2020.10.15-cp39-cp39-win32.whl", hash = "sha256:698f8a5a2815e1663d9895830a063098ae2f8f2655ae4fdc5dfa2b1f52b90087"}, + {file = "regex-2020.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:a51e51eecdac39a50ede4aeed86dbef4776e3b73347d31d6ad0bc9648ba36049"}, + {file = "regex-2020.10.15.tar.gz", hash = "sha256:d25f5cca0f3af6d425c9496953445bf5b288bb5b71afc2b8308ad194b714c159"}, ] requests = [ {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, @@ -1321,9 +1350,13 @@ toml = [ {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, ] +tomlkit = [ + {file = "tomlkit-0.7.0-py2.py3-none-any.whl", hash = "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831"}, + {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, +] tox = [ - {file = "tox-3.20.0-py2.py3-none-any.whl", hash = "sha256:e6318f404aff16522ff5211c88cab82b39af121735a443674e4e2e65f4e4637b"}, - {file = "tox-3.20.0.tar.gz", hash = "sha256:eb629ddc60e8542fd4a1956b2462e3b8771d49f1ff630cecceacaa0fbfb7605a"}, + {file = "tox-3.20.1-py2.py3-none-any.whl", hash = "sha256:42ce19ce5dc2f6d6b1fdc5666c476e1f1e2897359b47e0aa3a5b774f335d57c2"}, + {file = "tox-3.20.1.tar.gz", hash = "sha256:4321052bfe28f9d85082341ca8e233e3ea901fdd14dab8a5d3fbd810269fbaf6"}, ] typed-ast = [ {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, @@ -1358,14 +1391,14 @@ urllib3 = [ {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"}, ] virtualenv = [ - {file = "virtualenv-20.0.31-py2.py3-none-any.whl", hash = "sha256:e0305af10299a7fb0d69393d8f04cb2965dda9351140d11ac8db4e5e3970451b"}, - {file = "virtualenv-20.0.31.tar.gz", hash = "sha256:43add625c53c596d38f971a465553f6318decc39d98512bc100fa1b1e839c8dc"}, + {file = "virtualenv-20.0.35-py2.py3-none-any.whl", hash = "sha256:0ebc633426d7468664067309842c81edab11ae97fcaf27e8ad7f5748c89b431b"}, + {file = "virtualenv-20.0.35.tar.gz", hash = "sha256:2a72c80fa2ad8f4e2985c06e6fc12c3d60d060e410572f553c90619b0f6efaf3"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] zipp = [ - {file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"}, - {file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"}, + {file = "zipp-3.3.1-py3-none-any.whl", hash = "sha256:16522f69653f0d67be90e8baa4a46d66389145b734345d68a257da53df670903"}, + {file = "zipp-3.3.1.tar.gz", hash = "sha256:c1532a8030c32fd52ff6a288d855fe7adef5823ba1d26a29a68fd6314aa72baa"}, ] diff --git a/pyproject.toml b/pyproject.toml index b5be6bdd6..e84aac404 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,6 @@ black = { version = ">=19.3b0", optional = true } dataclasses = { version = "^0.7", python = ">=3.6, <3.7" } grpclib = "^0.4.1" jinja2 = { version = "^2.11.2", optional = true } -protobuf = { version = "^3.12.2", optional = true } [tool.poetry.dev-dependencies] black = "^20.8b1" @@ -26,7 +25,7 @@ bpython = "^0.19" grpcio-tools = "^1.30.0" jinja2 = "^2.11.2" mypy = "^0.770" -poethepoet = "^0.5.0" +poethepoet = ">=0.9.0" protobuf = "^3.12.2" pytest = "^5.4.2" pytest-asyncio = "^0.12.0" @@ -41,7 +40,7 @@ asv = "^0.4.2" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] -compiler = ["black", "jinja2", "protobuf"] +compiler = ["black", "jinja2"] [tool.poe.tasks] # Dev workflow tasks @@ -53,6 +52,16 @@ clean = { cmd = "rm -rf .coverage .mypy_cache .pytest_cache dist betterpro docs = { cmd = "sphinx-build docs docs/build", help = "Build the sphinx docs"} bench = { shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD", help = "Benchmark current commit vs. master branch"} +generate_lib.cmd = """ +protoc + --plugin=protoc-gen-custom=src/betterproto/plugin/main.py + --custom_opt=INCLUDE_GOOGLE + --custom_out=src/betterproto/lib + -I /usr/local/include/ + /usr/local/include/google/protobuf/**/*.proto +""" +generate_lib.help = "Regenerate the types in betterproto.lib.google" + # CI tasks full-test = { shell = "poe generate && tox", help = "Run tests with multiple pythons" } check-style = { cmd = "black . --check --diff --exclude tests/output_", help = "Check if code style is correct"} From 230721f9b8eb738c210ac4c99522de6b634e1132 Mon Sep 17 00:00:00 2001 From: Nat Noordanus Date: Mon, 19 Oct 2020 18:22:33 +0200 Subject: [PATCH 09/46] Fix template bug resulting in empty __post_init__ methods --- .../lib/google/protobuf/__init__.py | 156 ------------------ .../lib/google/protobuf/compiler/__init__.py | 12 -- src/betterproto/plugin/models.py | 4 + src/betterproto/templates/template.py.j2 | 4 +- 4 files changed, 6 insertions(+), 170 deletions(-) diff --git a/src/betterproto/lib/google/protobuf/__init__.py b/src/betterproto/lib/google/protobuf/__init__.py index d1f21f947..6dd90c49c 100644 --- a/src/betterproto/lib/google/protobuf/__init__.py +++ b/src/betterproto/lib/google/protobuf/__init__.py @@ -172,9 +172,6 @@ class Timestamp(betterproto.Message): # count forward in time. Must be from 0 to 999,999,999 inclusive. nanos: int = betterproto.int32_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FieldMask(betterproto.Message): @@ -259,9 +256,6 @@ class FieldMask(betterproto.Message): # The set of field mask paths. paths: List[str] = betterproto.string_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class SourceContext(betterproto.Message): @@ -274,9 +268,6 @@ class SourceContext(betterproto.Message): # protobuf element. For example: `"google/protobuf/source_context.proto"`. file_name: str = betterproto.string_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Any(betterproto.Message): @@ -335,9 +326,6 @@ class Any(betterproto.Message): # Must be a valid serialized protocol buffer of the above specified type. value: bytes = betterproto.bytes_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Type(betterproto.Message): @@ -356,9 +344,6 @@ class Type(betterproto.Message): # The source syntax. syntax: "Syntax" = betterproto.enum_field(6) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Field(betterproto.Message): @@ -387,9 +372,6 @@ class Field(betterproto.Message): # The string value of the default value of this field. Proto2 syntax only. default_value: str = betterproto.string_field(11) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Enum(betterproto.Message): @@ -408,9 +390,6 @@ class Enum(betterproto.Message): # The source syntax. syntax: "Syntax" = betterproto.enum_field(5) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumValue(betterproto.Message): @@ -423,9 +402,6 @@ class EnumValue(betterproto.Message): # Protocol buffer options. options: List["Option"] = betterproto.message_field(3) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Option(betterproto.Message): @@ -445,9 +421,6 @@ class Option(betterproto.Message): # value using the google.protobuf.Int32Value type. value: "Any" = betterproto.message_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Api(betterproto.Message): @@ -491,9 +464,6 @@ class Api(betterproto.Message): # The source syntax of the service. syntax: "Syntax" = betterproto.enum_field(7) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Method(betterproto.Message): @@ -514,9 +484,6 @@ class Method(betterproto.Message): # The source syntax of this method. syntax: "Syntax" = betterproto.enum_field(7) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Mixin(betterproto.Message): @@ -562,9 +529,6 @@ class Mixin(betterproto.Message): # If non-empty specifies a path under which inherited HTTP paths are rooted. root: str = betterproto.string_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Duration(betterproto.Message): @@ -610,9 +574,6 @@ class Duration(betterproto.Message): # `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. nanos: int = betterproto.int32_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Struct(betterproto.Message): @@ -630,9 +591,6 @@ class Struct(betterproto.Message): 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Value(betterproto.Message): @@ -657,9 +615,6 @@ class Value(betterproto.Message): # Represents a repeated `Value`. list_value: "ListValue" = betterproto.message_field(6, group="kind") - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class ListValue(betterproto.Message): @@ -671,9 +626,6 @@ class ListValue(betterproto.Message): # Repeated field of dynamically typed values. values: List["Value"] = betterproto.message_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class DoubleValue(betterproto.Message): @@ -685,9 +637,6 @@ class DoubleValue(betterproto.Message): # The double value. value: float = betterproto.double_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FloatValue(betterproto.Message): @@ -699,9 +648,6 @@ class FloatValue(betterproto.Message): # The float value. value: float = betterproto.float_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Int64Value(betterproto.Message): @@ -713,9 +659,6 @@ class Int64Value(betterproto.Message): # The int64 value. value: int = betterproto.int64_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class UInt64Value(betterproto.Message): @@ -727,9 +670,6 @@ class UInt64Value(betterproto.Message): # The uint64 value. value: int = betterproto.uint64_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Int32Value(betterproto.Message): @@ -741,9 +681,6 @@ class Int32Value(betterproto.Message): # The int32 value. value: int = betterproto.int32_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class UInt32Value(betterproto.Message): @@ -755,9 +692,6 @@ class UInt32Value(betterproto.Message): # The uint32 value. value: int = betterproto.uint32_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class BoolValue(betterproto.Message): @@ -769,9 +703,6 @@ class BoolValue(betterproto.Message): # The bool value. value: bool = betterproto.bool_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class StringValue(betterproto.Message): @@ -783,9 +714,6 @@ class StringValue(betterproto.Message): # The string value. value: str = betterproto.string_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class BytesValue(betterproto.Message): @@ -797,9 +725,6 @@ class BytesValue(betterproto.Message): # The bytes value. value: bytes = betterproto.bytes_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class Empty(betterproto.Message): @@ -813,9 +738,6 @@ class Empty(betterproto.Message): pass - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FileDescriptorSet(betterproto.Message): @@ -826,9 +748,6 @@ class FileDescriptorSet(betterproto.Message): file: List["FileDescriptorProto"] = betterproto.message_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FileDescriptorProto(betterproto.Message): @@ -858,9 +777,6 @@ class FileDescriptorProto(betterproto.Message): # "proto3". syntax: str = betterproto.string_field(12) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class DescriptorProto(betterproto.Message): @@ -881,9 +797,6 @@ class DescriptorProto(betterproto.Message): # A given name may only be reserved once. reserved_name: List[str] = betterproto.string_field(10) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class DescriptorProtoExtensionRange(betterproto.Message): @@ -891,9 +804,6 @@ class DescriptorProtoExtensionRange(betterproto.Message): end: int = betterproto.int32_field(2) options: "ExtensionRangeOptions" = betterproto.message_field(3) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class DescriptorProtoReservedRange(betterproto.Message): @@ -906,18 +816,12 @@ class DescriptorProtoReservedRange(betterproto.Message): start: int = betterproto.int32_field(1) end: int = betterproto.int32_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class ExtensionRangeOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FieldDescriptorProto(betterproto.Message): @@ -952,9 +856,6 @@ class FieldDescriptorProto(betterproto.Message): json_name: str = betterproto.string_field(10) options: "FieldOptions" = betterproto.message_field(8) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class OneofDescriptorProto(betterproto.Message): @@ -963,9 +864,6 @@ class OneofDescriptorProto(betterproto.Message): name: str = betterproto.string_field(1) options: "OneofOptions" = betterproto.message_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumDescriptorProto(betterproto.Message): @@ -984,9 +882,6 @@ class EnumDescriptorProto(betterproto.Message): # be reserved once. reserved_name: List[str] = betterproto.string_field(5) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumDescriptorProtoEnumReservedRange(betterproto.Message): @@ -1000,9 +895,6 @@ class EnumDescriptorProtoEnumReservedRange(betterproto.Message): start: int = betterproto.int32_field(1) end: int = betterproto.int32_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumValueDescriptorProto(betterproto.Message): @@ -1012,9 +904,6 @@ class EnumValueDescriptorProto(betterproto.Message): number: int = betterproto.int32_field(2) options: "EnumValueOptions" = betterproto.message_field(3) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class ServiceDescriptorProto(betterproto.Message): @@ -1024,9 +913,6 @@ class ServiceDescriptorProto(betterproto.Message): method: List["MethodDescriptorProto"] = betterproto.message_field(2) options: "ServiceOptions" = betterproto.message_field(3) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class MethodDescriptorProto(betterproto.Message): @@ -1043,9 +929,6 @@ class MethodDescriptorProto(betterproto.Message): # Identifies if server streams multiple server messages server_streaming: bool = betterproto.bool_field(6) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FileOptions(betterproto.Message): @@ -1174,9 +1057,6 @@ class MessageOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class FieldOptions(betterproto.Message): @@ -1234,18 +1114,12 @@ class FieldOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class OneofOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumOptions(betterproto.Message): @@ -1259,9 +1133,6 @@ class EnumOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class EnumValueOptions(betterproto.Message): @@ -1273,9 +1144,6 @@ class EnumValueOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class ServiceOptions(betterproto.Message): @@ -1286,9 +1154,6 @@ class ServiceOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class MethodOptions(betterproto.Message): @@ -1300,9 +1165,6 @@ class MethodOptions(betterproto.Message): # The parser stores options it doesn't recognize here. See above. uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class UninterpretedOption(betterproto.Message): @@ -1325,9 +1187,6 @@ class UninterpretedOption(betterproto.Message): string_value: bytes = betterproto.bytes_field(7) aggregate_value: str = betterproto.string_field(8) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class UninterpretedOptionNamePart(betterproto.Message): @@ -1342,9 +1201,6 @@ class UninterpretedOptionNamePart(betterproto.Message): name_part: str = betterproto.string_field(1) is_extension: bool = betterproto.bool_field(2) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class SourceCodeInfo(betterproto.Message): @@ -1384,9 +1240,6 @@ class SourceCodeInfo(betterproto.Message): # as more types of locations could be recorded in the future. location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class SourceCodeInfoLocation(betterproto.Message): @@ -1434,9 +1287,6 @@ class SourceCodeInfoLocation(betterproto.Message): trailing_comments: str = betterproto.string_field(4) leading_detached_comments: List[str] = betterproto.string_field(6) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class GeneratedCodeInfo(betterproto.Message): @@ -1450,9 +1300,6 @@ class GeneratedCodeInfo(betterproto.Message): # its generating .proto file. annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class GeneratedCodeInfoAnnotation(betterproto.Message): @@ -1468,6 +1315,3 @@ class GeneratedCodeInfoAnnotation(betterproto.Message): # the identified offset. The end offset should be one past the last relevant # byte (so the length of the text = end - begin). end: int = betterproto.int32_field(4) - - def __post_init__(self) -> None: - super().__post_init__() diff --git a/src/betterproto/lib/google/protobuf/compiler/__init__.py b/src/betterproto/lib/google/protobuf/compiler/__init__.py index cfe185b36..e5c8b60f6 100644 --- a/src/betterproto/lib/google/protobuf/compiler/__init__.py +++ b/src/betterproto/lib/google/protobuf/compiler/__init__.py @@ -18,9 +18,6 @@ class Version(betterproto.Message): # be empty for mainline stable releases. suffix: str = betterproto.string_field(4) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class CodeGeneratorRequest(betterproto.Message): @@ -49,9 +46,6 @@ class CodeGeneratorRequest(betterproto.Message): # The version number of protocol compiler. compiler_version: "Version" = betterproto.message_field(3) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class CodeGeneratorResponse(betterproto.Message): @@ -67,9 +61,6 @@ class CodeGeneratorResponse(betterproto.Message): error: str = betterproto.string_field(1) file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) - def __post_init__(self) -> None: - super().__post_init__() - @dataclass(eq=False, repr=False) class CodeGeneratorResponseFile(betterproto.Message): @@ -118,8 +109,5 @@ class CodeGeneratorResponseFile(betterproto.Message): # The file contents. content: str = betterproto.string_field(15) - def __post_init__(self) -> None: - super().__post_init__() - import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index 98fd9067b..a08457fc6 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -312,6 +312,10 @@ def deprecated_fields(self) -> Iterator[str]: if f.deprecated: yield f.py_name + @property + def has_deprecated_fields(self) -> bool: + return any(self.deprecated_fields) + def is_map( proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto diff --git a/src/betterproto/templates/template.py.j2 b/src/betterproto/templates/template.py.j2 index 753d340c7..47367170c 100644 --- a/src/betterproto/templates/template.py.j2 +++ b/src/betterproto/templates/template.py.j2 @@ -53,7 +53,7 @@ class {{ message.py_name }}(betterproto.Message): pass {% endif %} - {% if message.deprecated or message.deprecated_fields %} + {% if message.deprecated or message.has_deprecated_fields %} def __post_init__(self) -> None: {% if message.deprecated %} warnings.warn("{{ message.py_name }} is deprecated", DeprecationWarning) @@ -82,7 +82,7 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): Optional[{{ field.annotation }}] {%- else -%} {{ field.annotation }} - {%- endif -%} = + {%- endif -%} = {%- if field.py_name not in method.mutable_default_args -%} {{ field.default_value_string }} {%- else -%} From de9c0a032c1ea1b60d1195497cb7c63f0ce30189 Mon Sep 17 00:00:00 2001 From: James Date: Mon, 19 Oct 2020 18:39:22 +0100 Subject: [PATCH 10/46] Implement command line interface --- src/betterproto/__main__.py | 132 ++++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 src/betterproto/__main__.py diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py new file mode 100644 index 000000000..425341b66 --- /dev/null +++ b/src/betterproto/__main__.py @@ -0,0 +1,132 @@ +import subprocess +import sys +from functools import partial +from pathlib import Path +from typing import Tuple + +import black +import click + +DEFAULT_OUT = Path.cwd() / "betterproto_out" +VERBOSE = False +try: + import grpc +except ImportError: + USE_PROTOC = True +else: + USE_PROTOC = False + + +out = partial(click.secho, bold=True, err=True) +err = partial(click.secho, fg="red", err=True) + + +def recursive_file_finder(directory: Path) -> Tuple[Path, ...]: + files = set() + for path in directory.iterdir(): + if path.is_file() and path.name.endswith(".proto"): + files.add(path) + elif path.is_dir(): + files.update(recursive_file_finder(path)) + + return tuple(files) + + +def compile_files(*files: Path, output_dir: Path) -> None: + files = [file.as_posix() for file in files] + command = [ + f"--python_betterproto_out={output_dir.as_posix()}", + "-I", + output_dir.parent.as_posix(), + *files, + ] + if USE_PROTOC: + command.insert(0, "protoc") + else: + command.insert(0, "grpc.tools.protoc") + command.insert(0, "-m") + command.insert(0, sys.executable) + + proc = subprocess.Popen( + args=command, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = proc.communicate() + stdout = stdout.decode() + stderr = stderr.decode() + + if proc.returncode != 0: + failed_files = "\n".join(f" - {file}" for file in files) + err( + f"{'Protoc' if USE_PROTOC else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{stderr}" + ) + + if VERBOSE: + out(f"VERBOSE: {stdout}") + + out( + f"Finished generating output for {len(files)}, compiled output should be in " + f"{output_dir.as_posix()}" + ) + + +@click.group() +@click.pass_context +def main(ctx: click.Context): + """The main entry point to all things betterproto""" + if ctx.invoked_subcommand is None: + click.echo(ctx.get_help()) + + +@main.command() +@click.option( + "-v", + "--verbose", + is_flag=True, +) +@click.option( + "-p", + "--protoc", + is_flag=True, + help="Whether or not to use protoc or GRPC to compile the protobufs", + default=USE_PROTOC, +) +@click.option( + "-o", + "--output", + help="The output directory", + default=DEFAULT_OUT.name, + is_eager=True, +) +@click.argument( + "src", + type=click.Path( + exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True + ), + is_eager=True, +) +def compile(verbose: bool, protoc: bool, output: str, src: str): + """The recommended way to compile your protobuf files.""" + directory = Path.cwd().joinpath(src) + files = recursive_file_finder(directory) if directory.is_dir() else (directory,) + if not files: + return out("No files found to compile") + + output = Path.cwd().joinpath(output) + output.mkdir(exist_ok=True) + + # Update constants/flags + globs = globals() + globs["VERBOSE"] = verbose + + return compile_files(*files, output_dir=output) + + +# Decorators aren't handled very well +main: click.Group +compile: click.Command + + +if __name__ == "__main__": + black.patch_click() + main() From 4a4429d05278a9a128667f437a3045f16965ed77 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Mon, 19 Oct 2020 22:26:20 +0100 Subject: [PATCH 11/46] Update docs --- src/betterproto/__init__.py | 280 ++++++++++++++++++++++++++---------- 1 file changed, 201 insertions(+), 79 deletions(-) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index 4834a0409..60c7f06f9 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -26,7 +26,7 @@ from .casing import camel_case, safe_snake_case, snake_case from .grpc.grpclib_client import ServiceStub -if not (sys.version_info.major == 3 and sys.version_info.minor >= 7): +if sys.version_info[:2] < (3, 7): # Apply backport of datetime.fromisoformat from 3.7 from backports.datetime_fromisoformat import MonkeyPatch @@ -110,7 +110,7 @@ # Protobuf datetimes start at the Unix Epoch in 1970 in UTC. -def datetime_default_gen(): +def datetime_default_gen() -> datetime: return datetime(1970, 1, 1, tzinfo=timezone.utc) @@ -120,8 +120,8 @@ def datetime_default_gen(): class Casing(enum.Enum): """Casing constants for serialization.""" - CAMEL = camel_case - SNAKE = snake_case + CAMEL = camel_case #: A camelCase sterilization function. + SNAKE = snake_case #: A snake_case sterilization function. PLACEHOLDER: Any = object() @@ -249,11 +249,25 @@ def map_field( class Enum(enum.IntEnum): - """Protocol buffers enumeration base class. Acts like `enum.IntEnum`.""" + """ + The base class for protobuf enumerations, all generated enumerations will inherit + from this. Bases :class:`enum.IntEnum`. + """ @classmethod - def from_string(cls, name: str) -> int: - """Return the value which corresponds to the string name.""" + def from_string(cls, name: str) -> "Enum": + """Return the value which corresponds to the string name. + + Parameters + ----------- + name: :class:`str` + The name of the enum member to get + + Raises + ------- + :exc:`ValueError` + The member was not found in the Enum. + """ try: return cls._member_map_[name] except KeyError as e: @@ -301,11 +315,7 @@ def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes: return encode_varint(value) elif proto_type in [TYPE_SINT32, TYPE_SINT64]: # Handle zig-zag encoding. - if value >= 0: - value = value << 1 - else: - value = (value << 1) ^ (~0) - return encode_varint(value) + return encode_varint(value << 1 if value >= 0 else (value << 1) ^ (~0)) elif proto_type in FIXED_TYPES: return struct.pack(_pack_fmt(proto_type), value) elif proto_type == TYPE_STRING: @@ -398,15 +408,15 @@ def parse_fields(value: bytes) -> Generator[ParsedField, None, None]: wire_type = num_wire & 0x7 decoded: Any = None - if wire_type == 0: + if wire_type == WIRE_VARINT: decoded, i = decode_varint(value, i) - elif wire_type == 1: + elif wire_type == WIRE_FIXED_64: decoded, i = value[i : i + 8], i + 8 - elif wire_type == 2: + elif wire_type == WIRE_LEN_DELIM: length, i = decode_varint(value, i) decoded = value[i : i + length] i += length - elif wire_type == 5: + elif wire_type == WIRE_FIXED_32: decoded, i = value[i : i + 4], i + 4 yield ParsedField( @@ -415,12 +425,6 @@ def parse_fields(value: bytes) -> Generator[ParsedField, None, None]: class ProtoClassMetadata: - oneof_group_by_field: Dict[str, str] - oneof_field_by_group: Dict[str, Set[dataclasses.Field]] - default_gen: Dict[str, Callable] - cls_by_field: Dict[str, Type] - field_name_by_number: Dict[int, str] - meta_by_field_name: Dict[str, FieldMetadata] __slots__ = ( "oneof_group_by_field", "oneof_field_by_group", @@ -431,6 +435,14 @@ class ProtoClassMetadata: "sorted_field_names", ) + oneof_group_by_field: Dict[str, str] + oneof_field_by_group: Dict[str, Set[dataclasses.Field]] + field_name_by_number: Dict[int, str] + meta_by_field_name: Dict[str, FieldMetadata] + sorted_field_names: Tuple[str, ...] + default_gen: Dict[str, Callable[[], Any]] + cls_by_field: Dict[str, Type] + def __init__(self, cls: Type["Message"]): by_field = {} by_group: Dict[str, Set] = {} @@ -455,23 +467,21 @@ def __init__(self, cls: Type["Message"]): self.field_name_by_number = by_field_number self.meta_by_field_name = by_field_name self.sorted_field_names = tuple( - by_field_number[number] for number in sorted(by_field_number.keys()) + by_field_number[number] for number in sorted(by_field_number) ) - self.default_gen = self._get_default_gen(cls, fields) self.cls_by_field = self._get_cls_by_field(cls, fields) @staticmethod - def _get_default_gen(cls, fields): - default_gen = {} - - for field in fields: - default_gen[field.name] = cls._get_field_default_gen(field) - - return default_gen + def _get_default_gen( + cls: Type["Message"], fields: List[dataclasses.Field] + ) -> Dict[str, Callable[[], Any]]: + return {field.name: cls._get_field_default_gen(field) for field in fields} @staticmethod - def _get_cls_by_field(cls, fields): + def _get_cls_by_field( + cls: Type["Message"], fields: List[dataclasses.Field] + ) -> Dict[str, Type]: field_cls = {} for field in fields: @@ -488,7 +498,7 @@ def _get_cls_by_field(cls, fields): ], bases=(Message,), ) - field_cls[field.name + ".value"] = vt + field_cls[f"{field.name}.value"] = vt else: field_cls[field.name] = cls._cls_for(field) @@ -497,9 +507,19 @@ def _get_cls_by_field(cls, fields): class Message(ABC): """ - A protobuf message base class. Generated code will inherit from this and - register the message fields which get used by the serializers and parsers - to go between Python, binary and JSON protobuf message representations. + The base class for protobuf messages, all generated messages will inherit from + this. This class registers the message fields which are used by the serializers and + parsers to go between the Python, binary and JSON representations of the message. + + .. container:: operations + + .. describe:: bytes(x) + + Calls :meth:`__bytes__`. + + .. describe:: bool(x) + + Calls :meth:`__bool__`. """ _serialized_on_wire: bool @@ -591,6 +611,7 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) def __bool__(self) -> bool: + """Whether or not the Message has had fields set over its lifetime.""" if not self._serialized_on_wire: return False for field in dataclasses.fields(self): @@ -600,7 +621,7 @@ def __bool__(self) -> bool: return False @property - def _betterproto(self): + def _betterproto(self) -> ProtoClassMetadata: """ Lazy initialize metadata for each protobuf class. It may be initialized multiple times in a multi-threaded environment, @@ -614,7 +635,7 @@ def _betterproto(self): def __bytes__(self) -> bytes: """ - Get the binary encoded Protobuf representation of this instance. + Get the binary encoded Protobuf representation of this message instance. """ output = bytearray() for field_name, meta in self._betterproto.meta_by_field_name.items(): @@ -693,7 +714,20 @@ def __bytes__(self) -> bytes: return bytes(output) # For compatibility with other libraries - SerializeToString = __bytes__ + def SerializeToString(self: T) -> bytes: + """ + Get the binary encoded Protobuf representation of this message instance. + + .. note:: + This is a method for compatibility with other libraries, + you should really use ``bytes(x)``. + + Returns + -------- + :class:`bytes` + The binary encoded Protobuf representation of this message instance + """ + return bytes(self) @classmethod def _type_hint(cls, field_name: str) -> Type: @@ -701,9 +735,8 @@ def _type_hint(cls, field_name: str) -> Type: @classmethod def _type_hints(cls) -> Dict[str, Type]: - module = inspect.getmodule(cls) - type_hints = get_type_hints(cls, vars(module)) - return type_hints + module = sys.modules[cls.__module__] + return get_type_hints(cls, vars(module)) @classmethod def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type: @@ -714,7 +747,7 @@ def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type: field_cls = field_cls.__args__[index] return field_cls - def _get_field_default(self, field_name): + def _get_field_default(self, field_name: str) -> Any: return self._betterproto.default_gen[field_name]() @classmethod @@ -737,7 +770,7 @@ def _get_field_default_gen(cls, field: dataclasses.Field) -> Any: elif issubclass(t, Enum): # Enums always default to zero. return int - elif t == datetime: + elif t is datetime: # Offsets are relative to 1970-01-01T00:00:00Z return datetime_default_gen else: @@ -797,6 +830,16 @@ def parse(self: T, data: bytes) -> T: """ Parse the binary encoded Protobuf into this message instance. This returns the instance itself and is therefore assignable and chainable. + + Parameters + ----------- + data: :class:`bytes` + The data to parse the protobuf from. + + Returns + -------- + :class:`Message` + The initialized message. """ # Got some data over the wire self._serialized_on_wire = True @@ -847,20 +890,47 @@ def parse(self: T, data: bytes) -> T: # For compatibility with other libraries. @classmethod def FromString(cls: Type[T], data: bytes) -> T: + """ + Parse the binary encoded Protobuf into this message instance. This + returns the instance itself and is therefore assignable and chainable. + + .. note:: + This is a method for compatibility with other libraries, + you should really use :meth:`parse`. + + + Parameters + ----------- + data: :class:`bytes` + The data to parse the protobuf from. + + Returns + -------- + :class:`Message` + The initialized message. + """ return cls().parse(data) def to_dict( self, casing: Casing = Casing.CAMEL, include_default_values: bool = False ) -> Dict[str, Any]: """ - Returns a dict representation of this message instance which can be - used to serialize to e.g. JSON. Defaults to camel casing for - compatibility but can be set to other modes. - - `include_default_values` can be set to `True` to include default - values of fields. E.g. an `int32` type field with `0` value will - not be in returned dict if `include_default_values` is set to - `False`. + Returns a JSON serializable dict representation of this object. + + Parameters + ----------- + casing: :class:`Casing` + The casing to use for key values. Default is :attr:`Casing.CAMEL` for + compatibility purposes. + include_default_values: :class:`bool` + If ``True`` will include the default values of fields. Default is ``False``. + E.g. an ``int32`` field will be included with a value of ``0`` if this is + set to ``True``, otherwise this would be ignored. + + Returns + -------- + Dict[:class:`str`, Any] + The JSON serializable dict representation of this object. """ output: Dict[str, Any] = {} field_types = self._type_hints() @@ -904,7 +974,7 @@ def to_dict( ) ): output[cased_name] = value.to_dict(casing, include_default_values) - elif meta.proto_type == "map": + elif meta.proto_type == TYPE_MAP: for k in value: if hasattr(value[k], "to_dict"): value[k] = value[k].to_dict(casing, include_default_values) @@ -947,10 +1017,20 @@ def to_dict( output[cased_name] = value return output - def from_dict(self: T, value: dict) -> T: + def from_dict(self: T, value: Dict[str, Any]) -> T: """ - Parse the key/value pairs in `value` into this message instance. This - returns the instance itself and is therefore assignable and chainable. + Parse the key/value pairs into the current message instance. This returns the + instance itself and is therefore assignable and chainable. + + Parameters + ----------- + value: Dict[:class:`str`, Any] + The dictionary to parse from. + + Returns + -------- + :class:`Message` + The initialized message. """ self._serialized_on_wire = True for key in value: @@ -960,12 +1040,12 @@ def from_dict(self: T, value: dict) -> T: continue if value[key] is not None: - if meta.proto_type == "message": + if meta.proto_type == TYPE_MESSAGE: v = getattr(self, field_name) if isinstance(v, list): cls = self._betterproto.cls_by_field[field_name] - for i in range(len(value[key])): - v.append(cls().from_dict(value[key][i])) + for item in value[key]: + v.append(cls().from_dict(item)) elif isinstance(v, datetime): v = datetime.fromisoformat(value[key].replace("Z", "+00:00")) setattr(self, field_name, v) @@ -980,7 +1060,7 @@ def from_dict(self: T, value: dict) -> T: v.from_dict(value[key]) elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: v = getattr(self, field_name) - cls = self._betterproto.cls_by_field[field_name + ".value"] + cls = self._betterproto.cls_by_field[f"{field_name}.value"] for k in value[key]: v[k] = cls().from_dict(value[key][k]) else: @@ -1007,45 +1087,87 @@ def from_dict(self: T, value: dict) -> T: return self def to_json(self, indent: Union[None, int, str] = None) -> str: - """Returns the encoded JSON representation of this message instance.""" + """A helper function to parse the message instance into its JSON + representation. + + This is equivalent to:: + + json.dumps(message.to_dict(), indent=indent) + + Parameters + ----------- + indent: Optional[Union[:class:`int`, :class:`str`]] + The indent to pass to :func:`json.dumps`. + + Returns + -------- + :class:`str` + The JSON representation of the message. + """ return json.dumps(self.to_dict(), indent=indent) def from_json(self: T, value: Union[str, bytes]) -> T: - """ - Parse the key/value pairs in `value` into this message instance. This - returns the instance itself and is therefore assignable and chainable. + """A helper function to return the message instance from its JSON + representation. This returns the instance itself and is therefore assignable + and chainable. + + This is equivalent to:: + + return message.from_dict(json.loads(value)) + + Parameters + ----------- + value: Union[:class:`str`, :class:`bytes`] + The value to pass to :func:`json.loads`. + + Returns + -------- + :class:`Message` + The initialized message. """ return self.from_dict(json.loads(value)) def serialized_on_wire(message: Message) -> bool: """ - True if this message was or should be serialized on the wire. This can - be used to detect presence (e.g. optional wrapper message) and is used - internally during parsing/serialization. + If this message was or should be serialized on the wire. This can be used to detect + presence (e.g. optional wrapper message) and is used internally during + parsing/serialization. + + Returns + -------- + :class:`bool` + Whether this message was or should be serialized on the wire. """ return message._serialized_on_wire -def which_one_of(message: Message, group_name: str) -> Tuple[str, Any]: - """Return the name and value of a message's one-of field group.""" +def which_one_of(message: Message, group_name: str) -> Tuple[str, Optional[Any]]: + """ + Return the name and value of a message's one-of field group. + + Returns + -------- + Tuple[:class:`str`, Any] + The field name and the value for that field. + """ field_name = message._group_current.get(group_name) if not field_name: - return ("", None) - return (field_name, getattr(message, field_name)) + return "", None + return field_name, getattr(message, field_name) # Circular import workaround: google.protobuf depends on base classes defined above. from .lib.google.protobuf import ( # noqa - Duration, - Timestamp, BoolValue, BytesValue, DoubleValue, + Duration, FloatValue, Int32Value, Int64Value, StringValue, + Timestamp, UInt32Value, UInt64Value, ) @@ -1060,8 +1182,8 @@ def delta_to_json(delta: timedelta) -> str: parts = str(delta.total_seconds()).split(".") if len(parts) > 1: while len(parts[1]) not in [3, 6, 9]: - parts[1] = parts[1] + "0" - return ".".join(parts) + "s" + parts[1] = f"{parts[1]}0" + return f"{'.'.join(parts)}s" class _Timestamp(Timestamp): @@ -1077,15 +1199,15 @@ def timestamp_to_json(dt: datetime) -> str: if (nanos % 1e9) == 0: # If there are 0 fractional digits, the fractional # point '.' should be omitted when serializing. - return result + "Z" + return f"{result}Z" if (nanos % 1e6) == 0: # Serialize 3 fractional digits. - return result + ".%03dZ" % (nanos / 1e6) + return f"{result}.{int(nanos // 1e6) :03d}Z" if (nanos % 1e3) == 0: # Serialize 6 fractional digits. - return result + ".%06dZ" % (nanos / 1e3) + return f"{result}.{int(nanos // 1e3) :06d}Z" # Serialize 9 fractional digits. - return result + ".%09dZ" % nanos + return f"{result}.{nanos:09d}" class _WrappedMessage(Message): From 86d7c3037f182f0675df84f479cc417d26a50be5 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Tue, 20 Oct 2020 17:19:29 +0100 Subject: [PATCH 12/46] Add __bool__ to special members --- docs/api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api.rst b/docs/api.rst index ceae2d200..9f99b51d8 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -12,7 +12,7 @@ Message .. autoclass:: betterproto.Message :members: - :special-members: __bytes__ + :special-members: __bytes__, __bool__ .. autofunction:: betterproto.serialized_on_wire From 5c8e926c980b40a0cfd6a7000dac8fce70bdb96c Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Tue, 20 Oct 2020 20:26:17 +0100 Subject: [PATCH 13/46] Update __init__.py --- src/betterproto/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index 60c7f06f9..f1295955c 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -611,7 +611,7 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) def __bool__(self) -> bool: - """Whether or not the Message has had fields set over its lifetime.""" + """Whether or not the Message has any fields that are non-default.""" if not self._serialized_on_wire: return False for field in dataclasses.fields(self): From f10bec475ca839557712688bac599e2fed8e47c6 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Tue, 27 Oct 2020 21:03:54 +0000 Subject: [PATCH 14/46] Simplify bool --- src/betterproto/__init__.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index f1295955c..df33157b3 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -612,13 +612,11 @@ def __setattr__(self, attr: str, value: Any) -> None: def __bool__(self) -> bool: """Whether or not the Message has any fields that are non-default.""" - if not self._serialized_on_wire: - return False - for field in dataclasses.fields(self): - value = self.__raw_get(field.name) - if self._get_field_default(field.name) != value: - return True - return False + return any( + self.__raw_get(field_name) + not in (PLACEHOLDER, self._get_field_default(field_name)) + for field_name in self._betterproto.meta_by_field_name + ) @property def _betterproto(self) -> ProtoClassMetadata: From e0eb2915ecc7abb7491dfddb976bafb35b2f1e85 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 7 Nov 2020 00:57:24 +0000 Subject: [PATCH 15/46] Fix some typos --- src/betterproto/__main__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 425341b66..4bfbc3497 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -56,7 +56,7 @@ def compile_files(*files: Path, output_dir: Path) -> None: if proc.returncode != 0: failed_files = "\n".join(f" - {file}" for file in files) - err( + return err( f"{'Protoc' if USE_PROTOC else 'GRPC'} failed to generate outputs for:\n\n" f"{failed_files}\n\nSee the output for the issue:\n{stderr}" ) @@ -65,8 +65,8 @@ def compile_files(*files: Path, output_dir: Path) -> None: out(f"VERBOSE: {stdout}") out( - f"Finished generating output for {len(files)}, compiled output should be in " - f"{output_dir.as_posix()}" + f"Finished generating output for {len(files)} files, compiled output should be " + f"in {output_dir.as_posix()}" ) From e04fcb64246b8cee45e409a7ab3c5e2e14c98bce Mon Sep 17 00:00:00 2001 From: nat Date: Tue, 24 Nov 2020 19:32:06 +0100 Subject: [PATCH 16/46] Tweak __bool__ docstring --- src/betterproto/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index df33157b3..12469c25b 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -611,7 +611,7 @@ def __setattr__(self, attr: str, value: Any) -> None: super().__setattr__(attr, value) def __bool__(self) -> bool: - """Whether or not the Message has any fields that are non-default.""" + """True if the Message has any fields with non-default values.""" return any( self.__raw_get(field_name) not in (PLACEHOLDER, self._get_field_default(field_name)) From 53b2bca3fc4e3a16931327ee6c67d1174fa5bf95 Mon Sep 17 00:00:00 2001 From: Nat Noordanus Date: Mon, 19 Oct 2020 18:31:37 +0200 Subject: [PATCH 17/46] Sort the list of sources in generated file headers --- src/betterproto/lib/google/protobuf/__init__.py | 2 +- src/betterproto/plugin/models.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/betterproto/lib/google/protobuf/__init__.py b/src/betterproto/lib/google/protobuf/__init__.py index 6dd90c49c..b361c04ee 100644 --- a/src/betterproto/lib/google/protobuf/__init__.py +++ b/src/betterproto/lib/google/protobuf/__init__.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: google/protobuf/timestamp.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/any.proto, google/protobuf/type.proto, google/protobuf/api.proto, google/protobuf/duration.proto, google/protobuf/struct.proto, google/protobuf/wrappers.proto, google/protobuf/empty.proto, google/protobuf/descriptor.proto +# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto # plugin: python-betterproto import warnings from dataclasses import dataclass diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index a08457fc6..deb39a30a 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -251,15 +251,15 @@ def package(self) -> str: return self.package_proto_obj.package @property - def input_filenames(self) -> List[str]: + def input_filenames(self) -> Iterator[str]: """Names of the input files used to build this output. Returns ------- - List[str] + Iterator[str] Names of the input files used to build this output. """ - return [f.name for f in self.input_files] + return sorted(f.name for f in self.input_files) @property def python_module_imports(self) -> Set[str]: From 9c4e8d8939f995f00d762007033108c2921c10df Mon Sep 17 00:00:00 2001 From: James Date: Mon, 19 Oct 2020 18:39:22 +0100 Subject: [PATCH 18/46] Implement command line interface --- src/betterproto/__main__.py | 132 ++++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 src/betterproto/__main__.py diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py new file mode 100644 index 000000000..425341b66 --- /dev/null +++ b/src/betterproto/__main__.py @@ -0,0 +1,132 @@ +import subprocess +import sys +from functools import partial +from pathlib import Path +from typing import Tuple + +import black +import click + +DEFAULT_OUT = Path.cwd() / "betterproto_out" +VERBOSE = False +try: + import grpc +except ImportError: + USE_PROTOC = True +else: + USE_PROTOC = False + + +out = partial(click.secho, bold=True, err=True) +err = partial(click.secho, fg="red", err=True) + + +def recursive_file_finder(directory: Path) -> Tuple[Path, ...]: + files = set() + for path in directory.iterdir(): + if path.is_file() and path.name.endswith(".proto"): + files.add(path) + elif path.is_dir(): + files.update(recursive_file_finder(path)) + + return tuple(files) + + +def compile_files(*files: Path, output_dir: Path) -> None: + files = [file.as_posix() for file in files] + command = [ + f"--python_betterproto_out={output_dir.as_posix()}", + "-I", + output_dir.parent.as_posix(), + *files, + ] + if USE_PROTOC: + command.insert(0, "protoc") + else: + command.insert(0, "grpc.tools.protoc") + command.insert(0, "-m") + command.insert(0, sys.executable) + + proc = subprocess.Popen( + args=command, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = proc.communicate() + stdout = stdout.decode() + stderr = stderr.decode() + + if proc.returncode != 0: + failed_files = "\n".join(f" - {file}" for file in files) + err( + f"{'Protoc' if USE_PROTOC else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{stderr}" + ) + + if VERBOSE: + out(f"VERBOSE: {stdout}") + + out( + f"Finished generating output for {len(files)}, compiled output should be in " + f"{output_dir.as_posix()}" + ) + + +@click.group() +@click.pass_context +def main(ctx: click.Context): + """The main entry point to all things betterproto""" + if ctx.invoked_subcommand is None: + click.echo(ctx.get_help()) + + +@main.command() +@click.option( + "-v", + "--verbose", + is_flag=True, +) +@click.option( + "-p", + "--protoc", + is_flag=True, + help="Whether or not to use protoc or GRPC to compile the protobufs", + default=USE_PROTOC, +) +@click.option( + "-o", + "--output", + help="The output directory", + default=DEFAULT_OUT.name, + is_eager=True, +) +@click.argument( + "src", + type=click.Path( + exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True + ), + is_eager=True, +) +def compile(verbose: bool, protoc: bool, output: str, src: str): + """The recommended way to compile your protobuf files.""" + directory = Path.cwd().joinpath(src) + files = recursive_file_finder(directory) if directory.is_dir() else (directory,) + if not files: + return out("No files found to compile") + + output = Path.cwd().joinpath(output) + output.mkdir(exist_ok=True) + + # Update constants/flags + globs = globals() + globs["VERBOSE"] = verbose + + return compile_files(*files, output_dir=output) + + +# Decorators aren't handled very well +main: click.Group +compile: click.Command + + +if __name__ == "__main__": + black.patch_click() + main() From 2e9ec7a2dce8bee970446db1840e237b219962e4 Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 7 Nov 2020 00:57:24 +0000 Subject: [PATCH 19/46] Fix some typos --- src/betterproto/__main__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 425341b66..4bfbc3497 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -56,7 +56,7 @@ def compile_files(*files: Path, output_dir: Path) -> None: if proc.returncode != 0: failed_files = "\n".join(f" - {file}" for file in files) - err( + return err( f"{'Protoc' if USE_PROTOC else 'GRPC'} failed to generate outputs for:\n\n" f"{failed_files}\n\nSee the output for the issue:\n{stderr}" ) @@ -65,8 +65,8 @@ def compile_files(*files: Path, output_dir: Path) -> None: out(f"VERBOSE: {stdout}") out( - f"Finished generating output for {len(files)}, compiled output should be in " - f"{output_dir.as_posix()}" + f"Finished generating output for {len(files)} files, compiled output should be " + f"in {output_dir.as_posix()}" ) From 4fde7b1b2afd681cbb1d418e5e99ea44b86a302a Mon Sep 17 00:00:00 2001 From: James Date: Mon, 19 Oct 2020 18:39:22 +0100 Subject: [PATCH 20/46] Implement command line interface --- src/betterproto/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 4bfbc3497..01713ccd2 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -65,8 +65,8 @@ def compile_files(*files: Path, output_dir: Path) -> None: out(f"VERBOSE: {stdout}") out( - f"Finished generating output for {len(files)} files, compiled output should be " - f"in {output_dir.as_posix()}" + f"Finished generating output for {len(files)}, compiled output should be in " + f"{output_dir.as_posix()}" ) From ee40943fc3f2321df2e2fed8caaaaba690b7864e Mon Sep 17 00:00:00 2001 From: James <50501825+Gobot1234@users.noreply.github.com> Date: Sat, 7 Nov 2020 00:57:24 +0000 Subject: [PATCH 21/46] Fix some typos --- src/betterproto/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 01713ccd2..4bfbc3497 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -65,8 +65,8 @@ def compile_files(*files: Path, output_dir: Path) -> None: out(f"VERBOSE: {stdout}") out( - f"Finished generating output for {len(files)}, compiled output should be in " - f"{output_dir.as_posix()}" + f"Finished generating output for {len(files)} files, compiled output should be " + f"in {output_dir.as_posix()}" ) From e6d1eaab64c9dce171185dc630c270a0954a1bd4 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 17 Jan 2021 17:16:02 +0000 Subject: [PATCH 22/46] Initial update --- pyproject.toml | 1 + src/betterproto/__main__.py | 130 +----------------------- src/betterproto/plugin/__init__.py | 45 +++++++++ src/betterproto/plugin/__main__.py | 1 - src/betterproto/plugin/cli/__init__.py | 24 +++++ src/betterproto/plugin/cli/commands.py | 133 +++++++++++++++++++++++++ src/betterproto/plugin/cli/errors.py | 28 ++++++ src/betterproto/plugin/cli/runner.py | 66 ++++++++++++ src/betterproto/plugin/cli/utils.py | 68 +++++++++++++ src/betterproto/plugin/compiler.py | 18 +--- src/betterproto/plugin/main.py | 56 +++-------- src/betterproto/plugin/models.py | 43 +++----- src/betterproto/plugin/parser.py | 34 +++---- 13 files changed, 415 insertions(+), 232 deletions(-) create mode 100644 src/betterproto/plugin/cli/__init__.py create mode 100644 src/betterproto/plugin/cli/commands.py create mode 100644 src/betterproto/plugin/cli/errors.py create mode 100644 src/betterproto/plugin/cli/runner.py create mode 100644 src/betterproto/plugin/cli/utils.py diff --git a/pyproject.toml b/pyproject.toml index e84aac404..d8b926aa2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" [tool.poetry.scripts] +betterproto = "betterproto:__main__.main" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 4bfbc3497..245612cce 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -1,132 +1,4 @@ -import subprocess -import sys -from functools import partial -from pathlib import Path -from typing import Tuple - -import black -import click - -DEFAULT_OUT = Path.cwd() / "betterproto_out" -VERBOSE = False -try: - import grpc -except ImportError: - USE_PROTOC = True -else: - USE_PROTOC = False - - -out = partial(click.secho, bold=True, err=True) -err = partial(click.secho, fg="red", err=True) - - -def recursive_file_finder(directory: Path) -> Tuple[Path, ...]: - files = set() - for path in directory.iterdir(): - if path.is_file() and path.name.endswith(".proto"): - files.add(path) - elif path.is_dir(): - files.update(recursive_file_finder(path)) - - return tuple(files) - - -def compile_files(*files: Path, output_dir: Path) -> None: - files = [file.as_posix() for file in files] - command = [ - f"--python_betterproto_out={output_dir.as_posix()}", - "-I", - output_dir.parent.as_posix(), - *files, - ] - if USE_PROTOC: - command.insert(0, "protoc") - else: - command.insert(0, "grpc.tools.protoc") - command.insert(0, "-m") - command.insert(0, sys.executable) - - proc = subprocess.Popen( - args=command, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - stdout, stderr = proc.communicate() - stdout = stdout.decode() - stderr = stderr.decode() - - if proc.returncode != 0: - failed_files = "\n".join(f" - {file}" for file in files) - return err( - f"{'Protoc' if USE_PROTOC else 'GRPC'} failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{stderr}" - ) - - if VERBOSE: - out(f"VERBOSE: {stdout}") - - out( - f"Finished generating output for {len(files)} files, compiled output should be " - f"in {output_dir.as_posix()}" - ) - - -@click.group() -@click.pass_context -def main(ctx: click.Context): - """The main entry point to all things betterproto""" - if ctx.invoked_subcommand is None: - click.echo(ctx.get_help()) - - -@main.command() -@click.option( - "-v", - "--verbose", - is_flag=True, -) -@click.option( - "-p", - "--protoc", - is_flag=True, - help="Whether or not to use protoc or GRPC to compile the protobufs", - default=USE_PROTOC, -) -@click.option( - "-o", - "--output", - help="The output directory", - default=DEFAULT_OUT.name, - is_eager=True, -) -@click.argument( - "src", - type=click.Path( - exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True - ), - is_eager=True, -) -def compile(verbose: bool, protoc: bool, output: str, src: str): - """The recommended way to compile your protobuf files.""" - directory = Path.cwd().joinpath(src) - files = recursive_file_finder(directory) if directory.is_dir() else (directory,) - if not files: - return out("No files found to compile") - - output = Path.cwd().joinpath(output) - output.mkdir(exist_ok=True) - - # Update constants/flags - globs = globals() - globs["VERBOSE"] = verbose - - return compile_files(*files, output_dir=output) - - -# Decorators aren't handled very well -main: click.Group -compile: click.Command - +from .plugin.cli import main if __name__ == "__main__": - black.patch_click() main() diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index c28a133f2..13a1319e9 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -1 +1,46 @@ +import sys +from distutils import sysconfig +from pathlib import Path +from types import TracebackType +from typing import Type +import traceback + +IMPORT_ERROR_MESSAGE = ( + "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've installed " + 'betterproto as `pip install "betterproto[compiler]"` so that compiler dependencies are ' + "included." +) + +STDLIB_MODULES = [ + p.with_suffix("").name + for p in Path(sysconfig.get_python_lib(standard_lib=True)).iterdir() +] + + +def import_exception_hook( + type: Type[BaseException], value: ImportError, tb: TracebackType +) -> None: + """Set an exception hook to automatically print: + + "Unable to import `x` from betterproto plugin! Please ensure that you've installed + betterproto as `pip install "betterproto[compiler]"` so that compiler dependencies are + included." + + if the module imported is not found and the exception is raised in this sub module + """ + module = list(traceback.walk_tb(tb))[-1][0].f_globals.get("__name__", "__main__") + if ( + not module.startswith(__name__) + or not isinstance(value, ImportError) + or value.name in STDLIB_MODULES + or value.name.startswith("betterproto") + ): + return sys.__excepthook__(type, value, tb) + + print(f"\033[31m{IMPORT_ERROR_MESSAGE.format(value)}\033[0m", file=sys.stderr) + exit(1) + + +sys.excepthook = import_exception_hook + from .main import main diff --git a/src/betterproto/plugin/__main__.py b/src/betterproto/plugin/__main__.py index bd95daead..5d6a8109e 100644 --- a/src/betterproto/plugin/__main__.py +++ b/src/betterproto/plugin/__main__.py @@ -1,4 +1,3 @@ from .main import main - main() diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py new file mode 100644 index 000000000..1526c8f68 --- /dev/null +++ b/src/betterproto/plugin/cli/__init__.py @@ -0,0 +1,24 @@ +import asyncio +import os +import platform +from pathlib import Path +from typing import Any, Dict + +from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa + +try: + import grpc +except ImportError: + USE_PROTOC = True +else: + USE_PROTOC = False + +DEFAULT_OUT = Path.cwd() / "betterproto_out" +VERBOSE = False +ENV: Dict[str, Any] = dict(os.environ) + +from .commands import main +from .runner import compile_protobufs + +if platform.system() == "Windows": + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py new file mode 100644 index 000000000..67b045b77 --- /dev/null +++ b/src/betterproto/plugin/cli/commands.py @@ -0,0 +1,133 @@ +import os +import sys +from pathlib import Path +import logging +import click +import rich +from rich.progress import Progress +from rich.syntax import Syntax + +from src.betterproto.plugin.cli import DEFAULT_LINE_LENGTH, DEFAULT_OUT, ENV, USE_PROTOC, VERBOSE +from src.betterproto.plugin.cli.errors import ProtobufSyntaxError +from src.betterproto.plugin.cli.runner import compile_protobufs +from src.betterproto.plugin.cli.utils import recursive_file_finder, run_sync +from src.betterproto.plugin.models import monkey_patch_oneof_index + +monkey_patch_oneof_index() + +logger = logging.getLogger('asyncio') +logger.setLevel(logging.DEBUG) +handler = logging.FileHandler(filename='out.log', encoding='utf-8', mode='w') +handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) +logger.addHandler(handler) + + +@click.group(context_settings={"help_option_names": ["-h", "--help"]}) +@click.pass_context +def main(ctx: click.Context) -> None: + """The main entry point to all things betterproto""" + if ctx.invoked_subcommand is None: + click.echo(ctx.get_help()) + + +@main.command(context_settings={"help_option_names": ["-h", "--help"]}) +@click.option( + "-v", + "--verbose", + is_flag=True, + default=VERBOSE, +) +@click.option( + "-p", + "--protoc", + is_flag=True, + help="Whether or not to use protoc to compile the protobufs if this is false it will attempt to use grpc instead", + default=USE_PROTOC, +) +@click.option( + "-l", + "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, +) +@click.option( + "--generate-services", + help="Whether or not to generate servicer stubs", + is_flag=True, + default=True, +) +@click.option( + "-o", + "--output", + help="The output directory", + type=click.Path(file_okay=False, dir_okay=True, allow_dash=True), + default=DEFAULT_OUT.name, + is_eager=True, +) +@click.argument( + "src", + type=click.Path(exists=True, file_okay=True, dir_okay=True, allow_dash=True), + is_eager=True, +) +@run_sync +async def compile( + verbose: bool, + protoc: bool, + line_length: int, + generate_services: bool, + output: str, + src: str, +) -> None: + """The recommended way to compile your protobuf files.""" + + directory = (Path.cwd() / src).resolve() + files = recursive_file_finder(directory) if directory.is_dir() else (directory,) + if not files: + return rich.print("[bold]No files found to compile") + + output = Path.cwd() / output + output.mkdir(exist_ok=True) + + ENV["VERBOSE"] = str(int(verbose)) + ENV["GENERATE_SERVICES"] = str(int(generate_services)) + ENV["USE_PROTOC"] = str(int(protoc and USE_PROTOC)) + ENV["LINE_LENGTH"] = str(line_length) + ENV["USING_BETTERPROTO_CLI"] = str(1) + + try: + await compile_protobufs(*files, output=output) + except ProtobufSyntaxError as exc: + error = Syntax.from_path(str(exc.file).strip(), line_numbers=True, line_range=(0, exc.lineno)) + return rich.print(f"Syntax Error in protobuf file {str(exc.file).strip()}:\n", error, f"{' ' * (exc.offset + 3)}^\n", exc.msg) + except SyntaxError: + failed_files = "\n".join(f" - {file}" for file in files) + return rich.print( + f"[red]{'Protoc' if ENV['USE_PROTOC'] else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{exc.stderr}", + file=sys.stderr, + ) + + rich.print( + f"[bold]Finished generating output for {len(files)} files, compiled output should be in {output.as_posix()}" + ) + +""" +async def run_cli(port: int) -> None: + + with Progress(transient=True) as progress: # TODO reading and compiling stuff + compiling_progress_bar = progress.add_task( + "[green]Compiling protobufs...", total=total + ) + + async for message in service.get_currently_compiling(): + progress.tasks[0].description = ( + f"[green]Compiling protobufs...\n" + f"Currently compiling {message.type.name.lower()}: {message.name}" + ) + progress.update(compiling_progress_bar, advance=1) + rich.print(f"[bold][green]Finished compiling output should be at {round(3)}")""" + +if __name__ == '__main__': + os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests" + sys.argv = "betterproto compile /Users/gobot1234/PycharmProjects/betterproto/tests/inputs/bool".split() + main() diff --git a/src/betterproto/plugin/cli/errors.py b/src/betterproto/plugin/cli/errors.py new file mode 100644 index 000000000..b5cee0482 --- /dev/null +++ b/src/betterproto/plugin/cli/errors.py @@ -0,0 +1,28 @@ +from pathlib import Path + + +class CLIError(Exception): + """The base class for all exceptions when compiling a file""" + + +class CompilerError(CLIError): + ... + + +class ProtobufSyntaxError(SyntaxError, CompilerError): + """ + Attributes + ---------- + msg: :class:`str` + The message given by protoc e.g. "Expected top-level statement (e.g. "message")." + file: :class:`.Path` + The file that had the syntax error. + lineno: :class:`int` + The line number on which the syntax error occurs. + offset: :class:`int` + The offset along the :attr:`lineno` that the syntax error occurs. + """ + def __init__(self, msg: str, file: Path, lineno: int, offset: int): + text = file.read_text().splitlines()[lineno - 1] + super().__init__(msg, (str(file), lineno, offset, text)) + self.file = file diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py new file mode 100644 index 000000000..33496edfe --- /dev/null +++ b/src/betterproto/plugin/cli/runner.py @@ -0,0 +1,66 @@ +import asyncio +import re +from pathlib import Path +from typing import Tuple + +from . import ENV +from .errors import ProtobufSyntaxError, CLIError +from ...lib.google.protobuf.compiler import CodeGeneratorRequest +from ...plugin.parser import generate_code + + +async def compile_protobufs( + *files: Path, output: Path, implementation: str = "betterproto_" +) -> Tuple[str, str]: + """ + A programmatic way to compile protobufs. + + Parameters + ---------- + *files + output + + Returns + ------- + Tuple[:class:`str`, :class:`str`] + A tuple of the ``stdout`` and ``stderr`` from the invocation of protoc. + """ + from . import utils # circular import + + command = utils.generate_command(*files, output=output, implementation=implementation) + + process = await asyncio.create_subprocess_shell( + command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + stdin=asyncio.subprocess.PIPE, + env=ENV, + cwd=Path.cwd() + ) + + if implementation == "betterproto_": + data = await process.stderr.read() # we put code on stderr so we can actually read it + # thank you google :))))) + + request = CodeGeneratorRequest().parse(data) + if request._unknown_fields: + match = re.match(r"(?P.+):(?P\d+):(?P\d+):(?P.*)", data.decode()) + # we had a parsing error + for file in files: + if file.as_posix().endswith(match["filename"]): + raise ProtobufSyntaxError(match["message"].strip(), file, int(match["lineno"]), int(match["offset"])) + raise ProtobufSyntaxError + # Generate code + response = await utils.to_thread(generate_code, request) + for file in response.file: + (output / file.name).resolve().write_text(file.content) + + stdout, stderr = await process.communicate() + + else: + stdout, stderr = await process.communicate() + + if process.returncode != 0: + raise CLIError(stderr.decode()) # bad + + return stdout.decode(), stderr.decode() diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py new file mode 100644 index 000000000..e60090727 --- /dev/null +++ b/src/betterproto/plugin/cli/utils.py @@ -0,0 +1,68 @@ +import asyncio +import functools +import sys +from pathlib import Path +from typing import Awaitable, Callable, Tuple, TypeVar, Any + +from . import ENV + +T = TypeVar("T") + + +def recursive_file_finder(directory: Path) -> Tuple[Path, ...]: + files = set() + for path in directory.iterdir(): + if path.is_file() and path.name.endswith(".proto"): + files.add(path) + elif path.is_dir(): + files.update(recursive_file_finder(path)) + + return tuple(files) + + +def generate_command( + *files: Path, output: Path, implementation: str = "betterproto_" +) -> str: + cwd = Path.cwd() + files = [file.relative_to(cwd).as_posix() for file in files] + command = [ + f"--python_{implementation}out={output.as_posix()}", + "-I", + ".", + *files, + ] + if ENV["USE_PROTOC"]: + command.insert(0, "protoc") + else: + command.insert(0, "grpc.tools.protoc") + command.insert(0, "-m") + command.insert(0, sys.executable) + + return " ".join(command) + + +def run_sync(func: Callable[..., Awaitable[T]]) -> Callable[..., T]: + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> T: + coro = func(*args, **kwargs) + + if hasattr(asyncio, "run"): + return asyncio.run(coro, debug=True) + + loop = asyncio.new_event_loop() + try: + return loop.run_until_complete(coro) + finally: + loop.close() + + return wrapper + + +if sys.version_info[:2] >= (3, 9): + from asyncio import to_thread +else: + async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: + loop = asyncio.get_event_loop() + # no context vars + partial = functools.partial(func, *args, **kwargs) + return await loop.run_in_executor(None, partial) diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto/plugin/compiler.py index 617a65046..2cba70321 100644 --- a/src/betterproto/plugin/compiler.py +++ b/src/betterproto/plugin/compiler.py @@ -1,19 +1,7 @@ import os.path -try: - # betterproto[compiler] specific dependencies - import black - import jinja2 -except ImportError as err: - print( - "\033[31m" - f"Unable to import `{err.name}` from betterproto plugin! " - "Please ensure that you've installed betterproto as " - '`pip install "betterproto[compiler]"` so that compiler dependencies ' - "are included." - "\033[0m" - ) - raise SystemExit(1) +import black +import jinja2 from .models import OutputTemplate @@ -33,5 +21,5 @@ def outputfile_compiler(output_file: OutputTemplate) -> str: return black.format_str( template.render(output_file=output_file), - mode=black.FileMode(target_versions={black.TargetVersion.PY37}), + mode=black.Mode(target_versions={black.TargetVersion.PY37}), ) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index e0b2557d2..3672c2455 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -3,13 +3,9 @@ import os import sys -from betterproto.lib.google.protobuf.compiler import ( - CodeGeneratorRequest, - CodeGeneratorResponse, -) - -from betterproto.plugin.parser import generate_code -from betterproto.plugin.models import monkey_patch_oneof_index +from ..lib.google.protobuf.compiler import CodeGeneratorRequest +from .models import monkey_patch_oneof_index +from .parser import generate_code def main() -> None: @@ -17,40 +13,20 @@ def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() - # Apply Work around for proto2/3 difference in protoc messages - monkey_patch_oneof_index() - - # Parse request - request = CodeGeneratorRequest() - request.parse(data) - - dump_file = os.getenv("BETTERPROTO_DUMP") - if dump_file: - dump_request(dump_file, request) - - # Create response - response = CodeGeneratorResponse() + if os.getenv("USING_BETTERPROTO_CLI"): + # Write the data to stderr for cli + sys.stderr.buffer.write(data) # need to figure out how to potentially lock this + sys.stdout.buffer.write(b"") + return + else: + # Apply Work around for proto2/3 difference in protoc messages + monkey_patch_oneof_index() - # Generate code - generate_code(request, response) + # Parse request + request = CodeGeneratorRequest().parse(data) - # Serialise response message - output = response.SerializeToString() + # Generate code + response = generate_code(request) # Write to stdout - sys.stdout.buffer.write(output) - - -def dump_request(dump_file: str, request: CodeGeneratorRequest) -> None: - """ - For developers: Supports running plugin.py standalone so its possible to debug it. - Run protoc (or generate.py) with BETTERPROTO_DUMP="yourfile.bin" to write the request to a file. - Then run plugin.py from your IDE in debugging mode, and redirect stdin to the file. - """ - with open(str(dump_file), "wb") as fh: - sys.stderr.write(f"\033[31mWriting input from protoc to: {dump_file}\033[0m\n") - fh.write(request.SerializeToString()) - - -if __name__ == "__main__": - main() + sys.stdout.buffer.write(bytes(response)) diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index deb39a30a..d759a6bb0 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -29,38 +29,13 @@ reference to `A` to `B`'s `fields` attribute. """ - -import betterproto -from betterproto import which_one_of -from betterproto.casing import sanitize_name -from betterproto.compile.importing import ( - get_type_reference, - parse_source_type_name, -) -from betterproto.compile.naming import ( - pythonize_class_name, - pythonize_field_name, - pythonize_method_name, -) -from betterproto.lib.google.protobuf import ( - DescriptorProto, - EnumDescriptorProto, - FileDescriptorProto, - MethodDescriptorProto, - Field, - FieldDescriptorProto, - FieldDescriptorProtoType, - FieldDescriptorProtoLabel, -) -from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest - - import re import textwrap from dataclasses import dataclass, field from typing import Dict, Iterator, List, Optional, Set, Text, Type, Union -import sys +import betterproto +from .. import Message, which_one_of from ..casing import sanitize_name from ..compile.importing import get_type_reference, parse_source_type_name from ..compile.naming import ( @@ -68,7 +43,17 @@ pythonize_field_name, pythonize_method_name, ) - +from ..lib.google.protobuf import ( + DescriptorProto, + EnumDescriptorProto, + Field, + FieldDescriptorProto, + FieldDescriptorProtoLabel, + FieldDescriptorProtoType, + FileDescriptorProto, + MethodDescriptorProto, +) +from ..lib.google.protobuf.compiler import CodeGeneratorRequest # Create a unique placeholder to deal with # https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses @@ -168,7 +153,7 @@ class ProtoContentBase: source_file: FileDescriptorProto path: List[int] comment_indent: int = 4 - parent: Union["betterproto.Message", "OutputTemplate"] + parent: Union["Message", "OutputTemplate"] def __post_init__(self) -> None: """Checks that no fake default fields were left as placeholders.""" diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 4be99773c..fcd468a39 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,19 +1,19 @@ -from betterproto.lib.google.protobuf import ( +import itertools +import pathlib +import sys +from typing import Iterator, List, Set, Tuple, Union + +from ..lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, - FieldDescriptorProto, FileDescriptorProto, ServiceDescriptorProto, ) -from betterproto.lib.google.protobuf.compiler import ( +from ..lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponse, CodeGeneratorResponseFile, ) -import itertools -import pathlib -import sys -from typing import Iterator, List, Tuple, TYPE_CHECKING, Union from .compiler import outputfile_compiler from .models import ( EnumDefinitionCompiler, @@ -29,17 +29,14 @@ is_oneof, ) -if TYPE_CHECKING: - from google.protobuf.descriptor import Descriptor - def traverse( - proto_file: FieldDescriptorProto, -) -> "itertools.chain[Tuple[Union[str, EnumDescriptorProto], List[int]]]": + proto_file: FileDescriptorProto, +) -> "itertools.chain[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]": # Todo: Keep information about nested hierarchy def _traverse( - path: List[int], items: List["Descriptor"], prefix="" - ) -> Iterator[Tuple[Union[str, EnumDescriptorProto], List[int]]]: + path: List[int], items: List[Union[DescriptorProto, EnumDescriptorProto]], prefix="" + ) -> Iterator[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]: for i, item in enumerate(items): # Adjust the name since we flatten the hierarchy. # Todo: don't change the name, but include full name in returned tuple @@ -60,9 +57,8 @@ def _traverse( ) -def generate_code( - request: CodeGeneratorRequest, response: CodeGeneratorResponse -) -> None: +def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse: + response = CodeGeneratorResponse() plugin_options = request.parameter.split(",") if request.parameter else [] request_data = PluginRequestCompiler(plugin_request_obj=request) @@ -133,9 +129,11 @@ def generate_code( for output_package_name in sorted(output_paths.union(init_files)): print(f"Writing {output_package_name}", file=sys.stderr) + return response + def read_protobuf_type( - item: DescriptorProto, + item: Union[DescriptorProto, EnumDescriptorProto], path: List[int], source_file: "FileDescriptorProto", output_package: OutputTemplate, From 231dc058c78f43f80aa3177e0f18393665c3e07b Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Mon, 18 Jan 2021 09:50:43 +0000 Subject: [PATCH 23/46] Write files concurrently and general improvements --- src/betterproto/plugin/cli/commands.py | 13 +++--- src/betterproto/plugin/cli/runner.py | 65 +++++++++++++++++++------- src/betterproto/plugin/parser.py | 6 ++- 3 files changed, 58 insertions(+), 26 deletions(-) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 67b045b77..94258510a 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -7,11 +7,10 @@ from rich.progress import Progress from rich.syntax import Syntax -from src.betterproto.plugin.cli import DEFAULT_LINE_LENGTH, DEFAULT_OUT, ENV, USE_PROTOC, VERBOSE -from src.betterproto.plugin.cli.errors import ProtobufSyntaxError -from src.betterproto.plugin.cli.runner import compile_protobufs -from src.betterproto.plugin.cli.utils import recursive_file_finder, run_sync -from src.betterproto.plugin.models import monkey_patch_oneof_index +from . import DEFAULT_LINE_LENGTH, DEFAULT_OUT, ENV, USE_PROTOC, VERBOSE, utils +from .errors import ProtobufSyntaxError +from .runner import compile_protobufs +from ..models import monkey_patch_oneof_index monkey_patch_oneof_index() @@ -69,7 +68,7 @@ def main(ctx: click.Context) -> None: type=click.Path(exists=True, file_okay=True, dir_okay=True, allow_dash=True), is_eager=True, ) -@run_sync +@utils.run_sync async def compile( verbose: bool, protoc: bool, @@ -81,7 +80,7 @@ async def compile( """The recommended way to compile your protobuf files.""" directory = (Path.cwd() / src).resolve() - files = recursive_file_finder(directory) if directory.is_dir() else (directory,) + files = utils.recursive_file_finder(directory) if directory.is_dir() else (directory,) if not files: return rich.print("[bold]No files found to compile") diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 33496edfe..eb5fd4943 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,12 +1,22 @@ import asyncio +import functools import re from pathlib import Path from typing import Tuple +from concurrent.futures import ProcessPoolExecutor -from . import ENV +from . import ENV, utils from .errors import ProtobufSyntaxError, CLIError -from ...lib.google.protobuf.compiler import CodeGeneratorRequest -from ...plugin.parser import generate_code +from ...lib.google.protobuf.compiler import ( + CodeGeneratorRequest, + CodeGeneratorResponseFile, +) +from ..parser import generate_code + + +def write_file(output: Path, file: CodeGeneratorResponseFile) -> None: + path = (output / file.name).resolve() + path.write_text(file.content) async def compile_protobufs( @@ -17,17 +27,19 @@ async def compile_protobufs( Parameters ---------- - *files - output + *files: :class:`.Path` + The locations of the protobuf files to be generated. + output: :class:`.Path` + The output directory. Returns ------- Tuple[:class:`str`, :class:`str`] A tuple of the ``stdout`` and ``stderr`` from the invocation of protoc. """ - from . import utils # circular import - - command = utils.generate_command(*files, output=output, implementation=implementation) + command = utils.generate_command( + *files, output=output, implementation=implementation + ) process = await asyncio.create_subprocess_shell( command, @@ -35,30 +47,47 @@ async def compile_protobufs( stderr=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, env=ENV, - cwd=Path.cwd() + cwd=Path.cwd(), ) if implementation == "betterproto_": - data = await process.stderr.read() # we put code on stderr so we can actually read it - # thank you google :))))) + data = await process.stderr.read() + # we put code on stderr so we can actually read it thank you google :))))) request = CodeGeneratorRequest().parse(data) + if request._unknown_fields: - match = re.match(r"(?P.+):(?P\d+):(?P\d+):(?P.*)", data.decode()) + match = re.match( + r"(?P.+):(?P\d+):(?P\d+):(?P.*)", + data.decode(), + ) # we had a parsing error for file in files: if file.as_posix().endswith(match["filename"]): - raise ProtobufSyntaxError(match["message"].strip(), file, int(match["lineno"]), int(match["offset"])) + raise ProtobufSyntaxError( + match["message"].strip(), + file, + int(match["lineno"]), + int(match["offset"]), + ) raise ProtobufSyntaxError + # Generate code response = await utils.to_thread(generate_code, request) - for file in response.file: - (output / file.name).resolve().write_text(file.content) - stdout, stderr = await process.communicate() + with ProcessPoolExecutor(max_workers=4) as process_pool: + # write multiple files concurrently + loop = asyncio.get_event_loop() + await asyncio.gather( + *( + loop.run_in_executor( + process_pool, functools.partial(write_file, output, file) + ) + for file in response.file + ) + ) - else: - stdout, stderr = await process.communicate() + stdout, stderr = await process.communicate() if process.returncode != 0: raise CLIError(stderr.decode()) # bad diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index fcd468a39..0ebec381f 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -57,7 +57,11 @@ def _traverse( ) -def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse: +def generate_code( + request: CodeGeneratorRequest, + # *, + # generate_services: bool = False, # TODO **KWARGS support for custom options +) -> CodeGeneratorResponse: response = CodeGeneratorResponse() plugin_options = request.parameter.split(",") if request.parameter else [] From 1f5df3d8bd785ba5b8d4b343f652111030fc16a5 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Tue, 19 Jan 2021 18:42:52 +0000 Subject: [PATCH 24/46] Fix up everything --- pyproject.toml | 2 + src/betterproto/_types.py | 1 + src/betterproto/plugin/__init__.py | 2 +- src/betterproto/plugin/cli/__init__.py | 5 +- src/betterproto/plugin/cli/commands.py | 82 ++++++++++------- src/betterproto/plugin/cli/runner.py | 65 +++++++++----- src/betterproto/plugin/cli/utils.py | 48 +++++----- src/betterproto/plugin/compiler.py | 4 +- src/betterproto/plugin/main.py | 16 +++- src/betterproto/plugin/models.py | 1 + src/betterproto/plugin/parser.py | 59 ++++++++----- tests/generate.py | 118 ++++++++++++------------- tests/util.py | 33 +------ 13 files changed, 236 insertions(+), 200 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d8b926aa2..cb98a0222 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,8 @@ tox = "^3.15.1" sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" +click = "^7.1.2" +rich = "^9.8.2" [tool.poetry.scripts] betterproto = "betterproto:__main__.main" diff --git a/src/betterproto/_types.py b/src/betterproto/_types.py index 26b734406..7b4e9b1a8 100644 --- a/src/betterproto/_types.py +++ b/src/betterproto/_types.py @@ -2,6 +2,7 @@ if TYPE_CHECKING: from grpclib._typing import IProtoMessage + from . import Message # Bound type variable to allow methods to return `self` of subclasses diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index 13a1319e9..737522047 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -1,9 +1,9 @@ import sys +import traceback from distutils import sysconfig from pathlib import Path from types import TracebackType from typing import Type -import traceback IMPORT_ERROR_MESSAGE = ( "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've installed " diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py index 1526c8f68..c97e59a68 100644 --- a/src/betterproto/plugin/cli/__init__.py +++ b/src/betterproto/plugin/cli/__init__.py @@ -4,10 +4,8 @@ from pathlib import Path from typing import Any, Dict -from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa - try: - import grpc + import grpc_tools.protoc except ImportError: USE_PROTOC = True else: @@ -16,6 +14,7 @@ DEFAULT_OUT = Path.cwd() / "betterproto_out" VERBOSE = False ENV: Dict[str, Any] = dict(os.environ) +from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa from .commands import main from .runner import compile_protobufs diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 94258510a..f4e4a385e 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -1,32 +1,35 @@ import os import sys from pathlib import Path -import logging +from typing import Tuple + import click import rich from rich.progress import Progress from rich.syntax import Syntax -from . import DEFAULT_LINE_LENGTH, DEFAULT_OUT, ENV, USE_PROTOC, VERBOSE, utils -from .errors import ProtobufSyntaxError -from .runner import compile_protobufs +from betterproto.plugin.cli import ( + DEFAULT_LINE_LENGTH, + DEFAULT_OUT, + ENV, + USE_PROTOC, + VERBOSE, + utils, +) + from ..models import monkey_patch_oneof_index +from .errors import CLIError, ProtobufSyntaxError +from .runner import compile_protobufs monkey_patch_oneof_index() -logger = logging.getLogger('asyncio') -logger.setLevel(logging.DEBUG) -handler = logging.FileHandler(filename='out.log', encoding='utf-8', mode='w') -handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) -logger.addHandler(handler) - @click.group(context_settings={"help_option_names": ["-h", "--help"]}) @click.pass_context def main(ctx: click.Context) -> None: """The main entry point to all things betterproto""" if ctx.invoked_subcommand is None: - click.echo(ctx.get_help()) + rich.print(ctx.get_help()) @main.command(context_settings={"help_option_names": ["-h", "--help"]}) @@ -40,7 +43,8 @@ def main(ctx: click.Context) -> None: "-p", "--protoc", is_flag=True, - help="Whether or not to use protoc to compile the protobufs if this is false it will attempt to use grpc instead", + help="Whether or not to use protoc to compile the protobufs if this is false" + "it will attempt to use grpc instead", default=USE_PROTOC, ) @click.option( @@ -61,12 +65,12 @@ def main(ctx: click.Context) -> None: help="The output directory", type=click.Path(file_okay=False, dir_okay=True, allow_dash=True), default=DEFAULT_OUT.name, - is_eager=True, ) @click.argument( "src", type=click.Path(exists=True, file_okay=True, dir_okay=True, allow_dash=True), is_eager=True, + nargs=-1, ) @utils.run_sync async def compile( @@ -75,41 +79,59 @@ async def compile( line_length: int, generate_services: bool, output: str, - src: str, + src: Tuple[str, ...], ) -> None: """The recommended way to compile your protobuf files.""" + if len(src) != 1: + return rich.print( + "[red]Currently can't handle more than one source this is just for a " + "nicer invocation of help" + ) - directory = (Path.cwd() / src).resolve() - files = utils.recursive_file_finder(directory) if directory.is_dir() else (directory,) + files = utils.get_files(src[0]) if not files: return rich.print("[bold]No files found to compile") output = Path.cwd() / output output.mkdir(exist_ok=True) - ENV["VERBOSE"] = str(int(verbose)) - ENV["GENERATE_SERVICES"] = str(int(generate_services)) - ENV["USE_PROTOC"] = str(int(protoc and USE_PROTOC)) - ENV["LINE_LENGTH"] = str(line_length) - ENV["USING_BETTERPROTO_CLI"] = str(1) + ENV["USING_BETTERPROTO_CLI"] = "true" try: - await compile_protobufs(*files, output=output) + await compile_protobufs( + *files, + output=output, + verbose=verbose, + use_protoc=protoc, + generate_services=generate_services, + line_length=line_length, + ) except ProtobufSyntaxError as exc: - error = Syntax.from_path(str(exc.file).strip(), line_numbers=True, line_range=(0, exc.lineno)) - return rich.print(f"Syntax Error in protobuf file {str(exc.file).strip()}:\n", error, f"{' ' * (exc.offset + 3)}^\n", exc.msg) - except SyntaxError: + error = Syntax( + exc.file.read_text(), + lexer_name="proto", + line_numbers=True, + line_range=(max(exc.lineno - 5, 0), exc.lineno), + ) + # I'd like to switch to .from_path but it appears to be bugged and doesnt pick up syntax + return rich.print( + f"[red]File {str(exc.file).strip()}:\n", + error, + f"[red]{' ' * (exc.offset + 3)}^\n" + f"[red]SyntaxError: {exc.msg}", + ) + except CLIError as exc: failed_files = "\n".join(f" - {file}" for file in files) return rich.print( - f"[red]{'Protoc' if ENV['USE_PROTOC'] else 'GRPC'} failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{exc.stderr}", - file=sys.stderr, + f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{exc.args[0]}[red]", ) rich.print( - f"[bold]Finished generating output for {len(files)} files, compiled output should be in {output.as_posix()}" + f"[bold]Finished generating output for {len(files)} files, output is in [link]{output.as_posix()}" ) + """ async def run_cli(port: int) -> None: @@ -126,7 +148,7 @@ async def run_cli(port: int) -> None: progress.update(compiling_progress_bar, advance=1) rich.print(f"[bold][green]Finished compiling output should be at {round(3)}")""" -if __name__ == '__main__': +if __name__ == "__main__": os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests" sys.argv = "betterproto compile /Users/gobot1234/PycharmProjects/betterproto/tests/inputs/bool".split() main() diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index eb5fd4943..78592e688 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,17 +1,17 @@ import asyncio import functools import re -from pathlib import Path -from typing import Tuple from concurrent.futures import ProcessPoolExecutor +from pathlib import Path +from typing import Any, NoReturn, Tuple -from . import ENV, utils -from .errors import ProtobufSyntaxError, CLIError from ...lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponseFile, ) from ..parser import generate_code +from . import ENV, USE_PROTOC, utils +from .errors import CLIError, ProtobufSyntaxError def write_file(output: Path, file: CodeGeneratorResponseFile) -> None: @@ -19,8 +19,31 @@ def write_file(output: Path, file: CodeGeneratorResponseFile) -> None: path.write_text(file.content) +def handle_error(data: str, files: Tuple[Path, ...]) -> NoReturn: + match = re.match( + r"(?P.+):(?P\d+):(?P\d+):(?P.*)", + data, + ) + if match is None: + raise CLIError(data.strip()) + + for file in files: + if file.as_posix().endswith(match["filename"]): + raise ProtobufSyntaxError( + match["message"].strip(), + file, + int(match["lineno"]), + int(match["offset"]), + ) + raise ProtobufSyntaxError + + async def compile_protobufs( - *files: Path, output: Path, implementation: str = "betterproto_" + *files: Path, + output: Path, + use_protoc: bool = USE_PROTOC, + implementation: str = "betterproto_", + **kwargs: Any, ) -> Tuple[str, str]: """ A programmatic way to compile protobufs. @@ -31,6 +54,8 @@ async def compile_protobufs( The locations of the protobuf files to be generated. output: :class:`.Path` The output directory. + **kwargs: + The **kwargs to pass to generate_code. Returns ------- @@ -38,7 +63,7 @@ async def compile_protobufs( A tuple of the ``stdout`` and ``stderr`` from the invocation of protoc. """ command = utils.generate_command( - *files, output=output, implementation=implementation + *files, output=output, use_protoc=use_protoc, implementation=implementation ) process = await asyncio.create_subprocess_shell( @@ -47,33 +72,27 @@ async def compile_protobufs( stderr=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, env=ENV, - cwd=Path.cwd(), ) if implementation == "betterproto_": data = await process.stderr.read() # we put code on stderr so we can actually read it thank you google :))))) - request = CodeGeneratorRequest().parse(data) + try: + request = CodeGeneratorRequest().parse(data) + except Exception: + handle_error(data.decode(), files) if request._unknown_fields: - match = re.match( - r"(?P.+):(?P\d+):(?P\d+):(?P.*)", - data.decode(), - ) - # we had a parsing error - for file in files: - if file.as_posix().endswith(match["filename"]): - raise ProtobufSyntaxError( - match["message"].strip(), - file, - int(match["lineno"]), - int(match["offset"]), - ) - raise ProtobufSyntaxError + try: + handle_error(data.decode(), files) + except UnicodeError: + raise CLIError( + 'Try running "poetry generate_lib" to try and fix this, if that doesn\'t work protoc broke' + ) # Generate code - response = await utils.to_thread(generate_code, request) + response = await utils.to_thread(generate_code, request, **kwargs) with ProcessPoolExecutor(max_workers=4) as process_pool: # write multiple files concurrently diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index e60090727..3017a0df1 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -1,53 +1,57 @@ import asyncio import functools +import platform import sys from pathlib import Path -from typing import Awaitable, Callable, Tuple, TypeVar, Any +from typing import Any, Awaitable, Callable, List, TypeVar -from . import ENV +from . import USE_PROTOC T = TypeVar("T") -def recursive_file_finder(directory: Path) -> Tuple[Path, ...]: - files = set() - for path in directory.iterdir(): - if path.is_file() and path.name.endswith(".proto"): - files.add(path) - elif path.is_dir(): - files.update(recursive_file_finder(path)) - - return tuple(files) +def get_files(src: str) -> List[Path]: + """Return a list of files ready for :func:`generate_command`""" + path = Path(src) + if not path.is_absolute(): + path = (Path.cwd() / src).resolve() + if path.is_dir(): + return [p for p in path.iterdir() if p.suffix == ".proto"] + return [path] def generate_command( - *files: Path, output: Path, implementation: str = "betterproto_" + *files: Path, output: Path, use_protoc: bool = USE_PROTOC, implementation: str = "betterproto_" ) -> str: - cwd = Path.cwd() - files = [file.relative_to(cwd).as_posix() for file in files] + command = [ + f"--proto_path={files[0].parent.as_posix()}", f"--python_{implementation}out={output.as_posix()}", - "-I", - ".", - *files, + *[file.as_posix() for file in files], ] - if ENV["USE_PROTOC"]: + if use_protoc: command.insert(0, "protoc") else: - command.insert(0, "grpc.tools.protoc") - command.insert(0, "-m") - command.insert(0, sys.executable) + command = [ + sys.executable, + "-m", + "grpc_tools.protoc", + *command, + ] return " ".join(command) def run_sync(func: Callable[..., Awaitable[T]]) -> Callable[..., T]: + if platform.system() == "Windows": + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) + @functools.wraps(func) def wrapper(*args: Any, **kwargs: Any) -> T: coro = func(*args, **kwargs) if hasattr(asyncio, "run"): - return asyncio.run(coro, debug=True) + return asyncio.run(coro) loop = asyncio.new_event_loop() try: diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto/plugin/compiler.py index 2cba70321..2d99008c3 100644 --- a/src/betterproto/plugin/compiler.py +++ b/src/betterproto/plugin/compiler.py @@ -6,7 +6,7 @@ from .models import OutputTemplate -def outputfile_compiler(output_file: OutputTemplate) -> str: +def outputfile_compiler(output_file: OutputTemplate, line_length: int = black.DEFAULT_LINE_LENGTH) -> str: templates_folder = os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "templates") @@ -21,5 +21,5 @@ def outputfile_compiler(output_file: OutputTemplate) -> str: return black.format_str( template.render(output_file=output_file), - mode=black.Mode(target_versions={black.TargetVersion.PY37}), + mode=black.Mode(line_length=line_length, target_versions={black.TargetVersion.PY37}), ) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 3672c2455..006f28504 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -3,6 +3,8 @@ import os import sys +import rich + from ..lib.google.protobuf.compiler import CodeGeneratorRequest from .models import monkey_patch_oneof_index from .parser import generate_code @@ -13,11 +15,10 @@ def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() - if os.getenv("USING_BETTERPROTO_CLI"): + if os.getenv("USING_BETTERPROTO_CLI") == "true": # Write the data to stderr for cli sys.stderr.buffer.write(data) # need to figure out how to potentially lock this sys.stdout.buffer.write(b"") - return else: # Apply Work around for proto2/3 difference in protoc messages monkey_patch_oneof_index() @@ -25,8 +26,15 @@ def main() -> None: # Parse request request = CodeGeneratorRequest().parse(data) + rich.print( + "Direct invocation of the protoc plugin is depreciated over using the CLI\n" + "To do so you just need to type:\n" + f"betterproto compile {' '.join(request.file_to_generate)}", + file=sys.stderr + ) + # Generate code response = generate_code(request) - # Write to stdout - sys.stdout.buffer.write(bytes(response)) + # Write to stdout + sys.stdout.buffer.write(bytes(response)) diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index d759a6bb0..6973241a2 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -35,6 +35,7 @@ from typing import Dict, Iterator, List, Optional, Set, Text, Type, Union import betterproto + from .. import Message, which_one_of from ..casing import sanitize_name from ..compile.importing import get_type_reference, parse_source_type_name diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 0ebec381f..ae5eab4ec 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,8 +1,9 @@ import itertools import pathlib -import sys from typing import Iterator, List, Set, Tuple, Union +import black + from ..lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, @@ -35,7 +36,9 @@ def traverse( ) -> "itertools.chain[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]": # Todo: Keep information about nested hierarchy def _traverse( - path: List[int], items: List[Union[DescriptorProto, EnumDescriptorProto]], prefix="" + path: List[int], + items: List[Union[DescriptorProto, EnumDescriptorProto]], + prefix: str = "", ) -> Iterator[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]: for i, item in enumerate(items): # Adjust the name since we flatten the hierarchy. @@ -59,21 +62,39 @@ def _traverse( def generate_code( request: CodeGeneratorRequest, - # *, - # generate_services: bool = False, # TODO **KWARGS support for custom options + *, + include_google: bool = False, + line_length: int = black.DEFAULT_LINE_LENGTH, + generate_services: bool = True, + separate_files: bool = False, + show_info: bool = False, + verbose: bool = False, ) -> CodeGeneratorResponse: + """Generate the protobuf response file for a given request. + + Parameters + ---------- + request + include_google + line_length + generate_services + separate_files + show_info + verbose + + Returns + ------- + :class:`.CodeGeneratorResponse` + """ response = CodeGeneratorResponse() plugin_options = request.parameter.split(",") if request.parameter else [] + include_google = "INCLUDE_GOOGLE" in plugin_options or include_google request_data = PluginRequestCompiler(plugin_request_obj=request) # Gather output packages for proto_file in request.proto_file: - if ( - proto_file.package == "google.protobuf" - and "INCLUDE_GOOGLE" not in plugin_options - ): - # If not INCLUDE_GOOGLE, - # skip re-compiling Google's well-known types + if proto_file.package == "google.protobuf" and include_google: + # If not INCLUDE_GOOGLE skip re-compiling Google's well-known types continue output_package_name = proto_file.package @@ -99,10 +120,11 @@ def generate_code( ) # Read Services - for output_package_name, output_package in request_data.output_packages.items(): - for proto_input_file in output_package.input_files: - for index, service in enumerate(proto_input_file.service): - read_protobuf_service(service, index, output_package) + if generate_services: + for output_package_name, output_package in request_data.output_packages.items(): + for proto_input_file in output_package.input_files: + for index, service in enumerate(proto_input_file.service): + read_protobuf_service(service, index, output_package) # Generate output files output_paths: Set[pathlib.Path] = set() @@ -116,23 +138,18 @@ def generate_code( CodeGeneratorResponseFile( name=str(output_path), # Render and then format the output file - content=outputfile_compiler(output_file=output_package), + content=outputfile_compiler(output_file=output_package, line_length=line_length), ) ) # Make each output directory a package with __init__ file init_files = { - directory.joinpath("__init__.py") - for path in output_paths - for directory in path.parents + directory / "__init__.py" for path in output_paths for directory in path.parents } - output_paths for init_file in init_files: response.file.append(CodeGeneratorResponseFile(name=str(init_file))) - for output_package_name in sorted(output_paths.union(init_files)): - print(f"Writing {output_package_name}", file=sys.stderr) - return response diff --git a/tests/generate.py b/tests/generate.py index 6795ae6f6..216819e40 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -1,18 +1,20 @@ #!/usr/bin/env python import asyncio import os -from pathlib import Path -import platform import shutil import sys -from typing import Set +from pathlib import Path +from typing import Optional, Set, Tuple + +import click +import rich +from betterproto.plugin.cli import compile_protobufs, utils from tests.util import ( get_directories, inputs_path, output_path_betterproto, output_path_reference, - protoc, ) # Force pure-python implementation instead of C++, otherwise imports @@ -20,7 +22,7 @@ os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" -def clear_directory(dir_path: Path): +def clear_directory(dir_path: Path) -> None: for file_or_directory in dir_path.glob("*"): if file_or_directory.is_dir(): shutil.rmtree(file_or_directory) @@ -28,7 +30,7 @@ def clear_directory(dir_path: Path): file_or_directory.unlink() -async def generate(whitelist: Set[str], verbose: bool): +async def generate(whitelist: Set[str], verbose: bool) -> None: test_case_names = set(get_directories(inputs_path)) - {"__pycache__"} path_whitelist = set() @@ -41,7 +43,7 @@ async def generate(whitelist: Set[str], verbose: bool): generation_tasks = [] for test_case_name in sorted(test_case_names): - test_case_input_path = inputs_path.joinpath(test_case_name).resolve() + test_case_input_path = (inputs_path / test_case_name).resolve() if ( whitelist and str(test_case_input_path) not in path_whitelist @@ -53,30 +55,30 @@ async def generate(whitelist: Set[str], verbose: bool): ) failed_test_cases = [] - # Wait for all subprocs and match any failures to names to report - for test_case_name, result in zip( + # Wait for processes before match any failures to names to report + for test_case_name, exception in zip( sorted(test_case_names), await asyncio.gather(*generation_tasks) ): - if result != 0: + if exception is not None: failed_test_cases.append(test_case_name) if failed_test_cases: - sys.stderr.write( - "\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n" + rich.print( + "[red bold]\nFailed to generate the following test cases:", + *(f"[red]- {failed_test_case}" for failed_test_case in failed_test_cases), + sep="\n", ) - for failed_test_case in failed_test_cases: - sys.stderr.write(f"- {failed_test_case}\n") async def generate_test_case_output( test_case_input_path: Path, test_case_name: str, verbose: bool -) -> int: +) -> Optional[Exception]: """ Returns the max of the subprocess return values """ - test_case_output_path_reference = output_path_reference.joinpath(test_case_name) - test_case_output_path_betterproto = output_path_betterproto.joinpath(test_case_name) + test_case_output_path_reference = output_path_reference / test_case_name + test_case_output_path_betterproto = output_path_betterproto / test_case_name os.makedirs(test_case_output_path_reference, exist_ok=True) os.makedirs(test_case_output_path_betterproto, exist_ok=True) @@ -84,63 +86,53 @@ async def generate_test_case_output( clear_directory(test_case_output_path_reference) clear_directory(test_case_output_path_betterproto) - ( - (ref_out, ref_err, ref_code), - (plg_out, plg_err, plg_code), - ) = await asyncio.gather( - protoc(test_case_input_path, test_case_output_path_reference, True), - protoc(test_case_input_path, test_case_output_path_betterproto, False), - ) + files = list(test_case_input_path.glob("*.proto")) + try: + ((ref_out, ref_err), (plg_out, plg_err),) = await asyncio.gather( + compile_protobufs( + *files, output=test_case_output_path_reference, implementation="" + ), + compile_protobufs(*files, output=test_case_output_path_betterproto), + ) + except Exception as exc: + return exc - message = f"Generated output for {test_case_name!r}" + message = f"[red][bold]Generated output for {test_case_name!r}" + rich.print(message) if verbose: - print(f"\033[31;1;4m{message}\033[0m") if ref_out: - sys.stdout.buffer.write(ref_out) + rich.print(f"[bold red]{ref_out}") if ref_err: - sys.stderr.buffer.write(ref_err) + rich.print(f"[bold red]{ref_err}", file=sys.stderr) if plg_out: - sys.stdout.buffer.write(plg_out) + rich.print(f"[bold red]{plg_out}") if plg_err: - sys.stderr.buffer.write(plg_err) + rich.print(f"[bold red]{plg_err}", file=sys.stderr) sys.stdout.buffer.flush() sys.stderr.buffer.flush() - else: - print(message) - - return max(ref_code, plg_code) - - -HELP = "\n".join( - ( - "Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]", - "Generate python classes for standard tests.", - "", - "DIRECTORIES One or more relative or absolute directories of test-cases to generate classes for.", - " python generate.py inputs/bool inputs/double inputs/enum", - "", - "NAMES One or more test-case names to generate classes for.", - " python generate.py bool double enums", - ) -) -def main(): - if set(sys.argv).intersection({"-h", "--help"}): - print(HELP) - return - if sys.argv[1:2] == ["-v"]: - verbose = True - whitelist = set(sys.argv[2:]) - else: - verbose = False - whitelist = set(sys.argv[1:]) - - if platform.system() == "Windows": - asyncio.set_event_loop(asyncio.ProactorEventLoop()) - - asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose)) +@click.command( + help="Generate python classes for standard tests.", + context_settings={"help_option_names": ["-h", "--help"]}, +) +@click.option("-v", "--verbose", is_flag=True, default=False) +@click.argument("directories", nargs=-1) +@utils.run_sync +async def main(verbose: bool, directories: Tuple[str, ...]): + """ + Parameters + ---------- + verbose: + Whether or not to run the plugin in verbose mode. + directories: + One or more relative or absolute directories or test-case names test-cases to generate classes for. e.g. + ``inputs/bool inputs/double inputs/enum`` or ``bool double enum`` + """ + await generate(set(directories), verbose) if __name__ == "__main__": + sys.argv = "generate.py".split() + os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests" main() diff --git a/tests/util.py b/tests/util.py index 6c631417c..c96f66f04 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,11 +1,9 @@ -import asyncio import importlib import os import pathlib -import sys from pathlib import Path from types import ModuleType -from typing import Callable, Generator, Optional, Union +from typing import Callable, Generator, Optional os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" @@ -15,38 +13,11 @@ output_path_betterproto = root_path.joinpath("output_betterproto") -def get_files(path, suffix: str) -> Generator[str, None, None]: - for r, dirs, files in os.walk(path): - for filename in [f for f in files if f.endswith(suffix)]: - yield os.path.join(r, filename) - - -def get_directories(path): +def get_directories(path: str) -> Generator[str, None, None]: for root, directories, files in os.walk(path): yield from directories -async def protoc( - path: Union[str, Path], output_dir: Union[str, Path], reference: bool = False -): - path: Path = Path(path).resolve() - output_dir: Path = Path(output_dir).resolve() - python_out_option: str = "python_betterproto_out" if not reference else "python_out" - command = [ - sys.executable, - "-m", - "grpc.tools.protoc", - f"--proto_path={path.as_posix()}", - f"--{python_out_option}={output_dir.as_posix()}", - *[p.as_posix() for p in path.glob("*.proto")], - ] - proc = await asyncio.create_subprocess_exec( - *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE - ) - stdout, stderr = await proc.communicate() - return stdout, stderr, proc.returncode - - def get_test_case_json_data(test_case_name: str, json_file_name: Optional[str] = None): test_data_file_name = json_file_name or f"{test_case_name}.json" test_data_file_path = inputs_path.joinpath(test_case_name, test_data_file_name) From b908c58ce6910044f740765975acf0c5d8a2c2b7 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Thu, 21 Jan 2021 21:38:59 +0000 Subject: [PATCH 25/46] Improve stuff a lot --- pyproject.toml | 28 ++-- src/betterproto/__main__.py | 4 +- src/betterproto/plugin/cli/__init__.py | 8 +- src/betterproto/plugin/cli/commands.py | 196 ++++++++++--------------- src/betterproto/plugin/cli/runner.py | 45 +++--- src/betterproto/plugin/cli/utils.py | 61 ++++++-- src/betterproto/plugin/parser.py | 2 + 7 files changed, 169 insertions(+), 175 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cb98a0222..b32afa898 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,10 +36,11 @@ sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" click = "^7.1.2" +typer = "^0.3.2" rich = "^9.8.2" [tool.poetry.scripts] -betterproto = "betterproto:__main__.main" +betterproto = "betterproto:__main__.app" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] @@ -47,23 +48,14 @@ compiler = ["black", "jinja2"] [tool.poe.tasks] # Dev workflow tasks -generate = { script = "tests.generate:main", help = "Generate test cases (do this once before running test)" } -test = { cmd = "pytest --cov src", help = "Run tests" } -types = { cmd = "mypy src --ignore-missing-imports", help = "Check types with mypy" } -format = { cmd = "black . --exclude tests/output_", help = "Apply black formatting to source code" } -clean = { cmd = "rm -rf .coverage .mypy_cache .pytest_cache dist betterproto.egg-info **/__pycache__ tests/output_*", help = "Clean out generated files from the workspace" } -docs = { cmd = "sphinx-build docs docs/build", help = "Build the sphinx docs"} -bench = { shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD", help = "Benchmark current commit vs. master branch"} - -generate_lib.cmd = """ -protoc - --plugin=protoc-gen-custom=src/betterproto/plugin/main.py - --custom_opt=INCLUDE_GOOGLE - --custom_out=src/betterproto/lib - -I /usr/local/include/ - /usr/local/include/google/protobuf/**/*.proto -""" -generate_lib.help = "Regenerate the types in betterproto.lib.google" +generate = { script = "tests.generate:main", help = "Generate test cases (do this once before running test)" } +test = { cmd = "pytest --cov src", help = "Run tests" } +types = { cmd = "mypy src --ignore-missing-imports", help = "Check types with mypy" } +format = { cmd = "black . --exclude tests/output_", help = "Apply black formatting to source code" } +clean = { cmd = "rm -rf .coverage .mypy_cache .pytest_cache dist betterproto.egg-info **/__pycache__ tests/output_*", help = "Clean out generated files from the workspace" } +docs = { cmd = "sphinx-build docs docs/build", help = "Build the sphinx docs"} +bench = { shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD", help = "Benchmark current commit vs. master branch"} +generate_lib = { cmd = "betterproto compile /usr/local/bin/include/google/protobuf --output=src/betterproto/lib", help = "Regenerate the types in betterproto.lib.google"} # CI tasks full-test = { shell = "poe generate && tox", help = "Run tests with multiple pythons" } diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 245612cce..9f733a0d7 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -1,4 +1,4 @@ -from .plugin.cli import main +from .plugin.cli import app if __name__ == "__main__": - main() + app() diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py index c97e59a68..dbdb74518 100644 --- a/src/betterproto/plugin/cli/__init__.py +++ b/src/betterproto/plugin/cli/__init__.py @@ -1,8 +1,5 @@ import asyncio -import os import platform -from pathlib import Path -from typing import Any, Dict try: import grpc_tools.protoc @@ -11,12 +8,11 @@ else: USE_PROTOC = False -DEFAULT_OUT = Path.cwd() / "betterproto_out" +DEFAULT_OUT = "betterproto_out" VERBOSE = False -ENV: Dict[str, Any] = dict(os.environ) from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa -from .commands import main +from .commands import app from .runner import compile_protobufs if platform.system() == "Windows": diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index f4e4a385e..68dace310 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -1,9 +1,8 @@ import os -import sys from pathlib import Path -from typing import Tuple +from typing import List -import click +import typer import rich from rich.progress import Progress from rich.syntax import Syntax @@ -11,144 +10,105 @@ from betterproto.plugin.cli import ( DEFAULT_LINE_LENGTH, DEFAULT_OUT, - ENV, USE_PROTOC, VERBOSE, utils, ) -from ..models import monkey_patch_oneof_index -from .errors import CLIError, ProtobufSyntaxError -from .runner import compile_protobufs +from betterproto.plugin.models import monkey_patch_oneof_index +from betterproto.plugin.cli.errors import CLIError, ProtobufSyntaxError +from betterproto.plugin.cli.runner import compile_protobufs monkey_patch_oneof_index() +app = typer.Typer() +compile_app = typer.Typer() +app.add_typer(compile_app, name="compile") -@click.group(context_settings={"help_option_names": ["-h", "--help"]}) -@click.pass_context -def main(ctx: click.Context) -> None: +@app.callback(context_settings={"help_option_names": ["-h", "--help"]}) +def callback(ctx: typer.Context) -> None: """The main entry point to all things betterproto""" if ctx.invoked_subcommand is None: rich.print(ctx.get_help()) -@main.command(context_settings={"help_option_names": ["-h", "--help"]}) -@click.option( - "-v", - "--verbose", - is_flag=True, - default=VERBOSE, -) -@click.option( - "-p", - "--protoc", - is_flag=True, - help="Whether or not to use protoc to compile the protobufs if this is false" - "it will attempt to use grpc instead", - default=USE_PROTOC, -) -@click.option( - "-l", - "--line-length", - type=int, - default=DEFAULT_LINE_LENGTH, -) -@click.option( - "--generate-services", - help="Whether or not to generate servicer stubs", - is_flag=True, - default=True, -) -@click.option( - "-o", - "--output", - help="The output directory", - type=click.Path(file_okay=False, dir_okay=True, allow_dash=True), - default=DEFAULT_OUT.name, -) -@click.argument( - "src", - type=click.Path(exists=True, file_okay=True, dir_okay=True, allow_dash=True), - is_eager=True, - nargs=-1, -) @utils.run_sync +@app.command(context_settings={"help_option_names": ["-h", "--help"]}) async def compile( - verbose: bool, - protoc: bool, - line_length: int, - generate_services: bool, - output: str, - src: Tuple[str, ...], + verbose: bool = typer.Option(VERBOSE, "-v", "--verbose"), + protoc: bool = typer.Option( + USE_PROTOC, + "-p", + "--protoc", + help="Whether or not to use protoc to compile the protobufs if this is false " + "it will attempt to use grpc instead", + ), + line_length: int = typer.Option(DEFAULT_LINE_LENGTH, "-l", "--line-length"), + generate_services: bool = typer.Option( + True, help="Whether or not to generate servicer stubs" + ), + output: str = typer.Option( + DEFAULT_OUT, + "-o", + "--output", + help="The name of the output directory", + file_okay=False, + allow_dash=True, + ), + paths: List[Path] = typer.Argument( + ..., exists=True, allow_dash=True, resolve_path=True + ), ) -> None: """The recommended way to compile your protobuf files.""" - if len(src) != 1: - return rich.print( - "[red]Currently can't handle more than one source this is just for a " - "nicer invocation of help" - ) + if not paths: + return rich.print("[bold]No files provided") - files = utils.get_files(src[0]) + files = utils.get_files(paths) if not files: return rich.print("[bold]No files found to compile") - output = Path.cwd() / output - output.mkdir(exist_ok=True) - - ENV["USING_BETTERPROTO_CLI"] = "true" - - try: - await compile_protobufs( - *files, - output=output, - verbose=verbose, - use_protoc=protoc, - generate_services=generate_services, - line_length=line_length, - ) - except ProtobufSyntaxError as exc: - error = Syntax( - exc.file.read_text(), - lexer_name="proto", - line_numbers=True, - line_range=(max(exc.lineno - 5, 0), exc.lineno), - ) - # I'd like to switch to .from_path but it appears to be bugged and doesnt pick up syntax - return rich.print( - f"[red]File {str(exc.file).strip()}:\n", - error, - f"[red]{' ' * (exc.offset + 3)}^\n" - f"[red]SyntaxError: {exc.msg}", - ) - except CLIError as exc: - failed_files = "\n".join(f" - {file}" for file in files) - return rich.print( - f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{exc.args[0]}[red]", - ) - - rich.print( - f"[bold]Finished generating output for {len(files)} files, output is in [link]{output.as_posix()}" - ) - - -""" -async def run_cli(port: int) -> None: - - with Progress(transient=True) as progress: # TODO reading and compiling stuff - compiling_progress_bar = progress.add_task( - "[green]Compiling protobufs...", total=total - ) + for output_path, protos in files.items(): + try: + output = Path.cwd() / output_path.name if output == DEFAULT_OUT else output + output.mkdir(exist_ok=True) + await compile_protobufs( + *protos, + output=output, + verbose=verbose, + use_protoc=protoc, + generate_services=generate_services, + line_length=line_length, + from_cli=True, + ) + except ProtobufSyntaxError as exc: + error = Syntax( + exc.file.read_text(), + "proto", + line_numbers=True, + line_range=(max(exc.lineno - 5, 0), exc.lineno), + ) + # I'd like to switch to .from_path but it appears to be bugged and doesnt pick lexer_name + rich.print( + f"[red]File {str(exc.file).strip()}:\n", + error, + f"{' ' * (exc.offset + 3)}^\n" f"SyntaxError: {exc.msg}[red]", + ) + except CLIError as exc: + failed_files = "\n".join(f" - {file}" for file in protos) + rich.print( + f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{exc.args[0]}[red]", + ) - async for message in service.get_currently_compiling(): - progress.tasks[0].description = ( - f"[green]Compiling protobufs...\n" - f"Currently compiling {message.type.name.lower()}: {message.name}" + else: + rich.print( + f"[bold green]Finished generating output for {len(protos)} files, output is in {output.as_posix()}" ) - progress.update(compiling_progress_bar, advance=1) - rich.print(f"[bold][green]Finished compiling output should be at {round(3)}")""" + if __name__ == "__main__": - os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests" - sys.argv = "betterproto compile /Users/gobot1234/PycharmProjects/betterproto/tests/inputs/bool".split() - main() + os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto" + # sys.argv = "betterproto compile --output=src/betterproto/lib".split() + compile( + output=Path("src/betterproto/lib").resolve(), paths=[Path("/usr/local/bin/include/google/protobuf")] + ) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 78592e688..0cc4cd70e 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -10,7 +10,7 @@ CodeGeneratorResponseFile, ) from ..parser import generate_code -from . import ENV, USE_PROTOC, utils +from . import USE_PROTOC, utils from .errors import CLIError, ProtobufSyntaxError @@ -21,7 +21,7 @@ def write_file(output: Path, file: CodeGeneratorResponseFile) -> None: def handle_error(data: str, files: Tuple[Path, ...]) -> NoReturn: match = re.match( - r"(?P.+):(?P\d+):(?P\d+):(?P.*)", + r"^(?P.+):(?P\d+):(?P\d+):(?P.*)", data, ) if match is None: @@ -55,7 +55,7 @@ async def compile_protobufs( output: :class:`.Path` The output directory. **kwargs: - The **kwargs to pass to generate_code. + Any keyword arguments to pass to generate_code. Returns ------- @@ -70,22 +70,22 @@ async def compile_protobufs( command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, - stdin=asyncio.subprocess.PIPE, - env=ENV, + env={"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)).lower()}, ) + stdout, stderr = await process.communicate() + if implementation == "betterproto_": - data = await process.stderr.read() # we put code on stderr so we can actually read it thank you google :))))) try: - request = CodeGeneratorRequest().parse(data) + request = CodeGeneratorRequest().parse(stderr) except Exception: - handle_error(data.decode(), files) + handle_error(stderr.decode(), files) if request._unknown_fields: try: - handle_error(data.decode(), files) + handle_error(stderr.decode(), files) except UnicodeError: raise CLIError( 'Try running "poetry generate_lib" to try and fix this, if that doesn\'t work protoc broke' @@ -94,21 +94,26 @@ async def compile_protobufs( # Generate code response = await utils.to_thread(generate_code, request, **kwargs) - with ProcessPoolExecutor(max_workers=4) as process_pool: - # write multiple files concurrently + if len(response.files) > 1: loop = asyncio.get_event_loop() - await asyncio.gather( - *( - loop.run_in_executor( - process_pool, functools.partial(write_file, output, file) + with ProcessPoolExecutor(max_workers=4) as process_pool: + # write multiple files concurrently + await asyncio.gather( + *( + loop.run_in_executor( + process_pool, functools.partial(write_file, output, file) + ) + for file in response.file ) - for file in response.file ) - ) - stdout, stderr = await process.communicate() + else: + await utils.to_thread(write_file, output, response.file[0]) + + elif stderr: + handle_error(stderr.decode(), files) - if process.returncode != 0: - raise CLIError(stderr.decode()) # bad + elif process.returncode != 0: + raise CLIError(stderr.decode()) return stdout.decode(), stderr.decode() diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 3017a0df1..070a4ad51 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -1,27 +1,60 @@ import asyncio import functools +import os import platform import sys +from collections import defaultdict from pathlib import Path -from typing import Any, Awaitable, Callable, List, TypeVar +from typing import Awaitable, Callable, TypeVar, Any, List, Set -from . import USE_PROTOC +# from . import USE_PROTOC +USE_PROTOC = True T = TypeVar("T") - -def get_files(src: str) -> List[Path]: +INCLUDE = ( + "any.proto", + "api.proto", + "compiler/plugin.proto", + "descriptor.proto", + "duration.proto", + "empty.proto", + "field_mask.proto", + "source_context.proto", + "struct.proto", + "timestamp.proto", + "type.proto", + "wrappers.proto", +) + + +def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": """Return a list of files ready for :func:`generate_command`""" - path = Path(src) - if not path.is_absolute(): - path = (Path.cwd() / src).resolve() - if path.is_dir(): - return [p for p in path.iterdir() if p.suffix == ".proto"] - return [path] + # TODO create a default dict of parent to paths + # recurse up folder to file first folder without a .proto + # return highest directory as first value in list + + new_paths: "defaultdict[Path, Set[Path]]" = defaultdict(set) + for path in paths: + if not path.is_absolute(): + path = (Path.cwd() / path).resolve() + if str(path).startswith("/usr") and "include/google/protobuf" in str(path): + # TODO make this better for windows systems and being in different places in usr/ + # TODO make this actually work :) --plugin=protoc-gen-custom=src/betterproto/plugin/main.py + new_paths[path].update(path / proto for proto in INCLUDE) + elif path.is_dir(): + new_paths[path].update(path.glob("*.proto")) + else: + new_paths[path.parent].add(path) + + return new_paths def generate_command( - *files: Path, output: Path, use_protoc: bool = USE_PROTOC, implementation: str = "betterproto_" + *files: Path, + output: Path, + use_protoc: bool = USE_PROTOC, + implementation: str = "betterproto_", ) -> str: command = [ @@ -65,8 +98,14 @@ def wrapper(*args: Any, **kwargs: Any) -> T: if sys.version_info[:2] >= (3, 9): from asyncio import to_thread else: + async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: loop = asyncio.get_event_loop() # no context vars partial = functools.partial(func, *args, **kwargs) return await loop.run_in_executor(None, partial) + + +if __name__ == '__main__': + os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests/inputs" + print(get_files(("bool", "bool/bool.proto", "casing/casing.proto"))) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index ae5eab4ec..3bb053438 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -69,6 +69,7 @@ def generate_code( separate_files: bool = False, show_info: bool = False, verbose: bool = False, + from_cli: bool = False, ) -> CodeGeneratorResponse: """Generate the protobuf response file for a given request. @@ -81,6 +82,7 @@ def generate_code( separate_files show_info verbose + from_cli Returns ------- From 8b4380fcb0f31200afd51e67cc0dc7b202a8650a Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Fri, 22 Jan 2021 11:16:00 +0000 Subject: [PATCH 26/46] Final stuff to make it work --- src/betterproto/plugin/cli/commands.py | 41 +++++----- src/betterproto/plugin/cli/runner.py | 10 ++- src/betterproto/plugin/models.py | 2 +- src/betterproto/plugin/parser.py | 106 ++++++++++++++++--------- 4 files changed, 96 insertions(+), 63 deletions(-) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 68dace310..1f403b24f 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -1,10 +1,8 @@ -import os from pathlib import Path from typing import List import typer import rich -from rich.progress import Progress from rich.syntax import Syntax from betterproto.plugin.cli import ( @@ -14,7 +12,6 @@ VERBOSE, utils, ) - from betterproto.plugin.models import monkey_patch_oneof_index from betterproto.plugin.cli.errors import CLIError, ProtobufSyntaxError from betterproto.plugin.cli.runner import compile_protobufs @@ -32,10 +29,12 @@ def callback(ctx: typer.Context) -> None: rich.print(ctx.get_help()) +@compile_app.command(context_settings={"help_option_names": ["-h", "--help"]}) @utils.run_sync -@app.command(context_settings={"help_option_names": ["-h", "--help"]}) async def compile( - verbose: bool = typer.Option(VERBOSE, "-v", "--verbose"), + verbose: bool = typer.Option( + VERBOSE, "-v", "--verbose", help="Whether or not to be verbose" + ), protoc: bool = typer.Option( USE_PROTOC, "-p", @@ -47,30 +46,34 @@ async def compile( generate_services: bool = typer.Option( True, help="Whether or not to generate servicer stubs" ), - output: str = typer.Option( + output: Path = typer.Option( DEFAULT_OUT, - "-o", - "--output", help="The name of the output directory", file_okay=False, allow_dash=True, ), paths: List[Path] = typer.Argument( - ..., exists=True, allow_dash=True, resolve_path=True + ..., + help="The protobuf files to compile", + exists=True, + allow_dash=True, + resolve_path=True, ), ) -> None: """The recommended way to compile your protobuf files.""" - if not paths: - return rich.print("[bold]No files provided") - files = utils.get_files(paths) + if not files: return rich.print("[bold]No files found to compile") for output_path, protos in files.items(): try: - output = Path.cwd() / output_path.name if output == DEFAULT_OUT else output - output.mkdir(exist_ok=True) + output = ( + (Path(output_path.parent.name) / output_path.name).resolve() + if output.name == DEFAULT_OUT + else output + ) + output.mkdir(exist_ok=True, parents=True) await compile_protobufs( *protos, output=output, @@ -91,7 +94,7 @@ async def compile( rich.print( f"[red]File {str(exc.file).strip()}:\n", error, - f"{' ' * (exc.offset + 3)}^\n" f"SyntaxError: {exc.msg}[red]", + f"{' ' * (exc.offset + 3)}^\nSyntaxError: {exc.msg}[red]", ) except CLIError as exc: failed_files = "\n".join(f" - {file}" for file in protos) @@ -104,11 +107,3 @@ async def compile( rich.print( f"[bold green]Finished generating output for {len(protos)} files, output is in {output.as_posix()}" ) - - -if __name__ == "__main__": - os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto" - # sys.argv = "betterproto compile --output=src/betterproto/lib".split() - compile( - output=Path("src/betterproto/lib").resolve(), paths=[Path("/usr/local/bin/include/google/protobuf")] - ) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 0cc4cd70e..aa91f0715 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,5 +1,6 @@ import asyncio import functools +import os import re from concurrent.futures import ProcessPoolExecutor from pathlib import Path @@ -65,12 +66,13 @@ async def compile_protobufs( command = utils.generate_command( *files, output=output, use_protoc=use_protoc, implementation=implementation ) - + env = {"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)).lower()} + env.update(os.environ) process = await asyncio.create_subprocess_shell( command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, - env={"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)).lower()}, + env=env, ) stdout, stderr = await process.communicate() @@ -94,7 +96,7 @@ async def compile_protobufs( # Generate code response = await utils.to_thread(generate_code, request, **kwargs) - if len(response.files) > 1: + if len(response.file) > 1: loop = asyncio.get_event_loop() with ProcessPoolExecutor(max_workers=4) as process_pool: # write multiple files concurrently @@ -110,6 +112,8 @@ async def compile_protobufs( else: await utils.to_thread(write_file, output, response.file[0]) + stderr = b"" + elif stderr: handle_error(stderr.decode(), files) diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index 6973241a2..84d08311c 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -217,7 +217,7 @@ class OutputTemplate: parent_request: PluginRequestCompiler package_proto_obj: FileDescriptorProto - input_files: List[str] = field(default_factory=list) + input_files: List[FileDescriptorProto] = field(default_factory=list) imports: Set[str] = field(default_factory=set) datetime_imports: Set[str] = field(default_factory=set) typing_imports: Set[str] = field(default_factory=set) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 3bb053438..c1c7baec4 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -3,6 +3,8 @@ from typing import Iterator, List, Set, Tuple, Union import black +import rich +from rich.progress import Progress from ..lib.google.protobuf import ( DescriptorProto, @@ -94,55 +96,87 @@ def generate_code( request_data = PluginRequestCompiler(plugin_request_obj=request) # Gather output packages - for proto_file in request.proto_file: - if proto_file.package == "google.protobuf" and include_google: - # If not INCLUDE_GOOGLE skip re-compiling Google's well-known types - continue - - output_package_name = proto_file.package - if output_package_name not in request_data.output_packages: - # Create a new output if there is no output for this package - request_data.output_packages[output_package_name] = OutputTemplate( - parent_request=request_data, package_proto_obj=proto_file - ) - # Add this input file to the output corresponding to this package - request_data.output_packages[output_package_name].input_files.append(proto_file) + + with Progress(transient=True) as progress: + reading_progress_bar = progress.add_task( + "[green]Reading protobuf files...", total=len(request.proto_file) + ) + for proto_file in request.proto_file: + if proto_file.package == "google.protobuf" and include_google: + # If not INCLUDE_GOOGLE skip re-compiling Google's well-known types + continue + + output_package_name = proto_file.package + if output_package_name not in request_data.output_packages: + # Create a new output if there is no output for this package + request_data.output_packages[output_package_name] = OutputTemplate( + parent_request=request_data, package_proto_obj=proto_file + ) + # Add this input file to the output corresponding to this package + request_data.output_packages[output_package_name].input_files.append(proto_file) + if from_cli: + progress.update(reading_progress_bar, advance=1) # Read Messages and Enums # We need to read Messages before Services in so that we can # get the references to input/output messages for each service - for output_package_name, output_package in request_data.output_packages.items(): - for proto_input_file in output_package.input_files: - for item, path in traverse(proto_input_file): - read_protobuf_type( - source_file=proto_input_file, - item=item, - path=path, - output_package=output_package, - ) + with Progress(transient=True) as progress: + parsing_progress_bar = progress.add_task( + "[green]Parsing protobuf enums and messages...", total=sum( + len(message.package_proto_obj.enum_type) + + len(message.package_proto_obj.message_type) + for message in request_data.output_packages.values() + ) + ) + for output_package_name, output_package in request_data.output_packages.items(): + for proto_input_file in output_package.input_files: + for item, path in traverse(proto_input_file): + read_protobuf_type( + item=item, + path=path, + source_file=proto_input_file, + output_package=output_package, + ) + if from_cli: + progress.update(parsing_progress_bar, advance=1) # Read Services if generate_services: - for output_package_name, output_package in request_data.output_packages.items(): - for proto_input_file in output_package.input_files: - for index, service in enumerate(proto_input_file.service): - read_protobuf_service(service, index, output_package) + with Progress(transient=True) as progress: + parsing_progress_bar = progress.add_task( + "[green]Parsing protobuf services...", total=sum( + len(message.package_proto_obj.service) + for message in request_data.output_packages.values() + ) + ) + for output_package_name, output_package in request_data.output_packages.items(): + for proto_input_file in output_package.input_files: + for index, service in enumerate(proto_input_file.service): + read_protobuf_service(service, index, output_package) + if from_cli: + progress.update(parsing_progress_bar, advance=1) # Generate output files output_paths: Set[pathlib.Path] = set() - for output_package_name, output_package in request_data.output_packages.items(): + with Progress(transient=True) as progress: + compiling_progress_bar = progress.add_task( + "[green]Compiling protobuf files...", total=len(request_data.output_packages) + ) + for output_package_name, output_package in request_data.output_packages.items(): - # Add files to the response object - output_path = pathlib.Path(*output_package_name.split("."), "__init__.py") - output_paths.add(output_path) + # Add files to the response object + output_path = pathlib.Path(*output_package_name.split("."), "__init__.py") + output_paths.add(output_path) - response.file.append( - CodeGeneratorResponseFile( - name=str(output_path), - # Render and then format the output file - content=outputfile_compiler(output_file=output_package, line_length=line_length), + response.file.append( + CodeGeneratorResponseFile( + name=str(output_path), + # Render and then format the output file + content=outputfile_compiler(output_file=output_package, line_length=line_length), + ) ) - ) + if from_cli: + progress.update(compiling_progress_bar, advance=1) # Make each output directory a package with __init__ file init_files = { From 16af49958f3cc58eea128e17790877a09c30207c Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Fri, 22 Jan 2021 18:41:34 +0000 Subject: [PATCH 27/46] More stuff --- pyproject.toml | 2 +- src/betterproto/__main__.py | 4 +-- src/betterproto/plugin/__init__.py | 3 +- src/betterproto/plugin/cli/commands.py | 28 +++++++++------- src/betterproto/plugin/cli/runner.py | 9 ++--- src/betterproto/plugin/models.py | 5 +-- src/betterproto/plugin/parser.py | 6 ++-- tests/generate.py | 46 ++++++++++++-------------- tests/util.py | 2 +- 9 files changed, 50 insertions(+), 55 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b32afa898..ab029ff69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ typer = "^0.3.2" rich = "^9.8.2" [tool.poetry.scripts] -betterproto = "betterproto:__main__.app" +betterproto = "betterproto:__main__.main" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index 9f733a0d7..f9538be69 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -1,4 +1,4 @@ -from .plugin.cli import app +from .plugin.cli import app as main if __name__ == "__main__": - app() + main() diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index 737522047..c7f6541ca 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -1,6 +1,5 @@ import sys import traceback -from distutils import sysconfig from pathlib import Path from types import TracebackType from typing import Type @@ -13,7 +12,7 @@ STDLIB_MODULES = [ p.with_suffix("").name - for p in Path(sysconfig.get_python_lib(standard_lib=True)).iterdir() + for p in Path(traceback.__file__).parent.iterdir() if p.suffix == ".py" or p.is_dir() ] diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 1f403b24f..d7d8871fd 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -5,31 +5,29 @@ import rich from rich.syntax import Syntax -from betterproto.plugin.cli import ( +from ..models import monkey_patch_oneof_index +from . import ( DEFAULT_LINE_LENGTH, DEFAULT_OUT, USE_PROTOC, VERBOSE, utils, ) -from betterproto.plugin.models import monkey_patch_oneof_index -from betterproto.plugin.cli.errors import CLIError, ProtobufSyntaxError -from betterproto.plugin.cli.runner import compile_protobufs +from .errors import CLIError, ProtobufSyntaxError +from .runner import compile_protobufs monkey_patch_oneof_index() app = typer.Typer() -compile_app = typer.Typer() -app.add_typer(compile_app, name="compile") @app.callback(context_settings={"help_option_names": ["-h", "--help"]}) def callback(ctx: typer.Context) -> None: - """The main entry point to all things betterproto""" + """The callback for all things betterproto""" if ctx.invoked_subcommand is None: rich.print(ctx.get_help()) -@compile_app.command(context_settings={"help_option_names": ["-h", "--help"]}) +@app.command(context_settings={"help_option_names": ["-h", "--help"]}) @utils.run_sync async def compile( verbose: bool = typer.Option( @@ -42,7 +40,12 @@ async def compile( help="Whether or not to use protoc to compile the protobufs if this is false " "it will attempt to use grpc instead", ), - line_length: int = typer.Option(DEFAULT_LINE_LENGTH, "-l", "--line-length"), + line_length: int = typer.Option( + DEFAULT_LINE_LENGTH, + "-l", + "--line-length", + help="The line length to format with", + ), generate_services: bool = typer.Option( True, help="Whether or not to generate servicer stubs" ), @@ -57,7 +60,7 @@ async def compile( help="The protobuf files to compile", exists=True, allow_dash=True, - resolve_path=True, + readable=False, ), ) -> None: """The recommended way to compile your protobuf files.""" @@ -90,7 +93,7 @@ async def compile( line_numbers=True, line_range=(max(exc.lineno - 5, 0), exc.lineno), ) - # I'd like to switch to .from_path but it appears to be bugged and doesnt pick lexer_name + # TODO switch to .from_path but it appears to be bugged and doesnt pick lexer_name rich.print( f"[red]File {str(exc.file).strip()}:\n", error, @@ -105,5 +108,6 @@ async def compile( else: rich.print( - f"[bold green]Finished generating output for {len(protos)} files, output is in {output.as_posix()}" + f"[bold green]Finished generating output for {len(protos)} files, " + f"output is in {output.as_posix()}" ) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index aa91f0715..e00beeaa1 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -86,19 +86,14 @@ async def compile_protobufs( handle_error(stderr.decode(), files) if request._unknown_fields: - try: - handle_error(stderr.decode(), files) - except UnicodeError: - raise CLIError( - 'Try running "poetry generate_lib" to try and fix this, if that doesn\'t work protoc broke' - ) + handle_error(stderr.decode(), files) # Generate code response = await utils.to_thread(generate_code, request, **kwargs) if len(response.file) > 1: loop = asyncio.get_event_loop() - with ProcessPoolExecutor(max_workers=4) as process_pool: + with ProcessPoolExecutor() as process_pool: # write multiple files concurrently await asyncio.gather( *( diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index 84d08311c..c39f8f575 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -53,6 +53,7 @@ FieldDescriptorProtoType, FileDescriptorProto, MethodDescriptorProto, + ServiceDescriptorProto, ) from ..lib.google.protobuf.compiler import CodeGeneratorRequest @@ -467,7 +468,7 @@ def py_type(self) -> str: source_type=self.proto_obj.type_name, ) else: - raise NotImplementedError(f"Unknown type {field.type}") + raise NotImplementedError(f"Unknown type {self.proto_obj.type}") @property def annotation(self) -> str: @@ -576,7 +577,7 @@ def default_value_string(self) -> str: @dataclass class ServiceCompiler(ProtoContentBase): parent: OutputTemplate = PLACEHOLDER - proto_obj: DescriptorProto = PLACEHOLDER + proto_obj: ServiceDescriptorProto = PLACEHOLDER path: List[int] = PLACEHOLDER methods: List["ServiceMethodCompiler"] = field(default_factory=list) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index c1c7baec4..1a565a1ed 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -3,7 +3,6 @@ from typing import Iterator, List, Set, Tuple, Union import black -import rich from rich.progress import Progress from ..lib.google.protobuf import ( @@ -69,7 +68,6 @@ def generate_code( line_length: int = black.DEFAULT_LINE_LENGTH, generate_services: bool = True, separate_files: bool = False, - show_info: bool = False, verbose: bool = False, from_cli: bool = False, ) -> CodeGeneratorResponse: @@ -82,14 +80,14 @@ def generate_code( line_length generate_services separate_files - show_info verbose from_cli Returns ------- :class:`.CodeGeneratorResponse` - """ + """ # TODO + response = CodeGeneratorResponse() plugin_options = request.parameter.split(",") if request.parameter else [] include_google = "INCLUDE_GOOGLE" in plugin_options or include_google diff --git a/tests/generate.py b/tests/generate.py index 216819e40..c3bb233f4 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -4,9 +4,9 @@ import shutil import sys from pathlib import Path -from typing import Optional, Set, Tuple +from typing import Optional, Set, List -import click +import typer import rich from betterproto.plugin.cli import compile_protobufs, utils @@ -20,6 +20,7 @@ # Force pure-python implementation instead of C++, otherwise imports # break things because we can't properly reset the symbol database. os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" +app = typer.Typer() def clear_directory(dir_path: Path) -> None: @@ -30,7 +31,7 @@ def clear_directory(dir_path: Path) -> None: file_or_directory.unlink() -async def generate(whitelist: Set[str], verbose: bool) -> None: +async def generate(whitelist: Set[Path], verbose: bool) -> None: test_case_names = set(get_directories(inputs_path)) - {"__pycache__"} path_whitelist = set() @@ -59,7 +60,7 @@ async def generate(whitelist: Set[str], verbose: bool) -> None: for test_case_name, exception in zip( sorted(test_case_names), await asyncio.gather(*generation_tasks) ): - if exception is not None: + if exception is not None: # TODO this broke failed_test_cases.append(test_case_name) if failed_test_cases: @@ -92,7 +93,9 @@ async def generate_test_case_output( compile_protobufs( *files, output=test_case_output_path_reference, implementation="" ), - compile_protobufs(*files, output=test_case_output_path_betterproto), + compile_protobufs( + *files, output=test_case_output_path_betterproto, from_cli=True + ), ) except Exception as exc: return exc @@ -112,27 +115,22 @@ async def generate_test_case_output( sys.stderr.buffer.flush() -@click.command( - help="Generate python classes for standard tests.", - context_settings={"help_option_names": ["-h", "--help"]}, -) -@click.option("-v", "--verbose", is_flag=True, default=False) -@click.argument("directories", nargs=-1) +@app.command(context_settings={"help_option_names": ["-h", "--help"]},) @utils.run_sync -async def main(verbose: bool, directories: Tuple[str, ...]): - """ - Parameters - ---------- - verbose: - Whether or not to run the plugin in verbose mode. - directories: - One or more relative or absolute directories or test-case names test-cases to generate classes for. e.g. - ``inputs/bool inputs/double inputs/enum`` or ``bool double enum`` - """ +async def main( + verbose: bool = typer.Option( + False, help="Whether or not to run the plugin in verbose mode." + ), + directories: List[Path] = typer.Option( + (), + help="One or more relative or absolute directories or test-case names " + "test-cases to generate classes for. e.g. ``inputs/bool inputs/double " + "inputs/enum`` or ``bool double enum``" + ) +) -> None: + """Generate python classes for standard tests.""" await generate(set(directories), verbose) if __name__ == "__main__": - sys.argv = "generate.py".split() - os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests" - main() + app() diff --git a/tests/util.py b/tests/util.py index c96f66f04..d62018db0 100644 --- a/tests/util.py +++ b/tests/util.py @@ -13,7 +13,7 @@ output_path_betterproto = root_path.joinpath("output_betterproto") -def get_directories(path: str) -> Generator[str, None, None]: +def get_directories(path: Path) -> Generator[str, None, None]: for root, directories, files in os.walk(path): yield from directories From 7faec49b61dcae4a9fb428652412123f2595e4ec Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sat, 23 Jan 2021 18:52:08 +0000 Subject: [PATCH 28/46] Fix some weird bugs --- pyproject.toml | 3 +- src/betterproto/plugin/cli/__init__.py | 1 - src/betterproto/plugin/cli/commands.py | 11 ++--- src/betterproto/plugin/cli/runner.py | 61 ++++++++++++++------------ src/betterproto/plugin/cli/utils.py | 9 +--- src/betterproto/plugin/main.py | 2 +- tests/generate.py | 38 +++++++++------- 7 files changed, 63 insertions(+), 62 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ab029ff69..d71bfda2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,6 @@ tox = "^3.15.1" sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" -click = "^7.1.2" typer = "^0.3.2" rich = "^9.8.2" @@ -44,7 +43,7 @@ betterproto = "betterproto:__main__.main" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] -compiler = ["black", "jinja2"] +compiler = ["black", "jinja2", "typer", "rich"] [tool.poe.tasks] # Dev workflow tasks diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py index dbdb74518..c5508b617 100644 --- a/src/betterproto/plugin/cli/__init__.py +++ b/src/betterproto/plugin/cli/__init__.py @@ -8,7 +8,6 @@ else: USE_PROTOC = False -DEFAULT_OUT = "betterproto_out" VERBOSE = False from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index d7d8871fd..6131bfe71 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import List +from typing import List, Optional import typer import rich @@ -8,7 +8,6 @@ from ..models import monkey_patch_oneof_index from . import ( DEFAULT_LINE_LENGTH, - DEFAULT_OUT, USE_PROTOC, VERBOSE, utils, @@ -49,8 +48,8 @@ async def compile( generate_services: bool = typer.Option( True, help="Whether or not to generate servicer stubs" ), - output: Path = typer.Option( - DEFAULT_OUT, + output: Optional[Path] = typer.Option( + None, help="The name of the output directory", file_okay=False, allow_dash=True, @@ -72,9 +71,7 @@ async def compile( for output_path, protos in files.items(): try: output = ( - (Path(output_path.parent.name) / output_path.name).resolve() - if output.name == DEFAULT_OUT - else output + output or (Path(output_path.parent.name) / output_path.name).resolve() ) output.mkdir(exist_ok=True, parents=True) await compile_protobufs( diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index e00beeaa1..9acbcaa42 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -3,8 +3,7 @@ import os import re from concurrent.futures import ProcessPoolExecutor -from pathlib import Path -from typing import Any, NoReturn, Tuple +from typing import TYPE_CHECKING, Any, NoReturn, Tuple from ...lib.google.protobuf.compiler import ( CodeGeneratorRequest, @@ -14,13 +13,25 @@ from . import USE_PROTOC, utils from .errors import CLIError, ProtobufSyntaxError +if TYPE_CHECKING: + from pathlib import Path -def write_file(output: Path, file: CodeGeneratorResponseFile) -> None: +DEFAULT_IMPLEMENTATION = "betterproto_" + + +def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: path = (output / file.name).resolve() - path.write_text(file.content) + if file.content.__class__ is object: + return + try: + path.write_text(file.content) + except FileNotFoundError: + # something weird happens here with files that use google imports + # the google files seem to get included in the generated code but with no code? + pass -def handle_error(data: str, files: Tuple[Path, ...]) -> NoReturn: +def handle_error(data: str, files: Tuple["Path", ...]) -> NoReturn: match = re.match( r"^(?P.+):(?P\d+):(?P\d+):(?P.*)", data, @@ -40,10 +51,10 @@ def handle_error(data: str, files: Tuple[Path, ...]) -> NoReturn: async def compile_protobufs( - *files: Path, - output: Path, + *files: "Path", + output: "Path", use_protoc: bool = USE_PROTOC, - implementation: str = "betterproto_", + use_betterproto: bool = True, **kwargs: Any, ) -> Tuple[str, str]: """ @@ -63,21 +74,21 @@ async def compile_protobufs( Tuple[:class:`str`, :class:`str`] A tuple of the ``stdout`` and ``stderr`` from the invocation of protoc. """ + implementation = DEFAULT_IMPLEMENTATION if use_betterproto else "" command = utils.generate_command( *files, output=output, use_protoc=use_protoc, implementation=implementation ) - env = {"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)).lower()} - env.update(os.environ) + process = await asyncio.create_subprocess_shell( command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, - env=env, + env={"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)), **os.environ}, ) stdout, stderr = await process.communicate() - if implementation == "betterproto_": + if use_betterproto: # we put code on stderr so we can actually read it thank you google :))))) try: @@ -91,28 +102,24 @@ async def compile_protobufs( # Generate code response = await utils.to_thread(generate_code, request, **kwargs) - if len(response.file) > 1: - loop = asyncio.get_event_loop() - with ProcessPoolExecutor() as process_pool: - # write multiple files concurrently - await asyncio.gather( - *( - loop.run_in_executor( - process_pool, functools.partial(write_file, output, file) - ) - for file in response.file + loop = asyncio.get_event_loop() + with ProcessPoolExecutor() as process_pool: + # write multiple files concurrently + await asyncio.gather( + *( + loop.run_in_executor( + process_pool, functools.partial(write_file, output, file) ) + for file in response.file ) - - else: - await utils.to_thread(write_file, output, response.file[0]) + ) stderr = b"" - elif stderr: + if stderr: handle_error(stderr.decode(), files) - elif process.returncode != 0: + if process.returncode != 0: raise CLIError(stderr.decode()) return stdout.decode(), stderr.decode() diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 070a4ad51..b6af68874 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -7,8 +7,7 @@ from pathlib import Path from typing import Awaitable, Callable, TypeVar, Any, List, Set -# from . import USE_PROTOC -USE_PROTOC = True +from . import USE_PROTOC T = TypeVar("T") @@ -30,17 +29,12 @@ def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": """Return a list of files ready for :func:`generate_command`""" - # TODO create a default dict of parent to paths - # recurse up folder to file first folder without a .proto - # return highest directory as first value in list new_paths: "defaultdict[Path, Set[Path]]" = defaultdict(set) for path in paths: if not path.is_absolute(): path = (Path.cwd() / path).resolve() if str(path).startswith("/usr") and "include/google/protobuf" in str(path): - # TODO make this better for windows systems and being in different places in usr/ - # TODO make this actually work :) --plugin=protoc-gen-custom=src/betterproto/plugin/main.py new_paths[path].update(path / proto for proto in INCLUDE) elif path.is_dir(): new_paths[path].update(path.glob("*.proto")) @@ -56,6 +50,7 @@ def generate_command( use_protoc: bool = USE_PROTOC, implementation: str = "betterproto_", ) -> str: + # TODO make this actually work :) --plugin=protoc-gen-custom=src/betterproto/plugin/main.py command = [ f"--proto_path={files[0].parent.as_posix()}", diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 006f28504..9fcd3e44e 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -15,7 +15,7 @@ def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() - if os.getenv("USING_BETTERPROTO_CLI") == "true": + if os.getenv("USING_BETTERPROTO_CLI") == "True": # Write the data to stderr for cli sys.stderr.buffer.write(data) # need to figure out how to potentially lock this sys.stdout.buffer.write(b"") diff --git a/tests/generate.py b/tests/generate.py index c3bb233f4..d7897aeba 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -61,6 +61,8 @@ async def generate(whitelist: Set[Path], verbose: bool) -> None: sorted(test_case_names), await asyncio.gather(*generation_tasks) ): if exception is not None: # TODO this broke + import traceback + traceback.print_exception(exception.__class__, exception, exception.__traceback__) failed_test_cases.append(test_case_name) if failed_test_cases: @@ -89,9 +91,9 @@ async def generate_test_case_output( files = list(test_case_input_path.glob("*.proto")) try: - ((ref_out, ref_err), (plg_out, plg_err),) = await asyncio.gather( + (ref_out, ref_err), (plg_out, plg_err) = await asyncio.gather( compile_protobufs( - *files, output=test_case_output_path_reference, implementation="" + *files, output=test_case_output_path_reference, use_betterproto=False ), compile_protobufs( *files, output=test_case_output_path_betterproto, from_cli=True @@ -100,36 +102,38 @@ async def generate_test_case_output( except Exception as exc: return exc - message = f"[red][bold]Generated output for {test_case_name!r}" - rich.print(message) + rich.print(f"[bold red]Generated output for {test_case_name!r}") if verbose: if ref_out: - rich.print(f"[bold red]{ref_out}") + rich.print(f"[red]{ref_out}") if ref_err: - rich.print(f"[bold red]{ref_err}", file=sys.stderr) + rich.print(f"[red]{ref_err}", file=sys.stderr) if plg_out: - rich.print(f"[bold red]{plg_out}") + rich.print(f"[red]{plg_out}") if plg_err: - rich.print(f"[bold red]{plg_err}", file=sys.stderr) - sys.stdout.buffer.flush() - sys.stderr.buffer.flush() + rich.print(f"[red]{plg_err}", file=sys.stderr) + sys.stdout.flush() + sys.stderr.flush() -@app.command(context_settings={"help_option_names": ["-h", "--help"]},) +@app.command(context_settings={"help_option_names": ["-h", "--help"]}) @utils.run_sync async def main( verbose: bool = typer.Option( - False, help="Whether or not to run the plugin in verbose mode." + False, + "-v", + "--verbose", + help="Whether or not to run the plugin in verbose mode.", ), - directories: List[Path] = typer.Option( - (), + directories: Optional[List[Path]] = typer.Argument( + None, help="One or more relative or absolute directories or test-case names " "test-cases to generate classes for. e.g. ``inputs/bool inputs/double " - "inputs/enum`` or ``bool double enum``" - ) + "inputs/enum`` or ``bool double enum``", + ), ) -> None: """Generate python classes for standard tests.""" - await generate(set(directories), verbose) + await generate(set(directories or ()), verbose) if __name__ == "__main__": From 305b7dfe7818883a3c39aed93d68ab09be088971 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Tue, 26 Jan 2021 17:18:00 +0000 Subject: [PATCH 29/46] Boring stuff --- README.md | 18 +- docs/quick-start.rst | 12 +- poetry.lock | 1252 ++++++++++++++------------ pyproject.toml | 4 +- src/betterproto/plugin/__init__.py | 10 +- src/betterproto/plugin/cli/errors.py | 1 + src/betterproto/plugin/cli/utils.py | 2 +- src/betterproto/plugin/compiler.py | 8 +- src/betterproto/plugin/main.py | 2 +- src/betterproto/plugin/parser.py | 26 +- tests/generate.py | 5 +- 11 files changed, 730 insertions(+), 610 deletions(-) diff --git a/README.md b/README.md index 5e9d9c345..8ca590912 100644 --- a/README.md +++ b/README.md @@ -74,14 +74,7 @@ You can run the following to invoke protoc directly: ```sh mkdir lib -protoc -I . --python_betterproto_out=lib example.proto -``` - -or run the following to invoke protoc via grpcio-tools: - -```sh -pip install grpcio-tools -python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto +betterproto compile example.proto --output=lib ``` This will generate `lib/hello/__init__.py` which looks like: @@ -407,17 +400,12 @@ poe full-test Betterproto includes compiled versions for Google's well-known types at [betterproto/lib/google](betterproto/lib/google). Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests. -Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`. +Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`. Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows: ```sh -protoc \ - --plugin=protoc-gen-custom=src/betterproto/plugin/main.py \ - --custom_opt=INCLUDE_GOOGLE \ - --custom_out=src/betterproto/lib \ - -I /usr/local/include/ \ - /usr/local/include/google/protobuf/*.proto +betterproto compile /usr/local/include/google/protobuf/*.proto --output=src/betterproto/lib ``` ### TODO diff --git a/docs/quick-start.rst b/docs/quick-start.rst index c731ca20e..b22bd7fa8 100644 --- a/docs/quick-start.rst +++ b/docs/quick-start.rst @@ -42,20 +42,10 @@ Given you installed the compiler and have a proto file, e.g ``example.proto``: To compile the proto you would run the following: -You can run the following to invoke protoc directly: - .. code-block:: sh mkdir hello - protoc -I . --python_betterproto_out=lib example.proto - -or run the following to invoke protoc via grpcio-tools: - -.. code-block:: sh - - pip install grpcio-tools - python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto - + betterproto compile example.proto --output=lib This will generate ``lib/__init__.py`` which looks like: diff --git a/poetry.lock b/poetry.lock index 402646a2e..368be9fc3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,26 +1,26 @@ [[package]] -category = "dev" -description = "A configurable sidebar-enabled Sphinx theme" name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = "*" -version = "0.7.12" [[package]] -category = "main" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = "*" -version = "1.4.4" [[package]] -category = "dev" -description = "Airspeed Velocity: A simple Python history benchmarking tool" name = "asv" +version = "0.4.2" +description = "Airspeed Velocity: A simple Python history benchmarking tool" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.4.2" [package.dependencies] six = ">=1.4" @@ -29,59 +29,58 @@ six = ">=1.4" hg = ["python-hglib (>=1.5)"] [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "dev" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.3.0" +description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.2.0" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "dev" -description = "Internationalization utilities" name = "babel" +version = "2.9.0" +description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.8.0" [package.dependencies] pytz = ">=2015.7" [[package]] -category = "main" -description = "Backport of Python 3.7's datetime.fromisoformat" -marker = "python_version < \"3.7\"" name = "backports-datetime-fromisoformat" +version = "1.0.0" +description = "Backport of Python 3.7's datetime.fromisoformat" +category = "main" optional = false python-versions = "*" -version = "1.0.0" [[package]] -category = "main" -description = "The uncompromising code formatter." name = "black" +version = "20.8b1" +description = "The uncompromising code formatter." +category = "main" optional = false python-versions = ">=3.6" -version = "20.8b1" [package.dependencies] appdirs = "*" click = ">=7.1.2" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.6,<1" regex = ">=2020.1.8" @@ -89,32 +88,28 @@ toml = ">=0.10.1" typed-ast = ">=1.4.0" typing-extensions = ">=3.7.4" -[package.dependencies.dataclasses] -python = "<3.7" -version = ">=0.6" - [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -category = "dev" -description = "A thin, practical wrapper around terminal coloring, styling, and positioning" name = "blessings" +version = "1.7" +description = "A thin, practical wrapper around terminal coloring, styling, and positioning" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.7" [package.dependencies] six = "*" [[package]] -category = "dev" -description = "Fancy Interface to the Python Interpreter" name = "bpython" +version = "0.19" +description = "Fancy Interface to the Python Interpreter" +category = "dev" optional = false python-versions = "*" -version = "0.19" [package.dependencies] curtsies = ">=0.1.18" @@ -129,228 +124,243 @@ urwid = ["urwid"] watch = ["watchdog"] [[package]] -category = "dev" -description = "Python package for providing Mozilla's CA Bundle." name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = "*" -version = "2020.6.20" [[package]] -category = "dev" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "dev" optional = false -python-versions = "*" -version = "3.0.4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -category = "main" -description = "Composable command line interface toolkit" name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "7.1.2" [[package]] -category = "dev" -description = "Cross-platform colored terminal text." -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.4" [[package]] -category = "dev" -description = "Code coverage measurement for Python" +name = "commonmark" +version = "0.9.1" +description = "Python parser for the CommonMark Markdown spec" +category = "main" +optional = true +python-versions = "*" + +[package.extras] +test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] + +[[package]] name = "coverage" +version = "5.4" +description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.3" [package.extras] toml = ["toml"] [[package]] -category = "dev" -description = "Curses-like terminal wrapper, with colored strings!" name = "curtsies" +version = "0.3.5" +description = "Curses-like terminal wrapper, with colored strings!" +category = "dev" optional = false -python-versions = "*" -version = "0.3.4" +python-versions = ">=3.6" [package.dependencies] blessings = ">=1.5" -wcwidth = ">=0.1.4" +cwcwidth = "*" + +[[package]] +name = "cwcwidth" +version = "0.1.1" +description = "Python bindings for wc(s)width" +category = "dev" +optional = false +python-versions = ">= 3.6" [[package]] -category = "main" -description = "A backport of the dataclasses module for Python 3.6" -marker = "python_version >= \"3.6\" and python_version < \"3.7\" or python_version < \"3.7\"" name = "dataclasses" +version = "0.7" +description = "A backport of the dataclasses module for Python 3.6" +category = "main" optional = false python-versions = ">=3.6, <3.7" -version = "0.7" [[package]] -category = "dev" -description = "Distribution utilities" name = "distlib" +version = "0.3.1" +description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" -version = "0.3.1" [[package]] -category = "dev" -description = "Docutils -- Python Documentation Utilities" name = "docutils" +version = "0.16" +description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.16" [[package]] -category = "dev" -description = "A platform independent file lock." name = "filelock" +version = "3.0.12" +description = "A platform independent file lock." +category = "dev" optional = false python-versions = "*" -version = "3.0.12" [[package]] -category = "dev" -description = "Lightweight in-process concurrent programming" name = "greenlet" +version = "1.0.0" +description = "Lightweight in-process concurrent programming" +category = "dev" optional = false -python-versions = "*" -version = "0.4.17" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["sphinx"] [[package]] -category = "dev" -description = "HTTP/2-based RPC framework" name = "grpcio" +version = "1.35.0" +description = "HTTP/2-based RPC framework" +category = "dev" optional = false python-versions = "*" -version = "1.32.0" [package.dependencies] six = ">=1.5.2" [package.extras] -protobuf = ["grpcio-tools (>=1.32.0)"] +protobuf = ["grpcio-tools (>=1.35.0)"] [[package]] -category = "dev" -description = "Protobuf code generator for gRPC" name = "grpcio-tools" +version = "1.35.0" +description = "Protobuf code generator for gRPC" +category = "dev" optional = false python-versions = "*" -version = "1.32.0" [package.dependencies] -grpcio = ">=1.32.0" +grpcio = ">=1.35.0" protobuf = ">=3.5.0.post1,<4.0dev" [[package]] -category = "main" -description = "Pure-Python gRPC implementation for asyncio" name = "grpclib" +version = "0.4.1" +description = "Pure-Python gRPC implementation for asyncio" +category = "main" optional = false python-versions = ">=3.6" -version = "0.4.1" [package.dependencies] +dataclasses = {version = "*", markers = "python_version < \"3.7\""} h2 = ">=3.1.0,<5" multidict = "*" -[package.dependencies.dataclasses] -python = "<3.7" -version = "*" - [[package]] -category = "main" -description = "HTTP/2 State-Machine based protocol implementation" name = "h2" +version = "3.2.0" +description = "HTTP/2 State-Machine based protocol implementation" +category = "main" optional = false python-versions = "*" -version = "3.2.0" [package.dependencies] hpack = ">=3.0,<4" hyperframe = ">=5.2.0,<6" [[package]] -category = "main" -description = "Pure-Python HPACK header compression" name = "hpack" +version = "3.0.0" +description = "Pure-Python HPACK header compression" +category = "main" optional = false python-versions = "*" -version = "3.0.0" [[package]] -category = "main" -description = "HTTP/2 framing layer for Python" name = "hyperframe" +version = "5.2.0" +description = "HTTP/2 framing layer for Python" +category = "main" optional = false python-versions = "*" -version = "5.2.0" [[package]] -category = "dev" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.10" [[package]] -category = "dev" -description = "Getting image size from png/jpeg/jpeg2000/gif file" name = "imagesize" +version = "1.2.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.2.0" [[package]] -category = "dev" -description = "Read metadata from Python packages" -marker = "python_version < \"3.8\"" name = "importlib-metadata" +version = "3.4.0" +description = "Read metadata from Python packages" +category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "2.0.0" +python-versions = ">=3.6" [package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "rst.linker"] -testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] -category = "dev" -description = "Read resources from Python packages" -marker = "python_version < \"3.7\"" name = "importlib-resources" +version = "5.1.0" +description = "Read resources from Python packages" +category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "3.0.0" +python-versions = ">=3.6" [package.dependencies] -[package.dependencies.zipp] -python = "<3.8" -version = ">=0.4" +zipp = {version = ">=0.4", markers = "python_version < \"3.8\""} [package.extras] -docs = ["sphinx", "rst.linker", "jaraco.packaging"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] -category = "main" -description = "A very fast and expressive template engine." name = "jinja2" +version = "2.11.2" +description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.11.2" [package.dependencies] MarkupSafe = ">=0.23" @@ -359,36 +369,36 @@ MarkupSafe = ">=0.23" i18n = ["Babel (>=0.8)"] [[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" [[package]] -category = "dev" -description = "More routines for operating on iterables, beyond itertools" name = "more-itertools" +version = "8.6.0" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" optional = false python-versions = ">=3.5" -version = "8.5.0" [[package]] -category = "main" -description = "multidict implementation" name = "multidict" +version = "5.1.0" +description = "multidict implementation" +category = "main" optional = false -python-versions = ">=3.5" -version = "5.0.0" +python-versions = ">=3.6" [[package]] -category = "dev" -description = "Optional static typing for Python" name = "mypy" +version = "0.770" +description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.770" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -399,138 +409,131 @@ typing-extensions = ">=3.7.4" dmypy = ["psutil (>=4.0)"] [[package]] -category = "main" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "main" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "dev" -description = "Core utilities for Python packages" name = "packaging" +version = "20.8" +description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" -six = "*" [[package]] -category = "dev" -description = "Bring colors to your terminal." name = "pastel" +version = "0.2.1" +description = "Bring colors to your terminal." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.2.1" [[package]] -category = "main" -description = "Utility library for gitignore style pattern matching of file paths." name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.0" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "A task runner that works well with poetry." name = "poethepoet" +version = "0.9.0" +description = "A task runner that works well with poetry." +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.9.0" [package.dependencies] pastel = ">=0.2.0,<0.3.0" tomlkit = ">=0.6.0,<1.0.0" [[package]] -category = "dev" -description = "Protocol Buffers" name = "protobuf" +version = "3.14.0" +description = "Protocol Buffers" +category = "dev" optional = false python-versions = "*" -version = "3.13.0" [package.dependencies] -setuptools = "*" six = ">=1.9" [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.9.0" [[package]] -category = "dev" -description = "Pygments is a syntax highlighting package written in Python." name = "pygments" +version = "2.7.4" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.5" -version = "2.7.1" [[package]] -category = "dev" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "5.4.3" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.4.3" [package.dependencies] -atomicwrites = ">=1.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} more-itertools = ">=4.0.0" packaging = "*" pluggy = ">=0.12,<1.0" py = ">=1.5.0" wcwidth = "*" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" - [package.extras] -checkqa-mypy = ["mypy (v0.761)"] +checkqa-mypy = ["mypy (==v0.761)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest support for asyncio." name = "pytest-asyncio" +version = "0.12.0" +description = "Pytest support for asyncio." +category = "dev" optional = false python-versions = ">= 3.5" -version = "0.12.0" [package.dependencies] pytest = ">=5.4.0" @@ -539,27 +542,27 @@ pytest = ">=5.4.0" testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "2.11.1" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.1" [package.dependencies] -coverage = ">=4.4" +coverage = ">=5.2.1" pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] [[package]] -category = "dev" -description = "Thin-wrapper around the mock package for easier use with pytest" name = "pytest-mock" +version = "3.5.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.5" -version = "3.3.1" [package.dependencies] pytest = ">=5.0" @@ -568,74 +571,91 @@ pytest = ">=5.0" dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] -category = "dev" -description = "World timezone definitions, modern and historical" name = "pytz" +version = "2020.5" +description = "World timezone definitions, modern and historical" +category = "dev" optional = false python-versions = "*" -version = "2020.1" [[package]] -category = "main" -description = "Alternative regular expression module, to replace re." name = "regex" +version = "2020.11.13" +description = "Alternative regular expression module, to replace re." +category = "main" optional = false python-versions = "*" -version = "2020.10.15" [[package]] -category = "dev" -description = "Python HTTP for Humans." name = "requests" +version = "2.25.1" +description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<4" +chardet = ">=3.0.2,<5" idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" +urllib3 = ">=1.21.1,<1.27" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "rich" +version = "9.9.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" +optional = true +python-versions = ">=3.6,<4.0" + +[package.dependencies] +colorama = ">=0.4.0,<0.5.0" +commonmark = ">=0.9.0,<0.10.0" +dataclasses = {version = ">=0.7,<0.9", markers = "python_version >= \"3.6\" and python_version < \"3.7\""} +pygments = ">=2.6.0,<3.0.0" +typing-extensions = ">=3.7.4,<4.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] -category = "dev" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" [[package]] -category = "dev" -description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." name = "snowballstemmer" +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" -version = "2.0.0" [[package]] -category = "dev" -description = "Python documentation generator" name = "sphinx" +version = "3.1.2" +description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.5" -version = "3.1.2" [package.dependencies] -Jinja2 = ">=2.3" -Pygments = ">=2.0" alabaster = ">=0.7,<0.8" babel = ">=1.3" -colorama = ">=0.3.5" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.12" imagesize = "*" +Jinja2 = ">=2.3" packaging = "*" +Pygments = ">=2.0" requests = ">=2.5.0" -setuptools = "*" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -650,12 +670,12 @@ lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-s test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] [[package]] -category = "dev" -description = "Read the Docs theme for Sphinx" name = "sphinx-rtd-theme" +version = "0.5.0" +description = "Read the Docs theme for Sphinx" +category = "dev" optional = false python-versions = "*" -version = "0.5.0" [package.dependencies] sphinx = "*" @@ -664,103 +684,104 @@ sphinx = "*" dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] [[package]] -category = "dev" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" name = "sphinxcontrib-htmlhelp" +version = "1.0.3" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest", "html5lib"] [[package]] -category = "dev" -description = "A sphinx extension which renders display math in HTML via JavaScript" name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.1" [package.extras] test = ["pytest", "flake8", "mypy"] [[package]] -category = "dev" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." name = "sphinxcontrib-serializinghtml" +version = "1.1.4" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.1.4" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "main" -description = "Python Library for Tom's Obvious, Minimal Language" name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" optional = false -python-versions = "*" -version = "0.10.1" +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] -category = "dev" -description = "Style preserving TOML library" name = "tomlkit" +version = "0.7.0" +description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.7.0" [[package]] -category = "dev" -description = "tox is a generic virtualenv management and test command line tool" name = "tox" +version = "3.21.2" +description = "tox is a generic virtualenv management and test command line tool" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "3.20.1" [package.dependencies] -colorama = ">=0.4.1" +colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} filelock = ">=3.0.0" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} packaging = ">=14" pluggy = ">=0.12.0" py = ">=1.4.17" @@ -768,97 +789,103 @@ six = ">=1.14.0" toml = ">=0.9.4" virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12,<3" - [package.extras] docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"] [[package]] -category = "main" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" +version = "1.4.2" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "main" optional = false python-versions = "*" -version = "1.4.1" [[package]] +name = "typer" +version = "0.3.2" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." category = "main" -description = "Backported and Experimental Type Hints for Python 3.5+" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.1,<7.2.0" + +[package.extras] +test = ["pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (==0.782)", "black (>=19.10b0,<20.0b0)", "isort (>=5.0.6,<6.0.0)", "shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)"] +all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=5.4.0,<6.0.0)", "markdown-include (>=0.5.1,<0.6.0)"] + +[[package]] name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" optional = false python-versions = "*" -version = "3.7.4.3" [[package]] -category = "dev" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.26.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -category = "dev" -description = "Virtual Python Environment builder" name = "virtualenv" +version = "20.4.0" +description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "20.0.35" [package.dependencies] appdirs = ">=1.4.3,<2" distlib = ">=0.3.1,<1" filelock = ">=3.0.0,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} six = ">=1.9.0,<2" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12,<3" - -[package.dependencies.importlib-resources] -python = "<3.7" -version = ">=1.0" - [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] -category = "dev" -description = "Measures the displayed width of unicode strings in a terminal" name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" -version = "0.2.5" [[package]] -category = "dev" -description = "Backport of pathlib-compatible object wrapper for zip files" -marker = "python_version < \"3.8\"" name = "zipp" +version = "3.4.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.3.1" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] -compiler = ["black", "jinja2"] +compiler = ["black", "jinja2", "typer", "rich"] [metadata] -content-hash = "e43baf152424b7496430ae5b5fdf2f0680fb6cc1c7abac9a0184dd851003762c" -lock-version = "1.0" +lock-version = "1.1" python-versions = "^3.6" +content-hash = "d6e514b0cf9eeb9f3cb06500a8500361c3759cb54aaf13593c1e51861e8e9e66" [metadata.files] alabaster = [ @@ -877,12 +904,12 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.2.0-py2.py3-none-any.whl", hash = "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"}, - {file = "attrs-20.2.0.tar.gz", hash = "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594"}, + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] babel = [ - {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"}, - {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"}, + {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, + {file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"}, ] backports-datetime-fromisoformat = [ {file = "backports-datetime-fromisoformat-1.0.0.tar.gz", hash = "sha256:9577a2a9486cd7383a5f58b23bb8e81cf0821dbbc0eb7c87d3fa198c1df40f5c"}, @@ -900,12 +927,12 @@ bpython = [ {file = "bpython-0.19.tar.gz", hash = "sha256:476ce09a896c4d34bf5e56aca64650c56fdcfce45781a20dc1521221df8cc49c"}, ] certifi = [ - {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, - {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"}, + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, ] chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -915,45 +942,66 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +commonmark = [ + {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, + {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, +] coverage = [ - {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, - {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, - {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, - {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, - {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, - {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, - {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, - {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, - {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, - {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, - {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, - {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, - {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, - {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, - {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, - {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, - {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, - {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, - {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, - {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, + {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9754a5c265f991317de2bac0c70a746efc2b695cf4d49f5d2cddeac36544fb44"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fbb17c0d0822684b7d6c09915677a32319f16ff1115df5ec05bdcaaee40b35f3"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b7f7421841f8db443855d2854e25914a79a1ff48ae92f70d0a5c2f8907ab98c9"}, + {file = "coverage-5.4-cp27-cp27m-win32.whl", hash = "sha256:4a780807e80479f281d47ee4af2eb2df3e4ccf4723484f77da0bb49d027e40a1"}, + {file = "coverage-5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:87c4b38288f71acd2106f5d94f575bc2136ea2887fdb5dfe18003c881fa6b370"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6809ebcbf6c1049002b9ac09c127ae43929042ec1f1dbd8bb1615f7cd9f70a0"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ba7ca81b6d60a9f7a0b4b4e175dcc38e8fef4992673d9d6e6879fd6de00dd9b8"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:89fc12c6371bf963809abc46cced4a01ca4f99cba17be5e7d416ed7ef1245d19"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8eb7785bd23565b542b01fb39115a975fefb4a82f23d407503eee2c0106247"}, + {file = "coverage-5.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:7e40d3f8eb472c1509b12ac2a7e24158ec352fc8567b77ab02c0db053927e339"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1ccae21a076d3d5f471700f6d30eb486da1626c380b23c70ae32ab823e453337"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:755c56beeacac6a24c8e1074f89f34f4373abce8b662470d3aa719ae304931f3"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:322549b880b2d746a7672bf6ff9ed3f895e9c9f108b714e7360292aa5c5d7cf4"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:60a3307a84ec60578accd35d7f0c71a3a971430ed7eca6567399d2b50ef37b8c"}, + {file = "coverage-5.4-cp35-cp35m-win32.whl", hash = "sha256:1375bb8b88cb050a2d4e0da901001347a44302aeadb8ceb4b6e5aa373b8ea68f"}, + {file = "coverage-5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:16baa799ec09cc0dcb43a10680573269d407c159325972dd7114ee7649e56c66"}, + {file = "coverage-5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f2cf7a42d4b7654c9a67b9d091ec24374f7c58794858bff632a2039cb15984d"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b62046592b44263fa7570f1117d372ae3f310222af1fc1407416f037fb3af21b"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:812eaf4939ef2284d29653bcfee9665f11f013724f07258928f849a2306ea9f9"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:859f0add98707b182b4867359e12bde806b82483fb12a9ae868a77880fc3b7af"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:04b14e45d6a8e159c9767ae57ecb34563ad93440fc1b26516a89ceb5b33c1ad5"}, + {file = "coverage-5.4-cp36-cp36m-win32.whl", hash = "sha256:ebfa374067af240d079ef97b8064478f3bf71038b78b017eb6ec93ede1b6bcec"}, + {file = "coverage-5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:84df004223fd0550d0ea7a37882e5c889f3c6d45535c639ce9802293b39cd5c9"}, + {file = "coverage-5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1b811662ecf72eb2d08872731636aee6559cae21862c36f74703be727b45df90"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6b588b5cf51dc0fd1c9e19f622457cc74b7d26fe295432e434525f1c0fae02bc"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3fe50f1cac369b02d34ad904dfe0771acc483f82a1b54c5e93632916ba847b37"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:32ab83016c24c5cf3db2943286b85b0a172dae08c58d0f53875235219b676409"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:68fb816a5dd901c6aff352ce49e2a0ffadacdf9b6fae282a69e7a16a02dad5fb"}, + {file = "coverage-5.4-cp37-cp37m-win32.whl", hash = "sha256:a636160680c6e526b84f85d304e2f0bb4e94f8284dd765a1911de9a40450b10a"}, + {file = "coverage-5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:bb32ca14b4d04e172c541c69eec5f385f9a075b38fb22d765d8b0ce3af3a0c22"}, + {file = "coverage-5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4d7165a4e8f41eca6b990c12ee7f44fef3932fac48ca32cecb3a1b2223c21f"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a565f48c4aae72d1d3d3f8e8fb7218f5609c964e9c6f68604608e5958b9c60c3"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fff1f3a586246110f34dc762098b5afd2de88de507559e63553d7da643053786"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a839e25f07e428a87d17d857d9935dd743130e77ff46524abb992b962eb2076c"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6625e52b6f346a283c3d563d1fd8bae8956daafc64bb5bbd2b8f8a07608e3994"}, + {file = "coverage-5.4-cp38-cp38-win32.whl", hash = "sha256:5bee3970617b3d74759b2d2df2f6a327d372f9732f9ccbf03fa591b5f7581e39"}, + {file = "coverage-5.4-cp38-cp38-win_amd64.whl", hash = "sha256:03ed2a641e412e42cc35c244508cf186015c217f0e4d496bf6d7078ebe837ae7"}, + {file = "coverage-5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14a9f1887591684fb59fdba8feef7123a0da2424b0652e1b58dd5b9a7bb1188c"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9564ac7eb1652c3701ac691ca72934dd3009997c81266807aef924012df2f4b3"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0f48fc7dc82ee14aeaedb986e175a429d24129b7eada1b7e94a864e4f0644dde"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:107d327071061fd4f4a2587d14c389a27e4e5c93c7cba5f1f59987181903902f"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0cdde51bfcf6b6bd862ee9be324521ec619b20590787d1655d005c3fb175005f"}, + {file = "coverage-5.4-cp39-cp39-win32.whl", hash = "sha256:c67734cff78383a1f23ceba3b3239c7deefc62ac2b05fa6a47bcd565771e5880"}, + {file = "coverage-5.4-cp39-cp39-win_amd64.whl", hash = "sha256:c669b440ce46ae3abe9b2d44a913b5fd86bb19eb14a8701e88e3918902ecd345"}, + {file = "coverage-5.4-pp36-none-any.whl", hash = "sha256:c0ff1c1b4d13e2240821ef23c1efb1f009207cb3f56e16986f713c2b0e7cd37f"}, + {file = "coverage-5.4-pp37-none-any.whl", hash = "sha256:cd601187476c6bed26a0398353212684c427e10a903aeafa6da40c63309d438b"}, + {file = "coverage-5.4.tar.gz", hash = "sha256:6d2e262e5e8da6fa56e774fb8e2643417351427604c2b177f8e8c5f75fc928ca"}, ] curtsies = [ - {file = "curtsies-0.3.4-py2.py3-none-any.whl", hash = "sha256:068db8e5d8a2f23b765d648a66dfa9445cf2412177126ae946a7357ade992640"}, - {file = "curtsies-0.3.4.tar.gz", hash = "sha256:4ca543998d8bbba7185db099f8b7bb30baeb47426f7ac1a271e4d9ca8bbb2b05"}, + {file = "curtsies-0.3.5.tar.gz", hash = "sha256:a587ff3335667a32be7afed163f60a1c82c5d9c848d8297534a06fd29de20dbd"}, +] +cwcwidth = [ + {file = "cwcwidth-0.1.1.tar.gz", hash = "sha256:042cdf80d80a836935f700d8e1c34270f82a627fc07f7b5ec1e8cec486e1d755"}, ] dataclasses = [ {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, @@ -972,106 +1020,145 @@ filelock = [ {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, ] greenlet = [ - {file = "greenlet-0.4.17-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:75e4c27188f28149b74e7685809f9227410fd15432a4438fc48627f518577fa5"}, - {file = "greenlet-0.4.17-cp27-cp27m-win32.whl", hash = "sha256:3af587e9813f9bd8be9212722321a5e7be23b2bc37e6323a90e592ab0c2ef117"}, - {file = "greenlet-0.4.17-cp27-cp27m-win_amd64.whl", hash = "sha256:ccd62f09f90b2730150d82f2f2ffc34d73c6ce7eac234aed04d15dc8a3023994"}, - {file = "greenlet-0.4.17-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:13037e2d7ab2145300676852fa069235512fdeba4ed1e3bb4b0677a04223c525"}, - {file = "greenlet-0.4.17-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:e495096e3e2e8f7192afb6aaeba19babc4fb2bdf543d7b7fed59e00c1df7f170"}, - {file = "greenlet-0.4.17-cp35-cp35m-win32.whl", hash = "sha256:124a3ae41215f71dc91d1a3d45cbf2f84e46b543e5d60b99ecc20e24b4c8f272"}, - {file = "greenlet-0.4.17-cp35-cp35m-win_amd64.whl", hash = "sha256:5494e3baeacc371d988345fbf8aa4bd15555b3077c40afcf1994776bb6d77eaf"}, - {file = "greenlet-0.4.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bee111161420f341a346731279dd976be161b465c1286f82cc0779baf7b729e8"}, - {file = "greenlet-0.4.17-cp36-cp36m-win32.whl", hash = "sha256:ac85db59aa43d78547f95fc7b6fd2913e02b9e9b09e2490dfb7bbdf47b2a4914"}, - {file = "greenlet-0.4.17-cp36-cp36m-win_amd64.whl", hash = "sha256:4481002118b2f1588fa3d821936ffdc03db80ef21186b62b90c18db4ba5e743b"}, - {file = "greenlet-0.4.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:be7a79988b8fdc5bbbeaed69e79cfb373da9759242f1565668be4fb7f3f37552"}, - {file = "greenlet-0.4.17-cp37-cp37m-win32.whl", hash = "sha256:97f2b01ab622a4aa4b3724a3e1fba66f47f054c434fbaa551833fa2b41e3db51"}, - {file = "greenlet-0.4.17-cp37-cp37m-win_amd64.whl", hash = "sha256:d3436110ca66fe3981031cc6aff8cc7a40d8411d173dde73ddaa5b8445385e2d"}, - {file = "greenlet-0.4.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a34023b9eabb3525ee059f3bf33a417d2e437f7f17e341d334987d4091ae6072"}, - {file = "greenlet-0.4.17-cp38-cp38-win32.whl", hash = "sha256:e66a824f44892bc4ec66c58601a413419cafa9cec895e63d8da889c8a1a4fa4a"}, - {file = "greenlet-0.4.17-cp38-cp38-win_amd64.whl", hash = "sha256:47825c3a109f0331b1e54c1173d4e57fa000aa6c96756b62852bfa1af91cd652"}, - {file = "greenlet-0.4.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1023d7b43ca11264ab7052cb09f5635d4afdb43df55e0854498fc63070a0b206"}, - {file = "greenlet-0.4.17.tar.gz", hash = "sha256:41d8835c69a78de718e466dd0e6bfd4b46125f21a67c3ff6d76d8d8059868d6b"}, + {file = "greenlet-1.0.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2"}, + {file = "greenlet-1.0.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c"}, + {file = "greenlet-1.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203"}, + {file = "greenlet-1.0.0-cp27-cp27m-win32.whl", hash = "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476"}, + {file = "greenlet-1.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b"}, + {file = "greenlet-1.0.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379"}, + {file = "greenlet-1.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce"}, + {file = "greenlet-1.0.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c"}, + {file = "greenlet-1.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5"}, + {file = "greenlet-1.0.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f"}, + {file = "greenlet-1.0.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6"}, + {file = "greenlet-1.0.0-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683"}, + {file = "greenlet-1.0.0-cp35-cp35m-win32.whl", hash = "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70"}, + {file = "greenlet-1.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770"}, + {file = "greenlet-1.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee"}, + {file = "greenlet-1.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd"}, + {file = "greenlet-1.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a"}, + {file = "greenlet-1.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d"}, + {file = "greenlet-1.0.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2"}, + {file = "greenlet-1.0.0-cp36-cp36m-win32.whl", hash = "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7"}, + {file = "greenlet-1.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be"}, + {file = "greenlet-1.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef"}, + {file = "greenlet-1.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128"}, + {file = "greenlet-1.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7"}, + {file = "greenlet-1.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c"}, + {file = "greenlet-1.0.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f"}, + {file = "greenlet-1.0.0-cp37-cp37m-win32.whl", hash = "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c"}, + {file = "greenlet-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85"}, + {file = "greenlet-1.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7"}, + {file = "greenlet-1.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06"}, + {file = "greenlet-1.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36"}, + {file = "greenlet-1.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378"}, + {file = "greenlet-1.0.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196"}, + {file = "greenlet-1.0.0-cp38-cp38-win32.whl", hash = "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e"}, + {file = "greenlet-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c"}, + {file = "greenlet-1.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243"}, + {file = "greenlet-1.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df"}, + {file = "greenlet-1.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218"}, + {file = "greenlet-1.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7"}, + {file = "greenlet-1.0.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664"}, + {file = "greenlet-1.0.0-cp39-cp39-win32.whl", hash = "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139"}, + {file = "greenlet-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0"}, + {file = "greenlet-1.0.0.tar.gz", hash = "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8"}, ] grpcio = [ - {file = "grpcio-1.32.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3afb058b6929eba07dba9ae6c5b555aa1d88cb140187d78cc510bd72d0329f28"}, - {file = "grpcio-1.32.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a8004b34f600a8a51785e46859cd88f3386ef67cccd1cfc7598e3d317608c643"}, - {file = "grpcio-1.32.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e6786f6f7be0937614577edcab886ddce91b7c1ea972a07ef9972e9f9ecbbb78"}, - {file = "grpcio-1.32.0-cp27-cp27m-win32.whl", hash = "sha256:e467af6bb8f5843f5a441e124b43474715cfb3981264e7cd227343e826dcc3ce"}, - {file = "grpcio-1.32.0-cp27-cp27m-win_amd64.whl", hash = "sha256:1376a60f9bfce781b39973f100b5f67e657b5be479f2fd8a7d2a408fc61c085c"}, - {file = "grpcio-1.32.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:ce617e1c4a39131f8527964ac9e700eb199484937d7a0b3e52655a3ba50d5fb9"}, - {file = "grpcio-1.32.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:99bac0e2c820bf446662365df65841f0c2a55b0e2c419db86eaf5d162ddae73e"}, - {file = "grpcio-1.32.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6d869a3e8e62562b48214de95e9231c97c53caa7172802236cd5d60140d7cddd"}, - {file = "grpcio-1.32.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:182c64ade34c341398bf71ec0975613970feb175090760ab4f51d1e9a5424f05"}, - {file = "grpcio-1.32.0-cp35-cp35m-macosx_10_7_intel.whl", hash = "sha256:9c0d8f2346c842088b8cbe3e14985b36e5191a34bf79279ba321a4bf69bd88b7"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4775bc35af9cd3b5033700388deac2e1d611fa45f4a8dcb93667d94cb25f0444"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:be98e3198ec765d0a1e27f69d760f69374ded8a33b953dcfe790127731f7e690"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:378fe80ec5d9353548eb2a8a43ea03747a80f2e387c4f177f2b3ff6c7d898753"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:f7d508691301027033215d3662dab7e178f54d5cca2329f26a71ae175d94b83f"}, - {file = "grpcio-1.32.0-cp35-cp35m-win32.whl", hash = "sha256:25959a651420dd4a6fd7d3e8dee53f4f5fd8c56336a64963428e78b276389a59"}, - {file = "grpcio-1.32.0-cp35-cp35m-win_amd64.whl", hash = "sha256:ac7028d363d2395f3d755166d0161556a3f99500a5b44890421ccfaaf2aaeb08"}, - {file = "grpcio-1.32.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:c31e8a219650ddae1cd02f5a169e1bffe66a429a8255d3ab29e9363c73003b62"}, - {file = "grpcio-1.32.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e28e4c0d4231beda5dee94808e3a224d85cbaba3cfad05f2192e6f4ec5318053"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f03dfefa9075dd1c6c5cc27b1285c521434643b09338d8b29e1d6a27b386aa82"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c4966d746dccb639ef93f13560acbe9630681c07f2b320b7ec03fe2c8f0a1f15"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:ec10d5f680b8e95a06f1367d73c5ddcc0ed04a3f38d6e4c9346988fb0cea2ffa"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:28677f057e2ef11501860a7bc15de12091d40b95dd0fddab3c37ff1542e6b216"}, - {file = "grpcio-1.32.0-cp36-cp36m-win32.whl", hash = "sha256:0f3f09269ffd3fded430cd89ba2397eabbf7e47be93983b25c187cdfebb302a7"}, - {file = "grpcio-1.32.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4396b1d0f388ae875eaf6dc05cdcb612c950fd9355bc34d38b90aaa0665a0d4b"}, - {file = "grpcio-1.32.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1ada89326a364a299527c7962e5c362dbae58c67b283fe8383c4d952b26565d5"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:1d384a61f96a1fc6d5d3e0b62b0a859abc8d4c3f6d16daba51ebf253a3e7df5d"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e811ce5c387256609d56559d944a974cc6934a8eea8c76e7c86ec388dc06192d"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:07b430fa68e5eecd78e2ad529ab80f6a234b55fc1b675fe47335ccbf64c6c6c8"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:0e3edd8cdb71809d2455b9dbff66b4dd3d36c321e64bfa047da5afdfb0db332b"}, - {file = "grpcio-1.32.0-cp37-cp37m-win32.whl", hash = "sha256:6f7947dad606c509d067e5b91a92b250aa0530162ab99e4737090f6b17eb12c4"}, - {file = "grpcio-1.32.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7cda998b7b551503beefc38db9be18c878cfb1596e1418647687575cdefa9273"}, - {file = "grpcio-1.32.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c58825a3d8634cd634d8f869afddd4d5742bdb59d594aea4cea17b8f39269a55"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:ef9bd7fdfc0a063b4ed0efcab7906df5cae9bbcf79d05c583daa2eba56752b00"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1ce6f5ff4f4a548c502d5237a071fa617115df58ea4b7bd41dac77c1ab126e9c"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f12900be4c3fd2145ba94ab0d80b7c3d71c9e6414cfee2f31b1c20188b5c281f"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:f53f2dfc8ff9a58a993e414a016c8b21af333955ae83960454ad91798d467c7b"}, - {file = "grpcio-1.32.0-cp38-cp38-win32.whl", hash = "sha256:5bddf9d53c8df70061916c3bfd2f468ccf26c348bb0fb6211531d895ed5e4c72"}, - {file = "grpcio-1.32.0-cp38-cp38-win_amd64.whl", hash = "sha256:14c0f017bfebbc18139551111ac58ecbde11f4bc375b73a53af38927d60308b6"}, - {file = "grpcio-1.32.0.tar.gz", hash = "sha256:01d3046fe980be25796d368f8fc5ff34b7cf5e1444f3789a017a7fe794465639"}, + {file = "grpcio-1.35.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:95cc4d2067deced18dc807442cf8062a93389a86abf8d40741120054389d3f29"}, + {file = "grpcio-1.35.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d186a0ce291f4386e28a7042ec31c85250b0c2e25d2794b87fa3c15ff473c46c"}, + {file = "grpcio-1.35.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c8d0a6a58a42275c6cb616e7cb9f9fcf5eba1e809996546e561cd818b8f7cff7"}, + {file = "grpcio-1.35.0-cp27-cp27m-win32.whl", hash = "sha256:8d08f90d72a8e8d9af087476337da76d26749617b0a092caff4e684ce267af21"}, + {file = "grpcio-1.35.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0072ec4563ab4268c4c32e936955085c2d41ea175b662363496daedd2273372c"}, + {file = "grpcio-1.35.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:aca45d2ccb693c9227fbf21144891422a42dc4b76b52af8dd1d4e43afebe321d"}, + {file = "grpcio-1.35.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:87147b1b306c88fe7dca7e3dff8aefd1e63d6aed86e224f9374ddf283f17d7f1"}, + {file = "grpcio-1.35.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:22edfc278070d54f3ab7f741904e09155a272fe934e842babbf84476868a50de"}, + {file = "grpcio-1.35.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:f3654a52f72ba28953dbe2e93208099f4903f4b3c07dc7ff4db671c92968111d"}, + {file = "grpcio-1.35.0-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:dc2589370ef84eb1cc53530070d658a7011d2ee65f18806581809c11cd016136"}, + {file = "grpcio-1.35.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:f0c27fd16582a303e5baf6cffd9345c9ac5f855d69a51232664a0b888a77ba80"}, + {file = "grpcio-1.35.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:b2985f73611b637271b00d9c4f177e65cc3193269bc9760f16262b1a12757265"}, + {file = "grpcio-1.35.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:acb489b7aafdcf960f1a0000a1f22b45e5b6ccdf8dba48f97617d627f4133195"}, + {file = "grpcio-1.35.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:16fd33030944672e49e0530dec2c60cd4089659ccdf327e99569b3b29246a0b6"}, + {file = "grpcio-1.35.0-cp35-cp35m-win32.whl", hash = "sha256:1757e81c09132851e85495b802fe4d4fbef3547e77fa422a62fb4f7d51785be0"}, + {file = "grpcio-1.35.0-cp35-cp35m-win_amd64.whl", hash = "sha256:35b72884e09cbc46c564091f4545a39fa66d132c5676d1a6e827517fff47f2c1"}, + {file = "grpcio-1.35.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:17940a7dc461066f28816df48be44f24d3b9f150db344308ee2aeae033e1af0b"}, + {file = "grpcio-1.35.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:75ea903edc42a8c6ec61dbc5f453febd79d8bdec0e1bad6df7088c34282e8c42"}, + {file = "grpcio-1.35.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b180a3ec4a5d6f96d3840c83e5f8ab49afac9fa942921e361b451d7a024efb00"}, + {file = "grpcio-1.35.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e163c27d2062cd3eb07057f23f8d1330925beaba16802312b51b4bad33d74098"}, + {file = "grpcio-1.35.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:764b50ba1a15a2074cdd1a841238f2dead0a06529c495a46821fae84cb9c7342"}, + {file = "grpcio-1.35.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:088c8bea0f6b596937fefacf2c8df97712e7a3dd49496975049cc95dbf02af1a"}, + {file = "grpcio-1.35.0-cp36-cp36m-win32.whl", hash = "sha256:1aa53f82362c7f2791fe0cdd9a3b3aec325c11d8f0dfde600f91907dfaa8546b"}, + {file = "grpcio-1.35.0-cp36-cp36m-win_amd64.whl", hash = "sha256:efb3d67405eb8030db6f27920b4be023fabfb5d4e09c34deab094a7c473a5472"}, + {file = "grpcio-1.35.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:44aaa6148d18a8e836f99dadcdec17b27bc7ec0995b2cc12c94e61826040ec90"}, + {file = "grpcio-1.35.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:18ad7644e23757420ea839ac476ef861e4f4841c8566269b7c91c100ca1943b3"}, + {file = "grpcio-1.35.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:859a0ceb23d7189362cc06fe7e906e9ed5c7a8f3ac960cc04ce13fe5847d0b62"}, + {file = "grpcio-1.35.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3e7d4428ed752fdfe2dddf2a404c93d3a2f62bf4b9109c0c10a850c698948891"}, + {file = "grpcio-1.35.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:a36151c335280b09afd5123f3b25085027ae2b10682087a4342fb6f635b928fb"}, + {file = "grpcio-1.35.0-cp37-cp37m-win32.whl", hash = "sha256:dfecb2acd3acb8bb50e9aa31472c6e57171d97c1098ee67cd283a6fe7d56a926"}, + {file = "grpcio-1.35.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e87e55fba98ebd7b4c614dcef9940dc2a7e057ad8bba5f91554934d47319a35b"}, + {file = "grpcio-1.35.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:da44bf613eed5d9e8df0785463e502a416de1be6e4ac31edbe99c9111abaed5f"}, + {file = "grpcio-1.35.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:9e503eaf853199804a954dc628c5207e67d6c7848dcba42a997fbe718618a2b1"}, + {file = "grpcio-1.35.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6ba3d7acf70acde9ce27e22921db921b84a71be578b32739536c32377b65041a"}, + {file = "grpcio-1.35.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:048c01d1eb5c2ae7cba2254b98938d2fc81f6dc10d172d9261d65266adb0fdb3"}, + {file = "grpcio-1.35.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:efd896e8ca7adb2654cf014479a5e1f74e4f776b6b2c0fbf95a6c92787a6631a"}, + {file = "grpcio-1.35.0-cp38-cp38-win32.whl", hash = "sha256:8a29a26b9f39701ce15aa1d5aa5e96e0b5f7028efe94f95341a4ed8dbe4bed78"}, + {file = "grpcio-1.35.0-cp38-cp38-win_amd64.whl", hash = "sha256:aea3d592a7ece84739b92d212cd16037c51d84a259414f64b51c14e946611f3d"}, + {file = "grpcio-1.35.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2f8e8d35d4799aa1627a212dbe8546594abf4064056415c31bd1b3b8f2a62027"}, + {file = "grpcio-1.35.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:9f0da13b215068e7434b161a35d0b4e92140ffcfa33ddda9c458199ea1d7ce45"}, + {file = "grpcio-1.35.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7ae408780b79c9b9b91a2592abd1d7abecd05675d988ea75038580f420966b59"}, + {file = "grpcio-1.35.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:0f714e261e1d63615476cda4ee808a79cca62f8f09e2943c136c2f87ec5347b1"}, + {file = "grpcio-1.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:7ee7d54da9d176d3c9a0f47c04d7ff6fdc6ee1c17643caff8c33d6c8a70678a4"}, + {file = "grpcio-1.35.0-cp39-cp39-win32.whl", hash = "sha256:94c3b81089a86d3c5877d22b07ebc66b5ed1d84771e24b001844e29a5b6178dd"}, + {file = "grpcio-1.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:399ee377b312ac652b07ef4365bbbba009da361fa7708c4d3d4ce383a1534ea7"}, + {file = "grpcio-1.35.0.tar.gz", hash = "sha256:7bd0ebbb14dde78bf66a1162efd29d3393e4e943952e2f339757aa48a184645c"}, ] grpcio-tools = [ - {file = "grpcio-tools-1.32.0.tar.gz", hash = "sha256:28547272c51e1d2d343685b9f531e85bb90ad7bd93e726ba646b5627173cbc47"}, - {file = "grpcio_tools-1.32.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6165dc7d424c3c58a54e9e47eacc7cc1513cd09c7c71ff5323e74ead5bb863f"}, - {file = "grpcio_tools-1.32.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a7432b84d6f2f6260d5461eb2a8904db8cf24b663e0a1236375098c8e15c289c"}, - {file = "grpcio_tools-1.32.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f61edfb0c07689a2835f15f4a25a781f058866cb4fea0bea391ae6deb74325f"}, - {file = "grpcio_tools-1.32.0-cp27-cp27m-win32.whl", hash = "sha256:a3524be59d4e6f8b089f7eaa128bc83e2375aac973f1bf0b568cd1c04c4df56e"}, - {file = "grpcio_tools-1.32.0-cp27-cp27m-win_amd64.whl", hash = "sha256:b31e7e909ba9efd8a08eb45665bf2f8326726da288d9e33555473e6b20596dbd"}, - {file = "grpcio_tools-1.32.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:4e04d6a7c48adbdca64e9b67cc75e8294b3b37b1284dd2819183e38a4207aa39"}, - {file = "grpcio_tools-1.32.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:37acc75ec1dc836772496ef77170fab585e2517abdf1330c29e682eb50a6ce86"}, - {file = "grpcio_tools-1.32.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7d5be0d06bf830efbf1867db7b01720e54a136454410270e896441ec56baba00"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:6e26e8d0ef73c04dc1118513c06ff56bce36672c8e28410ae4f938c22002ba00"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:3971dee0cf57dc3813f6f40724161341ec3b31137b026ae8d4db30c83afeb2a1"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:83414dd919b692d92876db787b6fda709c226243c9bdb71b5025297a127f3be4"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9b5beb49002bb1f1c0641b55ddc2d1d92c7844fb42348e874146bf7667b6ca20"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:7a18d6375efe075cc274fdfe004bee4530319a2dbb044eb7eb157c313fe88c97"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:8147085f0a044ddc27c870feb8e82a25685f3fdf09184dba0f63fed720f12e93"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-win32.whl", hash = "sha256:e2a37e716ef6b5e81c44648648aae258b67b9ef19e0a472ec4080f5e384be386"}, - {file = "grpcio_tools-1.32.0-cp35-cp35m-win_amd64.whl", hash = "sha256:130c248d0d94473f3eb80d86bdae35a39eb20ab98fde6d227e7f7e053ccbba88"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:11228fb5343c197e1f4376a966f6845ea270c794ec925260b8a27f6df5d90d04"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:246caf8cdea97ff3710a810c55c9400e3aa7af1a5464a667d62184e38a58a031"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:6aa6dd1d7e746c41803a209565d23e6027b0a5dd9b59596da37f99257cc58e65"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:541a6b992aa417a6305c965bb6896aa1a1ca37d00a82d5438074b18db6a37aad"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:e83146ef8f17e3a35fe77a438794f0a4a50ea11085194bfea1b419c1b342f7b1"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:71c451240e66245125e504abee5acc7ab30da099d5c17596d43ecc66e6034e20"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-win32.whl", hash = "sha256:6155ed6fed3c9a41fd03156c31adb5012c2399992c929987d3fa8ff1cd3c7cd8"}, - {file = "grpcio_tools-1.32.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a137b6079c96f11f0854a4793910f76aa4a62283947311b6e5131369fa226b48"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f5f381943081792d82fe34c5a649d98a6b91741c6d62cbca8914943b8d1a4e8b"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:adeae62f3bd1c6839e3822620f7650d30adb7398170e3a0b45a0059f9fe631c8"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bcc62cb4a3c9a39fb9e349124018e7d7edf0f627592561410e28b590767b831f"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d11f432ed6fde059b33c514b64fcbf4527f56e03ff94f52f95121547c6945825"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:708077380f458ef831e7da67f574abfb2fc6b6a24225c5976d92809b8930254c"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-win32.whl", hash = "sha256:de8ca90742bd41a19c1067fba6ffa13befd3ddb505d67eb297d6a418a5937a25"}, - {file = "grpcio_tools-1.32.0-cp37-cp37m-win_amd64.whl", hash = "sha256:632bba5853e955072392aac42fbca16daf65adfc0ec094fa840afbb83c78bee8"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2da2a4b2209156d0f88f91bd5d4650a9ed830acb6f685881a26d67d3f671361"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d3d01ebc1526cc9cdc5e29d2196bae43d56d8ec545dd30fead8b8b3e0b126808"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:fd059d37d9537fa1a89b1139f8cbed7530a5f81c8577560d3f7710fcec95efde"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:524f0460a49a3248d1cb462d0904e783a75bb3cecdcaea520c3688c8bccd9f2f"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6327f2c6acca4eac1d5a8e1ee92282682b83069d53199ff8ce18906e912086ed"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-win32.whl", hash = "sha256:07c1da5f1dbd4db664d416f68db6a92d5c88b4073ec6be41fcc7aa4d632f60a9"}, - {file = "grpcio_tools-1.32.0-cp38-cp38-win_amd64.whl", hash = "sha256:9b92f998ed1d01925160e47e9546c742aa0de49009f8fa3bb79420252d8a888d"}, + {file = "grpcio-tools-1.35.0.tar.gz", hash = "sha256:9e2a41cba9c5a20ae299d0fdd377fe231434fa04cbfbfb3807293c6ec10b03cf"}, + {file = "grpcio_tools-1.35.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:cfa49e6d62b313862a6007ae02016bd89a2fa184b0aab0d0e524cb24ecc2fdb4"}, + {file = "grpcio_tools-1.35.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:f66cd905ffcbe2294c9dee6d0de8064c3a49861a9b1770c18cb8a15be3bc0da5"}, + {file = "grpcio_tools-1.35.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:56663ba4a49ca585e4333dfebc5ed7e91ad3d75b838aced4f922fb4e365376cc"}, + {file = "grpcio_tools-1.35.0-cp27-cp27m-win32.whl", hash = "sha256:252bfaa0004d80d927a77998c8b3a81fb47620e41af1664bdba8837d722c4ead"}, + {file = "grpcio_tools-1.35.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ffa66fc4e80aff4f68599e786aa3295f4a0d6761ed63d75c32261f5de77aa0fd"}, + {file = "grpcio_tools-1.35.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:3cea2d07343801cb2a0d2f71fe7d6d7ffa6fe8fc0e1f6243c6867d0bb04557a1"}, + {file = "grpcio_tools-1.35.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:15c37528936774d8f734d75540848134fb5710ca27801ce4ac73c8a6cca0494e"}, + {file = "grpcio_tools-1.35.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:f3861211a450a312b7645d4eaf16c78f1d9e896e58a8c3be871f5881362d3fee"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:5fdb6a65f66ee6cdc49455ea03ca435ae86ef1869dc929a8652cc19b5f950d22"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:8bfd05f26af9ea069f2f3c48740a315470fc4a434189544fea3b3508b71be9a0"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:f7074cfd79989424e4bd903ff5618c1420a7c81ad97836256f3927447b74c027"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:adea0bd93978284f1590a3880d79621881f7029b2fac330f64f491af2b554707"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:b23e0a64cdbf4c3bcdf8e6ad0cdd8b8a582a4c50d5ed4eddc4c81dc8d5ba0c60"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:7cc68c42bcbebd76731686f22870930f110309e1e69244df428f8fb161b7645b"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-win32.whl", hash = "sha256:aa9cb65231a7efd77e83e149b1905335eda1bbadd301dd1bffcbfea69fd5bd56"}, + {file = "grpcio_tools-1.35.0-cp35-cp35m-win_amd64.whl", hash = "sha256:11e6dffd2e58737ade63a00a51da83b474b5740665914103f003049acff5be8e"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:59d80997e780dc52911e263e30ca2334e7b3bd12c10dc81625dcc34273fa744b"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:179b2eb274d8c29e1e18c21fb69c5101e3196617c7abb193a80e194ea9b274be"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:1687b0033beff82ac35f14fbbd5e7eb0cab39e60f8be0a25a7f4ba92d66578c8"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:9e956751b1b96ce343088550d155827f8312d85f09067f6ede0a4778273b787b"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:4ca85f9deee58473c017ee62aaa8c12dfda223eeabed5dd013c009af275bc4f2"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:853d030ff74ce90244bb77c5a8d5c2b2d84b24df477fc422d44fa81d512124d6"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-win32.whl", hash = "sha256:add160d4697a5366ee1420b59621bde69a3eaaba35170e60bd376f0ea6e24fe5"}, + {file = "grpcio_tools-1.35.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dbaaad0132a9e70439e93d26611443ee3aaaa62547b7d18655ac754b4984ea25"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:bbc6986e29ab3bb39db9a0e31cdbb0ced80cead2ef0453c40dfdfacbab505950"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:8631df0e357b28da4ef617306a08f70c21cf85c049849f4a556b95069c146d61"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6e2b07dbe25c6022eeae972b4eee2058836dea345a3253082524240a00daa9f"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:30f83ccc6d09be07d7f15d05f29acd5017140f330ba3a218ae7b7e19db02bda6"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:88184383f24af8f8cbbb4020846af53634d8632b486479a3b98ea29c1470372e"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-win32.whl", hash = "sha256:579cf4538d8ec25314c45ef84bb140fad8888446ed7a69913965fd7d9bc188d5"}, + {file = "grpcio_tools-1.35.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9203e0105db476131f32ff3c3213b5aa6b77b25553ffe0d09d973913b2320856"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:c910dec8903fb9d16fd1b111de57401a46e4d5f74c6d009a12a945d696603eb0"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:cc9bcd34a653c2353dd43fc395ceb560271551f2fae30bcafede2e4ad0c101c4"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4241301b8e594c5c265f06c600b492372e867a4bb80dc205b545088c39e010d0"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:8bae2611a8e09617922ff4cb11de6fd5f59b91c75a14a318c7d378f427584be1"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:0d5028f548fa2b99494baf992dd0e23676361b1a217322be44f6c13b5133f6b3"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-win32.whl", hash = "sha256:8d2c507c093a0ae3df62201ef92ceabcc34ac3f7e53026f12357f8c3641e809a"}, + {file = "grpcio_tools-1.35.0-cp38-cp38-win_amd64.whl", hash = "sha256:994adfe39a1755424e3c33c434786a9fa65090a50515303dfa8125cbec4a5940"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:51bf36ae34f70a8d6ccee5d9d2e52a9e65251670b405f91b7b547a73788f90fb"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e00dc8d641001409963f78b0b8bf83834eb87c0090357ebc862f874dd0e6dbb5"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5f279dee8b77bf93996592ada3bf56ad44fa9b0e780099172f1a7093a506eb67"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:713b496dd02fc868da0d59cc09536c62452d52035d0b694204d5054e75fe4929"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:15fa3c66e6b0ba2e434eccf8cdbce68e4e37b5fe440dbeffb9efd599aa23910f"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-win32.whl", hash = "sha256:ee0f750b5d8d628349e903438bb506196c4c5cee0007e81800d95cd0a2b23e6f"}, + {file = "grpcio_tools-1.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8451c60e106310436c123f3243c115db21ccb957402edbe73b1bb68276e4aa4"}, ] grpclib = [ {file = "grpclib-0.4.1.tar.gz", hash = "sha256:8c0021cd038634c268249e4cd168d9f3570e66ceceec1c9416094b788ebc8372"}, @@ -1097,12 +1184,12 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"}, - {file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"}, + {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"}, + {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"}, ] importlib-resources = [ - {file = "importlib_resources-3.0.0-py2.py3-none-any.whl", hash = "sha256:d028f66b66c0d5732dae86ba4276999855e162a749c92620a38c1d779ed138a7"}, - {file = "importlib_resources-3.0.0.tar.gz", hash = "sha256:19f745a6eca188b490b1428c8d1d4a0d2368759f32370ea8fb89cad2ab1106c3"}, + {file = "importlib_resources-5.1.0-py3-none-any.whl", hash = "sha256:885b8eae589179f661c909d699a546cf10d83692553e34dca1bf5eb06f7f6217"}, + {file = "importlib_resources-5.1.0.tar.gz", hash = "sha256:bfdad047bce441405a49cf8eb48ddce5e56c696e185f59147a8b79e75e9e6380"}, ] jinja2 = [ {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, @@ -1144,43 +1231,47 @@ markupsafe = [ {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] more-itertools = [ - {file = "more-itertools-8.5.0.tar.gz", hash = "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20"}, - {file = "more_itertools-8.5.0-py3-none-any.whl", hash = "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c"}, + {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, + {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, ] multidict = [ - {file = "multidict-5.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:11dcf2366da487d5b9de1d4b2055308c7ed9bde1a52973d07a89b42252af9ebe"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:167bd8e6351b57525bbf2d524ca5a133834699a2fcb090aad0c330c6017f3f3e"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:60af726c19a899ed49bbb276e062f08b80222cb6b9feda44b59a128b5ff52966"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:32f0a904859a6274d7edcbb01752c8ae9c633fb7d1c131771ff5afd32eceee42"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:7561a804093ea4c879e06b5d3d18a64a0bc21004bade3540a4b31342b528d326"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:786ad04ad954afe9927a1b3049aa58722e182160fe2fcac7ad7f35c93595d4f6"}, - {file = "multidict-5.0.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:02b2ea2bb1277a970d238c5c783023790ca94d386c657aeeb165259950951cc6"}, - {file = "multidict-5.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:932964cf57c0e59d1f3fb63ff342440cf8aaa75bf0dbcbad902c084024975380"}, - {file = "multidict-5.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:c692087913e12b801a759e25a626c3d311f416252dfba2ecdfd254583427949f"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cda06c99cd6f4a36571bb38e560a6fcfb1f136521e57f612e0bc31957b1cd4bd"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:84e4943d8725659942e7401bdf31780acde9cfdaf6fe977ff1449fffafcd93a9"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:bbec545b8f82536bc50afa9abce832176ed250aa22bfff3e20b3463fb90b0b35"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:c339b7d73c0ea5c551025617bb8aa1c00a0111187b6545f48836343e6cfbe6a0"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:0ce1d956ecbf112d49915ebc2f29c03e35fe451fb5e9f491edf9a2f4395ee0af"}, - {file = "multidict-5.0.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:39713fa2c687e0d0e709ad751a8a709ac051fcdc7f2048f6fd09365dd03c83eb"}, - {file = "multidict-5.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0ffdb4b897b15df798c0a5939a0323ccf703f2bae551dfab4eb1af7fbab38ead"}, - {file = "multidict-5.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:4ef76ce695da72e176f6a51867afb3bf300ce16ba2597824caaef625af5906a9"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:711289412b78cf41a21457f4c806890466013d62bf4296bd3d71fad73ff8a581"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2b0cfc33f53e5c8226f7d7c4e126fa0780f970ef1e96f7c6353da7d01eafe490"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:28b5913e5b6fef273e5d4230b61f33c8a51c3ce5f44a88582dee6b5ca5c9977b"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:a5eca9ee72b372199c2b76672145e47d3c829889eefa2037b1f3018f54e5f67d"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:20eaf1c279c543e07c164e4ac02151488829177da06607efa7ccfecd71b21e79"}, - {file = "multidict-5.0.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ec8bc0ab00c76c4260a201eaa58812ea8b1b7fde0ecf5f9c9365a182bd4691ed"}, - {file = "multidict-5.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad240c1429e386af38a2d6761032f0bec5177fed7c5f582c835c99fff135b5c"}, - {file = "multidict-5.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:52b5b51281d760197ce3db063c166fdb626e01c8e428a325aa37198ce31c9565"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5263359a03368985b5296b7a73363d761a269848081879ba04a6e4bfd0cf4a78"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:620c39b1270b68e194023ad471b6a54bdb517bb48515939c9829b56c783504a3"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2739d1d9237835122b27d88990849ecf41ef670e0fcb876159edd236ca9ef40f"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:62f6e66931fb87e9016e7c1cc806ab4f3e39392fd502362df3cac888078b27cb"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:5dd303b545b62f9d2b14f99fbdb84c109a20e64a57f6a192fe6aebcb6263b59d"}, - {file = "multidict-5.0.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:60b12d14bc122ba2dae1e4460a891b3a96e73d815b4365675f6ec0a1725416a5"}, - {file = "multidict-5.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:79dc3e6e7ce853fb7ed17c134e01fcb0d0c826b33201aa2a910fb27ed75c2eb9"}, - {file = "multidict-5.0.0.tar.gz", hash = "sha256:1b324444299c3a49b601b1bf621fc21704e29066f6ac2b7d7e4034a4a18662a1"}, + {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, + {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, + {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, + {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, + {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, + {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, + {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, + {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, + {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, + {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, + {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, + {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, + {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, ] mypy = [ {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, @@ -1203,16 +1294,16 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, - {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, + {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, + {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, ] pastel = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, ] pathspec = [ - {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, - {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, @@ -1223,32 +1314,32 @@ poethepoet = [ {file = "poethepoet-0.9.0.tar.gz", hash = "sha256:ab2263fd7be81d16d38a4b4fe42a055d992d04421e61cad36498b1e4bd8ee2a6"}, ] protobuf = [ - {file = "protobuf-3.13.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9c2e63c1743cba12737169c447374fab3dfeb18111a460a8c1a000e35836b18c"}, - {file = "protobuf-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1e834076dfef9e585815757a2c7e4560c7ccc5962b9d09f831214c693a91b463"}, - {file = "protobuf-3.13.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:df3932e1834a64b46ebc262e951cd82c3cf0fa936a154f0a42231140d8237060"}, - {file = "protobuf-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:8c35bcbed1c0d29b127c886790e9d37e845ffc2725cc1db4bd06d70f4e8359f4"}, - {file = "protobuf-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:339c3a003e3c797bc84499fa32e0aac83c768e67b3de4a5d7a5a9aa3b0da634c"}, - {file = "protobuf-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:361acd76f0ad38c6e38f14d08775514fbd241316cce08deb2ce914c7dfa1184a"}, - {file = "protobuf-3.13.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9edfdc679a3669988ec55a989ff62449f670dfa7018df6ad7f04e8dbacb10630"}, - {file = "protobuf-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5db9d3e12b6ede5e601b8d8684a7f9d90581882925c96acf8495957b4f1b204b"}, - {file = "protobuf-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:c8abd7605185836f6f11f97b21200f8a864f9cb078a193fe3c9e235711d3ff1e"}, - {file = "protobuf-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4d1174c9ed303070ad59553f435846a2f877598f59f9afc1b89757bdf846f2a7"}, - {file = "protobuf-3.13.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bba42f439bf45c0f600c3c5993666fcb88e8441d011fad80a11df6f324eef33"}, - {file = "protobuf-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c0c5ab9c4b1eac0a9b838f1e46038c3175a95b0f2d944385884af72876bd6bc7"}, - {file = "protobuf-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:f68eb9d03c7d84bd01c790948320b768de8559761897763731294e3bc316decb"}, - {file = "protobuf-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:91c2d897da84c62816e2f473ece60ebfeab024a16c1751aaf31100127ccd93ec"}, - {file = "protobuf-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3dee442884a18c16d023e52e32dd34a8930a889e511af493f6dc7d4d9bf12e4f"}, - {file = "protobuf-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:e7662437ca1e0c51b93cadb988f9b353fa6b8013c0385d63a70c8a77d84da5f9"}, - {file = "protobuf-3.13.0-py2.py3-none-any.whl", hash = "sha256:d69697acac76d9f250ab745b46c725edf3e98ac24763990b24d58c16c642947a"}, - {file = "protobuf-3.13.0.tar.gz", hash = "sha256:6a82e0c8bb2bf58f606040cc5814e07715b2094caeba281e2e7d0b0e2e397db5"}, + {file = "protobuf-3.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a"}, + {file = "protobuf-3.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5"}, + {file = "protobuf-3.14.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472"}, + {file = "protobuf-3.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142"}, + {file = "protobuf-3.14.0-cp35-cp35m-win32.whl", hash = "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2"}, + {file = "protobuf-3.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980"}, + {file = "protobuf-3.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2"}, + {file = "protobuf-3.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1"}, + {file = "protobuf-3.14.0-cp36-cp36m-win32.whl", hash = "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e"}, + {file = "protobuf-3.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836"}, + {file = "protobuf-3.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd"}, + {file = "protobuf-3.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac"}, + {file = "protobuf-3.14.0-cp37-cp37m-win32.whl", hash = "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d"}, + {file = "protobuf-3.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5"}, + {file = "protobuf-3.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043"}, + {file = "protobuf-3.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00"}, + {file = "protobuf-3.14.0-py2.py3-none-any.whl", hash = "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c"}, + {file = "protobuf-3.14.0.tar.gz", hash = "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce"}, ] py = [ - {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, - {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pygments = [ - {file = "Pygments-2.7.1-py3-none-any.whl", hash = "sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998"}, - {file = "Pygments-2.7.1.tar.gz", hash = "sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7"}, + {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, + {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -1262,57 +1353,75 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, - {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pytest-mock = [ - {file = "pytest-mock-3.3.1.tar.gz", hash = "sha256:a4d6d37329e4a893e77d9ffa89e838dd2b45d5dc099984cf03c703ac8411bb82"}, - {file = "pytest_mock-3.3.1-py3-none-any.whl", hash = "sha256:024e405ad382646318c4281948aadf6fe1135632bea9cc67366ea0c4098ef5f2"}, + {file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"}, + {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, ] pytz = [ - {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, - {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, + {file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"}, + {file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"}, ] regex = [ - {file = "regex-2020.10.15-cp27-cp27m-win32.whl", hash = "sha256:e935a166a5f4c02afe3f7e4ce92ce5a786f75c6caa0c4ce09c922541d74b77e8"}, - {file = "regex-2020.10.15-cp27-cp27m-win_amd64.whl", hash = "sha256:d81be22d5d462b96a2aa5c512f741255ba182995efb0114e5a946fe254148df1"}, - {file = "regex-2020.10.15-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6d4cdb6c20e752426b2e569128488c5046fb1b16b1beadaceea9815c36da0847"}, - {file = "regex-2020.10.15-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:25991861c6fef1e5fd0a01283cf5658c5e7f7aa644128e85243bc75304e91530"}, - {file = "regex-2020.10.15-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:6e9f72e0ee49f7d7be395bfa29e9533f0507a882e1e6bf302c0a204c65b742bf"}, - {file = "regex-2020.10.15-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:578ac6379e65eb8e6a85299b306c966c852712c834dc7eef0ba78d07a828f67b"}, - {file = "regex-2020.10.15-cp36-cp36m-win32.whl", hash = "sha256:65b6b018b07e9b3b6a05c2c3bb7710ed66132b4df41926c243887c4f1ff303d5"}, - {file = "regex-2020.10.15-cp36-cp36m-win_amd64.whl", hash = "sha256:2f60ba5c33f00ce9be29a140e6f812e39880df8ba9cb92ad333f0016dbc30306"}, - {file = "regex-2020.10.15-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5d4a3221f37520bb337b64a0632716e61b26c8ae6aaffceeeb7ad69c009c404b"}, - {file = "regex-2020.10.15-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:26b85672275d8c7a9d4ff93dbc4954f5146efdb2ecec89ad1de49439984dea14"}, - {file = "regex-2020.10.15-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:828618f3c3439c5e6ef8621e7c885ca561bbaaba90ddbb6a7dfd9e1ec8341103"}, - {file = "regex-2020.10.15-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:aef23aed9d4017cc74d37f703d57ce254efb4c8a6a01905f40f539220348abf9"}, - {file = "regex-2020.10.15-cp37-cp37m-win32.whl", hash = "sha256:6c72adb85adecd4522a488a751e465842cdd2a5606b65464b9168bf029a54272"}, - {file = "regex-2020.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:ef3a55b16c6450574734db92e0a3aca283290889934a23f7498eaf417e3af9f0"}, - {file = "regex-2020.10.15-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8958befc139ac4e3f16d44ec386c490ea2121ed8322f4956f83dd9cad8e9b922"}, - {file = "regex-2020.10.15-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3dd952f3f8dc01b72c0cf05b3631e05c50ac65ddd2afdf26551638e97502107b"}, - {file = "regex-2020.10.15-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:608d6c05452c0e6cc49d4d7407b4767963f19c4d2230fa70b7201732eedc84f2"}, - {file = "regex-2020.10.15-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:02686a2f0b1a4be0facdd0d3ad4dc6c23acaa0f38fb5470d892ae88584ba705c"}, - {file = "regex-2020.10.15-cp38-cp38-win32.whl", hash = "sha256:137da580d1e6302484be3ef41d72cf5c3ad22a076070051b7449c0e13ab2c482"}, - {file = "regex-2020.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:20cdd7e1736f4f61a5161aa30d05ac108ab8efc3133df5eb70fe1e6a23ea1ca6"}, - {file = "regex-2020.10.15-cp39-cp39-manylinux1_i686.whl", hash = "sha256:85b733a1ef2b2e7001aff0e204a842f50ad699c061856a214e48cfb16ace7d0c"}, - {file = "regex-2020.10.15-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:af1f5e997dd1ee71fb6eb4a0fb6921bf7a778f4b62f1f7ef0d7445ecce9155d6"}, - {file = "regex-2020.10.15-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:b5eeaf4b5ef38fab225429478caf71f44d4a0b44d39a1aa4d4422cda23a9821b"}, - {file = "regex-2020.10.15-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:aeac7c9397480450016bc4a840eefbfa8ca68afc1e90648aa6efbfe699e5d3bb"}, - {file = "regex-2020.10.15-cp39-cp39-win32.whl", hash = "sha256:698f8a5a2815e1663d9895830a063098ae2f8f2655ae4fdc5dfa2b1f52b90087"}, - {file = "regex-2020.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:a51e51eecdac39a50ede4aeed86dbef4776e3b73347d31d6ad0bc9648ba36049"}, - {file = "regex-2020.10.15.tar.gz", hash = "sha256:d25f5cca0f3af6d425c9496953445bf5b288bb5b71afc2b8308ad194b714c159"}, + {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, + {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, + {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, + {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, + {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, + {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, + {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, + {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, + {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, + {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, + {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, + {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, + {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, ] requests = [ - {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, - {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"}, + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +rich = [ + {file = "rich-9.9.0-py3-none-any.whl", hash = "sha256:d376396cb3793a042f6167cd613a31a370ea2c5ec1bbdf76a5c9e9c588ccff12"}, + {file = "rich-9.9.0.tar.gz", hash = "sha256:0bd8f42c3a03b7ef5e311d5e37f47bea9d268f541981c169072be5869c007957"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"}, - {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, ] sphinx = [ {file = "Sphinx-3.1.2-py3-none-any.whl", hash = "sha256:97dbf2e31fc5684bb805104b8ad34434ed70e6c588f6896991b2fdfd2bef8c00"}, @@ -1347,39 +1456,52 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, ] toml = [ - {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, - {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomlkit = [ {file = "tomlkit-0.7.0-py2.py3-none-any.whl", hash = "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831"}, {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, ] tox = [ - {file = "tox-3.20.1-py2.py3-none-any.whl", hash = "sha256:42ce19ce5dc2f6d6b1fdc5666c476e1f1e2897359b47e0aa3a5b774f335d57c2"}, - {file = "tox-3.20.1.tar.gz", hash = "sha256:4321052bfe28f9d85082341ca8e233e3ea901fdd14dab8a5d3fbd810269fbaf6"}, + {file = "tox-3.21.2-py2.py3-none-any.whl", hash = "sha256:0aa777ee466f2ef18e6f58428c793c32378779e0a321dbb8934848bc3e78998c"}, + {file = "tox-3.21.2.tar.gz", hash = "sha256:f501808381c01c6d7827c2f17328be59c0a715046e94605ddca15fb91e65827d"}, ] typed-ast = [ - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, - {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, - {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, - {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, - {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, - {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, - {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, +] +typer = [ + {file = "typer-0.3.2-py3-none-any.whl", hash = "sha256:ba58b920ce851b12a2d790143009fa00ac1d05b3ff3257061ff69dbdfc3d161b"}, + {file = "typer-0.3.2.tar.gz", hash = "sha256:5455d750122cff96745b0dec87368f56d023725a7ebc9d2e54dd23dc86816303"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, @@ -1387,18 +1509,18 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"}, - {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"}, + {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, + {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, ] virtualenv = [ - {file = "virtualenv-20.0.35-py2.py3-none-any.whl", hash = "sha256:0ebc633426d7468664067309842c81edab11ae97fcaf27e8ad7f5748c89b431b"}, - {file = "virtualenv-20.0.35.tar.gz", hash = "sha256:2a72c80fa2ad8f4e2985c06e6fc12c3d60d060e410572f553c90619b0f6efaf3"}, + {file = "virtualenv-20.4.0-py2.py3-none-any.whl", hash = "sha256:227a8fed626f2f20a6cdb0870054989f82dd27b2560a911935ba905a2a5e0034"}, + {file = "virtualenv-20.4.0.tar.gz", hash = "sha256:219ee956e38b08e32d5639289aaa5bd190cfbe7dafcb8fa65407fca08e808f9c"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] zipp = [ - {file = "zipp-3.3.1-py3-none-any.whl", hash = "sha256:16522f69653f0d67be90e8baa4a46d66389145b734345d68a257da53df670903"}, - {file = "zipp-3.3.1.tar.gz", hash = "sha256:c1532a8030c32fd52ff6a288d855fe7adef5823ba1d26a29a68fd6314aa72baa"}, + {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, + {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, ] diff --git a/pyproject.toml b/pyproject.toml index d71bfda2d..24a6b3112 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,8 @@ black = { version = ">=19.3b0", optional = true } dataclasses = { version = "^0.7", python = ">=3.6, <3.7" } grpclib = "^0.4.1" jinja2 = { version = "^2.11.2", optional = true } +typer = { version = "^0.3.2", optional = true } +rich = { version = "^9.8.2", optional = true } [tool.poetry.dev-dependencies] black = "^20.8b1" @@ -35,8 +37,6 @@ tox = "^3.15.1" sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" -typer = "^0.3.2" -rich = "^9.8.2" [tool.poetry.scripts] betterproto = "betterproto:__main__.main" diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index c7f6541ca..cb47cca4e 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -10,10 +10,12 @@ "included." ) -STDLIB_MODULES = [ - p.with_suffix("").name - for p in Path(traceback.__file__).parent.iterdir() if p.suffix == ".py" or p.is_dir() -] +STDLIB_MODULES = getattr(sys, "module_names", [ + p.with_suffix("").name + for p in Path(traceback.__file__).parent.iterdir() + if p.suffix == ".py" or p.is_dir() + ] +) def import_exception_hook( diff --git a/src/betterproto/plugin/cli/errors.py b/src/betterproto/plugin/cli/errors.py index b5cee0482..c513e6c76 100644 --- a/src/betterproto/plugin/cli/errors.py +++ b/src/betterproto/plugin/cli/errors.py @@ -22,6 +22,7 @@ class ProtobufSyntaxError(SyntaxError, CompilerError): offset: :class:`int` The offset along the :attr:`lineno` that the syntax error occurs. """ + def __init__(self, msg: str, file: Path, lineno: int, offset: int): text = file.read_text().splitlines()[lineno - 1] super().__init__(msg, (str(file), lineno, offset, text)) diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index b6af68874..b8cf28ce8 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -101,6 +101,6 @@ async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: return await loop.run_in_executor(None, partial) -if __name__ == '__main__': +if __name__ == "__main__": os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests/inputs" print(get_files(("bool", "bool/bool.proto", "casing/casing.proto"))) diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto/plugin/compiler.py index 2d99008c3..b666dda01 100644 --- a/src/betterproto/plugin/compiler.py +++ b/src/betterproto/plugin/compiler.py @@ -6,7 +6,9 @@ from .models import OutputTemplate -def outputfile_compiler(output_file: OutputTemplate, line_length: int = black.DEFAULT_LINE_LENGTH) -> str: +def outputfile_compiler( + output_file: OutputTemplate, line_length: int = black.DEFAULT_LINE_LENGTH +) -> str: templates_folder = os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "templates") @@ -21,5 +23,7 @@ def outputfile_compiler(output_file: OutputTemplate, line_length: int = black.DE return black.format_str( template.render(output_file=output_file), - mode=black.Mode(line_length=line_length, target_versions={black.TargetVersion.PY37}), + mode=black.Mode( + line_length=line_length, target_versions={black.TargetVersion.PY37} + ), ) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 9fcd3e44e..c2d1a089e 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -30,7 +30,7 @@ def main() -> None: "Direct invocation of the protoc plugin is depreciated over using the CLI\n" "To do so you just need to type:\n" f"betterproto compile {' '.join(request.file_to_generate)}", - file=sys.stderr + file=sys.stderr, ) # Generate code diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 1a565a1ed..713342fe1 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -111,7 +111,9 @@ def generate_code( parent_request=request_data, package_proto_obj=proto_file ) # Add this input file to the output corresponding to this package - request_data.output_packages[output_package_name].input_files.append(proto_file) + request_data.output_packages[output_package_name].input_files.append( + proto_file + ) if from_cli: progress.update(reading_progress_bar, advance=1) @@ -120,11 +122,12 @@ def generate_code( # get the references to input/output messages for each service with Progress(transient=True) as progress: parsing_progress_bar = progress.add_task( - "[green]Parsing protobuf enums and messages...", total=sum( + "[green]Parsing protobuf enums and messages...", + total=sum( len(message.package_proto_obj.enum_type) + len(message.package_proto_obj.message_type) for message in request_data.output_packages.values() - ) + ), ) for output_package_name, output_package in request_data.output_packages.items(): for proto_input_file in output_package.input_files: @@ -142,12 +145,16 @@ def generate_code( if generate_services: with Progress(transient=True) as progress: parsing_progress_bar = progress.add_task( - "[green]Parsing protobuf services...", total=sum( + "[green]Parsing protobuf services...", + total=sum( len(message.package_proto_obj.service) for message in request_data.output_packages.values() - ) + ), ) - for output_package_name, output_package in request_data.output_packages.items(): + for ( + output_package_name, + output_package, + ) in request_data.output_packages.items(): for proto_input_file in output_package.input_files: for index, service in enumerate(proto_input_file.service): read_protobuf_service(service, index, output_package) @@ -158,7 +165,8 @@ def generate_code( output_paths: Set[pathlib.Path] = set() with Progress(transient=True) as progress: compiling_progress_bar = progress.add_task( - "[green]Compiling protobuf files...", total=len(request_data.output_packages) + "[green]Compiling protobuf files...", + total=len(request_data.output_packages), ) for output_package_name, output_package in request_data.output_packages.items(): @@ -170,7 +178,9 @@ def generate_code( CodeGeneratorResponseFile( name=str(output_path), # Render and then format the output file - content=outputfile_compiler(output_file=output_package, line_length=line_length), + content=outputfile_compiler( + output_file=output_package, line_length=line_length + ), ) ) if from_cli: diff --git a/tests/generate.py b/tests/generate.py index d7897aeba..f7725cc55 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -62,7 +62,10 @@ async def generate(whitelist: Set[Path], verbose: bool) -> None: ): if exception is not None: # TODO this broke import traceback - traceback.print_exception(exception.__class__, exception, exception.__traceback__) + + traceback.print_exception( + exception.__class__, exception, exception.__traceback__ + ) failed_test_cases.append(test_case_name) if failed_test_cases: From 3553e0e772aef65ccde9dc003d9e8df42f165301 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Wed, 27 Jan 2021 12:28:43 +0000 Subject: [PATCH 30/46] Fix some bugs --- src/betterproto/plugin/cli/runner.py | 7 ++----- src/betterproto/plugin/cli/utils.py | 2 +- src/betterproto/plugin/parser.py | 4 ++-- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 9acbcaa42..4cc487a47 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -21,13 +21,10 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: path = (output / file.name).resolve() - if file.content.__class__ is object: - return + path.parent.mkdir(parents=True, exist_ok=True) try: path.write_text(file.content) - except FileNotFoundError: - # something weird happens here with files that use google imports - # the google files seem to get included in the generated code but with no code? + except TypeError: pass diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index b8cf28ce8..02a6619ba 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -55,7 +55,7 @@ def generate_command( command = [ f"--proto_path={files[0].parent.as_posix()}", f"--python_{implementation}out={output.as_posix()}", - *[file.as_posix() for file in files], + *[f'"{file.as_posix()}"' for file in files], # ensure paths with spaces in the name get parsed correctly ] if use_protoc: command.insert(0, "protoc") diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 713342fe1..2540aed0a 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -100,8 +100,8 @@ def generate_code( "[green]Reading protobuf files...", total=len(request.proto_file) ) for proto_file in request.proto_file: - if proto_file.package == "google.protobuf" and include_google: - # If not INCLUDE_GOOGLE skip re-compiling Google's well-known types + if proto_file.package == "google.protobuf" and not include_google: + # If not include_google skip re-compiling Google's well-known types continue output_package_name = proto_file.package From 8eb7c90f506072e50908a5e7b698721e248c147f Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Wed, 27 Jan 2021 12:30:31 +0000 Subject: [PATCH 31/46] Ensure paths are sorted --- src/betterproto/plugin/cli/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 02a6619ba..f8e150045 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -37,7 +37,9 @@ def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": if str(path).startswith("/usr") and "include/google/protobuf" in str(path): new_paths[path].update(path / proto for proto in INCLUDE) elif path.is_dir(): - new_paths[path].update(path.glob("*.proto")) + new_paths[path].update( + sorted(path.glob("*.proto")) + ) # ensure order for files when debugging compilation errors else: new_paths[path.parent].add(path) From 0c4277b72387c32880b3aa799c72294f6c27f59f Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 31 Jan 2021 13:05:35 +0000 Subject: [PATCH 32/46] Redo error handling to be much more reliable --- src/betterproto/plugin/cli/commands.py | 16 ++--- src/betterproto/plugin/cli/errors.py | 24 +++++++ src/betterproto/plugin/cli/runner.py | 90 ++++++++++++++++++-------- src/betterproto/plugin/cli/utils.py | 9 +-- src/betterproto/plugin/main.py | 8 ++- 5 files changed, 104 insertions(+), 43 deletions(-) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 6131bfe71..018c25093 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -84,23 +84,21 @@ async def compile( from_cli=True, ) except ProtobufSyntaxError as exc: - error = Syntax( - exc.file.read_text(), - "proto", - line_numbers=True, - line_range=(max(exc.lineno - 5, 0), exc.lineno), - ) - # TODO switch to .from_path but it appears to be bugged and doesnt pick lexer_name rich.print( f"[red]File {str(exc.file).strip()}:\n", - error, + Syntax( + exc.file.read_text(), + "proto", + line_numbers=True, + line_range=(max(exc.lineno - 5, 0), exc.lineno), + ), # TODO switch to .from_path but it appears to be bugged and doesnt render properly f"{' ' * (exc.offset + 3)}^\nSyntaxError: {exc.msg}[red]", ) except CLIError as exc: failed_files = "\n".join(f" - {file}" for file in protos) rich.print( f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{exc.args[0]}[red]", + f"{failed_files}\n\nSee the output for the issue:\n{' '.join(exc.args)}[red]", ) else: diff --git a/src/betterproto/plugin/cli/errors.py b/src/betterproto/plugin/cli/errors.py index c513e6c76..858ba81fb 100644 --- a/src/betterproto/plugin/cli/errors.py +++ b/src/betterproto/plugin/cli/errors.py @@ -1,4 +1,6 @@ +from dataclasses import dataclass from pathlib import Path +from typing import Union class CLIError(Exception): @@ -27,3 +29,25 @@ def __init__(self, msg: str, file: Path, lineno: int, offset: int): text = file.read_text().splitlines()[lineno - 1] super().__init__(msg, (str(file), lineno, offset, text)) self.file = file + + +@dataclass +class UnusedImport(CLIError, ImportWarning): + """The warning emitted when an unused import is detected by protoc. + + Attributes + ---------- + msg: :class:`str` + The message given by protoc e.g. "Expected top-level statement (e.g. "message")." + file: :class:`.Path` + The file that had the warning issued for. + used_import: :class:`.Path` + The unused import file. + """ + + msg: str + file: Path + unused_import: Union[Path, str] + + def __str__(self): + return f"Import {self.unused_import} is unused in {self.file}" diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 4cc487a47..9c0df5edb 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -2,6 +2,8 @@ import functools import os import re +import secrets +import warnings from concurrent.futures import ProcessPoolExecutor from typing import TYPE_CHECKING, Any, NoReturn, Tuple @@ -11,7 +13,7 @@ ) from ..parser import generate_code from . import USE_PROTOC, utils -from .errors import CLIError, ProtobufSyntaxError +from .errors import CompilerError, ProtobufSyntaxError, UnusedImport if TYPE_CHECKING: from pathlib import Path @@ -28,23 +30,41 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: pass -def handle_error(data: str, files: Tuple["Path", ...]) -> NoReturn: - match = re.match( - r"^(?P.+):(?P\d+):(?P\d+):(?P.*)", +def handle_error(data: bytes, files: Tuple["Path", ...]) -> NoReturn: + matches = re.finditer( + rb"^(?P.+):(?P\d+):(?P\d+): (?P.*)", data, ) - if match is None: - raise CLIError(data.strip()) - - for file in files: - if file.as_posix().endswith(match["filename"]): - raise ProtobufSyntaxError( - match["message"].strip(), - file, - int(match["lineno"]), - int(match["offset"]), - ) - raise ProtobufSyntaxError + if not matches: + raise CompilerError(data.decode().strip()) + + for match in matches: + file = utils.find(lambda f: f.as_posix().endswith(match["filename"].decode()), files) + + if match["message"].startswith(b"warning: "): + import_matches = list(re.finditer(rb"warning: Import (?P.+) is unused\.", match["message"])) + if import_matches: + for import_match in import_matches: + unused_import = utils.find( + lambda f: file.as_posix().endswith(import_match["unused_import"].decode()), + files + ) + if unused_import is None: + unused_import = import_match["unused_import"].decode() + warning = UnusedImport(match["message"].decode().strip(), file, unused_import) + else: + warning = Warning(match["message"].lstrip(b"warning: ").strip().decode()) + + warnings.simplefilter("once") + warnings.warn(warning) + continue + + raise ProtobufSyntaxError( + match["message"].decode().strip(), + file, + int(match["lineno"]), + int(match["offset"]), + ) async def compile_protobufs( @@ -76,25 +96,39 @@ async def compile_protobufs( *files, output=output, use_protoc=use_protoc, implementation=implementation ) + secret_word = secrets.token_hex(256) + process = await asyncio.create_subprocess_shell( command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, - env={"USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)), **os.environ}, + env={ + "USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)), + "BETTERPROTO_STOP_KEYWORD": secret_word, + **os.environ, + }, ) - stdout, stderr = await process.communicate() - if use_betterproto: - # we put code on stderr so we can actually read it thank you google :))))) + stderr = await process.stderr.read() + if stderr.find(secret_word.encode()) == -1: + handle_error(stderr, files) try: - request = CodeGeneratorRequest().parse(stderr) - except Exception: - handle_error(stderr.decode(), files) + stderr, data = stderr.split(secret_word.encode()) + except TypeError: + return await compile_protobufs( + *files, + output=output, + use_protoc=use_protoc, + use_betterproto=use_betterproto, + **kwargs, + ) # you've exceptionally lucky - if request._unknown_fields: - handle_error(stderr.decode(), files) + if stderr: + handle_error(stderr, files) + + request = CodeGeneratorRequest().parse(data) # Generate code response = await utils.to_thread(generate_code, request, **kwargs) @@ -111,12 +145,12 @@ async def compile_protobufs( ) ) - stderr = b"" + stdout, stderr = await process.communicate() if stderr: - handle_error(stderr.decode(), files) + handle_error(stderr, files) if process.returncode != 0: - raise CLIError(stderr.decode()) + raise CompilerError(stderr.decode()) return stdout.decode(), stderr.decode() diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index f8e150045..968a1f186 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -5,7 +5,7 @@ import sys from collections import defaultdict from pathlib import Path -from typing import Awaitable, Callable, TypeVar, Any, List, Set +from typing import Awaitable, Callable, TypeVar, Any, List, Set, Iterable, Optional from . import USE_PROTOC @@ -103,6 +103,7 @@ async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: return await loop.run_in_executor(None, partial) -if __name__ == "__main__": - os.getcwd = lambda: "/Users/gobot1234/PycharmProjects/betterproto/tests/inputs" - print(get_files(("bool", "bool/bool.proto", "casing/casing.proto"))) +def find(predicate: Callable[[T], bool], iterable: Iterable[T]) -> Optional[T]: + for i in iterable: + if predicate(i): + return i diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index c2d1a089e..9627aac16 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -1,7 +1,10 @@ #!/usr/bin/env python import os +import signal import sys +import time +from socket import socket import rich @@ -16,8 +19,9 @@ def main() -> None: data = sys.stdin.buffer.read() if os.getenv("USING_BETTERPROTO_CLI") == "True": - # Write the data to stderr for cli - sys.stderr.buffer.write(data) # need to figure out how to potentially lock this + sys.stderr.buffer.write(os.environ["BETTERPROTO_STOP_KEYWORD"].encode()) + sys.stderr.buffer.write(data) + sys.stdout.buffer.write(b"") else: # Apply Work around for proto2/3 difference in protoc messages From f807e395152cc2d83ceaf76bd069f2d799e6e4a3 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 31 Jan 2021 16:08:13 +0000 Subject: [PATCH 33/46] Finish up --- pyproject.toml | 10 +++- src/betterproto/plugin/__init__.py | 7 ++- src/betterproto/plugin/cli/commands.py | 83 +++++++++++++------------- src/betterproto/plugin/cli/runner.py | 64 ++++++++++++-------- src/betterproto/plugin/cli/utils.py | 8 +-- tests/generate.py | 37 ++++-------- 6 files changed, 113 insertions(+), 96 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 24a6b3112..de955c53f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,15 @@ format = { cmd = "black . --exclude tests/output_", help = "Apply black clean = { cmd = "rm -rf .coverage .mypy_cache .pytest_cache dist betterproto.egg-info **/__pycache__ tests/output_*", help = "Clean out generated files from the workspace" } docs = { cmd = "sphinx-build docs docs/build", help = "Build the sphinx docs"} bench = { shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD", help = "Benchmark current commit vs. master branch"} -generate_lib = { cmd = "betterproto compile /usr/local/bin/include/google/protobuf --output=src/betterproto/lib", help = "Regenerate the types in betterproto.lib.google"} +generate_lib.cmd = """ +protoc + --plugin=protoc-gen-custom=src/betterproto/plugin/main.py + --custom_opt=INCLUDE_GOOGLE + --custom_out=src/betterproto/lib + -I /usr/local/include/ + /usr/local/include/google/protobuf/**/*.proto +""" +generate_lib.help = "Regenerate the types in betterproto.lib.google" # CI tasks full-test = { shell = "poe generate && tox", help = "Run tests with multiple pythons" } diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index cb47cca4e..25d80288f 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -10,11 +10,14 @@ "included." ) -STDLIB_MODULES = getattr(sys, "module_names", [ +STDLIB_MODULES = getattr( + sys, + "module_names", + [ p.with_suffix("").name for p in Path(traceback.__file__).parent.iterdir() if p.suffix == ".py" or p.is_dir() - ] + ], ) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 018c25093..87814656c 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -1,18 +1,14 @@ +import sys from pathlib import Path from typing import List, Optional -import typer import rich +import typer from rich.syntax import Syntax from ..models import monkey_patch_oneof_index -from . import ( - DEFAULT_LINE_LENGTH, - USE_PROTOC, - VERBOSE, - utils, -) -from .errors import CLIError, ProtobufSyntaxError +from . import DEFAULT_LINE_LENGTH, USE_PROTOC, VERBOSE, utils +from .errors import CLIError, ProtobufSyntaxError, UnusedImport from .runner import compile_protobufs monkey_patch_oneof_index() @@ -69,40 +65,45 @@ async def compile( return rich.print("[bold]No files found to compile") for output_path, protos in files.items(): - try: - output = ( - output or (Path(output_path.parent.name) / output_path.name).resolve() - ) - output.mkdir(exist_ok=True, parents=True) - await compile_protobufs( - *protos, - output=output, - verbose=verbose, - use_protoc=protoc, - generate_services=generate_services, - line_length=line_length, - from_cli=True, - ) - except ProtobufSyntaxError as exc: - rich.print( - f"[red]File {str(exc.file).strip()}:\n", - Syntax( - exc.file.read_text(), - "proto", - line_numbers=True, - line_range=(max(exc.lineno - 5, 0), exc.lineno), - ), # TODO switch to .from_path but it appears to be bugged and doesnt render properly - f"{' ' * (exc.offset + 3)}^\nSyntaxError: {exc.msg}[red]", - ) - except CLIError as exc: - failed_files = "\n".join(f" - {file}" for file in protos) - rich.print( - f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{' '.join(exc.args)}[red]", - ) + output = output or (Path(output_path.parent.name) / output_path.name).resolve() + output.mkdir(exist_ok=True, parents=True) + + errors = await compile_protobufs( + *protos, + output=output, + verbose=verbose, + use_protoc=protoc, + generate_services=generate_services, + line_length=line_length, + from_cli=True, + ) + + for error in errors: + if isinstance(error, ProtobufSyntaxError): + rich.print( + f"[red]File {str(error.file).strip()}:\n", + Syntax( + error.file.read_text(), + "proto", + line_numbers=True, + line_range=(max(error.lineno - 5, 0), error.lineno), + ), # TODO switch to .from_path but it appears to be bugged and doesnt render properly + f"{' ' * (error.offset + 3)}^\nSyntaxError: {error.msg}[red]", + file=sys.stderr, + ) + elif isinstance(error, Warning): + rich.print(f"Warning: {error}", file=sys.stderr) + elif isinstance(error, CLIError): + failed_files = "\n".join(f" - {file}" for file in protos) + rich.print( + f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{' '.join(error.args)}[red]", + file=sys.stderr, + ) - else: + if not errors or all(isinstance(e, Warning) for e in errors): rich.print( - f"[bold green]Finished generating output for {len(protos)} files, " + f"[bold green]Finished generating output for " + f"{len(protos)} file{'s' if len(protos) != 1 else ''}, " f"output is in {output.as_posix()}" ) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 9c0df5edb..f5e980084 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -3,9 +3,8 @@ import os import re import secrets -import warnings from concurrent.futures import ProcessPoolExecutor -from typing import TYPE_CHECKING, Any, NoReturn, Tuple +from typing import TYPE_CHECKING, Any, List, Tuple, Optional from ...lib.google.protobuf.compiler import ( CodeGeneratorRequest, @@ -13,7 +12,7 @@ ) from ..parser import generate_code from . import USE_PROTOC, utils -from .errors import CompilerError, ProtobufSyntaxError, UnusedImport +from .errors import CLIError, CompilerError, ProtobufSyntaxError, UnusedImport if TYPE_CHECKING: from pathlib import Path @@ -30,42 +29,59 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: pass -def handle_error(data: bytes, files: Tuple["Path", ...]) -> NoReturn: +def handle_error(data: bytes, files: Tuple["Path", ...]) -> List[CLIError]: + errors = [] matches = re.finditer( rb"^(?P.+):(?P\d+):(?P\d+): (?P.*)", data, ) if not matches: - raise CompilerError(data.decode().strip()) + return [CompilerError(data.decode().strip())] for match in matches: - file = utils.find(lambda f: f.as_posix().endswith(match["filename"].decode()), files) + file = utils.find( + lambda f: f.as_posix().endswith(match["filename"].decode()), files + ) if match["message"].startswith(b"warning: "): - import_matches = list(re.finditer(rb"warning: Import (?P.+) is unused\.", match["message"])) + import_matches = list( + re.finditer( + rb"warning: Import (?P.+) is unused\.", + match["message"], + ) + ) if import_matches: for import_match in import_matches: unused_import = utils.find( - lambda f: file.as_posix().endswith(import_match["unused_import"].decode()), - files + lambda f: file.as_posix().endswith( + import_match["unused_import"].decode() + ), + files, ) if unused_import is None: unused_import = import_match["unused_import"].decode() - warning = UnusedImport(match["message"].decode().strip(), file, unused_import) + warning = UnusedImport( + match["message"].decode().strip(), file, unused_import + ) else: - warning = Warning(match["message"].lstrip(b"warning: ").strip().decode()) + warning = Warning( + match["message"].lstrip(b"warning: ").strip().decode() + ) - warnings.simplefilter("once") - warnings.warn(warning) + errors.append(warning) continue - raise ProtobufSyntaxError( - match["message"].decode().strip(), - file, - int(match["lineno"]), - int(match["offset"]), + errors.append( + ProtobufSyntaxError( + match["message"].decode().strip(), + file, + int(match["lineno"]), + int(match["offset"]), + ) ) + return errors + async def compile_protobufs( *files: "Path", @@ -73,7 +89,7 @@ async def compile_protobufs( use_protoc: bool = USE_PROTOC, use_betterproto: bool = True, **kwargs: Any, -) -> Tuple[str, str]: +) -> List[CLIError]: """ A programmatic way to compile protobufs. @@ -112,7 +128,7 @@ async def compile_protobufs( if use_betterproto: stderr = await process.stderr.read() if stderr.find(secret_word.encode()) == -1: - handle_error(stderr, files) + return handle_error(stderr, files) try: stderr, data = stderr.split(secret_word.encode()) @@ -126,7 +142,7 @@ async def compile_protobufs( ) # you've exceptionally lucky if stderr: - handle_error(stderr, files) + return handle_error(stderr, files) request = CodeGeneratorRequest().parse(data) @@ -148,9 +164,9 @@ async def compile_protobufs( stdout, stderr = await process.communicate() if stderr: - handle_error(stderr, files) + return handle_error(stderr, files) if process.returncode != 0: - raise CompilerError(stderr.decode()) + return [CompilerError(stderr.decode())] - return stdout.decode(), stderr.decode() + return [] \ No newline at end of file diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 968a1f186..9dda32406 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -5,7 +5,7 @@ import sys from collections import defaultdict from pathlib import Path -from typing import Awaitable, Callable, TypeVar, Any, List, Set, Iterable, Optional +from typing import Any, Awaitable, Callable, Iterable, List, Optional, Set, TypeVar from . import USE_PROTOC @@ -52,12 +52,12 @@ def generate_command( use_protoc: bool = USE_PROTOC, implementation: str = "betterproto_", ) -> str: - # TODO make this actually work :) --plugin=protoc-gen-custom=src/betterproto/plugin/main.py - command = [ f"--proto_path={files[0].parent.as_posix()}", f"--python_{implementation}out={output.as_posix()}", - *[f'"{file.as_posix()}"' for file in files], # ensure paths with spaces in the name get parsed correctly + *[ + f'"{file.as_posix()}"' for file in files + ], # ensure paths with spaces in the name get parsed correctly ] if use_protoc: command.insert(0, "protoc") diff --git a/tests/generate.py b/tests/generate.py index f7725cc55..be6bec9d1 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -60,12 +60,7 @@ async def generate(whitelist: Set[Path], verbose: bool) -> None: for test_case_name, exception in zip( sorted(test_case_names), await asyncio.gather(*generation_tasks) ): - if exception is not None: # TODO this broke - import traceback - - traceback.print_exception( - exception.__class__, exception, exception.__traceback__ - ) + if exception is not None: failed_test_cases.append(test_case_name) if failed_test_cases: @@ -93,31 +88,25 @@ async def generate_test_case_output( clear_directory(test_case_output_path_betterproto) files = list(test_case_input_path.glob("*.proto")) - try: - (ref_out, ref_err), (plg_out, plg_err) = await asyncio.gather( - compile_protobufs( - *files, output=test_case_output_path_reference, use_betterproto=False - ), - compile_protobufs( - *files, output=test_case_output_path_betterproto, from_cli=True - ), - ) - except Exception as exc: - return exc + ref_errs, plg_errs = await asyncio.gather( + compile_protobufs( + *files, output=test_case_output_path_reference, use_betterproto=False + ), + compile_protobufs( + *files, output=test_case_output_path_betterproto, from_cli=True + ), + ) rich.print(f"[bold red]Generated output for {test_case_name!r}") if verbose: - if ref_out: - rich.print(f"[red]{ref_out}") - if ref_err: + for ref_err in ref_errs: rich.print(f"[red]{ref_err}", file=sys.stderr) - if plg_out: - rich.print(f"[red]{plg_out}") - if plg_err: + for plg_err in plg_errs: rich.print(f"[red]{plg_err}", file=sys.stderr) - sys.stdout.flush() sys.stderr.flush() + return ref_errs or plg_errs or None + @app.command(context_settings={"help_option_names": ["-h", "--help"]}) @utils.run_sync From 944a6bedc5c5f252f77f1e29af30948ffbedc374 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Mon, 1 Feb 2021 09:21:54 +0000 Subject: [PATCH 34/46] Update docs --- src/betterproto/plugin/cli/runner.py | 2 +- src/betterproto/plugin/parser.py | 31 ++++++++++++++++------------ 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index f5e980084..0ab439e03 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -139,7 +139,7 @@ async def compile_protobufs( use_protoc=use_protoc, use_betterproto=use_betterproto, **kwargs, - ) # you've exceptionally lucky + ) # you've exceptionally un/lucky if stderr: return handle_error(stderr, files) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 2540aed0a..0ce1db979 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -67,7 +67,6 @@ def generate_code( include_google: bool = False, line_length: int = black.DEFAULT_LINE_LENGTH, generate_services: bool = True, - separate_files: bool = False, verbose: bool = False, from_cli: bool = False, ) -> CodeGeneratorResponse: @@ -75,18 +74,24 @@ def generate_code( Parameters ---------- - request - include_google - line_length - generate_services - separate_files - verbose - from_cli + request: :class:`.CodeGeneratorRequest` + The request to generate the protobufs from. + include_google: :class:`bool` + Whether or not to include the google protobufs in the response files. + line_length: :class:`int` + The line length to pass to black for formatting. + generate_services: :class:`bool` + Whether or not to include services. + verbose: :class:`bool` + Whether or not to run the plugin in verbose mode. + from_cli: :class:`bool` + Whether or not the plugin is being ran from the CLI. Returns ------- :class:`.CodeGeneratorResponse` - """ # TODO + The response for the request. + """ response = CodeGeneratorResponse() plugin_options = request.parameter.split(",") if request.parameter else [] @@ -114,7 +119,7 @@ def generate_code( request_data.output_packages[output_package_name].input_files.append( proto_file ) - if from_cli: + if verbose or from_cli: progress.update(reading_progress_bar, advance=1) # Read Messages and Enums @@ -138,7 +143,7 @@ def generate_code( source_file=proto_input_file, output_package=output_package, ) - if from_cli: + if verbose or from_cli: progress.update(parsing_progress_bar, advance=1) # Read Services @@ -158,7 +163,7 @@ def generate_code( for proto_input_file in output_package.input_files: for index, service in enumerate(proto_input_file.service): read_protobuf_service(service, index, output_package) - if from_cli: + if verbose or from_cli: progress.update(parsing_progress_bar, advance=1) # Generate output files @@ -183,7 +188,7 @@ def generate_code( ), ) ) - if from_cli: + if verbose or from_cli: progress.update(compiling_progress_bar, advance=1) # Make each output directory a package with __init__ file From 5a1819ec72d2dc6d950081b21034e25593cb4156 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Mon, 1 Feb 2021 11:40:25 +0000 Subject: [PATCH 35/46] Update docs again --- README.md | 40 +++++++++++++++------------- docs/migrating.rst | 2 +- docs/quick-start.rst | 16 +++++------ src/betterproto/plugin/cli/runner.py | 2 +- 4 files changed, 29 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 8ca590912..29ed71cfc 100644 --- a/README.md +++ b/README.md @@ -37,11 +37,12 @@ This project exists because I am unhappy with the state of the official Google p - Uses `SerializeToString()` rather than the built-in `__bytes__()` - Special wrapped types don't use Python's `None` - Timestamp/duration types don't use Python's built-in `datetime` module + This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical. ## Installation -First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin: +First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the code generator: ```sh # Install both the library and compiler @@ -70,10 +71,9 @@ message Greeting { } ``` -You can run the following to invoke protoc directly: +To compile the protobuf you would run the following: ```sh -mkdir lib betterproto compile example.proto --output=lib ``` @@ -161,16 +161,13 @@ from grpclib.client import Channel async def main(): - channel = Channel(host="127.0.0.1", port=50051) - service = echo.EchoStub(channel) - response = await service.echo(value="hello", extra_times=1) - print(response) - - async for response in service.echo_stream(value="hello", extra_times=1): - print(response) - - # don't forget to close the channel when done! - channel.close() + async with Channel(host="127.0.0.1", port=50051) as channel: + service = echo.EchoStub(channel) + response = await service.echo(value="hello", extra_times=1) + print(response) + + async for response in service.echo_stream(value="hello", extra_times=1): + print(response) if __name__ == "__main__": @@ -233,23 +230,23 @@ You can use `betterproto.which_one_of(message, group_name)` to determine which o ```py >>> test = Test() >>> betterproto.which_one_of(test, "foo") -["", None] +("", None) >>> test.on = True >>> betterproto.which_one_of(test, "foo") -["on", True] +("on", True) # Setting one member of the group resets the others. >>> test.count = 57 >>> betterproto.which_one_of(test, "foo") -["count", 57] +("count", 57) >>> test.on False # Default (zero) values also work. >>> test.name = "" >>> betterproto.which_one_of(test, "foo") -["name", ""] +("name", "") >>> test.count 0 >>> test.on @@ -265,7 +262,7 @@ Again this is a little different than the official Google code generator: # New way (this project) >>> betterproto.which_one_of(message, "group") -["foo", "foo's value"] +("foo", "foo's value") ``` ### Well-Known Google Types @@ -405,7 +402,12 @@ Normally, the plugin does not compile any references to `google.protobuf`, since Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows: ```sh -betterproto compile /usr/local/include/google/protobuf/*.proto --output=src/betterproto/lib +protoc \ + --plugin=protoc-gen-custom=betterproto/plugin.py \ + --custom_opt=INCLUDE_GOOGLE \ + --custom_out=betterproto/lib \ + -I /usr/local/include/ \ + /usr/local/include/google/protobuf/*.proto ``` ### TODO diff --git a/docs/migrating.rst b/docs/migrating.rst index 0f18eac5f..f6680fb0c 100644 --- a/docs/migrating.rst +++ b/docs/migrating.rst @@ -4,7 +4,7 @@ Migrating Guide Google's protocolbuffers ------------------------ -betterproto has a mostly 1 to 1 drop in replacement for Google's protocolbuffers (after +betterproto has a mostly 1 to 1 drop in replacement for Google's protocol buffers (after regenerating your protobufs of course) although there are some minor differences. .. note:: diff --git a/docs/quick-start.rst b/docs/quick-start.rst index b22bd7fa8..44d8efbf1 100644 --- a/docs/quick-start.rst +++ b/docs/quick-start.rst @@ -40,11 +40,10 @@ Given you installed the compiler and have a proto file, e.g ``example.proto``: string message = 1; } -To compile the proto you would run the following: +To compile the protobuf you would run the following: .. code-block:: sh - mkdir hello betterproto compile example.proto --output=lib This will generate ``lib/__init__.py`` which looks like: @@ -131,16 +130,13 @@ The generated client can be used like so: async def main(): - channel = Channel(host="127.0.0.1", port=50051) - service = echo.EchoStub(channel) - response = await service.echo(value="hello", extra_times=1) - print(response) - - async for response in service.echo_stream(value="hello", extra_times=1): + async with Channel(host="127.0.0.1", port=50051) as channel: + service = echo.EchoStub(channel) + response = await service.echo(value="hello", extra_times=1) print(response) - # don't forget to close the channel when you're done! - channel.close() + async for response in service.echo_stream(value="hello", extra_times=1): + print(response) asyncio.run(main()) # python 3.7 only diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 0ab439e03..81eec8cb7 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -169,4 +169,4 @@ async def compile_protobufs( if process.returncode != 0: return [CompilerError(stderr.decode())] - return [] \ No newline at end of file + return [] From f31eb9816f744a43d8872e05795399614deb2141 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Mon, 1 Feb 2021 11:42:47 +0000 Subject: [PATCH 36/46] Regen lock --- poetry.lock | 74 ++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 51 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index 368be9fc3..c54a79611 100644 --- a/poetry.lock +++ b/poetry.lock @@ -191,7 +191,7 @@ cwcwidth = "*" [[package]] name = "cwcwidth" -version = "0.1.1" +version = "0.1.2" description = "Python bindings for wc(s)width" category = "dev" optional = false @@ -356,7 +356,7 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake [[package]] name = "jinja2" -version = "2.11.2" +version = "2.11.3" description = "A very fast and expressive template engine." category = "main" optional = false @@ -418,7 +418,7 @@ python-versions = "*" [[package]] name = "packaging" -version = "20.8" +version = "20.9" description = "Core utilities for Python packages" category = "dev" optional = false @@ -572,7 +572,7 @@ dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] name = "pytz" -version = "2020.5" +version = "2021.1" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -606,7 +606,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "rich" -version = "9.9.0" +version = "9.10.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "main" optional = true @@ -772,7 +772,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "tox" -version = "3.21.2" +version = "3.21.3" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -828,7 +828,7 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.26.2" +version = "1.26.3" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false @@ -841,7 +841,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.4.0" +version = "20.4.2" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -1001,7 +1001,35 @@ curtsies = [ {file = "curtsies-0.3.5.tar.gz", hash = "sha256:a587ff3335667a32be7afed163f60a1c82c5d9c848d8297534a06fd29de20dbd"}, ] cwcwidth = [ - {file = "cwcwidth-0.1.1.tar.gz", hash = "sha256:042cdf80d80a836935f700d8e1c34270f82a627fc07f7b5ec1e8cec486e1d755"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:043642a199463e29d682d5c1f70c9c7a84eddfafa4e08557d08f680d38b7822f"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2c1e4014fb2607c36ffac47341ff7d696adfad1b2f39c565850bf0eb08952695"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4d98c16a97bc88d03acc8d1b895a492048efd43b0d687fa0f537ff5e753a4d1b"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:1b74b5fb63a1b2172c24d229a5317be08901f84b4692ca036de4e11219f72c17"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:14ff60534b2a58d2c8708fcd3c763809b9ecab0ed5992bf6676f7d1d8e4731ea"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-win32.whl", hash = "sha256:86b81629e7c74830e90db6d058790d557500f3fc7ced31241a9717f1aeb1c7f1"}, + {file = "cwcwidth-0.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f77e297f7df7d797d50df58c612e1838023cd83f6dbfc1c6f8b146ef87fc9cfd"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ef66a357834110b9a17a481cfb4562996fc8e832c633c67cfb3617e17407a2ad"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a412d5504b7808bc1256c06659475d763594dbfdbd1d8b54e85403424a18e025"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0da4dca360c273e20c1ab031cb6f319baec60f9f70e6c7e7849c3d4d65ace34"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9ae976f5911c05146d3e55dd60a9e87da694ea2fbda9314a40280ff27e53b8f2"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:57be6edc9273bf298d0c371f8cbab55d5a556135d1733db620f518a25aa00ecf"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-win32.whl", hash = "sha256:e6ae7382cdbf1bcb8c7f30b04c70ce4a794b63145840ba7441b79f5a5f751cd7"}, + {file = "cwcwidth-0.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c58046a26719e3eef943b0eb09408d10fc896bda4b867c86b3642c8e5effffc2"}, + {file = "cwcwidth-0.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b1684ba81edfbcd083e864c68cd35b9baf165e6e4dc288e6441141abc6ce9667"}, + {file = "cwcwidth-0.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f6a40d778a6b6838ff4288b1860f80a35481a9e14d1d20180f08cebf22c36370"}, + {file = "cwcwidth-0.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bc3fd6a6ff517fed4a6e35ec8985940738284a625b916ccc0d60e4d103c28a27"}, + {file = "cwcwidth-0.1.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1fdaca6c96b0f2f43267489153778fcfd763bcdfae99b41aff88bd1e823fb032"}, + {file = "cwcwidth-0.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:e81f5b0a972c828b2a30f101e398ddb96a5e70430752e2992a00fe26163038f5"}, + {file = "cwcwidth-0.1.2-cp38-cp38-win32.whl", hash = "sha256:86158fb6060fd2091754a4831eb311f679ae50100df2fe6ed45193e3103f9db1"}, + {file = "cwcwidth-0.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:0e453ea7282b209af887e275a3aea6efc1ce4295c0465d3598045473ea1b3ac8"}, + {file = "cwcwidth-0.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5854e7c62bd0cd3962eb6de662f8df30f82300f3f7d7a0b77b75cc6f2f294a2a"}, + {file = "cwcwidth-0.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9253d6ea2bf7eef90134ee58b771f4e9f5114d5f4108c26e3e7c1cefae11863b"}, + {file = "cwcwidth-0.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8c9bfb9dc7b5ee742ab5394686c8b6db820b453e60f57a1cc35bdac25ede98a"}, + {file = "cwcwidth-0.1.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:afc4fa179c4ae769282bb5a0ee2ce3cee7a307a40f7e6ff5395e07ac824c2080"}, + {file = "cwcwidth-0.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:2722e94e1bf579a0856d399a6bdb6d4fb07dbd8a683838ca6e4e25e289a66a38"}, + {file = "cwcwidth-0.1.2-cp39-cp39-win32.whl", hash = "sha256:1a16afea349abba714cf9b6bb42e4a66d2b0917813d5ce6db1baed2f9e1557ed"}, + {file = "cwcwidth-0.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:d753e5bb7da61038c912dae91abcb02aeef7a8fa2c48ed9b2fb6739cc013d0ed"}, + {file = "cwcwidth-0.1.2.tar.gz", hash = "sha256:0c4d39d9d72183d228d8659f8efa751d57e4762491660e8ba13679b7f64fce58"}, ] dataclasses = [ {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, @@ -1192,8 +1220,8 @@ importlib-resources = [ {file = "importlib_resources-5.1.0.tar.gz", hash = "sha256:bfdad047bce441405a49cf8eb48ddce5e56c696e185f59147a8b79e75e9e6380"}, ] jinja2 = [ - {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, - {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -1294,8 +1322,8 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, - {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pastel = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, @@ -1361,8 +1389,8 @@ pytest-mock = [ {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, ] pytz = [ - {file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"}, - {file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"}, + {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, + {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, ] regex = [ {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, @@ -1412,8 +1440,8 @@ requests = [ {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] rich = [ - {file = "rich-9.9.0-py3-none-any.whl", hash = "sha256:d376396cb3793a042f6167cd613a31a370ea2c5ec1bbdf76a5c9e9c588ccff12"}, - {file = "rich-9.9.0.tar.gz", hash = "sha256:0bd8f42c3a03b7ef5e311d5e37f47bea9d268f541981c169072be5869c007957"}, + {file = "rich-9.10.0-py3-none-any.whl", hash = "sha256:3070d53e3a93864de351c1091af1deb25f41e6051b33e485d4626b591c0cfdb3"}, + {file = "rich-9.10.0.tar.gz", hash = "sha256:e0f2db62a52536ee32f6f584a47536465872cae2b94887cf1f080fb9eaa13eb2"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, @@ -1464,8 +1492,8 @@ tomlkit = [ {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, ] tox = [ - {file = "tox-3.21.2-py2.py3-none-any.whl", hash = "sha256:0aa777ee466f2ef18e6f58428c793c32378779e0a321dbb8934848bc3e78998c"}, - {file = "tox-3.21.2.tar.gz", hash = "sha256:f501808381c01c6d7827c2f17328be59c0a715046e94605ddca15fb91e65827d"}, + {file = "tox-3.21.3-py2.py3-none-any.whl", hash = "sha256:76df3db6eee929bb62bdbacca5bb6bc840669d98e86a015b7a57b7df0a6eaf8b"}, + {file = "tox-3.21.3.tar.gz", hash = "sha256:854e6e4a71c614b488f81cb88df3b92edcb1a9ec43d4102e6289e9669bbf7f18"}, ] typed-ast = [ {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, @@ -1509,12 +1537,12 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, - {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, ] virtualenv = [ - {file = "virtualenv-20.4.0-py2.py3-none-any.whl", hash = "sha256:227a8fed626f2f20a6cdb0870054989f82dd27b2560a911935ba905a2a5e0034"}, - {file = "virtualenv-20.4.0.tar.gz", hash = "sha256:219ee956e38b08e32d5639289aaa5bd190cfbe7dafcb8fa65407fca08e808f9c"}, + {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, + {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, From 48a5e7c27f63c726fafc106e24a352bd3001dc41 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 21 Mar 2021 00:01:41 +0000 Subject: [PATCH 37/46] Cleanup code --- src/betterproto/plugin/__init__.py | 2 +- src/betterproto/plugin/cli/runner.py | 18 +++++++++--------- src/betterproto/plugin/cli/utils.py | 12 ------------ src/betterproto/plugin/main.py | 3 --- 4 files changed, 10 insertions(+), 25 deletions(-) diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index 25d80288f..da00d3ff3 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -12,7 +12,7 @@ STDLIB_MODULES = getattr( sys, - "module_names", + "builtin_module_names", [ p.with_suffix("").name for p in Path(traceback.__file__).parent.iterdir() diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 81eec8cb7..db6914f8c 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -4,11 +4,12 @@ import re import secrets from concurrent.futures import ProcessPoolExecutor -from typing import TYPE_CHECKING, Any, List, Tuple, Optional +from typing import TYPE_CHECKING, Any, List, Tuple from ...lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponseFile, + CodeGeneratorResponse, ) from ..parser import generate_code from . import USE_PROTOC, utils @@ -23,10 +24,7 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: path = (output / file.name).resolve() path.parent.mkdir(parents=True, exist_ok=True) - try: - path.write_text(file.content) - except TypeError: - pass + path.write_text(file.content) def handle_error(data: bytes, files: Tuple["Path", ...]) -> List[CLIError]: @@ -104,8 +102,8 @@ async def compile_protobufs( Returns ------- - Tuple[:class:`str`, :class:`str`] - A tuple of the ``stdout`` and ``stderr`` from the invocation of protoc. + List[:class:`CLIError`] + A of exceptions from protoc. """ implementation = DEFAULT_IMPLEMENTATION if use_betterproto else "" command = utils.generate_command( @@ -146,10 +144,12 @@ async def compile_protobufs( request = CodeGeneratorRequest().parse(data) + loop = asyncio.get_event_loop() # Generate code - response = await utils.to_thread(generate_code, request, **kwargs) + response: CodeGeneratorResponse = await loop.run_in_executor( + None, functools.partial(generate_code, request, **kwargs) + ) - loop = asyncio.get_event_loop() with ProcessPoolExecutor() as process_pool: # write multiple files concurrently await asyncio.gather( diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 9dda32406..a418556c0 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -1,6 +1,5 @@ import asyncio import functools -import os import platform import sys from collections import defaultdict @@ -92,17 +91,6 @@ def wrapper(*args: Any, **kwargs: Any) -> T: return wrapper -if sys.version_info[:2] >= (3, 9): - from asyncio import to_thread -else: - - async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: - loop = asyncio.get_event_loop() - # no context vars - partial = functools.partial(func, *args, **kwargs) - return await loop.run_in_executor(None, partial) - - def find(predicate: Callable[[T], bool], iterable: Iterable[T]) -> Optional[T]: for i in iterable: if predicate(i): diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 9627aac16..22838c6ef 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -1,10 +1,7 @@ #!/usr/bin/env python import os -import signal import sys -import time -from socket import socket import rich From 8596a61c6a10381e0c807b0036cd4ded8e951c32 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 21 Mar 2021 00:01:52 +0000 Subject: [PATCH 38/46] Regen lock --- poetry.lock | 549 ++++++++++++++++++++++++++-------------------------- 1 file changed, 277 insertions(+), 272 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0016caaec..2b56b8aac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -168,7 +168,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "coverage" -version = "5.4" +version = "5.5" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -191,11 +191,11 @@ cwcwidth = "*" [[package]] name = "cwcwidth" -version = "0.1.2" +version = "0.1.4" description = "Python bindings for wc(s)width" category = "dev" optional = false -python-versions = ">= 3.6" +python-versions = "*" [[package]] name = "dataclasses" @@ -242,7 +242,7 @@ docs = ["sphinx"] [[package]] name = "grpcio" -version = "1.35.0" +version = "1.36.1" description = "HTTP/2-based RPC framework" category = "dev" optional = false @@ -252,18 +252,18 @@ python-versions = "*" six = ">=1.5.2" [package.extras] -protobuf = ["grpcio-tools (>=1.35.0)"] +protobuf = ["grpcio-tools (>=1.36.1)"] [[package]] name = "grpcio-tools" -version = "1.35.0" +version = "1.36.1" description = "Protobuf code generator for gRPC" category = "dev" optional = false python-versions = "*" [package.dependencies] -grpcio = ">=1.35.0" +grpcio = ">=1.36.1" protobuf = ">=3.5.0.post1,<4.0dev" [[package]] @@ -325,7 +325,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "3.4.0" +version = "3.7.3" description = "Read metadata from Python packages" category = "dev" optional = false @@ -341,7 +341,7 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake [[package]] name = "importlib-resources" -version = "5.1.0" +version = "5.1.2" description = "Read resources from Python packages" category = "dev" optional = false @@ -352,7 +352,7 @@ zipp = {version = ">=0.4", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "jinja2" @@ -378,7 +378,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" [[package]] name = "more-itertools" -version = "8.6.0" +version = "8.7.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false @@ -459,7 +459,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "poethepoet" -version = "0.9.0" +version = "0.10.0" description = "A task runner that works well with poetry." category = "dev" optional = false @@ -471,7 +471,7 @@ tomlkit = ">=0.6.0,<1.0.0" [[package]] name = "protobuf" -version = "3.14.0" +version = "3.15.6" description = "Protocol Buffers" category = "dev" optional = false @@ -490,7 +490,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.4" +version = "2.8.1" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -580,7 +580,7 @@ python-versions = "*" [[package]] name = "regex" -version = "2020.11.13" +version = "2021.3.17" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -606,7 +606,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "rich" -version = "9.10.0" +version = "9.13.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "main" optional = true @@ -781,7 +781,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "tox" -version = "3.21.3" +version = "3.23.0" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -837,20 +837,20 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.26.3" +version = "1.26.4" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "virtualenv" -version = "20.4.2" +version = "20.4.3" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -878,15 +878,15 @@ python-versions = "*" [[package]] name = "zipp" -version = "3.4.0" +version = "3.4.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.6" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] compiler = ["black", "jinja2", "typer", "rich"] @@ -956,89 +956,92 @@ commonmark = [ {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] coverage = [ - {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, - {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, - {file = "coverage-5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9754a5c265f991317de2bac0c70a746efc2b695cf4d49f5d2cddeac36544fb44"}, - {file = "coverage-5.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fbb17c0d0822684b7d6c09915677a32319f16ff1115df5ec05bdcaaee40b35f3"}, - {file = "coverage-5.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b7f7421841f8db443855d2854e25914a79a1ff48ae92f70d0a5c2f8907ab98c9"}, - {file = "coverage-5.4-cp27-cp27m-win32.whl", hash = "sha256:4a780807e80479f281d47ee4af2eb2df3e4ccf4723484f77da0bb49d027e40a1"}, - {file = "coverage-5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:87c4b38288f71acd2106f5d94f575bc2136ea2887fdb5dfe18003c881fa6b370"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6809ebcbf6c1049002b9ac09c127ae43929042ec1f1dbd8bb1615f7cd9f70a0"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ba7ca81b6d60a9f7a0b4b4e175dcc38e8fef4992673d9d6e6879fd6de00dd9b8"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:89fc12c6371bf963809abc46cced4a01ca4f99cba17be5e7d416ed7ef1245d19"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8eb7785bd23565b542b01fb39115a975fefb4a82f23d407503eee2c0106247"}, - {file = "coverage-5.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:7e40d3f8eb472c1509b12ac2a7e24158ec352fc8567b77ab02c0db053927e339"}, - {file = "coverage-5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1ccae21a076d3d5f471700f6d30eb486da1626c380b23c70ae32ab823e453337"}, - {file = "coverage-5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:755c56beeacac6a24c8e1074f89f34f4373abce8b662470d3aa719ae304931f3"}, - {file = "coverage-5.4-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:322549b880b2d746a7672bf6ff9ed3f895e9c9f108b714e7360292aa5c5d7cf4"}, - {file = "coverage-5.4-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:60a3307a84ec60578accd35d7f0c71a3a971430ed7eca6567399d2b50ef37b8c"}, - {file = "coverage-5.4-cp35-cp35m-win32.whl", hash = "sha256:1375bb8b88cb050a2d4e0da901001347a44302aeadb8ceb4b6e5aa373b8ea68f"}, - {file = "coverage-5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:16baa799ec09cc0dcb43a10680573269d407c159325972dd7114ee7649e56c66"}, - {file = "coverage-5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f2cf7a42d4b7654c9a67b9d091ec24374f7c58794858bff632a2039cb15984d"}, - {file = "coverage-5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b62046592b44263fa7570f1117d372ae3f310222af1fc1407416f037fb3af21b"}, - {file = "coverage-5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:812eaf4939ef2284d29653bcfee9665f11f013724f07258928f849a2306ea9f9"}, - {file = "coverage-5.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:859f0add98707b182b4867359e12bde806b82483fb12a9ae868a77880fc3b7af"}, - {file = "coverage-5.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:04b14e45d6a8e159c9767ae57ecb34563ad93440fc1b26516a89ceb5b33c1ad5"}, - {file = "coverage-5.4-cp36-cp36m-win32.whl", hash = "sha256:ebfa374067af240d079ef97b8064478f3bf71038b78b017eb6ec93ede1b6bcec"}, - {file = "coverage-5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:84df004223fd0550d0ea7a37882e5c889f3c6d45535c639ce9802293b39cd5c9"}, - {file = "coverage-5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1b811662ecf72eb2d08872731636aee6559cae21862c36f74703be727b45df90"}, - {file = "coverage-5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6b588b5cf51dc0fd1c9e19f622457cc74b7d26fe295432e434525f1c0fae02bc"}, - {file = "coverage-5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3fe50f1cac369b02d34ad904dfe0771acc483f82a1b54c5e93632916ba847b37"}, - {file = "coverage-5.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:32ab83016c24c5cf3db2943286b85b0a172dae08c58d0f53875235219b676409"}, - {file = "coverage-5.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:68fb816a5dd901c6aff352ce49e2a0ffadacdf9b6fae282a69e7a16a02dad5fb"}, - {file = "coverage-5.4-cp37-cp37m-win32.whl", hash = "sha256:a636160680c6e526b84f85d304e2f0bb4e94f8284dd765a1911de9a40450b10a"}, - {file = "coverage-5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:bb32ca14b4d04e172c541c69eec5f385f9a075b38fb22d765d8b0ce3af3a0c22"}, - {file = "coverage-5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4d7165a4e8f41eca6b990c12ee7f44fef3932fac48ca32cecb3a1b2223c21f"}, - {file = "coverage-5.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a565f48c4aae72d1d3d3f8e8fb7218f5609c964e9c6f68604608e5958b9c60c3"}, - {file = "coverage-5.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fff1f3a586246110f34dc762098b5afd2de88de507559e63553d7da643053786"}, - {file = "coverage-5.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a839e25f07e428a87d17d857d9935dd743130e77ff46524abb992b962eb2076c"}, - {file = "coverage-5.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6625e52b6f346a283c3d563d1fd8bae8956daafc64bb5bbd2b8f8a07608e3994"}, - {file = "coverage-5.4-cp38-cp38-win32.whl", hash = "sha256:5bee3970617b3d74759b2d2df2f6a327d372f9732f9ccbf03fa591b5f7581e39"}, - {file = "coverage-5.4-cp38-cp38-win_amd64.whl", hash = "sha256:03ed2a641e412e42cc35c244508cf186015c217f0e4d496bf6d7078ebe837ae7"}, - {file = "coverage-5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14a9f1887591684fb59fdba8feef7123a0da2424b0652e1b58dd5b9a7bb1188c"}, - {file = "coverage-5.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9564ac7eb1652c3701ac691ca72934dd3009997c81266807aef924012df2f4b3"}, - {file = "coverage-5.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0f48fc7dc82ee14aeaedb986e175a429d24129b7eada1b7e94a864e4f0644dde"}, - {file = "coverage-5.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:107d327071061fd4f4a2587d14c389a27e4e5c93c7cba5f1f59987181903902f"}, - {file = "coverage-5.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0cdde51bfcf6b6bd862ee9be324521ec619b20590787d1655d005c3fb175005f"}, - {file = "coverage-5.4-cp39-cp39-win32.whl", hash = "sha256:c67734cff78383a1f23ceba3b3239c7deefc62ac2b05fa6a47bcd565771e5880"}, - {file = "coverage-5.4-cp39-cp39-win_amd64.whl", hash = "sha256:c669b440ce46ae3abe9b2d44a913b5fd86bb19eb14a8701e88e3918902ecd345"}, - {file = "coverage-5.4-pp36-none-any.whl", hash = "sha256:c0ff1c1b4d13e2240821ef23c1efb1f009207cb3f56e16986f713c2b0e7cd37f"}, - {file = "coverage-5.4-pp37-none-any.whl", hash = "sha256:cd601187476c6bed26a0398353212684c427e10a903aeafa6da40c63309d438b"}, - {file = "coverage-5.4.tar.gz", hash = "sha256:6d2e262e5e8da6fa56e774fb8e2643417351427604c2b177f8e8c5f75fc928ca"}, + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] curtsies = [ {file = "curtsies-0.3.5.tar.gz", hash = "sha256:a587ff3335667a32be7afed163f60a1c82c5d9c848d8297534a06fd29de20dbd"}, ] cwcwidth = [ - {file = "cwcwidth-0.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:043642a199463e29d682d5c1f70c9c7a84eddfafa4e08557d08f680d38b7822f"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2c1e4014fb2607c36ffac47341ff7d696adfad1b2f39c565850bf0eb08952695"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4d98c16a97bc88d03acc8d1b895a492048efd43b0d687fa0f537ff5e753a4d1b"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:1b74b5fb63a1b2172c24d229a5317be08901f84b4692ca036de4e11219f72c17"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:14ff60534b2a58d2c8708fcd3c763809b9ecab0ed5992bf6676f7d1d8e4731ea"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-win32.whl", hash = "sha256:86b81629e7c74830e90db6d058790d557500f3fc7ced31241a9717f1aeb1c7f1"}, - {file = "cwcwidth-0.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f77e297f7df7d797d50df58c612e1838023cd83f6dbfc1c6f8b146ef87fc9cfd"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ef66a357834110b9a17a481cfb4562996fc8e832c633c67cfb3617e17407a2ad"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a412d5504b7808bc1256c06659475d763594dbfdbd1d8b54e85403424a18e025"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0da4dca360c273e20c1ab031cb6f319baec60f9f70e6c7e7849c3d4d65ace34"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9ae976f5911c05146d3e55dd60a9e87da694ea2fbda9314a40280ff27e53b8f2"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:57be6edc9273bf298d0c371f8cbab55d5a556135d1733db620f518a25aa00ecf"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-win32.whl", hash = "sha256:e6ae7382cdbf1bcb8c7f30b04c70ce4a794b63145840ba7441b79f5a5f751cd7"}, - {file = "cwcwidth-0.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c58046a26719e3eef943b0eb09408d10fc896bda4b867c86b3642c8e5effffc2"}, - {file = "cwcwidth-0.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b1684ba81edfbcd083e864c68cd35b9baf165e6e4dc288e6441141abc6ce9667"}, - {file = "cwcwidth-0.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f6a40d778a6b6838ff4288b1860f80a35481a9e14d1d20180f08cebf22c36370"}, - {file = "cwcwidth-0.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bc3fd6a6ff517fed4a6e35ec8985940738284a625b916ccc0d60e4d103c28a27"}, - {file = "cwcwidth-0.1.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1fdaca6c96b0f2f43267489153778fcfd763bcdfae99b41aff88bd1e823fb032"}, - {file = "cwcwidth-0.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:e81f5b0a972c828b2a30f101e398ddb96a5e70430752e2992a00fe26163038f5"}, - {file = "cwcwidth-0.1.2-cp38-cp38-win32.whl", hash = "sha256:86158fb6060fd2091754a4831eb311f679ae50100df2fe6ed45193e3103f9db1"}, - {file = "cwcwidth-0.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:0e453ea7282b209af887e275a3aea6efc1ce4295c0465d3598045473ea1b3ac8"}, - {file = "cwcwidth-0.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5854e7c62bd0cd3962eb6de662f8df30f82300f3f7d7a0b77b75cc6f2f294a2a"}, - {file = "cwcwidth-0.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9253d6ea2bf7eef90134ee58b771f4e9f5114d5f4108c26e3e7c1cefae11863b"}, - {file = "cwcwidth-0.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8c9bfb9dc7b5ee742ab5394686c8b6db820b453e60f57a1cc35bdac25ede98a"}, - {file = "cwcwidth-0.1.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:afc4fa179c4ae769282bb5a0ee2ce3cee7a307a40f7e6ff5395e07ac824c2080"}, - {file = "cwcwidth-0.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:2722e94e1bf579a0856d399a6bdb6d4fb07dbd8a683838ca6e4e25e289a66a38"}, - {file = "cwcwidth-0.1.2-cp39-cp39-win32.whl", hash = "sha256:1a16afea349abba714cf9b6bb42e4a66d2b0917813d5ce6db1baed2f9e1557ed"}, - {file = "cwcwidth-0.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:d753e5bb7da61038c912dae91abcb02aeef7a8fa2c48ed9b2fb6739cc013d0ed"}, - {file = "cwcwidth-0.1.2.tar.gz", hash = "sha256:0c4d39d9d72183d228d8659f8efa751d57e4762491660e8ba13679b7f64fce58"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0614e892110401284fec5850ee45846d5ff163654574d3df040f86f02ec05399"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ffb278e25d3ff9789dca99dcb666469a390ff226b181f846cc8736f1554ff085"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:77281cd94e6d582f3459e1535305cb3ad0afd3fbed0bacbe2e84b7e5cb3e9123"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3a93491f4cbe5fc821bae02ebcccfa5b9206f441fa3ef618dc6f62fdccde0f07"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ede2a05f88e3ddc4be22591fd5c5491e8a94f6e7fd3c93a3a06164f4ce8690d0"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-win32.whl", hash = "sha256:d76c3b5078355e78ca3aa0fd06939a9793f5a9f9bf4522738fff90fb58b47429"}, + {file = "cwcwidth-0.1.4-cp36-cp36m-win_amd64.whl", hash = "sha256:d5a487c6981bf157b67f83514a754df5e6713a9090df71558a2625788c4a448a"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d8f8464656b48549d2a8ac776eed5c6f10906ee2dcc3767ef8228cc582857f6d"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a85539ec3b879177eb1715bda5bd2bb9753d84569f8717684f07016efb92a5c7"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:53ec58c6478af6062e979a89fc11ec6ca1e4254e93f3305ac62da28809745185"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3bec2366e89de99c6ca8dcd1c92156d60efdbb47dc3a9cdb86d7064773c05d65"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5a7da558423d32064bb8cabe461824543c6072ecdf2d0c2adf521dc63b3f0073"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-win32.whl", hash = "sha256:ec9d57742804a975a75dc633ee3a0bb5bffe67dc897def6a3d84717805584dbd"}, + {file = "cwcwidth-0.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9faa4adcdb0c74fb8350da8eee6f893dde5b9a0f817ee0b83607b8e0e4d12929"}, + {file = "cwcwidth-0.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9d0188488c55d947f71d48f47e7f8e4355d75a86afcc8932a500cd84e32e278"}, + {file = "cwcwidth-0.1.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:73d66da4f1807cc673cf924c9fd83f9f61465af13693f5ef2b5b4b9c32faa0c7"}, + {file = "cwcwidth-0.1.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ef08bc8af421e5991ff6c2e67124add008e73ed7fd4fb8767f44c07b789fe114"}, + {file = "cwcwidth-0.1.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:3011f108504e3ad6472f53df0b7a12b9a978e6e0e41bd841a768a6a5f678bc0e"}, + {file = "cwcwidth-0.1.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:55969b269a11f317c29b206e74cae02267af92a3a9a2fb86860a84f64366705a"}, + {file = "cwcwidth-0.1.4-cp38-cp38-win32.whl", hash = "sha256:51481cb731c6d9c46a5d751bafff03ea3072f856c590fe8d4a27a1d404bb20be"}, + {file = "cwcwidth-0.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:146069bc61cb5db11d3c037b057454d78ef2254932f4f4871ae355e0923ce8e7"}, + {file = "cwcwidth-0.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fc0d1c4214f76ba7fec60aac6e1467588d865a0005ce9063c5471c57751f895"}, + {file = "cwcwidth-0.1.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b1d75b2c9edc19a579dd5d92e93dc7087b6430a250928a06527aa6ebd627b06c"}, + {file = "cwcwidth-0.1.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:63190cb5b87a568ed89cfae3be203935a14dea0c10b116160a15031273771b44"}, + {file = "cwcwidth-0.1.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:99fb16a3b0258ee2fa952e7dab80b839b990aebdb96b98b648211a99e8a0c906"}, + {file = "cwcwidth-0.1.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:01b630049fdd8fc37f0e929d24012fee7855d8aa3f304c8a0c26caf2415c7d85"}, + {file = "cwcwidth-0.1.4-cp39-cp39-win32.whl", hash = "sha256:0e05498c57629bf6c8445b17b2e5a9ec26c0f97080cb7ae2602e14a5db67209b"}, + {file = "cwcwidth-0.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:7779cb2ccc04694f95134d3f660216f32be5de82101dcbd8f1c8f81ff748ae41"}, + {file = "cwcwidth-0.1.4.tar.gz", hash = "sha256:482a937891a6918667436e0a7041aab576c26e4bcbcdddd178ef79362fbcf9ab"}, ] dataclasses = [ {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, @@ -1102,100 +1105,100 @@ greenlet = [ {file = "greenlet-1.0.0.tar.gz", hash = "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8"}, ] grpcio = [ - {file = "grpcio-1.35.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:95cc4d2067deced18dc807442cf8062a93389a86abf8d40741120054389d3f29"}, - {file = "grpcio-1.35.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d186a0ce291f4386e28a7042ec31c85250b0c2e25d2794b87fa3c15ff473c46c"}, - {file = "grpcio-1.35.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c8d0a6a58a42275c6cb616e7cb9f9fcf5eba1e809996546e561cd818b8f7cff7"}, - {file = "grpcio-1.35.0-cp27-cp27m-win32.whl", hash = "sha256:8d08f90d72a8e8d9af087476337da76d26749617b0a092caff4e684ce267af21"}, - {file = "grpcio-1.35.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0072ec4563ab4268c4c32e936955085c2d41ea175b662363496daedd2273372c"}, - {file = "grpcio-1.35.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:aca45d2ccb693c9227fbf21144891422a42dc4b76b52af8dd1d4e43afebe321d"}, - {file = "grpcio-1.35.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:87147b1b306c88fe7dca7e3dff8aefd1e63d6aed86e224f9374ddf283f17d7f1"}, - {file = "grpcio-1.35.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:22edfc278070d54f3ab7f741904e09155a272fe934e842babbf84476868a50de"}, - {file = "grpcio-1.35.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:f3654a52f72ba28953dbe2e93208099f4903f4b3c07dc7ff4db671c92968111d"}, - {file = "grpcio-1.35.0-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:dc2589370ef84eb1cc53530070d658a7011d2ee65f18806581809c11cd016136"}, - {file = "grpcio-1.35.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:f0c27fd16582a303e5baf6cffd9345c9ac5f855d69a51232664a0b888a77ba80"}, - {file = "grpcio-1.35.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:b2985f73611b637271b00d9c4f177e65cc3193269bc9760f16262b1a12757265"}, - {file = "grpcio-1.35.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:acb489b7aafdcf960f1a0000a1f22b45e5b6ccdf8dba48f97617d627f4133195"}, - {file = "grpcio-1.35.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:16fd33030944672e49e0530dec2c60cd4089659ccdf327e99569b3b29246a0b6"}, - {file = "grpcio-1.35.0-cp35-cp35m-win32.whl", hash = "sha256:1757e81c09132851e85495b802fe4d4fbef3547e77fa422a62fb4f7d51785be0"}, - {file = "grpcio-1.35.0-cp35-cp35m-win_amd64.whl", hash = "sha256:35b72884e09cbc46c564091f4545a39fa66d132c5676d1a6e827517fff47f2c1"}, - {file = "grpcio-1.35.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:17940a7dc461066f28816df48be44f24d3b9f150db344308ee2aeae033e1af0b"}, - {file = "grpcio-1.35.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:75ea903edc42a8c6ec61dbc5f453febd79d8bdec0e1bad6df7088c34282e8c42"}, - {file = "grpcio-1.35.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b180a3ec4a5d6f96d3840c83e5f8ab49afac9fa942921e361b451d7a024efb00"}, - {file = "grpcio-1.35.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e163c27d2062cd3eb07057f23f8d1330925beaba16802312b51b4bad33d74098"}, - {file = "grpcio-1.35.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:764b50ba1a15a2074cdd1a841238f2dead0a06529c495a46821fae84cb9c7342"}, - {file = "grpcio-1.35.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:088c8bea0f6b596937fefacf2c8df97712e7a3dd49496975049cc95dbf02af1a"}, - {file = "grpcio-1.35.0-cp36-cp36m-win32.whl", hash = "sha256:1aa53f82362c7f2791fe0cdd9a3b3aec325c11d8f0dfde600f91907dfaa8546b"}, - {file = "grpcio-1.35.0-cp36-cp36m-win_amd64.whl", hash = "sha256:efb3d67405eb8030db6f27920b4be023fabfb5d4e09c34deab094a7c473a5472"}, - {file = "grpcio-1.35.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:44aaa6148d18a8e836f99dadcdec17b27bc7ec0995b2cc12c94e61826040ec90"}, - {file = "grpcio-1.35.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:18ad7644e23757420ea839ac476ef861e4f4841c8566269b7c91c100ca1943b3"}, - {file = "grpcio-1.35.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:859a0ceb23d7189362cc06fe7e906e9ed5c7a8f3ac960cc04ce13fe5847d0b62"}, - {file = "grpcio-1.35.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3e7d4428ed752fdfe2dddf2a404c93d3a2f62bf4b9109c0c10a850c698948891"}, - {file = "grpcio-1.35.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:a36151c335280b09afd5123f3b25085027ae2b10682087a4342fb6f635b928fb"}, - {file = "grpcio-1.35.0-cp37-cp37m-win32.whl", hash = "sha256:dfecb2acd3acb8bb50e9aa31472c6e57171d97c1098ee67cd283a6fe7d56a926"}, - {file = "grpcio-1.35.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e87e55fba98ebd7b4c614dcef9940dc2a7e057ad8bba5f91554934d47319a35b"}, - {file = "grpcio-1.35.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:da44bf613eed5d9e8df0785463e502a416de1be6e4ac31edbe99c9111abaed5f"}, - {file = "grpcio-1.35.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:9e503eaf853199804a954dc628c5207e67d6c7848dcba42a997fbe718618a2b1"}, - {file = "grpcio-1.35.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6ba3d7acf70acde9ce27e22921db921b84a71be578b32739536c32377b65041a"}, - {file = "grpcio-1.35.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:048c01d1eb5c2ae7cba2254b98938d2fc81f6dc10d172d9261d65266adb0fdb3"}, - {file = "grpcio-1.35.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:efd896e8ca7adb2654cf014479a5e1f74e4f776b6b2c0fbf95a6c92787a6631a"}, - {file = "grpcio-1.35.0-cp38-cp38-win32.whl", hash = "sha256:8a29a26b9f39701ce15aa1d5aa5e96e0b5f7028efe94f95341a4ed8dbe4bed78"}, - {file = "grpcio-1.35.0-cp38-cp38-win_amd64.whl", hash = "sha256:aea3d592a7ece84739b92d212cd16037c51d84a259414f64b51c14e946611f3d"}, - {file = "grpcio-1.35.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2f8e8d35d4799aa1627a212dbe8546594abf4064056415c31bd1b3b8f2a62027"}, - {file = "grpcio-1.35.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:9f0da13b215068e7434b161a35d0b4e92140ffcfa33ddda9c458199ea1d7ce45"}, - {file = "grpcio-1.35.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7ae408780b79c9b9b91a2592abd1d7abecd05675d988ea75038580f420966b59"}, - {file = "grpcio-1.35.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:0f714e261e1d63615476cda4ee808a79cca62f8f09e2943c136c2f87ec5347b1"}, - {file = "grpcio-1.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:7ee7d54da9d176d3c9a0f47c04d7ff6fdc6ee1c17643caff8c33d6c8a70678a4"}, - {file = "grpcio-1.35.0-cp39-cp39-win32.whl", hash = "sha256:94c3b81089a86d3c5877d22b07ebc66b5ed1d84771e24b001844e29a5b6178dd"}, - {file = "grpcio-1.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:399ee377b312ac652b07ef4365bbbba009da361fa7708c4d3d4ce383a1534ea7"}, - {file = "grpcio-1.35.0.tar.gz", hash = "sha256:7bd0ebbb14dde78bf66a1162efd29d3393e4e943952e2f339757aa48a184645c"}, + {file = "grpcio-1.36.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:e3a83c5db16f95daac1d96cf3c9018d765579b5a29bb336758d793028e729921"}, + {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c18739fecb90760b183bfcb4da1cf2c6bf57e38f7baa2c131d5f67d9a4c8365d"}, + {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f6efa62ca1fe02cd34ec35f53446f04a15fe2c886a4e825f5679936a573d2cbf"}, + {file = "grpcio-1.36.1-cp27-cp27m-win32.whl", hash = "sha256:9a18299827a70be0507f98a65393b1c7f6c004fe2ca995fe23ffac534dd187a7"}, + {file = "grpcio-1.36.1-cp27-cp27m-win_amd64.whl", hash = "sha256:8a89190de1985a54ef311650cf9687ffb81de038973fd32e452636ddae36b29f"}, + {file = "grpcio-1.36.1-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:3e75643d21db7d68acd541d3fec66faaa8061d12b511e101b529ff12a276bb9b"}, + {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c5204e05e18268dd6a1099ca6c106fd9d00bcae1e37d5a5186094c55044c941"}, + {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:24d4c2c5e540e666c52225953d6813afc8ccf9bf46db6a72edd4e8d606656248"}, + {file = "grpcio-1.36.1-cp35-cp35m-linux_armv7l.whl", hash = "sha256:4dc7295dc9673f7af22c1e38c2a2c24ecbd6773a4c5ed5a46ed38ad4dcf2bf6c"}, + {file = "grpcio-1.36.1-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:f241116d4bf1a8037ff87f16914b606390824e50902bdbfa2262e855fbf07fe5"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:1056b558acfd575d774644826df449e1402a03e456a3192fafb6b06d1069bf80"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:52ec563da45d06319224ebbda53501d25594de64ee1b2786e119ba4a2f1ce40c"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:7cbeac9bbe6a4a7fce4a89c892c249135dd9f5f5219ede157174c34a456188f0"}, + {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:2abaa9f0d83bd0b26f6d0d1fc4b97d73bde3ceac36ab857f70d3cabcf31c5c79"}, + {file = "grpcio-1.36.1-cp35-cp35m-win32.whl", hash = "sha256:02030e1afd3247f2b159df9dff959ec79dd4047b1c4dd4eec9e3d1642efbd504"}, + {file = "grpcio-1.36.1-cp35-cp35m-win_amd64.whl", hash = "sha256:eafafc7e040e36aa926edc731ab52c23465981888779ae64bfc8ad85888ed4f3"}, + {file = "grpcio-1.36.1-cp36-cp36m-linux_armv7l.whl", hash = "sha256:1030e74ddd0fa6e3bad7944f0c68cf1251b15bcd70641f0ad3858fdf2b8602a0"}, + {file = "grpcio-1.36.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:b003e24339030ed356f59505d1065b89e1f443ef41ce71ca9069be944c0d2e6b"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:76daa3c4d58fcf40f7969bdb4270335e96ee0382a050cadcd97d7332cd0251a3"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f591597bb25eae0094ead5a965555e911453e5f35fdbdaa83be11ef107865697"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:cbd82c479338fc1c0e5c3db09752b61fe47d40c6e38e4be8657153712fa76674"}, + {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:7e32bc01dfaa7a51c547379644ea619a2161d6969affdac3bbd173478d26673d"}, + {file = "grpcio-1.36.1-cp36-cp36m-win32.whl", hash = "sha256:5378189fb897567f4929f75ab67a3e0da4f8967806246cb9cfa1fa06bfbdb0d5"}, + {file = "grpcio-1.36.1-cp36-cp36m-win_amd64.whl", hash = "sha256:3a6295aa692806218e97bb687a71cd768450ed99e2acddc488f18d738edef463"}, + {file = "grpcio-1.36.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:6f6f8a8b57e40347d0bf32c2135037dae31d63d3b19007b4c426a11b76deaf65"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c05ed54b2a00df01e633bebec819b512bf0c60f8f5b3b36dd344dc673b02fea"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e1b9e906aa6f7577016e86ed7f3a69cae7dab4e41356584dc7980f76ea65035f"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a602d6b30760bbbb2fe776caaa914a0d404636cafc3f2322718bf8002d7b1e55"}, + {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:dee9971aef20fc09ed897420446c4d0926cd1d7630f343333288523ca5b44bb2"}, + {file = "grpcio-1.36.1-cp37-cp37m-win32.whl", hash = "sha256:ed16bfeda02268e75e038c58599d52afc7097d749916c079b26bc27a66900f7d"}, + {file = "grpcio-1.36.1-cp37-cp37m-win_amd64.whl", hash = "sha256:85a6035ae75ce964f78f19cf913938596ccf068b149fcd79f4371268bcb9aa7c"}, + {file = "grpcio-1.36.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6b30682180053eebc87802c2f249d2f59b430e1a18e8808575dde0d22a968b2c"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5e4920a8fb5d17b2c5ba980db0ac1c925bbee3e5d70e96da3ec4fb1c8600d68f"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f7740d9d9451f3663df11b241ac05cafc0efaa052d2fdca6640c4d3748eaf6e2"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:20b7c4c5513e1135a2261e56830c0e710f205fee92019b92fe132d7f16a5cfd8"}, + {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:216fbd2a488e74c3b96e240e4054c85c4c99102a439bc9f556936991643f43bc"}, + {file = "grpcio-1.36.1-cp38-cp38-win32.whl", hash = "sha256:7863c2a140e829b1f4c6d67bf0bf15e5321ac4766d0a295e2682970d9dd4b091"}, + {file = "grpcio-1.36.1-cp38-cp38-win_amd64.whl", hash = "sha256:f214076eb13da9e65c1aa9877b51fca03f51a82bd8691358e1a1edd9ff341330"}, + {file = "grpcio-1.36.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:ec753c022b39656f88409fbf9f2d3b28497e3f17aa678f884d78776b41ebe6bd"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:0648a6d5d7ddcd9c8462d7d961660ee024dad6b88152ee3a521819e611830edf"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:45ea10dd133a43b10c0b4326834107ebccfee25dab59b312b78e018c2d72a1f0"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:bab743cdac1d6d8326c65d1d091d0740b39966dfab06519f74a03b3d128b8454"}, + {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:09af8ceb91860086216edc6e5ea15f9beb2cf81687faa43b7c03216f5b73e244"}, + {file = "grpcio-1.36.1-cp39-cp39-win32.whl", hash = "sha256:f3f70505207ee1cee65f60a799fd8e06e07861409aa0d55d834825a79b40c297"}, + {file = "grpcio-1.36.1-cp39-cp39-win_amd64.whl", hash = "sha256:f22c11772eff25ba1ca536e760b8c34ba56f2a9d66b6842cb11770a8f61f879d"}, + {file = "grpcio-1.36.1.tar.gz", hash = "sha256:a66ea59b20f3669df0f0c6a3bd57b985e5b2d1dcf3e4c29819bb8dc232d0fd38"}, ] grpcio-tools = [ - {file = "grpcio-tools-1.35.0.tar.gz", hash = "sha256:9e2a41cba9c5a20ae299d0fdd377fe231434fa04cbfbfb3807293c6ec10b03cf"}, - {file = "grpcio_tools-1.35.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:cfa49e6d62b313862a6007ae02016bd89a2fa184b0aab0d0e524cb24ecc2fdb4"}, - {file = "grpcio_tools-1.35.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:f66cd905ffcbe2294c9dee6d0de8064c3a49861a9b1770c18cb8a15be3bc0da5"}, - {file = "grpcio_tools-1.35.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:56663ba4a49ca585e4333dfebc5ed7e91ad3d75b838aced4f922fb4e365376cc"}, - {file = "grpcio_tools-1.35.0-cp27-cp27m-win32.whl", hash = "sha256:252bfaa0004d80d927a77998c8b3a81fb47620e41af1664bdba8837d722c4ead"}, - {file = "grpcio_tools-1.35.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ffa66fc4e80aff4f68599e786aa3295f4a0d6761ed63d75c32261f5de77aa0fd"}, - {file = "grpcio_tools-1.35.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:3cea2d07343801cb2a0d2f71fe7d6d7ffa6fe8fc0e1f6243c6867d0bb04557a1"}, - {file = "grpcio_tools-1.35.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:15c37528936774d8f734d75540848134fb5710ca27801ce4ac73c8a6cca0494e"}, - {file = "grpcio_tools-1.35.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:f3861211a450a312b7645d4eaf16c78f1d9e896e58a8c3be871f5881362d3fee"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:5fdb6a65f66ee6cdc49455ea03ca435ae86ef1869dc929a8652cc19b5f950d22"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:8bfd05f26af9ea069f2f3c48740a315470fc4a434189544fea3b3508b71be9a0"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:f7074cfd79989424e4bd903ff5618c1420a7c81ad97836256f3927447b74c027"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:adea0bd93978284f1590a3880d79621881f7029b2fac330f64f491af2b554707"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:b23e0a64cdbf4c3bcdf8e6ad0cdd8b8a582a4c50d5ed4eddc4c81dc8d5ba0c60"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:7cc68c42bcbebd76731686f22870930f110309e1e69244df428f8fb161b7645b"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-win32.whl", hash = "sha256:aa9cb65231a7efd77e83e149b1905335eda1bbadd301dd1bffcbfea69fd5bd56"}, - {file = "grpcio_tools-1.35.0-cp35-cp35m-win_amd64.whl", hash = "sha256:11e6dffd2e58737ade63a00a51da83b474b5740665914103f003049acff5be8e"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:59d80997e780dc52911e263e30ca2334e7b3bd12c10dc81625dcc34273fa744b"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:179b2eb274d8c29e1e18c21fb69c5101e3196617c7abb193a80e194ea9b274be"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:1687b0033beff82ac35f14fbbd5e7eb0cab39e60f8be0a25a7f4ba92d66578c8"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:9e956751b1b96ce343088550d155827f8312d85f09067f6ede0a4778273b787b"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:4ca85f9deee58473c017ee62aaa8c12dfda223eeabed5dd013c009af275bc4f2"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:853d030ff74ce90244bb77c5a8d5c2b2d84b24df477fc422d44fa81d512124d6"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-win32.whl", hash = "sha256:add160d4697a5366ee1420b59621bde69a3eaaba35170e60bd376f0ea6e24fe5"}, - {file = "grpcio_tools-1.35.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dbaaad0132a9e70439e93d26611443ee3aaaa62547b7d18655ac754b4984ea25"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:bbc6986e29ab3bb39db9a0e31cdbb0ced80cead2ef0453c40dfdfacbab505950"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:8631df0e357b28da4ef617306a08f70c21cf85c049849f4a556b95069c146d61"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6e2b07dbe25c6022eeae972b4eee2058836dea345a3253082524240a00daa9f"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:30f83ccc6d09be07d7f15d05f29acd5017140f330ba3a218ae7b7e19db02bda6"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:88184383f24af8f8cbbb4020846af53634d8632b486479a3b98ea29c1470372e"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-win32.whl", hash = "sha256:579cf4538d8ec25314c45ef84bb140fad8888446ed7a69913965fd7d9bc188d5"}, - {file = "grpcio_tools-1.35.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9203e0105db476131f32ff3c3213b5aa6b77b25553ffe0d09d973913b2320856"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:c910dec8903fb9d16fd1b111de57401a46e4d5f74c6d009a12a945d696603eb0"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:cc9bcd34a653c2353dd43fc395ceb560271551f2fae30bcafede2e4ad0c101c4"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4241301b8e594c5c265f06c600b492372e867a4bb80dc205b545088c39e010d0"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:8bae2611a8e09617922ff4cb11de6fd5f59b91c75a14a318c7d378f427584be1"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:0d5028f548fa2b99494baf992dd0e23676361b1a217322be44f6c13b5133f6b3"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-win32.whl", hash = "sha256:8d2c507c093a0ae3df62201ef92ceabcc34ac3f7e53026f12357f8c3641e809a"}, - {file = "grpcio_tools-1.35.0-cp38-cp38-win_amd64.whl", hash = "sha256:994adfe39a1755424e3c33c434786a9fa65090a50515303dfa8125cbec4a5940"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:51bf36ae34f70a8d6ccee5d9d2e52a9e65251670b405f91b7b547a73788f90fb"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e00dc8d641001409963f78b0b8bf83834eb87c0090357ebc862f874dd0e6dbb5"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5f279dee8b77bf93996592ada3bf56ad44fa9b0e780099172f1a7093a506eb67"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:713b496dd02fc868da0d59cc09536c62452d52035d0b694204d5054e75fe4929"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:15fa3c66e6b0ba2e434eccf8cdbce68e4e37b5fe440dbeffb9efd599aa23910f"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-win32.whl", hash = "sha256:ee0f750b5d8d628349e903438bb506196c4c5cee0007e81800d95cd0a2b23e6f"}, - {file = "grpcio_tools-1.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8451c60e106310436c123f3243c115db21ccb957402edbe73b1bb68276e4aa4"}, + {file = "grpcio-tools-1.36.1.tar.gz", hash = "sha256:80ef584f7b917f575e4b8f2ec59cd4a4d98c2046e801a735f3136b05742a36a6"}, + {file = "grpcio_tools-1.36.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ebbfdbff079bfc303a4e1d3da59302147d5cf4f1db2c412a074366149d93e89e"}, + {file = "grpcio_tools-1.36.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d95dfefe156be02bcce4eb044ac7ff166c8a6c288d71bc3ed960d8b26bce2786"}, + {file = "grpcio_tools-1.36.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7969915ac252d0e67f9cfd4f8b9d6bb546efc7b26bce34978a940e37ee4078d5"}, + {file = "grpcio_tools-1.36.1-cp27-cp27m-win32.whl", hash = "sha256:582b77e7a4905063d8071ac3685cefa38941799d5f4ea7b4519281a28cfc6752"}, + {file = "grpcio_tools-1.36.1-cp27-cp27m-win_amd64.whl", hash = "sha256:66d2a6237941199df0493e46b8a3123005b4dfde9af1b9572e8c54eb605a7567"}, + {file = "grpcio_tools-1.36.1-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:702c3eb61a3cfddcaea04d2358c0390c2e189fe42b64a92239df8292366ab4df"}, + {file = "grpcio_tools-1.36.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ce621375bc7dfaeac93e23e202771a6e567a8ea7e9a7cc690b87d8b1950e3da6"}, + {file = "grpcio_tools-1.36.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ff1792b188183e977e2feccb1f3b3d4580f921df8f61385d7ae8eace10578a23"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-linux_armv7l.whl", hash = "sha256:a3a64797840fd4917ec98532d17b9b7c6a954dcfd7862657c750255556d369a5"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:0873697064cdbb116ba9f88ff524e13e9afd78bf7905ecd6a0f0f743bf40ca64"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6df49b402f387decaaf57784c3e74bea6f34cf446cc45d4bf7b9adb34f97fb20"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:aec997dafa90a29b399bdb23d968ab43da223faeac005d384a1194f43ee0f46e"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:92336c60db1052c865ab7c9936680187d16d2f565c470ba03199e817120714e8"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:bbe8da70ccbe21c72599eb8de5ad26bd053c01f4f03c48ea16323f96f1ec7095"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-win32.whl", hash = "sha256:96e1c0d267eb03b819a31bcf973579220ec3b8b53178952daa9e2f1ad696783f"}, + {file = "grpcio_tools-1.36.1-cp35-cp35m-win_amd64.whl", hash = "sha256:f4326b1a5352e85480629bf888b132f0aec79bb791d29cd3e2322586cd70433a"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-linux_armv7l.whl", hash = "sha256:f2befead0395e8aaab1e8f76825c8c9fa93d69249a513c26107a55183f91ccd9"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:125859be6458e65e348c50ddb7a964ba48945d521af3f46ce35aca9a2b752296"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c669f1ee5642631ad93fa51d298306124d26bccc76ce63a3bc143ddcf01c58af"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:693dc16a65b1766037fca8cddc173c0f45e79dd14e05d61128a30dbfd02f6503"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:add07eb3c79478b003ac3af7b636275c37fa6bac56e6a29f79128bea09b37488"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:747b547c487231a6325eda820d1d6a7c6080eda2cd1f68a7d4b2f8d9cc0a3e95"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-win32.whl", hash = "sha256:fd5eed43f5764209a95a58db82c064c1958525f30ad8ebb57df38dd2c9e86aa7"}, + {file = "grpcio_tools-1.36.1-cp36-cp36m-win_amd64.whl", hash = "sha256:bc6257b5533c66143f4f084aea3ae52c1c01f99997a8b81d2259d0cf083176b5"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f35fad86d99743cc15fccf11ec74d8c9b76e997cd233dc1fd031457d3f0fd7fc"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:dcdfe82237e7498eb49dd12751716c55d189a5e49b4bda0bb53f85acbe51bbb1"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d6b3c868c6ac15a0e288d3a5380ad5f01802cbbed8645333e496fa31ecea19af"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:6898776449485feedb6d0fd98d3a36c8882c32a5603b86b2511e2557ee765d40"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6fee070c18be66a282ceb56245191dabf80986aee333e74d2fdea58118b452d4"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-win32.whl", hash = "sha256:55ed5c5de883defacd899123ba5a9f0077b7fb87d8f1778cb5996b4391604447"}, + {file = "grpcio_tools-1.36.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f74f0c3eedc0de72c402e82bb1199ffe5e359ccdac70bf789d65444042a84f42"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:93e3ba4696b69fc4356a0823ecddd8b29ebb1fba0571f27574b1182ef5c262f6"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e730845677e45c6829d212be6e4fb69768979c3b35b5884293be02a7f436e18c"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f7ba8d631f8f5c089958285545bd9e307fd752cdd1fa31515a51cfc1e04b833d"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:af392594ba30b5ee470b7538cf792df970e2097edc299685d8e0d76b2b1bef7b"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:facda541209a0b0edfccf6a5b18ce344c4e90bc8950c995482c85936a23ba939"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-win32.whl", hash = "sha256:9fa491aaccd455e3aec35d12bcef5dce307c674f08e98bbbf33bf6774e6e2ec5"}, + {file = "grpcio_tools-1.36.1-cp38-cp38-win_amd64.whl", hash = "sha256:76900dde111192900c6eb5ed491cf0d8a13403e502c74859f2e2c3116842668a"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a7b85758e44d9585f27fc7692b58e63952a2e9130cfbbd16defda8c2ffbb2ad"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:9b8556e2938ef9437ef07d028b46198f299533497df878f96785502e6f74250d"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:cd44135fb8b45acc79424e7354bb4548911a6202ca2fac384574099f8d998f06"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c02b5b6d185b1af86342381ddd1ad3d0482c4116e203e52a7145636fb1b2ad12"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:377cd9b8d2098d2ced48d3dee466fd73fb19128aa0edc6f1799077cf4dbda606"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-win32.whl", hash = "sha256:120bad5a3f3288ae8acd07d839a13d7873304ae35a1d717033295e90ed9bd8ac"}, + {file = "grpcio_tools-1.36.1-cp39-cp39-win_amd64.whl", hash = "sha256:5cec989d219164312bdfa9389aedaea5887fb8133bb1a247fcde5901775b5427"}, ] grpclib = [ {file = "grpclib-0.4.1.tar.gz", hash = "sha256:8c0021cd038634c268249e4cd168d9f3570e66ceceec1c9416094b788ebc8372"}, @@ -1221,12 +1224,12 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"}, - {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"}, + {file = "importlib_metadata-3.7.3-py3-none-any.whl", hash = "sha256:b74159469b464a99cb8cc3e21973e4d96e05d3024d337313fedb618a6e86e6f4"}, + {file = "importlib_metadata-3.7.3.tar.gz", hash = "sha256:742add720a20d0467df2f444ae41704000f50e1234f46174b51f9c6031a1bd71"}, ] importlib-resources = [ - {file = "importlib_resources-5.1.0-py3-none-any.whl", hash = "sha256:885b8eae589179f661c909d699a546cf10d83692553e34dca1bf5eb06f7f6217"}, - {file = "importlib_resources-5.1.0.tar.gz", hash = "sha256:bfdad047bce441405a49cf8eb48ddce5e56c696e185f59147a8b79e75e9e6380"}, + {file = "importlib_resources-5.1.2-py3-none-any.whl", hash = "sha256:ebab3efe74d83b04d6bf5cd9a17f0c5c93e60fb60f30c90f56265fce4682a469"}, + {file = "importlib_resources-5.1.2.tar.gz", hash = "sha256:642586fc4740bd1cad7690f836b3321309402b20b332529f25617ff18e8e1370"}, ] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, @@ -1287,8 +1290,8 @@ markupsafe = [ {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] more-itertools = [ - {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, - {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, + {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, + {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, ] multidict = [ {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, @@ -1366,36 +1369,38 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] poethepoet = [ - {file = "poethepoet-0.9.0-py3-none-any.whl", hash = "sha256:6b1df9a755c297d5b10749cd4713924055b41edfa62055770c8bd6b5da8e2c69"}, - {file = "poethepoet-0.9.0.tar.gz", hash = "sha256:ab2263fd7be81d16d38a4b4fe42a055d992d04421e61cad36498b1e4bd8ee2a6"}, + {file = "poethepoet-0.10.0-py3-none-any.whl", hash = "sha256:6fb3021603d4421c6fcc40072bbcf150a6c52ef70ff4d3be089b8b04e015ef5a"}, + {file = "poethepoet-0.10.0.tar.gz", hash = "sha256:70b97cb194b978dc464c70793e85e6f746cddf82b84a38bfb135946ad71ae19c"}, ] protobuf = [ - {file = "protobuf-3.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a"}, - {file = "protobuf-3.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5"}, - {file = "protobuf-3.14.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472"}, - {file = "protobuf-3.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142"}, - {file = "protobuf-3.14.0-cp35-cp35m-win32.whl", hash = "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2"}, - {file = "protobuf-3.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980"}, - {file = "protobuf-3.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2"}, - {file = "protobuf-3.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1"}, - {file = "protobuf-3.14.0-cp36-cp36m-win32.whl", hash = "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e"}, - {file = "protobuf-3.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836"}, - {file = "protobuf-3.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd"}, - {file = "protobuf-3.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac"}, - {file = "protobuf-3.14.0-cp37-cp37m-win32.whl", hash = "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d"}, - {file = "protobuf-3.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5"}, - {file = "protobuf-3.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043"}, - {file = "protobuf-3.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00"}, - {file = "protobuf-3.14.0-py2.py3-none-any.whl", hash = "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c"}, - {file = "protobuf-3.14.0.tar.gz", hash = "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce"}, + {file = "protobuf-3.15.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1771ef20e88759c4d81db213e89b7a1fc53937968e12af6603c658ee4bcbfa38"}, + {file = "protobuf-3.15.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1a66261a402d05c8ad8c1fde8631837307bf8d7e7740a4f3941fc3277c2e1528"}, + {file = "protobuf-3.15.6-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:eac23a3e56175b710f3da9a9e8e2aa571891fbec60e0c5a06db1c7b1613b5cfd"}, + {file = "protobuf-3.15.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ec220d90eda8bb7a7a1434a8aed4fe26d7e648c1a051c2885f3f5725b6aa71a"}, + {file = "protobuf-3.15.6-cp35-cp35m-win32.whl", hash = "sha256:88d8f21d1ac205eedb6dea943f8204ed08201b081dba2a966ab5612788b9bb1e"}, + {file = "protobuf-3.15.6-cp35-cp35m-win_amd64.whl", hash = "sha256:eaada29bbf087dea7d8bce4d1d604fc768749e8809e9c295922accd7c8fce4d5"}, + {file = "protobuf-3.15.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:256c0b2e338c1f3228d3280707606fe5531fde85ab9d704cde6fdeb55112531f"}, + {file = "protobuf-3.15.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b9069e45b6e78412fba4a314ea38b4a478686060acf470d2b131b3a2c50484ec"}, + {file = "protobuf-3.15.6-cp36-cp36m-win32.whl", hash = "sha256:24f4697f57b8520c897a401b7f9a5ae45c369e22c572e305dfaf8053ecb49687"}, + {file = "protobuf-3.15.6-cp36-cp36m-win_amd64.whl", hash = "sha256:d9ed0955b794f1e5f367e27f8a8ff25501eabe34573f003f06639c366ca75f73"}, + {file = "protobuf-3.15.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:822ac7f87fc2fb9b24edd2db390538b60ef50256e421ca30d65250fad5a3d477"}, + {file = "protobuf-3.15.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:74ac159989e2b02d761188a2b6f4601ff5e494d9b9d863f5ad6e98e5e0c54328"}, + {file = "protobuf-3.15.6-cp37-cp37m-win32.whl", hash = "sha256:30fe4249a364576f9594180589c3f9c4771952014b5f77f0372923fc7bafbbe2"}, + {file = "protobuf-3.15.6-cp37-cp37m-win_amd64.whl", hash = "sha256:45a91fc6f9aa86d3effdeda6751882b02de628519ba06d7160daffde0c889ff8"}, + {file = "protobuf-3.15.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83c7c7534f050cb25383bb817159416601d1cc46c40bc5e851ec8bbddfc34a2f"}, + {file = "protobuf-3.15.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9ec20a6ded7d0888e767ad029dbb126e604e18db744ac0a428cf746e040ccecd"}, + {file = "protobuf-3.15.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f2da2fcc4102b6c3b57f03c9d8d5e37c63f8bc74deaa6cb54e0cc4524a77247"}, + {file = "protobuf-3.15.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:70054ae1ce5dea7dec7357db931fcf487f40ea45b02cb719ee6af07eb1e906fb"}, + {file = "protobuf-3.15.6-py2.py3-none-any.whl", hash = "sha256:1655fc0ba7402560d749de13edbfca1ac45d1753d8f4e5292989f18f5a00c215"}, + {file = "protobuf-3.15.6.tar.gz", hash = "sha256:2b974519a2ae83aa1e31cff9018c70bbe0e303a46a598f982943c49ae1d4fcd3"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pygments = [ - {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, - {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, + {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, + {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -1421,55 +1426,55 @@ pytz = [ {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, ] regex = [ - {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, - {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, - {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, - {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, - {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, - {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, - {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, - {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, - {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, - {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, - {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, - {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, - {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, + {file = "regex-2021.3.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b97ec5d299c10d96617cc851b2e0f81ba5d9d6248413cd374ef7f3a8871ee4a6"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:cb4ee827857a5ad9b8ae34d3c8cc51151cb4a3fe082c12ec20ec73e63cc7c6f0"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:633497504e2a485a70a3268d4fc403fe3063a50a50eed1039083e9471ad0101c"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a59a2ee329b3de764b21495d78c92ab00b4ea79acef0f7ae8c1067f773570afa"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f85d6f41e34f6a2d1607e312820971872944f1661a73d33e1e82d35ea3305e14"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4651f839dbde0816798e698626af6a2469eee6d9964824bb5386091255a1694f"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:39c44532d0e4f1639a89e52355b949573e1e2c5116106a395642cbbae0ff9bcd"}, + {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3d9a7e215e02bd7646a91fb8bcba30bc55fd42a719d6b35cf80e5bae31d9134e"}, + {file = "regex-2021.3.17-cp36-cp36m-win32.whl", hash = "sha256:159fac1a4731409c830d32913f13f68346d6b8e39650ed5d704a9ce2f9ef9cb3"}, + {file = "regex-2021.3.17-cp36-cp36m-win_amd64.whl", hash = "sha256:13f50969028e81765ed2a1c5fcfdc246c245cf8d47986d5172e82ab1a0c42ee5"}, + {file = "regex-2021.3.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9d8d286c53fe0cbc6d20bf3d583cabcd1499d89034524e3b94c93a5ab85ca90"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:201e2619a77b21a7780580ab7b5ce43835e242d3e20fef50f66a8df0542e437f"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d47d359545b0ccad29d572ecd52c9da945de7cd6cf9c0cfcb0269f76d3555689"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea2f41445852c660ba7c3ebf7d70b3779b20d9ca8ba54485a17740db49f46932"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:486a5f8e11e1f5bbfcad87f7c7745eb14796642323e7e1829a331f87a713daaa"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e25e0afe1cf0f62781a150c1454b2113785401ba285c745acf10c8ca8917df"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a2ee026f4156789df8644d23ef423e6194fad0bc53575534101bb1de5d67e8ce"}, + {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:4c0788010a93ace8a174d73e7c6c9d3e6e3b7ad99a453c8ee8c975ddd9965643"}, + {file = "regex-2021.3.17-cp37-cp37m-win32.whl", hash = "sha256:575a832e09d237ae5fedb825a7a5bc6a116090dd57d6417d4f3b75121c73e3be"}, + {file = "regex-2021.3.17-cp37-cp37m-win_amd64.whl", hash = "sha256:8e65e3e4c6feadf6770e2ad89ad3deb524bcb03d8dc679f381d0568c024e0deb"}, + {file = "regex-2021.3.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a0df9a0ad2aad49ea3c7f65edd2ffb3d5c59589b85992a6006354f6fb109bb18"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b98bc9db003f1079caf07b610377ed1ac2e2c11acc2bea4892e28cc5b509d8d5"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:808404898e9a765e4058bf3d7607d0629000e0a14a6782ccbb089296b76fa8fe"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5770a51180d85ea468234bc7987f5597803a4c3d7463e7323322fe4a1b181578"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:976a54d44fd043d958a69b18705a910a8376196c6b6ee5f2596ffc11bff4420d"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:63f3ca8451e5ff7133ffbec9eda641aeab2001be1a01878990f6c87e3c44b9d5"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bcd945175c29a672f13fce13a11893556cd440e37c1b643d6eeab1988c8b209c"}, + {file = "regex-2021.3.17-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3d9356add82cff75413bec360c1eca3e58db4a9f5dafa1f19650958a81e3249d"}, + {file = "regex-2021.3.17-cp38-cp38-win32.whl", hash = "sha256:f5d0c921c99297354cecc5a416ee4280bd3f20fd81b9fb671ca6be71499c3fdf"}, + {file = "regex-2021.3.17-cp38-cp38-win_amd64.whl", hash = "sha256:14de88eda0976020528efc92d0a1f8830e2fb0de2ae6005a6fc4e062553031fa"}, + {file = "regex-2021.3.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c2e364491406b7888c2ad4428245fc56c327e34a5dfe58fd40df272b3c3dab3"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8bd4f91f3fb1c9b1380d6894bd5b4a519409135bec14c0c80151e58394a4e88a"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:882f53afe31ef0425b405a3f601c0009b44206ea7f55ee1c606aad3cc213a52c"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:07ef35301b4484bce843831e7039a84e19d8d33b3f8b2f9aab86c376813d0139"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:360a01b5fa2ad35b3113ae0c07fb544ad180603fa3b1f074f52d98c1096fa15e"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:709f65bb2fa9825f09892617d01246002097f8f9b6dde8d1bb4083cf554701ba"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c66221e947d7207457f8b6f42b12f613b09efa9669f65a587a2a71f6a0e4d106"}, + {file = "regex-2021.3.17-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c782da0e45aff131f0bed6e66fbcfa589ff2862fc719b83a88640daa01a5aff7"}, + {file = "regex-2021.3.17-cp39-cp39-win32.whl", hash = "sha256:dc9963aacb7da5177e40874585d7407c0f93fb9d7518ec58b86e562f633f36cd"}, + {file = "regex-2021.3.17-cp39-cp39-win_amd64.whl", hash = "sha256:a0d04128e005142260de3733591ddf476e4902c0c23c1af237d9acf3c96e1b38"}, + {file = "regex-2021.3.17.tar.gz", hash = "sha256:4b8a1fb724904139149a43e172850f35aa6ea97fb0545244dc0b805e0154ed68"}, ] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] rich = [ - {file = "rich-9.10.0-py3-none-any.whl", hash = "sha256:3070d53e3a93864de351c1091af1deb25f41e6051b33e485d4626b591c0cfdb3"}, - {file = "rich-9.10.0.tar.gz", hash = "sha256:e0f2db62a52536ee32f6f584a47536465872cae2b94887cf1f080fb9eaa13eb2"}, + {file = "rich-9.13.0-py3-none-any.whl", hash = "sha256:9004f6449c89abadf689dad6f92393e760b8c3a8a8c4ea6d8d474066307c0e66"}, + {file = "rich-9.13.0.tar.gz", hash = "sha256:d59e94a0e3e686f0d268fe5c7060baa1bd6744abca71b45351f5850a3aaa6764"}, ] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, @@ -1520,8 +1525,8 @@ tomlkit = [ {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, ] tox = [ - {file = "tox-3.21.3-py2.py3-none-any.whl", hash = "sha256:76df3db6eee929bb62bdbacca5bb6bc840669d98e86a015b7a57b7df0a6eaf8b"}, - {file = "tox-3.21.3.tar.gz", hash = "sha256:854e6e4a71c614b488f81cb88df3b92edcb1a9ec43d4102e6289e9669bbf7f18"}, + {file = "tox-3.23.0-py2.py3-none-any.whl", hash = "sha256:e007673f3595cede9b17a7c4962389e4305d4a3682a6c5a4159a1453b4f326aa"}, + {file = "tox-3.23.0.tar.gz", hash = "sha256:05a4dbd5e4d3d8269b72b55600f0b0303e2eb47ad5c6fe76d3576f4c58d93661"}, ] typed-ast = [ {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, @@ -1565,18 +1570,18 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, - {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, ] virtualenv = [ - {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, - {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, + {file = "virtualenv-20.4.3-py2.py3-none-any.whl", hash = "sha256:83f95875d382c7abafe06bd2a4cdd1b363e1bb77e02f155ebe8ac082a916b37c"}, + {file = "virtualenv-20.4.3.tar.gz", hash = "sha256:49ec4eb4c224c6f7dd81bb6d0a28a09ecae5894f4e593c89b0db0885f565a107"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] zipp = [ - {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, - {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, ] From 07a2e9d058612fef675b5d1ffb2143efcc072be0 Mon Sep 17 00:00:00 2001 From: gobot1234 Date: Sun, 21 Mar 2021 00:03:18 +0000 Subject: [PATCH 39/46] Make the CLI better behaved --- src/betterproto/plugin/cli/commands.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 87814656c..f837c01d7 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -8,7 +8,7 @@ from ..models import monkey_patch_oneof_index from . import DEFAULT_LINE_LENGTH, USE_PROTOC, VERBOSE, utils -from .errors import CLIError, ProtobufSyntaxError, UnusedImport +from .errors import CLIError, ProtobufSyntaxError from .runner import compile_protobufs monkey_patch_oneof_index() @@ -101,9 +101,16 @@ async def compile( file=sys.stderr, ) - if not errors or all(isinstance(e, Warning) for e in errors): + has_warnings = all(isinstance(e, Warning) for e in errors) + if not errors or has_warnings: rich.print( f"[bold green]Finished generating output for " f"{len(protos)} file{'s' if len(protos) != 1 else ''}, " f"output is in {output.as_posix()}" ) + + if errors: + if not has_warnings: + exit(2) + exit(1) + exit(0) From 23747c581c549a967cc1ebe4c21d3c0040982753 Mon Sep 17 00:00:00 2001 From: James Date: Fri, 2 Apr 2021 14:45:06 +0100 Subject: [PATCH 40/46] Rebase stuff --- src/betterproto/plugin/main.py | 35 +++++++++++++------------------- src/betterproto/plugin/models.py | 8 +++----- tests/test_get_ref_type.py | 4 +--- tests/util.py | 11 +++++----- 4 files changed, 23 insertions(+), 35 deletions(-) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 22838c6ef..365169bde 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -import os import sys import rich @@ -15,27 +14,21 @@ def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() - if os.getenv("USING_BETTERPROTO_CLI") == "True": - sys.stderr.buffer.write(os.environ["BETTERPROTO_STOP_KEYWORD"].encode()) - sys.stderr.buffer.write(data) + # Apply Work around for proto2/3 difference in protoc messages + monkey_patch_oneof_index() - sys.stdout.buffer.write(b"") - else: - # Apply Work around for proto2/3 difference in protoc messages - monkey_patch_oneof_index() + # Parse request + request = CodeGeneratorRequest().parse(data) - # Parse request - request = CodeGeneratorRequest().parse(data) + rich.print( + "Direct invocation of the protoc plugin is depreciated over using the CLI\n" + "To do so you just need to type:\n" + f"betterproto compile {' '.join(request.file_to_generate)}", + file=sys.stderr, + ) - rich.print( - "Direct invocation of the protoc plugin is depreciated over using the CLI\n" - "To do so you just need to type:\n" - f"betterproto compile {' '.join(request.file_to_generate)}", - file=sys.stderr, - ) + # Generate code + response = generate_code(request) - # Generate code - response = generate_code(request) - - # Write to stdout - sys.stdout.buffer.write(bytes(response)) + # Write to stdout + sys.stdout.buffer.write(bytes(response)) diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index dab6572ce..5c8b00ff2 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -32,11 +32,9 @@ import re import textwrap from dataclasses import dataclass, field -from typing import Dict, Iterable, Iterator, List, Optional, Set, Text, Type, Union -import sys - -import betterproto +from typing import Dict, Iterator, List, Optional, Set, Text, Type, Union +from .. import Message, which_one_of from ..casing import sanitize_name from ..compile.importing import get_type_reference, parse_source_type_name from ..compile.naming import ( @@ -155,7 +153,7 @@ class ProtoContentBase: source_file: FileDescriptorProto path: List[int] comment_indent: int = 4 - parent: Union["betterproto.Message", "OutputTemplate"] + parent: Union["Message", "OutputTemplate"] def __post_init__(self) -> None: """Checks that no fake default fields were left as placeholders.""" diff --git a/tests/test_get_ref_type.py b/tests/test_get_ref_type.py index cbee4caa5..fa2e77bf1 100644 --- a/tests/test_get_ref_type.py +++ b/tests/test_get_ref_type.py @@ -35,9 +35,7 @@ def test_reference_google_wellknown_types_non_wrappers( name = get_type_reference(package="", imports=imports, source_type=google_type) assert name == expected_name - assert imports.__contains__( - expected_import - ), f"{expected_import} not found in {imports}" + assert expected_import in imports, f"{expected_import} not found in {imports}" @pytest.mark.parametrize( diff --git a/tests/util.py b/tests/util.py index 4a0b67e9d..8469f6f94 100644 --- a/tests/util.py +++ b/tests/util.py @@ -3,7 +3,7 @@ import pathlib from pathlib import Path from types import ModuleType -from typing import Callable, Generator, List, Optional, Union +from typing import Callable, Generator, List, Optional os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" @@ -24,10 +24,10 @@ def get_test_case_json_data(test_case_name: str, *json_file_names: str) -> List[ A list of all files found in "inputs_path/test_case_name" with names matching f"{test_case_name}.json" or f"{test_case_name}_*.json", OR given by json_file_names """ - test_case_dir = inputs_path.joinpath(test_case_name) + test_case_dir = inputs_path / test_case_name possible_file_paths = [ - *(test_case_dir.joinpath(json_file_name) for json_file_name in json_file_names), - test_case_dir.joinpath(f"{test_case_name}.json"), + *(test_case_dir / json_file_name for json_file_name in json_file_names), + test_case_dir / f"{test_case_name}.json", *test_case_dir.glob(f"{test_case_name}_*.json"), ] @@ -35,8 +35,7 @@ def get_test_case_json_data(test_case_name: str, *json_file_names: str) -> List[ for test_data_file_path in possible_file_paths: if not test_data_file_path.exists(): continue - with test_data_file_path.open("r") as fh: - result.append(fh.read()) + result.append(test_data_file_path.read_text()) return result From a72b907c1d78a6a562c3bb6f08fe51f2ad6032cb Mon Sep 17 00:00:00 2001 From: James Date: Sun, 18 Jul 2021 19:25:51 +0100 Subject: [PATCH 41/46] Pick this back up --- pyproject.toml | 10 +- src/betterproto/plugin/__init__.py | 15 +-- src/betterproto/plugin/cli/__init__.py | 13 --- src/betterproto/plugin/cli/commands.py | 21 ++-- src/betterproto/plugin/cli/errors.py | 53 ---------- src/betterproto/plugin/cli/runner.py | 138 +++++-------------------- src/betterproto/plugin/cli/utils.py | 60 ++--------- src/betterproto/plugin/main.py | 3 + 8 files changed, 59 insertions(+), 254 deletions(-) delete mode 100644 src/betterproto/plugin/cli/errors.py diff --git a/pyproject.toml b/pyproject.toml index 4e39f7daa..b31129a5e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,13 +38,14 @@ tox = "^3.15.1" sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" asv = "^0.4.2" +protobuf_parser = "*" [tool.poetry.scripts] betterproto = "betterproto:__main__.main" protoc-gen-python_betterproto = "betterproto.plugin:main" [tool.poetry.extras] -compiler = ["black", "jinja2", "typer", "rich"] +compiler = ["black", "jinja2", "typer", "rich", "protobuf_parser"] [tool.poe.tasks] # Dev workflow tasks @@ -57,12 +58,7 @@ bench = { shell = "asv run master^! && asv run HEAD^! && asv compare maste clean = { cmd = "rm -rf .asv .coverage .mypy_cache .pytest_cache dist betterproto.egg-info **/__pycache__ tests/output_*", help = "Clean out generated files from the workspace" } generate_lib.cmd = """ -protoc - --plugin=protoc-gen-custom=src/betterproto/plugin/main.py - --custom_opt=INCLUDE_GOOGLE - --custom_out=src/betterproto/lib - -I /usr/local/include/ - /usr/local/include/google/protobuf/**/*.proto +betterproto compile /usr/local/include/google/protobuf/**/*.proto """ generate_lib.help = "Regenerate the types in betterproto.lib.google" diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index da00d3ff3..a8951c02c 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -5,16 +5,16 @@ from typing import Type IMPORT_ERROR_MESSAGE = ( - "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've installed " - 'betterproto as `pip install "betterproto[compiler]"` so that compiler dependencies are ' - "included." + "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've " + "installed betterproto as `pip install \"betterproto[compiler]\"` so that compiler " + "dependencies are included." ) STDLIB_MODULES = getattr( sys, "builtin_module_names", [ - p.with_suffix("").name + p.stem for p in Path(traceback.__file__).parent.iterdir() if p.suffix == ".py" or p.is_dir() ], @@ -32,7 +32,8 @@ def import_exception_hook( if the module imported is not found and the exception is raised in this sub module """ - module = list(traceback.walk_tb(tb))[-1][0].f_globals.get("__name__", "__main__") + bottom_frame = list(traceback.walk_tb(tb))[-1][0] + module = bottom_frame.f_globals.get("__name__", "__main__") if ( not module.startswith(__name__) or not isinstance(value, ImportError) @@ -45,6 +46,8 @@ def import_exception_hook( exit(1) -sys.excepthook = import_exception_hook +def install_exception_hook(): + sys.excepthook = import_exception_hook + from .main import main diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py index c5508b617..117eed931 100644 --- a/src/betterproto/plugin/cli/__init__.py +++ b/src/betterproto/plugin/cli/__init__.py @@ -1,18 +1,5 @@ -import asyncio -import platform - -try: - import grpc_tools.protoc -except ImportError: - USE_PROTOC = True -else: - USE_PROTOC = False - VERBOSE = False from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa from .commands import app from .runner import compile_protobufs - -if platform.system() == "Windows": - asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index f837c01d7..e96f90310 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -2,13 +2,16 @@ from pathlib import Path from typing import List, Optional +from .. import install_exception_hook +install_exception_hook() + +import protobuf_parser import rich import typer from rich.syntax import Syntax from ..models import monkey_patch_oneof_index -from . import DEFAULT_LINE_LENGTH, USE_PROTOC, VERBOSE, utils -from .errors import CLIError, ProtobufSyntaxError +from . import DEFAULT_LINE_LENGTH, VERBOSE, utils from .runner import compile_protobufs monkey_patch_oneof_index() @@ -28,13 +31,6 @@ async def compile( verbose: bool = typer.Option( VERBOSE, "-v", "--verbose", help="Whether or not to be verbose" ), - protoc: bool = typer.Option( - USE_PROTOC, - "-p", - "--protoc", - help="Whether or not to use protoc to compile the protobufs if this is false " - "it will attempt to use grpc instead", - ), line_length: int = typer.Option( DEFAULT_LINE_LENGTH, "-l", @@ -72,14 +68,13 @@ async def compile( *protos, output=output, verbose=verbose, - use_protoc=protoc, generate_services=generate_services, line_length=line_length, from_cli=True, ) for error in errors: - if isinstance(error, ProtobufSyntaxError): + if isinstance(error, SyntaxError): rich.print( f"[red]File {str(error.file).strip()}:\n", Syntax( @@ -93,10 +88,10 @@ async def compile( ) elif isinstance(error, Warning): rich.print(f"Warning: {error}", file=sys.stderr) - elif isinstance(error, CLIError): + elif isinstance(error, protobuf_parser.Error): failed_files = "\n".join(f" - {file}" for file in protos) rich.print( - f"[red]{'Protoc' if protoc else 'GRPC'} failed to generate outputs for:\n\n" + f"[red]Protoc failed to generate outputs for:\n\n" f"{failed_files}\n\nSee the output for the issue:\n{' '.join(error.args)}[red]", file=sys.stderr, ) diff --git a/src/betterproto/plugin/cli/errors.py b/src/betterproto/plugin/cli/errors.py deleted file mode 100644 index 858ba81fb..000000000 --- a/src/betterproto/plugin/cli/errors.py +++ /dev/null @@ -1,53 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path -from typing import Union - - -class CLIError(Exception): - """The base class for all exceptions when compiling a file""" - - -class CompilerError(CLIError): - ... - - -class ProtobufSyntaxError(SyntaxError, CompilerError): - """ - Attributes - ---------- - msg: :class:`str` - The message given by protoc e.g. "Expected top-level statement (e.g. "message")." - file: :class:`.Path` - The file that had the syntax error. - lineno: :class:`int` - The line number on which the syntax error occurs. - offset: :class:`int` - The offset along the :attr:`lineno` that the syntax error occurs. - """ - - def __init__(self, msg: str, file: Path, lineno: int, offset: int): - text = file.read_text().splitlines()[lineno - 1] - super().__init__(msg, (str(file), lineno, offset, text)) - self.file = file - - -@dataclass -class UnusedImport(CLIError, ImportWarning): - """The warning emitted when an unused import is detected by protoc. - - Attributes - ---------- - msg: :class:`str` - The message given by protoc e.g. "Expected top-level statement (e.g. "message")." - file: :class:`.Path` - The file that had the warning issued for. - used_import: :class:`.Path` - The unused import file. - """ - - msg: str - file: Path - unused_import: Union[Path, str] - - def __str__(self): - return f"Import {self.unused_import} is unused in {self.file}" diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index db6914f8c..7a7876d0e 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,25 +1,22 @@ import asyncio import functools -import os -import re -import secrets + from concurrent.futures import ProcessPoolExecutor -from typing import TYPE_CHECKING, Any, List, Tuple +from typing import TYPE_CHECKING, Any, List + +import protobuf_parser +from ...lib.google.protobuf import FileDescriptorProto from ...lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponseFile, - CodeGeneratorResponse, ) from ..parser import generate_code -from . import USE_PROTOC, utils -from .errors import CLIError, CompilerError, ProtobufSyntaxError, UnusedImport +from . import utils if TYPE_CHECKING: from pathlib import Path -DEFAULT_IMPLEMENTATION = "betterproto_" - def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: path = (output / file.name).resolve() @@ -27,67 +24,12 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: path.write_text(file.content) -def handle_error(data: bytes, files: Tuple["Path", ...]) -> List[CLIError]: - errors = [] - matches = re.finditer( - rb"^(?P.+):(?P\d+):(?P\d+): (?P.*)", - data, - ) - if not matches: - return [CompilerError(data.decode().strip())] - - for match in matches: - file = utils.find( - lambda f: f.as_posix().endswith(match["filename"].decode()), files - ) - - if match["message"].startswith(b"warning: "): - import_matches = list( - re.finditer( - rb"warning: Import (?P.+) is unused\.", - match["message"], - ) - ) - if import_matches: - for import_match in import_matches: - unused_import = utils.find( - lambda f: file.as_posix().endswith( - import_match["unused_import"].decode() - ), - files, - ) - if unused_import is None: - unused_import = import_match["unused_import"].decode() - warning = UnusedImport( - match["message"].decode().strip(), file, unused_import - ) - else: - warning = Warning( - match["message"].lstrip(b"warning: ").strip().decode() - ) - - errors.append(warning) - continue - - errors.append( - ProtobufSyntaxError( - match["message"].decode().strip(), - file, - int(match["lineno"]), - int(match["offset"]), - ) - ) - - return errors - - async def compile_protobufs( *files: "Path", output: "Path", - use_protoc: bool = USE_PROTOC, use_betterproto: bool = True, **kwargs: Any, -) -> List[CLIError]: +) -> List[protobuf_parser.Error]: """ A programmatic way to compile protobufs. @@ -105,50 +47,20 @@ async def compile_protobufs( List[:class:`CLIError`] A of exceptions from protoc. """ - implementation = DEFAULT_IMPLEMENTATION if use_betterproto else "" - command = utils.generate_command( - *files, output=output, use_protoc=use_protoc, implementation=implementation - ) - - secret_word = secrets.token_hex(256) - - process = await asyncio.create_subprocess_shell( - command, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - env={ - "USING_BETTERPROTO_CLI": str(kwargs.get("from_cli", False)), - "BETTERPROTO_STOP_KEYWORD": secret_word, - **os.environ, - }, - ) + loop = asyncio.get_event_loop() if use_betterproto: - stderr = await process.stderr.read() - if stderr.find(secret_word.encode()) == -1: - return handle_error(stderr, files) - - try: - stderr, data = stderr.split(secret_word.encode()) - except TypeError: - return await compile_protobufs( - *files, - output=output, - use_protoc=use_protoc, - use_betterproto=use_betterproto, - **kwargs, - ) # you've exceptionally un/lucky - - if stderr: - return handle_error(stderr, files) - - request = CodeGeneratorRequest().parse(data) + files, errors = await utils.to_thread(protobuf_parser.parse, *files) + if errors: + return errors + request = CodeGeneratorRequest( + proto_file=[ + FileDescriptorProto().parse(file) for file in files + ] + ) - loop = asyncio.get_event_loop() # Generate code - response: CodeGeneratorResponse = await loop.run_in_executor( - None, functools.partial(generate_code, request, **kwargs) - ) + response = await utils.to_thread(generate_code, request, **kwargs) with ProcessPoolExecutor() as process_pool: # write multiple files concurrently @@ -161,12 +73,12 @@ async def compile_protobufs( ) ) - stdout, stderr = await process.communicate() - - if stderr: - return handle_error(stderr, files) - - if process.returncode != 0: - return [CompilerError(stderr.decode())] + else: + errors = await utils.to_thread( + protobuf_parser.run, + *(f'"{file.as_posix()}"' for file in files), + proto_path=files[0].parent.as_posix(), + python_out=output.as_posix() + ) - return [] + return errors diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index a418556c0..4830a41c6 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -1,30 +1,13 @@ import asyncio import functools -import platform import sys from collections import defaultdict from pathlib import Path from typing import Any, Awaitable, Callable, Iterable, List, Optional, Set, TypeVar -from . import USE_PROTOC T = TypeVar("T") -INCLUDE = ( - "any.proto", - "api.proto", - "compiler/plugin.proto", - "descriptor.proto", - "duration.proto", - "empty.proto", - "field_mask.proto", - "source_context.proto", - "struct.proto", - "timestamp.proto", - "type.proto", - "wrappers.proto", -) - def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": """Return a list of files ready for :func:`generate_command`""" @@ -33,9 +16,8 @@ def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": for path in paths: if not path.is_absolute(): path = (Path.cwd() / path).resolve() - if str(path).startswith("/usr") and "include/google/protobuf" in str(path): - new_paths[path].update(path / proto for proto in INCLUDE) - elif path.is_dir(): + + if path.is_dir(): new_paths[path].update( sorted(path.glob("*.proto")) ) # ensure order for files when debugging compilation errors @@ -45,36 +27,7 @@ def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": return new_paths -def generate_command( - *files: Path, - output: Path, - use_protoc: bool = USE_PROTOC, - implementation: str = "betterproto_", -) -> str: - command = [ - f"--proto_path={files[0].parent.as_posix()}", - f"--python_{implementation}out={output.as_posix()}", - *[ - f'"{file.as_posix()}"' for file in files - ], # ensure paths with spaces in the name get parsed correctly - ] - if use_protoc: - command.insert(0, "protoc") - else: - command = [ - sys.executable, - "-m", - "grpc_tools.protoc", - *command, - ] - - return " ".join(command) - - def run_sync(func: Callable[..., Awaitable[T]]) -> Callable[..., T]: - if platform.system() == "Windows": - asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) - @functools.wraps(func) def wrapper(*args: Any, **kwargs: Any) -> T: coro = func(*args, **kwargs) @@ -95,3 +48,12 @@ def find(predicate: Callable[[T], bool], iterable: Iterable[T]) -> Optional[T]: for i in iterable: if predicate(i): return i + + +if sys.version_info >= (3, 9): + to_thread = asyncio.to_thread +else: + async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: + loop = asyncio.get_running_loop() + func_call = functools.partial(func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 365169bde..8fa811521 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -2,6 +2,9 @@ import sys +from . import install_exception_hook +install_exception_hook() + import rich from ..lib.google.protobuf.compiler import CodeGeneratorRequest From 329d25d9aa8e7b82ed4f3d9c8d14113f9e221bc1 Mon Sep 17 00:00:00 2001 From: James Date: Fri, 6 Aug 2021 15:15:14 +0100 Subject: [PATCH 42/46] More stuff --- src/betterproto/plugin/cli/commands.py | 17 +++++++++++------ src/betterproto/plugin/cli/runner.py | 23 +++++++---------------- 2 files changed, 18 insertions(+), 22 deletions(-) diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index e96f90310..0b64276fc 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -10,6 +10,7 @@ import typer from rich.syntax import Syntax +from ... import __version__ from ..models import monkey_patch_oneof_index from . import DEFAULT_LINE_LENGTH, VERBOSE, utils from .runner import compile_protobufs @@ -25,6 +26,11 @@ def callback(ctx: typer.Context) -> None: rich.print(ctx.get_help()) +@app.command() +def version(ctx: typer.Context) -> None: + rich.print("betterproto version:", __version__) + + @app.command(context_settings={"help_option_names": ["-h", "--help"]}) @utils.run_sync async def compile( @@ -74,15 +80,14 @@ async def compile( ) for error in errors: - if isinstance(error, SyntaxError): + if isinstance(error, protobuf_parser.SyntaxError): rich.print( f"[red]File {str(error.file).strip()}:\n", - Syntax( - error.file.read_text(), - "proto", + Syntax.from_path( + error.file, line_numbers=True, line_range=(max(error.lineno - 5, 0), error.lineno), - ), # TODO switch to .from_path but it appears to be bugged and doesnt render properly + ), f"{' ' * (error.offset + 3)}^\nSyntaxError: {error.msg}[red]", file=sys.stderr, ) @@ -92,7 +97,7 @@ async def compile( failed_files = "\n".join(f" - {file}" for file in protos) rich.print( f"[red]Protoc failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{' '.join(error.args)}[red]", + f"{failed_files}\n\nSee the output for the issue:\n{error}[red]", file=sys.stderr, ) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 7a7876d0e..a33172876 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,7 +1,4 @@ import asyncio -import functools - -from concurrent.futures import ProcessPoolExecutor from typing import TYPE_CHECKING, Any, List import protobuf_parser @@ -35,17 +32,16 @@ async def compile_protobufs( Parameters ---------- - *files: :class:`.Path` + *files The locations of the protobuf files to be generated. - output: :class:`.Path` + output The output directory. **kwargs: Any keyword arguments to pass to generate_code. Returns ------- - List[:class:`CLIError`] - A of exceptions from protoc. + A of exceptions from protoc. """ loop = asyncio.get_event_loop() @@ -62,16 +58,11 @@ async def compile_protobufs( # Generate code response = await utils.to_thread(generate_code, request, **kwargs) - with ProcessPoolExecutor() as process_pool: - # write multiple files concurrently - await asyncio.gather( - *( - loop.run_in_executor( - process_pool, functools.partial(write_file, output, file) - ) - for file in response.file - ) + await asyncio.gather( + *( + utils.to_thread(write_file(output, file) for file in response.file) ) + ) else: errors = await utils.to_thread( From e595356a753e351621dfe6aca1a5b7ffd5bb53f2 Mon Sep 17 00:00:00 2001 From: James Hilton-Balfe <50501825+Gobot1234@users.noreply.github.com> Date: Mon, 7 Feb 2022 18:30:22 +0000 Subject: [PATCH 43/46] Revive this --- src/betterproto/plugin/exception_hook.py | 50 ++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 src/betterproto/plugin/exception_hook.py diff --git a/src/betterproto/plugin/exception_hook.py b/src/betterproto/plugin/exception_hook.py new file mode 100644 index 000000000..ed2043b5d --- /dev/null +++ b/src/betterproto/plugin/exception_hook.py @@ -0,0 +1,50 @@ +import sys +import traceback +from pathlib import Path +from types import TracebackType +from typing import Type + +IMPORT_ERROR_MESSAGE = ( + "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've " + 'installed betterproto as `pip install "betterproto[compiler]"` so that compiler ' + "dependencies are included." +) + +STDLIB_MODULES = getattr( + sys, + "builtin_module_names", + [ + p.stem + for p in Path(traceback.__file__).parent.iterdir() + if p.suffix == ".py" or p.is_dir() + ], +) + + +def import_exception_hook( + type: Type[BaseException], value: ImportError, tb: TracebackType +) -> None: + """Set an exception hook to automatically print: + + "Unable to import `x` from betterproto plugin! Please ensure that you've installed + betterproto as `pip install "betterproto[compiler]"` so that compiler dependencies are + included." + + if the module imported is not found and the exception is raised in this sub module + """ + bottom_frame = list(traceback.walk_tb(tb))[-1][0] + module = bottom_frame.f_globals.get("__name__", "__main__") + if ( + not module.startswith(__name__) + or not isinstance(value, ImportError) + or value.name in STDLIB_MODULES + or (value.name or "").startswith("betterproto") + ): + return sys.__excepthook__(type, value, tb) + + print(f"\033[31m{IMPORT_ERROR_MESSAGE.format(value)}\033[0m", file=sys.stderr) + exit(1) + + +def install_exception_hook(): + sys.excepthook = import_exception_hook From 66f96c4494066a19ea795882007db655afe85d49 Mon Sep 17 00:00:00 2001 From: James Hilton-Balfe <50501825+Gobot1234@users.noreply.github.com> Date: Mon, 7 Feb 2022 18:30:35 +0000 Subject: [PATCH 44/46] Revive this --- pyproject.toml | 2 +- src/betterproto/__init__.py | 4 +- src/betterproto/__main__.py | 3 ++ src/betterproto/plugin/__init__.py | 53 -------------------------- src/betterproto/plugin/__main__.py | 3 +- src/betterproto/plugin/cli/__init__.py | 7 ++-- src/betterproto/plugin/cli/commands.py | 3 -- src/betterproto/plugin/cli/runner.py | 14 ++----- src/betterproto/plugin/cli/utils.py | 12 ++++-- src/betterproto/plugin/compiler.py | 8 ++-- src/betterproto/plugin/main.py | 1 + src/betterproto/plugin/models.py | 11 ++---- src/betterproto/plugin/parser.py | 33 +++++++--------- tests/generate.py | 4 +- tests/util.py | 12 +++--- 15 files changed, 54 insertions(+), 116 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d914e9719..f32c34890 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" tomlkit = "^0.7.0" tox = "^3.15.1" -protobuf_parser = "*" +protobuf_parser = "1.0.0" [tool.poetry.scripts] betterproto = "betterproto:__main__.main" diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index d5465791b..e06a9ec4b 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -9,7 +9,6 @@ from abc import ABC from base64 import b64decode, b64encode from datetime import datetime, timedelta, timezone -from dateutil.parser import isoparse from typing import ( Any, Callable, @@ -25,12 +24,13 @@ get_type_hints, ) +from dateutil.parser import isoparse + from ._types import T from ._version import __version__ from .casing import camel_case, safe_snake_case, snake_case from .grpc.grpclib_client import ServiceStub - # Proto 3 data types TYPE_ENUM = "enum" TYPE_BOOL = "bool" diff --git a/src/betterproto/__main__.py b/src/betterproto/__main__.py index f9538be69..d225536f5 100644 --- a/src/betterproto/__main__.py +++ b/src/betterproto/__main__.py @@ -1,3 +1,6 @@ +from .plugin.exception_hook import install_exception_hook + +install_exception_hook() from .plugin.cli import app as main if __name__ == "__main__": diff --git a/src/betterproto/plugin/__init__.py b/src/betterproto/plugin/__init__.py index a8951c02c..e69de29bb 100644 --- a/src/betterproto/plugin/__init__.py +++ b/src/betterproto/plugin/__init__.py @@ -1,53 +0,0 @@ -import sys -import traceback -from pathlib import Path -from types import TracebackType -from typing import Type - -IMPORT_ERROR_MESSAGE = ( - "Unable to import `{0.name}` from betterproto plugin! Please ensure that you've " - "installed betterproto as `pip install \"betterproto[compiler]\"` so that compiler " - "dependencies are included." -) - -STDLIB_MODULES = getattr( - sys, - "builtin_module_names", - [ - p.stem - for p in Path(traceback.__file__).parent.iterdir() - if p.suffix == ".py" or p.is_dir() - ], -) - - -def import_exception_hook( - type: Type[BaseException], value: ImportError, tb: TracebackType -) -> None: - """Set an exception hook to automatically print: - - "Unable to import `x` from betterproto plugin! Please ensure that you've installed - betterproto as `pip install "betterproto[compiler]"` so that compiler dependencies are - included." - - if the module imported is not found and the exception is raised in this sub module - """ - bottom_frame = list(traceback.walk_tb(tb))[-1][0] - module = bottom_frame.f_globals.get("__name__", "__main__") - if ( - not module.startswith(__name__) - or not isinstance(value, ImportError) - or value.name in STDLIB_MODULES - or value.name.startswith("betterproto") - ): - return sys.__excepthook__(type, value, tb) - - print(f"\033[31m{IMPORT_ERROR_MESSAGE.format(value)}\033[0m", file=sys.stderr) - exit(1) - - -def install_exception_hook(): - sys.excepthook = import_exception_hook - - -from .main import main diff --git a/src/betterproto/plugin/__main__.py b/src/betterproto/plugin/__main__.py index 5d6a8109e..40e2b013f 100644 --- a/src/betterproto/plugin/__main__.py +++ b/src/betterproto/plugin/__main__.py @@ -1,3 +1,4 @@ from .main import main -main() +if __name__ == "__main__": + main() diff --git a/src/betterproto/plugin/cli/__init__.py b/src/betterproto/plugin/cli/__init__.py index 117eed931..f32d6de6b 100644 --- a/src/betterproto/plugin/cli/__init__.py +++ b/src/betterproto/plugin/cli/__init__.py @@ -1,5 +1,6 @@ VERBOSE = False -from black import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH # noqa -from .commands import app -from .runner import compile_protobufs +from black.const import DEFAULT_LINE_LENGTH as DEFAULT_LINE_LENGTH + +from .commands import app as app +from .runner import compile_protobufs as compile_protobufs diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 0b64276fc..36e829006 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -2,9 +2,6 @@ from pathlib import Path from typing import List, Optional -from .. import install_exception_hook -install_exception_hook() - import protobuf_parser import rich import typer diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index a33172876..5c720ffce 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -43,25 +43,19 @@ async def compile_protobufs( ------- A of exceptions from protoc. """ - loop = asyncio.get_event_loop() - if use_betterproto: - files, errors = await utils.to_thread(protobuf_parser.parse, *files) + proto_files, errors = await utils.to_thread(protobuf_parser.parse, *files) if errors: return errors request = CodeGeneratorRequest( - proto_file=[ - FileDescriptorProto().parse(file) for file in files - ] + proto_file=[FileDescriptorProto().parse(file) for file in proto_files] ) # Generate code response = await utils.to_thread(generate_code, request, **kwargs) await asyncio.gather( - *( - utils.to_thread(write_file(output, file) for file in response.file) - ) + *(utils.to_thread(write_file(output, file) for file in response.file)) ) else: @@ -69,7 +63,7 @@ async def compile_protobufs( protobuf_parser.run, *(f'"{file.as_posix()}"' for file in files), proto_path=files[0].parent.as_posix(), - python_out=output.as_posix() + python_out=output.as_posix(), ) return errors diff --git a/src/betterproto/plugin/cli/utils.py b/src/betterproto/plugin/cli/utils.py index 4830a41c6..f02e9bd2d 100644 --- a/src/betterproto/plugin/cli/utils.py +++ b/src/betterproto/plugin/cli/utils.py @@ -1,15 +1,18 @@ import asyncio import functools import sys +from collections.abc import Mapping from collections import defaultdict from pathlib import Path from typing import Any, Awaitable, Callable, Iterable, List, Optional, Set, TypeVar +from typing_extensions import ParamSpec T = TypeVar("T") +P = ParamSpec("P") -def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": +def get_files(paths: List[Path]) -> "Mapping[Path, Set[Path]]": """Return a list of files ready for :func:`generate_command`""" new_paths: "defaultdict[Path, Set[Path]]" = defaultdict(set) @@ -24,12 +27,12 @@ def get_files(paths: List[Path]) -> "defaultdict[Path, Set[Path]]": else: new_paths[path.parent].add(path) - return new_paths + return dict(new_paths) -def run_sync(func: Callable[..., Awaitable[T]]) -> Callable[..., T]: +def run_sync(func: Callable[P, Awaitable[T]]) -> Callable[P, T]: @functools.wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> T: + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: coro = func(*args, **kwargs) if hasattr(asyncio, "run"): @@ -53,6 +56,7 @@ def find(predicate: Callable[[T], bool], iterable: Iterable[T]) -> Optional[T]: if sys.version_info >= (3, 9): to_thread = asyncio.to_thread else: + async def to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: loop = asyncio.get_running_loop() func_call = functools.partial(func, *args, **kwargs) diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto/plugin/compiler.py index b666dda01..fe0fdc073 100644 --- a/src/betterproto/plugin/compiler.py +++ b/src/betterproto/plugin/compiler.py @@ -2,12 +2,14 @@ import black import jinja2 +from black.const import DEFAULT_LINE_LENGTH +from black.mode import Mode, TargetVersion from .models import OutputTemplate def outputfile_compiler( - output_file: OutputTemplate, line_length: int = black.DEFAULT_LINE_LENGTH + output_file: OutputTemplate, line_length: int = DEFAULT_LINE_LENGTH ) -> str: templates_folder = os.path.abspath( @@ -23,7 +25,5 @@ def outputfile_compiler( return black.format_str( template.render(output_file=output_file), - mode=black.Mode( - line_length=line_length, target_versions={black.TargetVersion.PY37} - ), + mode=Mode(line_length=line_length, target_versions={TargetVersion.PY37}), ) diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index 27e0a9123..d19266cdd 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -3,6 +3,7 @@ import sys from .exception_hook import install_exception_hook + install_exception_hook() import rich diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index 11490b8ba..19b1f64f5 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -29,18 +29,11 @@ reference to `A` to `B`'s `fields` attribute. """ -import re -import textwrap -from dataclasses import dataclass, field -from typing import Dict, Iterator, List, Optional, Set, Text, Type, Union - -from .. import Message, which_one_of - import builtins import re import textwrap from dataclasses import dataclass, field -from typing import Dict, Iterable, Iterator, List, Optional, Set, Type, Union +from typing import Dict, Iterator, List, Optional, Set, Type, Union import betterproto from betterproto import which_one_of @@ -63,6 +56,7 @@ ) from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest +from .. import Message, which_one_of from ..casing import sanitize_name from ..compile.importing import get_type_reference, parse_source_type_name from ..compile.naming import ( @@ -82,6 +76,7 @@ ServiceDescriptorProto, ) from ..lib.google.protobuf.compiler import CodeGeneratorRequest + # Create a unique placeholder to deal with # https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses PLACEHOLDER = object() diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 4aebcf907..caea11d3f 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,8 +1,8 @@ import itertools import pathlib -from typing import Iterator, List, Set, Tuple, Union +from typing import Iterator, List, Sequence, Set, Tuple, TypeAlias, Union -import black +from black.const import DEFAULT_LINE_LENGTH from rich.progress import Progress from ..lib.google.protobuf import ( @@ -17,10 +17,6 @@ CodeGeneratorResponseFeature, CodeGeneratorResponseFile, ) -import itertools -import pathlib -import sys -from typing import Iterator, List, Set, Tuple, TYPE_CHECKING, Union from .compiler import outputfile_compiler from .models import ( EnumDefinitionCompiler, @@ -36,18 +32,18 @@ is_oneof, ) +TraverseType: TypeAlias = ( + "Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]" +) + -def traverse( - proto_file: FileDescriptorProto, -) -> "itertools.chain[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]": +def traverse(proto_file: FileDescriptorProto) -> "itertools.chain[TraverseType]": # Todo: Keep information about nested hierarchy def _traverse( - path: List[int], items: List["EnumDescriptorProto"], prefix="" - ) -> Iterator[Tuple[Union[str, EnumDescriptorProto], List[int]]]: path: List[int], - items: List[Union[DescriptorProto, EnumDescriptorProto]], + items: Sequence[Union[DescriptorProto, EnumDescriptorProto]], prefix: str = "", - ) -> Iterator[Tuple[Union[DescriptorProto, EnumDescriptorProto], List[int]]]: + ) -> Iterator[TraverseType]: for i, item in enumerate(items): # Adjust the name since we flatten the hierarchy. # Todo: don't change the name, but include full name in returned tuple @@ -60,8 +56,7 @@ def _traverse( yield enum, path + [i, 4] if item.nested_type: - for n, p in _traverse(path + [i, 3], item.nested_type, next_prefix): - yield n, p + yield from _traverse(path + [i, 3], item.nested_type, next_prefix) return itertools.chain( _traverse([5], proto_file.enum_type), _traverse([4], proto_file.message_type) @@ -72,7 +67,7 @@ def generate_code( request: CodeGeneratorRequest, *, include_google: bool = False, - line_length: int = black.DEFAULT_LINE_LENGTH, + line_length: int = DEFAULT_LINE_LENGTH, generate_services: bool = True, verbose: bool = False, from_cli: bool = False, @@ -191,7 +186,9 @@ def generate_code( CodeGeneratorResponseFile( name=str(output_path), # Render and then format the output file - content=outputfile_compiler(output_file=output_package), + content=outputfile_compiler( + output_file=output_package, line_length=line_length + ), ) ) if verbose or from_cli: @@ -207,8 +204,6 @@ def generate_code( return response - return response - def read_protobuf_type( item: Union[DescriptorProto, EnumDescriptorProto], diff --git a/tests/generate.py b/tests/generate.py index 9765982de..35b81111f 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -4,10 +4,10 @@ import shutil import sys from pathlib import Path -from typing import Optional, Set, List +from typing import List, Optional, Set -import typer import rich +import typer from betterproto.plugin.cli import compile_protobufs, utils from tests.util import ( diff --git a/tests/util.py b/tests/util.py index 8017609c1..20c848385 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,13 +1,12 @@ -import importlib import asyncio -from dataclasses import dataclass +import importlib import os import pathlib -from pathlib import Path import sys +from dataclasses import dataclass +from pathlib import Path from types import ModuleType from typing import Callable, Dict, Generator, List, Optional, Tuple, Union -from typing import Callable, Generator, List, Optional os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" @@ -22,7 +21,6 @@ def get_directories(path: Path) -> Generator[str, None, None]: yield from directories -def get_test_case_json_data(test_case_name: str, *json_file_names: str) -> List[str]: async def protoc( path: Union[str, Path], output_dir: Union[str, Path], reference: bool = False ): @@ -76,7 +74,9 @@ def get_test_case_json_data( continue result.append( TestCaseJsonFile( - test_data_file_path.read_text(), test_case_name, test_data_file_path.name.split(".")[0] + test_data_file_path.read_text(), + test_case_name, + test_data_file_path.name.split(".")[0], ) ) From 8f1746fcfd85a00c3bfc37bf07cbc1b11aaa68e4 Mon Sep 17 00:00:00 2001 From: James Hilton-Balfe <50501825+Gobot1234@users.noreply.github.com> Date: Tue, 15 Feb 2022 15:14:39 +0000 Subject: [PATCH 45/46] Add NoopProgress --- src/betterproto/plugin/parser.py | 34 ++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index caea11d3f..68dc6af1d 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,6 +1,7 @@ import itertools import pathlib -from typing import Iterator, List, Sequence, Set, Tuple, TypeAlias, Union +from contextlib import AbstractContextManager +from typing import Any, Iterator, List, Sequence, Set, Tuple, TypeAlias, Union from black.const import DEFAULT_LINE_LENGTH from rich.progress import Progress @@ -63,6 +64,17 @@ def _traverse( ) +class NoopProgress(AbstractContextManager): + def add_task(self, *args: Any, **kwargs: Any) -> None: + ... + + def update(self, *args: Any, **kwargs: Any) -> None: + ... + + def __exit__(self, *args: Any) -> None: + ... + + def generate_code( request: CodeGeneratorRequest, *, @@ -103,7 +115,7 @@ def generate_code( request_data = PluginRequestCompiler(plugin_request_obj=request) # Gather output packages - with Progress(transient=True) as progress: + with Progress(transient=True) if from_cli else NoopProgress() as progress: reading_progress_bar = progress.add_task( "[green]Reading protobuf files...", total=len(request.proto_file) ) @@ -122,13 +134,12 @@ def generate_code( request_data.output_packages[output_package_name].input_files.append( proto_file ) - if verbose or from_cli: - progress.update(reading_progress_bar, advance=1) + progress.update(reading_progress_bar, advance=1) # Read Messages and Enums # We need to read Messages before Services in so that we can # get the references to input/output messages for each service - with Progress(transient=True) as progress: + with Progress(transient=True) if from_cli else NoopProgress() as progress: parsing_progress_bar = progress.add_task( "[green]Parsing protobuf enums and messages...", total=sum( @@ -146,12 +157,11 @@ def generate_code( path=path, output_package=output_package, ) - if verbose or from_cli: - progress.update(parsing_progress_bar, advance=1) + progress.update(parsing_progress_bar, advance=1) # Read Services if generate_services: - with Progress(transient=True) as progress: + with Progress(transient=True) if from_cli else NoopProgress() as progress: parsing_progress_bar = progress.add_task( "[green]Parsing protobuf services...", total=sum( @@ -166,12 +176,11 @@ def generate_code( for proto_input_file in output_package.input_files: for index, service in enumerate(proto_input_file.service): read_protobuf_service(service, index, output_package) - if verbose or from_cli: - progress.update(parsing_progress_bar, advance=1) + progress.update(parsing_progress_bar, advance=1) # Generate output files output_paths: Set[pathlib.Path] = set() - with Progress(transient=True) as progress: + with Progress(transient=True) if from_cli else NoopProgress() as progress: compiling_progress_bar = progress.add_task( "[green]Compiling protobuf files...", total=len(request_data.output_packages), @@ -191,8 +200,7 @@ def generate_code( ), ) ) - if verbose or from_cli: - progress.update(compiling_progress_bar, advance=1) + progress.update(compiling_progress_bar, advance=1) # Make each output directory a package with __init__ file init_files = { From 61f0335aabd312c9e50d34123d206ab394bf7680 Mon Sep 17 00:00:00 2001 From: James Hilton-Balfe <50501825+Gobot1234@users.noreply.github.com> Date: Thu, 17 Feb 2022 00:50:34 +0000 Subject: [PATCH 46/46] Make stuff work slightly more --- pyproject.toml | 4 +- src/betterproto/plugin/cli/commands.py | 58 +++++++++++++------------- src/betterproto/plugin/cli/runner.py | 20 ++++----- 3 files changed, 42 insertions(+), 40 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f32c34890..69bcf7409 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ dataclasses = { version = "^0.7", python = ">=3.6, <3.7" } grpclib = "^0.4.1" jinja2 = { version = ">=2.11.2", optional = true } typer = { version = "^0.3.2", optional = true } -rich = { version = "^9.8.2", optional = true } +rich = { version = "^11.2.0", optional = true } python-dateutil = "^2.8" [tool.poetry.dev-dependencies] @@ -38,7 +38,7 @@ sphinx = "3.1.2" sphinx-rtd-theme = "0.5.0" tomlkit = "^0.7.0" tox = "^3.15.1" -protobuf_parser = "1.0.0" +# protobuf_parser = "1.0.0" [tool.poetry.scripts] betterproto = "betterproto:__main__.main" diff --git a/src/betterproto/plugin/cli/commands.py b/src/betterproto/plugin/cli/commands.py index 36e829006..f3e652934 100644 --- a/src/betterproto/plugin/cli/commands.py +++ b/src/betterproto/plugin/cli/commands.py @@ -67,7 +67,7 @@ async def compile( output = output or (Path(output_path.parent.name) / output_path.name).resolve() output.mkdir(exist_ok=True, parents=True) - errors = await compile_protobufs( + results = await compile_protobufs( *protos, output=output, verbose=verbose, @@ -76,38 +76,40 @@ async def compile( from_cli=True, ) - for error in errors: - if isinstance(error, protobuf_parser.SyntaxError): - rich.print( - f"[red]File {str(error.file).strip()}:\n", - Syntax.from_path( - error.file, - line_numbers=True, - line_range=(max(error.lineno - 5, 0), error.lineno), - ), - f"{' ' * (error.offset + 3)}^\nSyntaxError: {error.msg}[red]", - file=sys.stderr, - ) - elif isinstance(error, Warning): - rich.print(f"Warning: {error}", file=sys.stderr) - elif isinstance(error, protobuf_parser.Error): - failed_files = "\n".join(f" - {file}" for file in protos) - rich.print( - f"[red]Protoc failed to generate outputs for:\n\n" - f"{failed_files}\n\nSee the output for the issue:\n{error}[red]", - file=sys.stderr, - ) + for result in results: + for error in result.errors: + if error.message.startswith("Syntax error"): + rich.print( + f"[red]File {str(result.file)}:\n", + Syntax.from_path( + str(result.file), + line_numbers=True, + line_range=(max(error.line - 5, 0), error.line), + ), + f"{' ' * (error.column + 3)}^\nSyntaxError: {error.message}[red]", + file=sys.stderr, + ) + elif isinstance(error, protobuf_parser.Warning): + rich.print(f"Warning: {error}", file=sys.stderr) + else: + failed_files = "\n".join(f" - {file}" for file in protos) + rich.print( + f"[red]Protoc failed to generate outputs for:\n\n" + f"{failed_files}\n\nSee the output for the issue:\n{error}[red]", + file=sys.stderr, + ) - has_warnings = all(isinstance(e, Warning) for e in errors) - if not errors or has_warnings: + # has_warnings = all(isinstance(e, Warning) for e in errors) + # if not errors or has_warnings: + if True: rich.print( f"[bold green]Finished generating output for " f"{len(protos)} file{'s' if len(protos) != 1 else ''}, " f"output is in {output.as_posix()}" ) - if errors: - if not has_warnings: - exit(2) - exit(1) + # if errors: + # if not has_warnings: + # exit(2) + # exit(1) exit(0) diff --git a/src/betterproto/plugin/cli/runner.py b/src/betterproto/plugin/cli/runner.py index 5c720ffce..6c10eac68 100644 --- a/src/betterproto/plugin/cli/runner.py +++ b/src/betterproto/plugin/cli/runner.py @@ -1,5 +1,5 @@ import asyncio -from typing import TYPE_CHECKING, Any, List +from typing import TYPE_CHECKING, Any, Sequence import protobuf_parser @@ -16,7 +16,7 @@ def write_file(output: "Path", file: CodeGeneratorResponseFile) -> None: - path = (output / file.name).resolve() + path = output.joinpath(file.name).resolve() path.parent.mkdir(parents=True, exist_ok=True) path.write_text(file.content) @@ -26,7 +26,7 @@ async def compile_protobufs( output: "Path", use_betterproto: bool = True, **kwargs: Any, -) -> List[protobuf_parser.Error]: +) -> Sequence[protobuf_parser.ParseResult["Path"]]: """ A programmatic way to compile protobufs. @@ -44,20 +44,20 @@ async def compile_protobufs( A of exceptions from protoc. """ if use_betterproto: - proto_files, errors = await utils.to_thread(protobuf_parser.parse, *files) - if errors: - return errors + results = await utils.to_thread(protobuf_parser.parse, *files) request = CodeGeneratorRequest( - proto_file=[FileDescriptorProto().parse(file) for file in proto_files] + proto_file=[ + FileDescriptorProto().parse(result.parsed) for result in results + ] ) # Generate code response = await utils.to_thread(generate_code, request, **kwargs) await asyncio.gather( - *(utils.to_thread(write_file(output, file) for file in response.file)) + *(utils.to_thread(write_file, output, file) for file in response.file) ) - + return results else: errors = await utils.to_thread( protobuf_parser.run, @@ -66,4 +66,4 @@ async def compile_protobufs( python_out=output.as_posix(), ) - return errors + return []