diff --git a/graphql_compiler/compiler/blocks.py b/graphql_compiler/compiler/blocks.py index cdda7d3b4..e91e06c56 100644 --- a/graphql_compiler/compiler/blocks.py +++ b/graphql_compiler/compiler/blocks.py @@ -357,7 +357,7 @@ def __init__( self.direction = direction self.edge_name = edge_name self.depth = depth - # Denotes whether the traversal is occuring after a prior @optional traversal + # Denotes whether the traversal is occurring after a prior @optional traversal self.within_optional_scope = within_optional_scope self.validate() diff --git a/graphql_compiler/compiler/emit_sql.py b/graphql_compiler/compiler/emit_sql.py index 1a706b80b..157027c9e 100644 --- a/graphql_compiler/compiler/emit_sql.py +++ b/graphql_compiler/compiler/emit_sql.py @@ -809,7 +809,7 @@ def __init__(self, sql_schema_info: SQLAlchemySchemaInfo, ir: IrAndMetadata): self._relocate(ir.query_metadata_table.root_location) # Mapping aliases to one of the column used to join into them. We use this column - # to check for LEFT JOIN misses, since it helps us distinguish actuall NULL values + # to check for LEFT JOIN misses, since it helps us distinguish actual NULL values # from values that are NULL because of a LEFT JOIN miss. self._came_from: Dict[Union[Alias, ColumnRouter], Column] = {} diff --git a/graphql_compiler/compiler/ir_lowering_common/common.py b/graphql_compiler/compiler/ir_lowering_common/common.py index d70d0ac90..a8704f653 100644 --- a/graphql_compiler/compiler/ir_lowering_common/common.py +++ b/graphql_compiler/compiler/ir_lowering_common/common.py @@ -266,14 +266,14 @@ def extract_folds_from_ir_blocks( def extract_optional_location_root_info( ir_blocks: List[BasicBlock], ) -> Tuple[List[Location], Dict[Location, Tuple[Location, ...]]]: - """Construct a mapping from locations within @optional to their correspoding optional Traverse. + """Construct a mapping from locations within @optional to their corresponding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): - complex_optional_roots: list of @optional locations (location immmediately preceding + complex_optional_roots: list of @optional locations (location immediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a tuple @@ -362,7 +362,7 @@ def extract_simple_optional_location_info( Args: ir_blocks: list of IR blocks to extract optional data from - complex_optional_roots: list of @optional locations (location immmediately preceding + complex_optional_roots: list of @optional locations (location immediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a tuple @@ -371,7 +371,7 @@ def extract_simple_optional_location_info( Returns: dict mapping from simple_optional_root_location -> dict containing keys - - 'inner_location': Location object correspoding to the unique MarkLocation present within + - 'inner_location': Location object corresponding to the unique MarkLocation present within a simple optional (one that does not expand vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope diff --git a/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py b/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py index 6d41c723c..5a12424d1 100644 --- a/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py +++ b/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py @@ -315,7 +315,7 @@ def to_gremlin(self): class GremlinFoldedLocalField(LocalField): - """A Gremlin-specific LocalField expressionto be used only within @fold scopes.""" + """A Gremlin-specific LocalField expression to be used only within @fold scopes.""" def get_local_object_gremlin_name(self): """Return the Gremlin name of the local object whose field is being produced.""" diff --git a/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py b/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py index 04dc3f42d..18a8e370c 100644 --- a/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py +++ b/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py @@ -326,7 +326,7 @@ def _translate_equivalent_locations( def lower_folded_coerce_types_into_filter_blocks( folded_ir_blocks: List[BasicBlock], ) -> List[BasicBlock]: - """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.""" + """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Intended for folded IR blocks.""" new_folded_ir_blocks: List[BasicBlock] = [] for block in folded_ir_blocks: new_block: BasicBlock diff --git a/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py b/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py index 32d0029f9..0278f1f0e 100644 --- a/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py +++ b/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py @@ -37,7 +37,7 @@ def _prune_traverse_using_omitted_locations( Args: match_traversal: list of MatchStep objects to be pruned omitted_locations: subset of complex_optional_roots to be omitted - complex_optional_roots: list of all @optional locations (location immmediately preceding + complex_optional_roots: list of all @optional locations (location immediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list @@ -64,7 +64,7 @@ def _prune_traverse_using_omitted_locations( ) ) elif optional_root_location in omitted_locations: - # Add filter to indicate that the omitted edge(s) shoud not exist + # Add filter to indicate that the omitted edge(s) should not exist field_name = step.root_block.get_field_name() # HACK(predrag): Plumbing in the type here is tricky and generally not worth it, @@ -208,7 +208,7 @@ def _get_present_locations(match_traversals): if not present_non_optional_locations.issubset(present_locations): raise AssertionError( "present_non_optional_locations {} was not a subset of " - "present_locations {}. THis hould never happen.".format( + "present_locations {}. This should never happen.".format( present_non_optional_locations, present_locations ) ) @@ -354,7 +354,7 @@ def _apply_filters_to_first_location_occurrence( new_match_traversal = [] newly_filtered_locations = set() for match_step in match_traversal: - # Apply all filters for a location to the first occurence of that location + # Apply all filters for a location to the first occurrence of that location current_location = match_step.as_block.location if current_location in newly_filtered_locations: @@ -394,8 +394,8 @@ def _apply_filters_to_first_location_occurrence( def collect_filters_to_first_location_occurrence(compound_match_query): """Collect all filters for a particular location to the first instance of the location. - Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may - result in filters being applied to locations after their first occurence. + Adding edge field non-existence filters in `_prune_traverse_using_omitted_locations` may + result in filters being applied to locations after their first occurrence. OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery, we collect all the filters for each location in a list. For each location, we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply @@ -566,7 +566,7 @@ def _lower_non_existent_context_field_filters(match_traversals, visitor_fn): The `visitor_fn` implements these behaviors (see `_update_context_field_expression`). Args: - match_traversals: list of match traversal enitities to be lowered + match_traversals: list of match traversal entities to be lowered visitor_fn: visit_and_update function for lowering expressions in given match traversal Returns: @@ -592,7 +592,7 @@ def _lower_non_existent_context_field_filters(match_traversals, visitor_fn): def lower_context_field_expressions(compound_match_query): - """Lower Expressons involving non-existent ContextFields.""" + """Lower Expressions involving non-existent ContextFields.""" if len(compound_match_query.match_queries) == 0: raise AssertionError( "Received CompoundMatchQuery {} with no MatchQuery objects.".format( @@ -601,7 +601,7 @@ def lower_context_field_expressions(compound_match_query): ) elif len(compound_match_query.match_queries) == 1: # All ContextFields exist if there is only one MatchQuery - # becuase none of the traverses were omitted, and all locations exist (are defined). + # because none of the traverses were omitted, and all locations exist (are defined). return compound_match_query else: new_match_queries = [] diff --git a/graphql_compiler/compiler/ir_lowering_match/utils.py b/graphql_compiler/compiler/ir_lowering_match/utils.py index 75861c9d7..6d97525e5 100644 --- a/graphql_compiler/compiler/ir_lowering_match/utils.py +++ b/graphql_compiler/compiler/ir_lowering_match/utils.py @@ -171,7 +171,7 @@ def _filter_orientdb_simple_optional_edge( """Return an Expression that is False for rows that don't follow the @optional specification. OrientDB does not filter correctly within optionals. Namely, a result where the optional edge - DOES EXIST will be returned regardless of whether the inner filter is satisfed. + DOES EXIST will be returned regardless of whether the inner filter is satisfied. To mitigate this, we add a final filter to reject such results. A valid result must satisfy either of the following: - The location within the optional exists (the filter will have been applied in this case) @@ -245,7 +245,7 @@ def construct_where_filter_predicate( are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - - 'inner_location': Location object correspoding to the + - 'inner_location': Location object corresponding to the unique MarkLocation present within a simple @optional (one that does not expand vertex fields) scope @@ -287,7 +287,7 @@ class OptionalTraversalTree(object): def __init__(self, complex_optional_roots: List[Location]): """Initialize empty tree of optional root Locations (elements of complex_optional_roots). - This object construst a tree of complex optional roots. These are locations preceding an + This object constructs a tree of complex optional roots. These are locations preceding an @optional traverse that expand vertex fields within. Simple @optional traverses i.e. ones that do not expand vertex fields within them are excluded. @@ -393,7 +393,7 @@ def construct_optional_traversal_tree( """Return a tree of complex optional root locations. Args: - complex_optional_roots: list of @optional locations (location immmediately preceding + complex_optional_roots: list of @optional locations (location immediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list diff --git a/graphql_compiler/compiler/sqlalchemy_extensions.py b/graphql_compiler/compiler/sqlalchemy_extensions.py index 098914326..37826a50d 100644 --- a/graphql_compiler/compiler/sqlalchemy_extensions.py +++ b/graphql_compiler/compiler/sqlalchemy_extensions.py @@ -121,5 +121,5 @@ def bind_parameters_to_query_string( def materialize_result_proxy(result: sqlalchemy.engine.result.ResultProxy) -> List[Dict[str, Any]]: - """Drain the results from a result proxy into a list of dicts represenation.""" + """Drain the results from a result proxy into a list of dicts representation.""" return [dict(row) for row in result] diff --git a/graphql_compiler/macros/__init__.py b/graphql_compiler/macros/__init__.py index 19fd76fe8..79349b4d4 100644 --- a/graphql_compiler/macros/__init__.py +++ b/graphql_compiler/macros/__init__.py @@ -117,7 +117,7 @@ def register_macro_edge(macro_registry, macro_edge_graphql, macro_edge_args): requires in order to function. """ # The below function will validate that the macro edge in question is valid in isolation, - # when considered only against the macro-less schema. After geting this result, + # when considered only against the macro-less schema. After getting this result, # we simply need to check the macro edge descriptor against other artifacts in the macro system # that might also cause conflicts. macro_descriptor = make_macro_edge_descriptor( diff --git a/graphql_compiler/macros/macro_edge/expansion.py b/graphql_compiler/macros/macro_edge/expansion.py index 88e531cc3..93ab87dff 100644 --- a/graphql_compiler/macros/macro_edge/expansion.py +++ b/graphql_compiler/macros/macro_edge/expansion.py @@ -141,7 +141,7 @@ def _expand_specific_macro_edge(subclass_sets, target_class_name, macro_ast, sel to be added somewhere in the same scope but before the replacement_selection_ast. - sibling_suffix_selections: list of GraphQL AST objects describing the selections to be added somewhere in the same scope but after the replacement_selection_ast. - Since the replacemet_selection_ast is a vertex field, and vertex fields always + Since the replacement_selection_ast is a vertex field, and vertex fields always go after property fields, these selections are all vertex fields. """ replacement_selection_ast = None diff --git a/graphql_compiler/macros/macro_edge/name_generation.py b/graphql_compiler/macros/macro_edge/name_generation.py index cc7d9d50f..932e8e514 100644 --- a/graphql_compiler/macros/macro_edge/name_generation.py +++ b/graphql_compiler/macros/macro_edge/name_generation.py @@ -9,7 +9,7 @@ def generate_disambiguations(existing_names, new_names): Args: existing_names: set of strings, the names that are already taken - new_names: set of strings, the names that might coincide with exisitng names + new_names: set of strings, the names that might coincide with existing names Returns: dict mapping the new names to other unique names not present in existing_names diff --git a/graphql_compiler/schema/schema_info.py b/graphql_compiler/schema/schema_info.py index 70a1b6883..1a2d8e312 100644 --- a/graphql_compiler/schema/schema_info.py +++ b/graphql_compiler/schema/schema_info.py @@ -162,12 +162,13 @@ class SQLSchemaInfo(BackendSpecificSchemaInfo): # Specifying the dialect for which we are compiling # e.g. sqlalchemy.dialects.mssql.dialect() dialect: Dialect + # dict mapping every GraphQL object type or interface type name in the schema to # a sqlalchemy table. # Column types that do not exist for this dialect are not allowed. # All tables are expected to have primary keys. - vertex_name_to_table: Dict[str, sqlalchemy.Table] + # dict mapping every GraphQL object type or interface type name in the schema to # dict mapping every vertex field name at that type to a JoinDescriptor. # The tables the join is to be performed on are not specified. diff --git a/graphql_compiler/schema/typedefs.py b/graphql_compiler/schema/typedefs.py index d00ad7426..10304b190 100644 --- a/graphql_compiler/schema/typedefs.py +++ b/graphql_compiler/schema/typedefs.py @@ -19,7 +19,7 @@ # Dict of GraphQL type name -> (Dict of field name on that type -> the desired type of the field) ClassToFieldTypeOverridesType = Dict[str, Dict[str, GraphQLSchemaFieldType]] -# The type of the type equivalence hints object, which defines which GraphQL intefaces and object +# The type of the type equivalence hints object, which defines which GraphQL interfaces and object # types should be considered equivalent to which union types. This is our workaround for the lack # of interface-interface and object-object inheritance. TypeEquivalenceHintsType = Dict[Union[GraphQLInterfaceType, GraphQLObjectType], GraphQLUnionType] diff --git a/graphql_compiler/schema_generation/graphql_schema.py b/graphql_compiler/schema_generation/graphql_schema.py index f94c6ee07..90855c95e 100644 --- a/graphql_compiler/schema_generation/graphql_schema.py +++ b/graphql_compiler/schema_generation/graphql_schema.py @@ -60,7 +60,7 @@ def _get_inherited_field_types(class_to_field_type_overrides, schema_graph): return inherited_field_type_overrides -def _validate_overriden_fields_are_not_defined_in_superclasses( +def _validate_overridden_fields_are_not_defined_in_superclasses( class_to_field_type_overrides, schema_graph ): """Assert that the fields we want to override are not defined in superclasses.""" @@ -275,7 +275,7 @@ def get_graphql_schema_from_schema_graph( if hidden_classes is None: hidden_classes = set() - _validate_overriden_fields_are_not_defined_in_superclasses( + _validate_overridden_fields_are_not_defined_in_superclasses( class_to_field_type_overrides, schema_graph ) diff --git a/graphql_compiler/tests/integration_tests/integration_backend_config.py b/graphql_compiler/tests/integration_tests/integration_backend_config.py index 732ad0dfe..b1ff6ffb2 100644 --- a/graphql_compiler/tests/integration_tests/integration_backend_config.py +++ b/graphql_compiler/tests/integration_tests/integration_backend_config.py @@ -45,7 +45,7 @@ pwd="Root-secure1", ) -# delimeters must be URL escaped +# delimiters must be URL escaped escaped_pyodbc_parameter_string = quote_plus(pyodbc_parameter_string) SQL_BACKEND_TO_CONNECTION_STRING = { diff --git a/graphql_compiler/tests/schema_generation_tests/test_orientdb_schema_generation.py b/graphql_compiler/tests/schema_generation_tests/test_orientdb_schema_generation.py index 6d5867e4a..07e051ec5 100644 --- a/graphql_compiler/tests/schema_generation_tests/test_orientdb_schema_generation.py +++ b/graphql_compiler/tests/schema_generation_tests/test_orientdb_schema_generation.py @@ -440,12 +440,12 @@ def test_include_non_graph_classes_in_graphql_schema(self) -> None: ARBITRARY_CONCRETE_NON_GRAPH_CLASS, ] - vertex_clases = [ + vertex_classes = [ ARBITRARY_CONCRETE_VERTEX_CLASS, BASE_VERTEX, ] - schema_data = non_graph_classes_to_include + non_graph_classes_to_ignore + vertex_clases + schema_data = non_graph_classes_to_include + non_graph_classes_to_ignore + vertex_classes names_of_non_graph_classes_to_ignore = { non_graph_class["name"] for non_graph_class in non_graph_classes_to_ignore diff --git a/graphql_compiler/tests/snapshot_tests/test_cost_estimation.py b/graphql_compiler/tests/snapshot_tests/test_cost_estimation.py index d1a704c09..26fe4d42f 100644 --- a/graphql_compiler/tests/snapshot_tests/test_cost_estimation.py +++ b/graphql_compiler/tests/snapshot_tests/test_cost_estimation.py @@ -549,7 +549,7 @@ def test_recurse_and_traverse(self) -> None: # For each Animal, we expect 11.0 / 7.0 "child" Animals. Since recurse first explores # depth=0, we add 1 to account for the parent. At the moment, we don't account for depths - # greater than 1, so we exepct 11.0 / 7.0 + 1 total children, each of which has 13.0 / 7.0 + # greater than 1, so we expect 11.0 / 7.0 + 1 total children, each of which has 13.0 / 7.0 # Animal_BornAt edges. expected_cardinality_estimate = 7.0 * (11.0 / 7.0 + 1) * (13.0 / 7.0) self.assertAlmostEqual(expected_cardinality_estimate, cardinality_estimate) diff --git a/graphql_compiler/tests/snapshot_tests/test_orientdb_match_query.py b/graphql_compiler/tests/snapshot_tests/test_orientdb_match_query.py index ee73e05c6..dc10a3cea 100644 --- a/graphql_compiler/tests/snapshot_tests/test_orientdb_match_query.py +++ b/graphql_compiler/tests/snapshot_tests/test_orientdb_match_query.py @@ -40,7 +40,7 @@ def execute_graphql( client: OrientDB, sample_parameters: Dict[str, Any], ) -> FrozenSet[Tuple[FrozenSet[Tuple[str, Any]], int]]: - """Compile the GraphQL query to MATCH, execute it agains the test_db, and return the results.""" + """Compile GraphQL query to MATCH, execute it against the test_db, and return the results.""" schema_based_type_equivalence_hints: Dict[ Union[GraphQLInterfaceType, GraphQLObjectType], GraphQLUnionType ] = {} diff --git a/graphql_compiler/tests/test_helpers.py b/graphql_compiler/tests/test_helpers.py index 3efdcee93..a29cda137 100644 --- a/graphql_compiler/tests/test_helpers.py +++ b/graphql_compiler/tests/test_helpers.py @@ -480,7 +480,7 @@ def transform(emitted_output: str) -> str: def _get_mismatch_message( expected_blocks: List[BasicBlock], received_blocks: List[BasicBlock] ) -> str: - """Create a well-formated error message indicating that two lists of blocks are mismatched.""" + """Create a well-formatted error message indicating that two lists of blocks are mismatched.""" pretty_expected = pformat(expected_blocks) pretty_received = pformat(received_blocks) return "{}\n\n!=\n\n{}".format(pretty_expected, pretty_received) diff --git a/graphql_compiler/tests/test_ir_generation_errors.py b/graphql_compiler/tests/test_ir_generation_errors.py index faaf93ed3..ff3b239b5 100644 --- a/graphql_compiler/tests/test_ir_generation_errors.py +++ b/graphql_compiler/tests/test_ir_generation_errors.py @@ -1281,7 +1281,7 @@ def test_invalid_edge_degree_queries(self) -> None: graphql_to_ir(self.schema, invalid_graphql) def test_missing_directives_in_schema(self) -> None: - """Ensure that validators properly identifiy missing directives in the schema. + """Ensure that validators properly identify missing directives in the schema. The schema should contain all directives that are supported by the graphql compiler, even if they might not be used in the query. Hence we raise an error when the following