1616from collections import defaultdict
1717from inspect import getmembers
1818from itertools import zip_longest
19- from ordered_set import OrderedSet
2019from deepdiff .helper import (strings , bytes_type , numbers , uuids , datetimes , ListItemRemovedOrAdded , notpresent ,
2120 IndexedHash , unprocessed , add_to_frozen_set , basic_types ,
2221 convert_item_or_items_into_set_else_none , get_type ,
2322 convert_item_or_items_into_compiled_regexes_else_none ,
2423 type_is_subclass_of_type_group , type_in_type_group , get_doc ,
2524 number_to_string , datetime_normalize , KEY_TO_VAL_STR , booleans ,
26- np_ndarray , np_floating , get_numpy_ndarray_rows , OrderedSetPlus , RepeatedTimer ,
25+ np_ndarray , np_floating , get_numpy_ndarray_rows , RepeatedTimer ,
2726 TEXT_VIEW , TREE_VIEW , DELTA_VIEW , detailed__dict__ , add_root_to_paths ,
2827 np , get_truncate_datetime , dict_ , CannotCompare , ENUM_INCLUDE_KEYS ,
29- PydanticBaseModel , Opcode ,)
28+ PydanticBaseModel , Opcode , SortedSet )
3029from deepdiff .serialization import SerializationMixin
3130from deepdiff .distance import DistanceMixin
3231from deepdiff .model import (
3332 RemapDict , ResultDict , TextResult , TreeResult , DiffLevel ,
3433 DictRelationship , AttributeRelationship , REPORT_KEYS ,
3534 SubscriptableIterableRelationship , NonSubscriptableIterableRelationship ,
36- SetRelationship , NumpyArrayRelationship , CUSTOM_FIELD , PrettyOrderedSet ,
35+ SetRelationship , NumpyArrayRelationship , CUSTOM_FIELD ,
3736 FORCE_DEFAULT ,
3837)
3938from deepdiff .deephash import DeepHash , combine_hashes_lists
@@ -567,27 +566,26 @@ def _diff_dict(
567566 rel_class = DictRelationship
568567
569568 if self .ignore_private_variables :
570- t1_keys = OrderedSet ([key for key in t1 if not (isinstance (key , str ) and key .startswith ('__' ))])
571- t2_keys = OrderedSet ([key for key in t2 if not (isinstance (key , str ) and key .startswith ('__' ))])
569+ t1_keys = SortedSet ([key for key in t1 if not (isinstance (key , str ) and key .startswith ('__' ))])
570+ t2_keys = SortedSet ([key for key in t2 if not (isinstance (key , str ) and key .startswith ('__' ))])
572571 else :
573- t1_keys = OrderedSet (t1 .keys ())
574- t2_keys = OrderedSet (t2 .keys ())
572+ t1_keys = SortedSet (t1 .keys ())
573+ t2_keys = SortedSet (t2 .keys ())
575574 if self .ignore_string_type_changes or self .ignore_numeric_type_changes or self .ignore_string_case :
576575 t1_clean_to_keys = self ._get_clean_to_keys_mapping (keys = t1_keys , level = level )
577576 t2_clean_to_keys = self ._get_clean_to_keys_mapping (keys = t2_keys , level = level )
578- t1_keys = OrderedSet (t1_clean_to_keys .keys ())
579- t2_keys = OrderedSet (t2_clean_to_keys .keys ())
577+ t1_keys = SortedSet (t1_clean_to_keys .keys ())
578+ t2_keys = SortedSet (t2_clean_to_keys .keys ())
580579 else :
581580 t1_clean_to_keys = t2_clean_to_keys = None
582581
583- t_keys_intersect = t2_keys . intersection ( t1_keys )
584-
582+ t_keys_intersect = t2_keys & t1_keys
583+ t_keys_union = t2_keys | t1_keys
585584 t_keys_added = t2_keys - t_keys_intersect
586585 t_keys_removed = t1_keys - t_keys_intersect
587586
588587 if self .threshold_to_diff_deeper :
589- len_keys_changed = (len (t_keys_added ) + len (t_keys_removed ))
590- if len_keys_changed and len (t_keys_intersect ) / len_keys_changed < self .threshold_to_diff_deeper :
588+ if len (t_keys_union ) and len (t_keys_intersect ) / len (t_keys_union ) < self .threshold_to_diff_deeper :
591589 self ._report_result ('values_changed' , level , local_tree = local_tree )
592590 return
593591
@@ -1142,7 +1140,7 @@ def _get_most_in_common_pairs_in_iterables(
11421140 # It also includes a "max" key that is just the value of the biggest current distance in the
11431141 # most_in_common_pairs dictionary.
11441142 def defaultdict_orderedset ():
1145- return defaultdict (OrderedSetPlus )
1143+ return defaultdict (SortedSet )
11461144 most_in_common_pairs = defaultdict (defaultdict_orderedset )
11471145 pairs = dict_ ()
11481146
@@ -1185,7 +1183,7 @@ def defaultdict_orderedset():
11851183 pairs_of_item [_distance ].add (removed_hash )
11861184 used_to_hashes = set ()
11871185
1188- distances_to_from_hashes = defaultdict (OrderedSetPlus )
1186+ distances_to_from_hashes = defaultdict (SortedSet )
11891187 for from_hash , distances_to_to_hashes in most_in_common_pairs .items ():
11901188 # del distances_to_to_hashes['max']
11911189 for dist in distances_to_to_hashes :
@@ -1194,11 +1192,11 @@ def defaultdict_orderedset():
11941192 for dist in sorted (distances_to_from_hashes .keys ()):
11951193 from_hashes = distances_to_from_hashes [dist ]
11961194 while from_hashes :
1197- from_hash = from_hashes .lpop ()
1195+ from_hash = from_hashes .pop ()
11981196 if from_hash not in used_to_hashes :
11991197 to_hashes = most_in_common_pairs [from_hash ][dist ]
12001198 while to_hashes :
1201- to_hash = to_hashes .lpop ()
1199+ to_hash = to_hashes .pop ()
12021200 if to_hash not in used_to_hashes :
12031201 used_to_hashes .add (from_hash )
12041202 used_to_hashes .add (to_hash )
@@ -1217,8 +1215,8 @@ def _diff_iterable_with_deephash(self, level, parents_ids, _original_type=None,
12171215
12181216 full_t1_hashtable = self ._create_hashtable (level , 't1' )
12191217 full_t2_hashtable = self ._create_hashtable (level , 't2' )
1220- t1_hashes = OrderedSetPlus (full_t1_hashtable .keys ())
1221- t2_hashes = OrderedSetPlus (full_t2_hashtable .keys ())
1218+ t1_hashes = SortedSet (full_t1_hashtable .keys ())
1219+ t2_hashes = SortedSet (full_t2_hashtable .keys ())
12221220 hashes_added = t2_hashes - t1_hashes
12231221 hashes_removed = t1_hashes - t2_hashes
12241222
@@ -1630,7 +1628,7 @@ def _diff(self, level, parents_ids=frozenset(), _original_type=None, local_tree=
16301628 elif isinstance (level .t1 , tuple ):
16311629 self ._diff_tuple (level , parents_ids , local_tree = local_tree )
16321630
1633- elif isinstance (level .t1 , (set , frozenset , OrderedSet )):
1631+ elif isinstance (level .t1 , (set , frozenset , SortedSet )):
16341632 self ._diff_set (level , local_tree = local_tree )
16351633
16361634 elif isinstance (level .t1 , np_ndarray ):
@@ -1752,19 +1750,19 @@ def affected_paths(self):
17521750 'iterable_item_added': {'root[3][1]': 4},
17531751 'values_changed': {'root[2]': {'new_value': 4, 'old_value': 2}}}
17541752 >>> ddiff.affected_paths
1755- OrderedSet (['root[3][1]', 'root[4]', 'root[5]', 'root[6]', 'root[2]'])
1753+ SortedSet (['root[3][1]', 'root[4]', 'root[5]', 'root[6]', 'root[2]'])
17561754 >>> ddiff.affected_root_keys
1757- OrderedSet ([3, 4, 5, 6, 2])
1755+ SortedSet ([3, 4, 5, 6, 2])
17581756
17591757 """
1760- result = OrderedSet ()
1758+ result = SortedSet ()
17611759 for key in REPORT_KEYS :
17621760 value = self .get (key )
17631761 if value :
1764- if isinstance (value , PrettyOrderedSet ):
1762+ if isinstance (value , SortedSet ):
17651763 result |= value
17661764 else :
1767- result |= OrderedSet (value .keys ())
1765+ result |= SortedSet (value .keys ())
17681766 return result
17691767
17701768 @property
@@ -1784,18 +1782,18 @@ def affected_root_keys(self):
17841782 'iterable_item_added': {'root[3][1]': 4},
17851783 'values_changed': {'root[2]': {'new_value': 4, 'old_value': 2}}}
17861784 >>> ddiff.affected_paths
1787- OrderedSet (['root[3][1]', 'root[4]', 'root[5]', 'root[6]', 'root[2]'])
1785+ SortedSet (['root[3][1]', 'root[4]', 'root[5]', 'root[6]', 'root[2]'])
17881786 >>> ddiff.affected_root_keys
1789- OrderedSet ([3, 4, 5, 6, 2])
1787+ SortedSet ([3, 4, 5, 6, 2])
17901788 """
1791- result = OrderedSet ()
1789+ result = SortedSet ()
17921790 for key in REPORT_KEYS :
17931791 value = self .tree .get (key )
17941792 if value :
1795- if isinstance (value , PrettyOrderedSet ):
1796- result |= OrderedSet ([i .get_root_key () for i in value ])
1793+ if isinstance (value , SortedSet ):
1794+ result |= SortedSet ([i .get_root_key () for i in value ])
17971795 else :
1798- result |= OrderedSet ([i .get_root_key () for i in value .keys ()])
1796+ result |= SortedSet ([i .get_root_key () for i in value .keys ()])
17991797 return result
18001798
18011799
0 commit comments