@@ -80,12 +80,9 @@ class DistributedArray:
8080        Axis along which distribution occurs. Defaults to ``0``. 
8181    local_shapes : :obj:`list`, optional 
8282        List of tuples or integers representing local shapes at each rank. 
83- <<<<<<< HEAD 
8483    mask : :obj:`list`, optional 
8584        Mask defining subsets of ranks to consider when performing 'global' 
8685        operations on the distributed array such as dot product or norm. 
87- ======= 
88- >>>>>>> 57b793e8ce4c150d90866d1e41c0bd9e88cae985 
8986    engine : :obj:`str`, optional 
9087        Engine used to store array (``numpy`` or ``cupy``) 
9188    dtype : :obj:`str`, optional 
@@ -96,10 +93,7 @@ def __init__(self, global_shape: Union[Tuple, Integral],
9693                 base_comm : Optional [MPI .Comm ] =  MPI .COMM_WORLD ,
9794                 partition : Partition  =  Partition .SCATTER , axis : int  =  0 ,
9895                 local_shapes : Optional [List [Union [Tuple , Integral ]]] =  None ,
99- << << << <  HEAD 
10096                 mask : Optional [List [Integral ]] =  None ,
101- == == == = 
102- > >> >> >>  57 b793e8ce4c150d90866d1e41c0bd9e88cae985 
10397                 engine : Optional [str ] =  "numpy" ,
10498                 dtype : Optional [DTypeLike ] =  np .float64 ):
10599        if  isinstance (global_shape , Integral ):
@@ -115,11 +109,8 @@ def __init__(self, global_shape: Union[Tuple, Integral],
115109        self ._base_comm  =  base_comm 
116110        self ._partition  =  partition 
117111        self ._axis  =  axis 
118- < << << <<  HEAD 
119112        self ._mask  =  mask 
120113        self ._sub_comm  =  base_comm  if  mask  is  None  else  base_comm .Split (color = mask [base_comm .rank ], key = base_comm .rank )
121- == == == = 
122- >> >> >> >  57 b793e8ce4c150d90866d1e41c0bd9e88cae985 
123114
124115        local_shapes  =  local_shapes  if  local_shapes  is  None  else  [_value_or_sized_to_tuple (local_shape ) for  local_shape  in  local_shapes ]
125116        self ._check_local_shapes (local_shapes )
0 commit comments