Skip to content

Commit c10abf5

Browse files
committed
MAINT: Cut down on "future" dispatching and "weak" DType paths
1 parent df8b85d commit c10abf5

File tree

2 files changed

+25
-126
lines changed

2 files changed

+25
-126
lines changed

numpy/core/src/umath/dispatching.c

Lines changed: 16 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -233,8 +233,7 @@ resolve_implementation_info(PyUFuncObject *ufunc,
233233
if (!matches) {
234234
continue;
235235
}
236-
// TODO: Right now, there should only be a single match possible, so
237-
// this can go into the NEXT pr.
236+
238237
/* The resolver matches, but we have to check if it is better */
239238
if (best_dtypes != NULL) {
240239
int current_best = -1; /* -1 neither, 0 current best, 1 new */
@@ -250,10 +249,6 @@ resolve_implementation_info(PyUFuncObject *ufunc,
250249
for (Py_ssize_t i = 0; i < nargs; i++) {
251250
int best;
252251

253-
/* Whether this (normally output) dtype was specified at all */
254-
int is_not_specified = (
255-
op_dtypes[i] == (PyArray_DTypeMeta *)Py_None);
256-
257252
PyObject *prev_dtype = PyTuple_GET_ITEM(best_dtypes, i);
258253
PyObject *new_dtype = PyTuple_GET_ITEM(curr_dtypes, i);
259254

@@ -262,54 +257,24 @@ resolve_implementation_info(PyUFuncObject *ufunc,
262257
continue;
263258
}
264259
/*
265-
* TODO: The `abstract` paths and all subclass checks are not
266-
* used right now. These will be used when user DTypes
267-
* and promoters are used. Especially, the abstract
268-
* paths will become important when value-based promotion
269-
* is removed from NumPy.
260+
* TODO: Even if the input is not specified, if we have
261+
* abstract DTypes and one is a subclass of the other,
262+
* the subclass should be considered a better match
263+
* (subclasses are always more specific).
270264
*/
271-
if (is_not_specified) {
272-
/*
273-
* When DType is completely unspecified, prefer abstract
274-
* over concrete, assuming it will resolve.
275-
* Furthermore, we cannot decide which abstract/None
276-
* is "better", only concrete ones which are subclasses
277-
* of Abstract ones are defined as worse.
278-
*/
279-
int prev_is_concrete = 0, new_is_concrete = 0;
280-
if ((prev_dtype != Py_None) &&
281-
(!((PyArray_DTypeMeta *)prev_dtype)->abstract)) {
282-
prev_is_concrete = 1;
283-
}
284-
if ((new_dtype != Py_None) &&
285-
(!((PyArray_DTypeMeta *)new_dtype)->abstract)) {
286-
new_is_concrete = 1;
287-
}
288-
if (prev_is_concrete == new_is_concrete) {
289-
best = -1;
290-
}
291-
else if (prev_is_concrete) {
292-
unambiguously_equally_good = 0;
293-
best = 1;
294-
}
295-
else {
296-
unambiguously_equally_good = 0;
297-
best = 0;
298-
}
299-
}
300-
/* If either is None, the other is strictly more specific */
301-
else if (prev_dtype == Py_None) {
265+
/* If either is None, the other is strictly more specific */
266+
if (prev_dtype == Py_None) {
302267
unambiguously_equally_good = 0;
303268
best = 1;
304269
}
305270
else if (new_dtype == Py_None) {
306271
unambiguously_equally_good = 0;
307272
best = 0;
308273
}
309-
/*
310-
* If both are concrete and not identical, this is
311-
* ambiguous.
312-
*/
274+
/*
275+
* If both are concrete and not identical, this is
276+
* ambiguous.
277+
*/
313278
else if (!((PyArray_DTypeMeta *)prev_dtype)->abstract &&
314279
!((PyArray_DTypeMeta *)new_dtype)->abstract) {
315280
/*
@@ -319,68 +284,15 @@ resolve_implementation_info(PyUFuncObject *ufunc,
319284
*/
320285
best = -1;
321286
}
322-
else if (!((PyArray_DTypeMeta *)prev_dtype)->abstract) {
323-
/* old is not abstract, so better (both not possible) */
324-
unambiguously_equally_good = 0;
325-
best = 0;
326-
}
327-
else if (!((PyArray_DTypeMeta *)new_dtype)->abstract) {
328-
/* new is not abstract, so better (both not possible) */
329-
unambiguously_equally_good = 0;
330-
best = 1;
331-
}
332287
/*
333-
* Both are abstract DTypes, there is a clear order if
334-
* one of them is a subclass of the other.
335-
* If this fails, reject it completely (could be changed).
336-
* The case that it is the same dtype is already caught.
288+
* TODO: Unreachable, but we will need logic for abstract
289+
* DTypes to decide if one is a subclass of the other
290+
* (And their subclass relation is well defined.)
337291
*/
338292
else {
339-
/* Note the identity check above, so this true subclass */
340-
int new_is_subclass = PyObject_IsSubclass(
341-
new_dtype, prev_dtype);
342-
if (new_is_subclass < 0) {
343-
return -1;
344-
}
345-
/*
346-
* Could optimize this away if above is True, but this
347-
* catches inconsistent definitions of subclassing.
348-
*/
349-
int prev_is_subclass = PyObject_IsSubclass(
350-
prev_dtype, new_dtype);
351-
if (prev_is_subclass < 0) {
352-
return -1;
353-
}
354-
if (prev_is_subclass && new_is_subclass) {
355-
/* should not happen unless they are identical */
356-
PyErr_SetString(PyExc_RuntimeError,
357-
"inconsistent subclassing of DTypes; if "
358-
"this happens, two dtypes claim to be a "
359-
"superclass of the other one.");
360-
return -1;
361-
}
362-
if (!prev_is_subclass && !new_is_subclass) {
363-
/* Neither is more precise than the other one */
364-
PyErr_SetString(PyExc_TypeError,
365-
"inconsistent type resolution hierarchy; "
366-
"DTypes of two matching loops do not have "
367-
"a clear hierarchy defined. Diamond shape "
368-
"inheritance is unsupported for use with "
369-
"UFunc type resolution. (You may resolve "
370-
"this by inserting an additional common "
371-
"subclass). This limitation may be "
372-
"partially resolved in the future.");
373-
return -1;
374-
}
375-
if (new_is_subclass) {
376-
unambiguously_equally_good = 0;
377-
best = 1;
378-
}
379-
else {
380-
unambiguously_equally_good = 0;
381-
best = 2;
382-
}
293+
assert(0);
383294
}
295+
384296
if ((current_best != -1) && (current_best != best)) {
385297
/*
386298
* We need a clear best, this could be tricky, unless

numpy/core/src/umath/ufunc_object.c

Lines changed: 9 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -965,30 +965,17 @@ convert_ufunc_arguments(PyUFuncObject *ufunc,
965965
all_scalar = NPY_FALSE;
966966
continue;
967967
}
968-
// TODO: Refactor this into a helper function!
969-
// We probably need to move the special object array-conversion till
970-
// After the promotion :(. If this is NOT a forced-legacy promotion?!
971-
// We may need to convert "twice" (but not more than we currently do)
972968
/*
973-
* Special case if it was a Python scalar, to allow "weak" promotion,
974-
* so replace the arrays DType with the Python scalar DType.
969+
* TODO: we need to special case scalars here, if the input is a
970+
* Python int, float, or complex, we have to use the "weak"
971+
* DTypes: `PyArray_PyIntAbstractDType`, etc.
972+
* This is to allow e.g. `float32(1.) + 1` to return `float32`.
973+
* The correct array dtype can only be found after promotion for
974+
* such a "weak scalar". We could avoid conversion here, but
975+
* must convert it for use in the legacy promotion.
976+
* There is still a small chance that this logic can instead
977+
* happen inside the Python operators.
975978
*/
976-
if ((PyObject *)out_op[i] != obj) {
977-
PyArray_DTypeMeta *scalar_DType = NULL;
978-
if (PyLong_CheckExact(obj)) {
979-
scalar_DType = &PyArray_PyIntAbstractDType;
980-
}
981-
else if (PyFloat_CheckExact(obj)) {
982-
scalar_DType = &PyArray_PyFloatAbstractDType;
983-
}
984-
else if (PyComplex_CheckExact(obj)) {
985-
scalar_DType = &PyArray_PyComplexAbstractDType;
986-
}
987-
if (scalar_DType != NULL) {
988-
Py_INCREF(scalar_DType);
989-
Py_SETREF(out_op_DTypes[i], scalar_DType);
990-
}
991-
}
992979
}
993980
if (all_legacy && (!all_scalar && any_scalar)) {
994981
*force_legacy_promotion = should_use_min_scalar(nin, out_op, 0, NULL);

0 commit comments

Comments
 (0)