-
Notifications
You must be signed in to change notification settings - Fork 25.6k
ES|QL Inference runner refactoring #131986
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e60510b
879f570
aa77791
ad782b9
7863781
fd81884
c6c7aac
29c33f2
273006c
a324b32
d77125a
15ec18d
ef60c09
db8cdb2
44f35fc
6ae9a03
c4ae133
933f484
8c93015
57fe411
a570854
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -188,9 +188,9 @@ public class Analyzer extends ParameterizedRuleExecutor<LogicalPlan, AnalyzerCon | |
Limiter.ONCE, | ||
new ResolveTable(), | ||
new ResolveEnrich(), | ||
new ResolveInference(), | ||
new ResolveLookupTables(), | ||
new ResolveFunctions(), | ||
new ResolveInference(), | ||
new DateMillisToNanosInEsRelation(IMPLICIT_CASTING_DATE_AND_DATE_NANOS.isEnabled()) | ||
), | ||
new Batch<>( | ||
|
@@ -414,34 +414,6 @@ private static NamedExpression createEnrichFieldExpression( | |
} | ||
} | ||
|
||
private static class ResolveInference extends ParameterizedAnalyzerRule<InferencePlan<?>, AnalyzerContext> { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is there any difference between this implementation and the new one you added? So this diff here looks like it was entirely unnecessary. Maybe the one thing that we needed in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There is no major difference but the new method is easier to extends since I can not chain more transform. return plan.transformDown(InferencePlan.class, p -> resolveInferencePlan(p, context)); will become when implementing the resolution of return plan.transformDown(InferencePlan.class, p -> resolveInferencePlan(p, context))
.transformExpressionsOnly(InferenceFunction.class, f -> resolveInferenceFunction(f, context)); This is the whole point of this PR to anticipate change that are required in the existing framework and to isolate these changes, so we can focus on testing potential regression. BTW, I am 100% sure that we will have to change the order of the resolution and that I will use this change to finish the implementation of TEXT_EMBEDDING. |
||
@Override | ||
protected LogicalPlan rule(InferencePlan<?> plan, AnalyzerContext context) { | ||
assert plan.inferenceId().resolved() && plan.inferenceId().foldable(); | ||
|
||
String inferenceId = BytesRefs.toString(plan.inferenceId().fold(FoldContext.small())); | ||
ResolvedInference resolvedInference = context.inferenceResolution().getResolvedInference(inferenceId); | ||
|
||
if (resolvedInference != null && resolvedInference.taskType() == plan.taskType()) { | ||
return plan; | ||
} else if (resolvedInference != null) { | ||
String error = "cannot use inference endpoint [" | ||
+ inferenceId | ||
+ "] with task type [" | ||
+ resolvedInference.taskType() | ||
+ "] within a " | ||
+ plan.nodeName() | ||
+ " command. Only inference endpoints with the task type [" | ||
+ plan.taskType() | ||
+ "] are supported."; | ||
return plan.withInferenceResolutionError(inferenceId, error); | ||
} else { | ||
String error = context.inferenceResolution().getError(inferenceId); | ||
return plan.withInferenceResolutionError(inferenceId, error); | ||
} | ||
} | ||
} | ||
|
||
private static class ResolveLookupTables extends ParameterizedAnalyzerRule<Lookup, AnalyzerContext> { | ||
|
||
@Override | ||
|
@@ -1335,6 +1307,41 @@ public static org.elasticsearch.xpack.esql.core.expression.function.Function res | |
} | ||
} | ||
|
||
private static class ResolveInference extends ParameterizedRule<LogicalPlan, LogicalPlan, AnalyzerContext> { | ||
|
||
@Override | ||
public LogicalPlan apply(LogicalPlan plan, AnalyzerContext context) { | ||
return plan.transformDown(InferencePlan.class, p -> resolveInferencePlan(p, context)); | ||
} | ||
|
||
private LogicalPlan resolveInferencePlan(InferencePlan<?> plan, AnalyzerContext context) { | ||
assert plan.inferenceId().resolved() && plan.inferenceId().foldable(); | ||
|
||
String inferenceId = BytesRefs.toString(plan.inferenceId().fold(FoldContext.small())); | ||
ResolvedInference resolvedInference = context.inferenceResolution().getResolvedInference(inferenceId); | ||
|
||
if (resolvedInference == null) { | ||
String error = context.inferenceResolution().getError(inferenceId); | ||
return plan.withInferenceResolutionError(inferenceId, error); | ||
} | ||
|
||
if (resolvedInference.taskType() != plan.taskType()) { | ||
String error = "cannot use inference endpoint [" | ||
+ inferenceId | ||
+ "] with task type [" | ||
+ resolvedInference.taskType() | ||
+ "] within a " | ||
+ plan.nodeName() | ||
+ " command. Only inference endpoints with the task type [" | ||
+ plan.taskType() | ||
+ "] are supported."; | ||
return plan.withInferenceResolutionError(inferenceId, error); | ||
} | ||
|
||
return plan; | ||
} | ||
} | ||
|
||
private static class AddImplicitLimit extends ParameterizedRule<LogicalPlan, LogicalPlan, AnalyzerContext> { | ||
@Override | ||
public LogicalPlan apply(LogicalPlan logicalPlan, AnalyzerContext context) { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ℹ️ Inference resolution is moved after function resolution, so we can resolve inference ids use in functions like
TEXT_EMBEDDING