Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/build-common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ jobs:
set +e
grep '^ implementation(".*:.*:[0-9].*")\|^ api(".*:.*:[0-9].*")' \
--include=\*.kts \
--exclude-dir=quarkus\*-plugin \
-r instrumentation \
| grep -v testing/build.gradle.kts \
| grep -v com.azure:azure-core-tracing-opentelemetry \
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,7 @@ jobs:
# --no-build-cache is required for codeql to analyze all modules
# --no-daemon is required for codeql to observe the compilation
# (see https://docs.github.com/en/code-security/codeql-cli/getting-started-with-the-codeql-cli/preparing-your-code-for-codeql-analysis#specifying-build-commands)
# quarkus tasks are disabled because they often cause the build to fail (see https://github.com/open-telemetry/opentelemetry-java-instrumentation/issues/13284)
run: ./gradlew assemble -x javadoc -x :instrumentation:quarkus-resteasy-reactive:quarkus3-testing:quarkusGenerateCodeDev -x :instrumentation:quarkus-resteasy-reactive:quarkus2-testing:quarkusGenerateCodeDev --no-build-cache --no-daemon
run: ./gradlew assemble -x javadoc --no-build-cache --no-daemon

- name: Perform CodeQL analysis
uses: github/codeql-action/analyze@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
plugins {
`java-gradle-plugin`
}

repositories {
mavenCentral()
gradlePluginPortal()
}

dependencies {
implementation(gradleApi())
implementation("io.quarkus:quarkus-gradle-model:2.16.7.Final")
}

gradlePlugin {
plugins {
create("quarkus2Plugin") {
id = "io.opentelemetry.instrumentation.quarkus2"
implementationClass = "io.opentelemetry.instrumentation.quarkus2plugin.Quarkus2Plugin"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/

package io.opentelemetry.instrumentation.quarkus2plugin;

import org.gradle.api.Plugin;
import org.gradle.api.Project;

@SuppressWarnings("unused")
public class Quarkus2Plugin implements Plugin<Project> {

@Override
public void apply(Project project) {
// we use this plugin with apply false and call its classes directly from the build script
throw new IllegalStateException("this plugin is not meant to be applied");
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,245 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/

// Includes work from:
/*
* Copyright Quarkus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.quarkus.gradle.dependency;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.ModuleVersionIdentifier;
import org.gradle.api.artifacts.ResolvedArtifact;

import io.quarkus.gradle.tooling.dependency.DependencyUtils;
import io.quarkus.gradle.tooling.dependency.ExtensionDependency;
import io.quarkus.maven.dependency.ArtifactCoords;
import io.quarkus.maven.dependency.ArtifactKey;
import io.quarkus.maven.dependency.GACT;
import io.quarkus.runtime.LaunchMode;

public class ConditionalDependenciesEnabler {

/**
* Links dependencies to extensions
*/
private final Map<GACT, Set<ExtensionDependency>> featureVariants = new HashMap<>();
/**
* Despite its name, only contains extensions which have no conditional dependencies, or have
* resolved their conditional dependencies.
*/
private final Map<ModuleVersionIdentifier, ExtensionDependency> allExtensions = new HashMap<>();
private final Project project;
private final Configuration enforcedPlatforms;
private final Set<ArtifactKey> existingArtifacts = new HashSet<>();
private final List<Dependency> unsatisfiedConditionalDeps = new ArrayList<>();

public ConditionalDependenciesEnabler(Project project, LaunchMode mode,
Configuration platforms) {
this.project = project;
this.enforcedPlatforms = platforms;

// Get runtimeClasspath (quarkusProdBaseRuntimeClasspathConfiguration to be exact)
Configuration baseRuntimeConfig = project.getConfigurations()
.getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode));

if (!baseRuntimeConfig.getIncoming().getDependencies().isEmpty()) {
// Gather all extensions from the full resolved dependency tree
collectConditionalDependencies(baseRuntimeConfig.getResolvedConfiguration().getResolvedArtifacts());
// If there are any extensions which had unresolved conditional dependencies:
while (!unsatisfiedConditionalDeps.isEmpty()) {
boolean satisfiedConditionalDeps = false;
final int originalUnsatisfiedCount = unsatisfiedConditionalDeps.size();
int i = 0;
// Go through each unsatisfied/unresolved dependency once:
while (i < unsatisfiedConditionalDeps.size()) {
final Dependency conditionalDep = unsatisfiedConditionalDeps.get(i);
// Try to resolve it with the latest evolved graph available
if (resolveConditionalDependency(conditionalDep)) {
// Mark the resolution as a success so we know the graph evolved
satisfiedConditionalDeps = true;
unsatisfiedConditionalDeps.remove(i);
} else {
// No resolution (yet) or graph evolution; move on to the next
++i;
}
}
// If we didn't resolve any dependencies and the graph did not evolve, give up.
if (!satisfiedConditionalDeps && unsatisfiedConditionalDeps.size() == originalUnsatisfiedCount) {
break;
}
}
reset();
}

}

public Collection<ExtensionDependency> getAllExtensions() {
return allExtensions.values();
}

private void reset() {
featureVariants.clear();
existingArtifacts.clear();
unsatisfiedConditionalDeps.clear();
}

private void collectConditionalDependencies(Set<ResolvedArtifact> runtimeArtifacts) {
// For every artifact in the dependency graph:
for (ResolvedArtifact artifact : runtimeArtifacts) {
// Add to master list of artifacts:
existingArtifacts.add(getKey(artifact));
ExtensionDependency extension = DependencyUtils.getExtensionInfoOrNull(project, artifact);
// If this artifact represents an extension:
if (extension != null) {
// Add to master list of accepted extensions:
allExtensions.put(extension.getExtensionId(), extension);
for (Dependency conditionalDep : extension.getConditionalDependencies()) {
// If the dependency is not present yet in the graph, queue it for resolution later
if (!exists(conditionalDep)) {
queueConditionalDependency(extension, conditionalDep);
}
}
}
}
}

private boolean resolveConditionalDependency(Dependency conditionalDep) {

final Configuration conditionalDeps = createConditionalDependenciesConfiguration(project, conditionalDep);
Set<ResolvedArtifact> resolvedArtifacts = conditionalDeps.getResolvedConfiguration().getResolvedArtifacts();

boolean satisfied = false;
// Resolved artifacts don't have great linking back to the original artifact, so I think
// this loop is trying to find the artifact that represents the original conditional
// dependency
for (ResolvedArtifact artifact : resolvedArtifacts) {
if (conditionalDep.getName().equals(artifact.getName())
&& conditionalDep.getVersion().equals(artifact.getModuleVersion().getId().getVersion())
&& artifact.getModuleVersion().getId().getGroup().equals(conditionalDep.getGroup())) {
// Once the dependency is found, reload the extension info from within
final ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
// Now check if this conditional dependency is resolved given the latest graph evolution
if (extensionDependency != null && (extensionDependency.getDependencyConditions().isEmpty()
|| exist(extensionDependency.getDependencyConditions()))) {
satisfied = true;
enableConditionalDependency(extensionDependency.getExtensionId());
break;
}
}
}

// No resolution (yet); give up.
if (!satisfied) {
return false;
}

// The conditional dependency resolved! Let's now add all of /its/ dependencies
for (ResolvedArtifact artifact : resolvedArtifacts) {
// First add the artifact to the master list
existingArtifacts.add(getKey(artifact));
ExtensionDependency extensionDependency = DependencyUtils.getExtensionInfoOrNull(project, artifact);
if (extensionDependency == null) {
continue;
}
// If this artifact represents an extension, mark this one as a conditional extension
extensionDependency.setConditional(true);
// Add to the master list of accepted extensions
allExtensions.put(extensionDependency.getExtensionId(), extensionDependency);
for (Dependency cd : extensionDependency.getConditionalDependencies()) {
// Add any unsatisfied/unresolved conditional dependencies of this dependency to the queue
if (!exists(cd)) {
queueConditionalDependency(extensionDependency, cd);
}
}
}
return satisfied;
}

private void queueConditionalDependency(ExtensionDependency extension, Dependency conditionalDep) {
// 1. Add to master list of unresolved/unsatisfied dependencies
// 2. Add map entry to link dependency to extension
featureVariants.computeIfAbsent(getFeatureKey(conditionalDep), k -> {
unsatisfiedConditionalDeps.add(conditionalDep);
return new HashSet<>();
}).add(extension);
}

private Configuration createConditionalDependenciesConfiguration(Project project, Dependency conditionalDep) {
/*
Configuration conditionalDepConfiguration = project.getConfigurations()
.detachedConfiguration()
.extendsFrom(enforcedPlatforms);
*/
Configuration conditionalDepConfiguration = project.getConfigurations().detachedConfiguration();
enforcedPlatforms.getExcludeRules().forEach(rule -> {
conditionalDepConfiguration.exclude(Map.of(
"group", rule.getGroup(),
"module", rule.getModule()));
});
enforcedPlatforms.getAllDependencies().forEach(dependency -> {
conditionalDepConfiguration.getDependencies().add(dependency);
});
conditionalDepConfiguration.getDependencies().add(conditionalDep);
return conditionalDepConfiguration;
}

private void enableConditionalDependency(ModuleVersionIdentifier dependency) {
final Set<ExtensionDependency> extensions = featureVariants.remove(getFeatureKey(dependency));
if (extensions == null) {
return;
}
extensions.forEach(e -> e.importConditionalDependency(project.getDependencies(), dependency));
}

private boolean exist(List<ArtifactKey> dependencies) {
return existingArtifacts.containsAll(dependencies);
}

private boolean exists(Dependency dependency) {
return existingArtifacts
.contains(ArtifactKey.of(dependency.getGroup(), dependency.getName(), null, ArtifactCoords.TYPE_JAR));
}

public boolean exists(ExtensionDependency dependency) {
return existingArtifacts
.contains(ArtifactKey.of(dependency.getGroup(), dependency.getName(), null, ArtifactCoords.TYPE_JAR));
}

private static GACT getFeatureKey(ModuleVersionIdentifier version) {
return new GACT(version.getGroup(), version.getName());
}

private static GACT getFeatureKey(Dependency version) {
return new GACT(version.getGroup(), version.getName());
}

private static ArtifactKey getKey(ResolvedArtifact a) {
return ArtifactKey.of(a.getModuleVersion().getId().getGroup(), a.getName(), a.getClassifier(), a.getType());
}
}
Loading