Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
package alien4cloud.paas.yorc.modifier;

import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.extern.slf4j.Slf4j;
import static alien4cloud.utils.AlienUtils.safe;

import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import javax.annotation.Resource;

import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutionContext;
import org.alien4cloud.alm.deployment.configuration.flow.TopologyModifierSupport;
import org.alien4cloud.tosca.editor.operations.workflow.ConnectStepFromOperation;
Expand All @@ -14,15 +19,11 @@
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.workflow.Workflow;
import org.alien4cloud.tosca.model.workflow.WorkflowStep;
import org.alien4cloud.tosca.utils.TopologyNavigationUtil;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import static alien4cloud.utils.AlienUtils.safe;
import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.extern.slf4j.Slf4j;

/**
* A {@code BlockStorageComputeWFModifier} is a Topology modifier that explore a {@link Topology} to swap Compute and BlockStorage
Expand Down Expand Up @@ -60,7 +61,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {
Workflow installWF = topology.getWorkflows().get("install");
Workflow uninstallWF = topology.getWorkflows().get("uninstall");

Set<NodeTemplate> bsSet = TopologyNavigationUtil.getNodesOfType(topology, TOSCA_NODES_BLOCK_STORAGE, true);
Set<NodeTemplate> bsSet = this.getNodesOfType(context, topology, TOSCA_NODES_BLOCK_STORAGE, true);

// Let's process all BS
bsSet.forEach(bs -> safe(bs.getRelationships()).forEach((rn, rt) -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,27 +101,27 @@ protected void doProcess(Topology topology, FlowExecutionContext context) {

// replace all yorc.nodes.slurm.ContainerJobUnit by
// yorc.nodes.slurm.SingularityJob
Set<NodeTemplate> containerJobUnitNodes = TopologyNavigationUtil.getNodesOfType(topology,
Set<NodeTemplate> containerJobUnitNodes = this.getNodesOfType(context, topology,
SLURM_TYPES_CONTAINER_JOB_UNIT, false);
containerJobUnitNodes.forEach(nodeTemplate -> transformContainerJobUnit(csar, topology, context, nodeTemplate));

// replace all yorc.nodes.slurm.ContainerJobUnit by
// yorc.nodes.slurm.SingularityJob if not already hosted on a ContainerJobUnit
Set<NodeTemplate> containerRuntimeNodes = TopologyNavigationUtil.getNodesOfType(topology,
Set<NodeTemplate> containerRuntimeNodes = this.getNodesOfType(context, topology,
SLURM_TYPES_CONTAINER_RUNTIME, false);
containerRuntimeNodes.forEach(nodeTemplate -> transformContainerRuntime(csar, topology, context, nodeTemplate));

// replace all tosca.nodes.Container.Application.DockerContainer by
// yorc.nodes.slurm.SingularityJob if hosted on a ContainerRuntime transformed
// into a yorc.nodes.slurm.SingularityJob
Set<NodeTemplate> containerNodes = TopologyNavigationUtil.getNodesOfType(topology,
Set<NodeTemplate> containerNodes = this.getNodesOfType(context, topology,
A4C_TYPES_APPLICATION_DOCKER_CONTAINER, true);
containerNodes.forEach(
nodeTemplate -> transformContainer(csar, topology, context, functionEvaluatorContext, nodeTemplate));

// for each volume node, populate the 'volumes' property of the corresponding
// deployment resource
Set<NodeTemplate> volumeNodes = TopologyNavigationUtil.getNodesOfType(topology,
Set<NodeTemplate> volumeNodes = this.getNodesOfType(context, topology,
SLURM_TYPES_HOST_TO_CONTAINER_VOLUME, true);
volumeNodes.forEach(nodeTemplate -> transformContainerVolume(csar, topology, context, nodeTemplate));

Expand All @@ -131,15 +131,30 @@ protected void doProcess(Topology topology, FlowExecutionContext context) {
safe(replacementMap.keySet()).forEach(nodeName -> removeNode(csar, topology, nodeName));
}

protected void linkDependsOn(Csar csar, FlowExecutionContext context, Topology topology,
protected void linkDependsOn(Csar csar, FlowExecutionContext context,
Topology topology,
Map<String, Set<String>> containersDependencies, Map<String, NodeTemplate> replacementMap) {
containersDependencies.forEach((source, targets) -> {
boolean sourceReplaced = true;
boolean targetReplaced = true;
NodeTemplate sourceNode = replacementMap.get(source);
safe(targets).forEach(target -> {
if (sourceNode == null) {
// not replaced in this modifier
sourceReplaced = false;
sourceNode = topology.getNodeTemplates().get(source);
}
for (String target : targets) {
NodeTemplate targetNode = replacementMap.get(target);
addRelationshipTemplate(csar, topology, sourceNode, targetNode.getName(),
if (targetNode == null) {
// not replaced in this modifier
targetReplaced = false;
targetNode = topology.getNodeTemplates().get(target);
}
if (sourceReplaced || targetReplaced) {
addRelationshipTemplate(csar, topology, sourceNode, targetNode.getName(),
NormativeRelationshipConstants.DEPENDS_ON, "dependency", "feature");
});
}
}
});
}

Expand Down Expand Up @@ -171,7 +186,7 @@ protected String getTargetJobTypeVersion() {
*/
protected void transformContainerJobUnit(Csar csar, Topology topology, FlowExecutionContext context,
NodeTemplate nodeTemplate) {
NodeTemplate singularityNode = addNodeTemplate(csar, topology, nodeTemplate.getName() + "_Singularity",
NodeTemplate singularityNode = addNodeTemplate(context, csar, topology, nodeTemplate.getName() + "_Singularity",
getTargetJobType(), getTargetJobTypeVersion());
addToReplacementMap(context, nodeTemplate, singularityNode);
setNodeTagValue(singularityNode, A4C_D2S_MODIFIER_TAG + "_created_from", nodeTemplate.getName());
Expand Down Expand Up @@ -340,13 +355,12 @@ private void transformContainer(Csar csar, Topology topology, FlowExecutionConte
Set<NodeTemplate> dependents = TopologyNavigationUtil.getSourceNodesByRelationshipType(topology, nodeTemplate,
NormativeRelationshipConstants.DEPENDS_ON);
dependents.forEach(sourceNode -> {
Set<String> d = containersDependencies.get(sourceNode.getName());
if (d == null) {
d = Sets.newHashSet();
containersDependencies.put(sourceNode.getName(), d);
}
d.add(nodeTemplate.getName());
containersDependencies.computeIfAbsent(sourceNode.getName(), k-> Sets.newHashSet()).add(nodeTemplate.getName());
});
Set<NodeTemplate> dependsOn =TopologyNavigationUtil.getTargetNodes(topology, nodeTemplate, "dependency");
for (NodeTemplate targetNode : dependsOn) {
containersDependencies.computeIfAbsent(nodeTemplate.getName(), k-> Sets.newHashSet()).add(targetNode.getName());
}
}

protected void transformContainerOperation(Csar csar, FlowExecutionContext context,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
package alien4cloud.paas.yorc.modifier;

import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.inject.Inject;

import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutionContext;
import org.alien4cloud.alm.deployment.configuration.flow.TopologyModifierSupport;
import org.alien4cloud.tosca.catalog.index.IToscaTypeSearchService;
Expand All @@ -14,11 +19,12 @@
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.alien4cloud.tosca.normative.constants.NormativeRelationshipConstants;
import org.alien4cloud.tosca.utils.TopologyNavigationUtil;
import org.springframework.stereotype.Component;

import javax.inject.Inject;
import java.util.*;
import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;

/**
* Modifies an OpenStack topology to add a new Floating IP Node template
Expand Down Expand Up @@ -50,7 +56,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {

Csar csar = new Csar(topology.getArchiveName(), topology.getArchiveVersion());

Set<NodeTemplate> publicNetworksNodes = TopologyNavigationUtil.getNodesOfType(topology, "yorc.nodes.openstack.PublicNetwork", false);
Set<NodeTemplate> publicNetworksNodes = this.getNodesOfType(context, topology, "yorc.nodes.openstack.PublicNetwork", false);

String fipConnectivityCap = "yorc.capabilities.openstack.FIPConnectivity";
String fipNodeType = "yorc.nodes.openstack.FloatingIP";
Expand All @@ -61,7 +67,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {
publicNetworksNodes.forEach(networkNodeTemplate -> {
final AbstractPropertyValue networkName = networkNodeTemplate.getProperties().get("floating_network_name");

// For each Node Template requiring a connection to this Public
// For each Node Template requiring a connection to this Public
// Network, creating a new Floating IP Node Template
for (NodeTemplate nodeTemplate : new ArrayList<>(topology.getNodeTemplates().values())) {

Expand All @@ -73,7 +79,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {

Map<String, AbstractPropertyValue> properties = new LinkedHashMap<>();
properties.put("floating_network_name", networkName);

Map<String, Capability> capabilities = new LinkedHashMap<>();
Capability connectionCap = new Capability();
connectionCap.setType(fipConnectivityCap);
Expand All @@ -86,10 +92,10 @@ private void doProcess(Topology topology, FlowExecutionContext context) {
}

// Creating a new Floating IP Node Template that will be
// associated to this Node Template requiring a
// associated to this Node Template requiring a
// connection to the Public Network
String fipName = "FIP" + nodeTemplate.getName();
NodeTemplate fipNodeTemplate = addNodeTemplate(
NodeTemplate fipNodeTemplate = addNodeTemplate(context,
csar,
topology,
fipName,
Expand Down Expand Up @@ -131,12 +137,12 @@ private void doProcess(Topology topology, FlowExecutionContext context) {
networkNodeTemplate.getName());
});

// Removing Public Network nodes for which a new Floating IP Node
// Removing Public Network nodes for which a new Floating IP Node
// template was created
nodesToRemove.forEach(pnn -> removeNode(topology, pnn));
nodesToRemove.forEach(pnn -> removeNode(context, topology, pnn));

// Creating a relationship between each new Floating IP Node Template
// and the Source Node Template having a connectivity requirement
// and the Source Node Template having a connectivity requirement
relationshipsToAdd.forEach( rel -> addRelationshipTemplate(
csar,
topology,
Expand Down
50 changes: 29 additions & 21 deletions src/main/java/alien4cloud/paas/yorc/modifier/GangjaModifier.java
Original file line number Diff line number Diff line change
@@ -1,20 +1,27 @@
package alien4cloud.paas.yorc.modifier;

import alien4cloud.deployment.ArtifactProcessorService;
import alien4cloud.paas.wf.TopologyContext;
import alien4cloud.paas.wf.WorkflowSimplifyService;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContext;
import alien4cloud.tosca.context.ToscaContextual;
import alien4cloud.tosca.parser.ToscaParser;
import alien4cloud.utils.CloneUtil;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.annotation.PostConstruct;
import javax.inject.Inject;

import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;

import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutionContext;
import org.alien4cloud.alm.deployment.configuration.flow.TopologyModifierSupport;
import org.alien4cloud.tosca.model.definitions.*;
import org.alien4cloud.tosca.model.definitions.AbstractPropertyValue;
import org.alien4cloud.tosca.model.definitions.ComplexPropertyValue;
import org.alien4cloud.tosca.model.definitions.DeploymentArtifact;
import org.alien4cloud.tosca.model.definitions.IValue;
import org.alien4cloud.tosca.model.definitions.PropertyDefinition;
import org.alien4cloud.tosca.model.definitions.ScalarPropertyValue;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.RelationshipTemplate;
import org.alien4cloud.tosca.model.templates.ServiceNodeTemplate;
Expand All @@ -24,15 +31,16 @@
import org.alien4cloud.tosca.utils.TopologyNavigationUtil;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import alien4cloud.deployment.ArtifactProcessorService;
import alien4cloud.paas.wf.TopologyContext;
import alien4cloud.paas.wf.WorkflowSimplifyService;
import alien4cloud.paas.wf.WorkflowsBuilderService;
import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContext;
import alien4cloud.tosca.context.ToscaContextual;
import alien4cloud.tosca.parser.ToscaParser;
import alien4cloud.utils.CloneUtil;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@Component("gangja-resolver-modifier")
Expand Down Expand Up @@ -99,7 +107,7 @@ protected void doProcess(Topology topology, FlowExecutionContext context) {
log.debug("ARM processing topology");
}

Set<NodeTemplate> nodes = TopologyNavigationUtil.getNodesOfType(topology, NODE_TYPE_TO_EXPORE, true, false);
Set<NodeTemplate> nodes = this.getNodesOfType(context, topology, NODE_TYPE_TO_EXPORE, true, false);
nodes.stream().forEach(nodeTemplate -> {
// check if node has org.alien4cloud.artifacts.GangjaConfig artefacts
if (hasGangjaFile(nodeTemplate)) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
package alien4cloud.paas.yorc.modifier;

import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;

import javax.inject.Inject;

import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutionContext;
import org.alien4cloud.alm.deployment.configuration.flow.TopologyModifierSupport;
import org.alien4cloud.tosca.catalog.index.IToscaTypeSearchService;
Expand All @@ -14,11 +19,12 @@
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.alien4cloud.tosca.utils.TopologyNavigationUtil;
import org.springframework.stereotype.Component;

import javax.inject.Inject;
import java.util.*;
import alien4cloud.paas.wf.validation.WorkflowValidator;
import alien4cloud.tosca.context.ToscaContextual;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;

@Slf4j
@Component(value = GoogleAddressTopologyModifier.YORC_GOOGLE_ADDRESS_MODIFIER_TAG)
Expand All @@ -44,7 +50,7 @@ public void process(Topology topology, FlowExecutionContext context) {
private void doProcess(Topology topology, FlowExecutionContext context) {
Csar csar = new Csar(topology.getArchiveName(), topology.getArchiveVersion());

Set<NodeTemplate> publicNetworksNodes = TopologyNavigationUtil.getNodesOfType(topology, "yorc.nodes.google.PublicNetwork", false);
Set<NodeTemplate> publicNetworksNodes = this.getNodesOfType(context, topology, "yorc.nodes.google.PublicNetwork", false);
String assignableCap = "yorc.capabilities.Assignable";

String addressTypeName = "yorc.nodes.google.Address";
Expand Down Expand Up @@ -97,7 +103,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {
// Creating a new Address Node Template that will be
// associated to this Node Template requiring an assignment
String name = nodeTemplate.getName() + "_address";
NodeTemplate addressNodeTemplate = addNodeTemplate(
NodeTemplate addressNodeTemplate = addNodeTemplate(context,
csar,
topology,
name,
Expand Down Expand Up @@ -133,7 +139,7 @@ private void doProcess(Topology topology, FlowExecutionContext context) {

// Removing Public Network nodes for which a new Address Node
// template was created
nodesToRemove.forEach(pnn -> removeNode(topology, pnn));
nodesToRemove.forEach(pnn -> removeNode(context, topology, pnn));

// Creating a relationship between each new Google Address Node Template
// and the Source Node Template having an assignment requirement
Expand Down
Loading