diff --git a/assemblies/developer-lightspeed-guide/assembly_configuring-openshift-ai.adoc b/assemblies/developer-lightspeed-guide/assembly_configuring-openshift-ai.adoc
new file mode 100644
index 00000000..fa30a331
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/assembly_configuring-openshift-ai.adoc
@@ -0,0 +1,27 @@
+:_newdoc-version: 2.18.3
+:_template-generated: 2025-04-08
+
+ifdef::context[:parent-context-of-configuring-openshift-ai: {context}]
+
+:_mod-docs-content-type: ASSEMBLY
+
+ifndef::context[]
+[id="configuring-openshift-ai"]
+endif::[]
+ifdef::context[]
+[id="configuring-openshift-ai_{context}"]
+endif::[]
+= Configuring {ocp-short} AI
+:context: configuring-openshift-ai
+
+abc
+
+include::topics/developer-lightspeed/proc_creating-datascience-cluster.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_configuring-llm-serving-runtime.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_creating-accelerator-profile.adoc[leveloffset=+1]
+
+
+ifdef::parent-context-of-configuring-openshift-ai[:context: {parent-context-of-configuring-openshift-ai}]
+ifndef::parent-context-of-configuring-openshift-ai[:!context:]
\ No newline at end of file
diff --git a/assemblies/developer-lightspeed-guide/assembly_configuring_llm.adoc b/assemblies/developer-lightspeed-guide/assembly_configuring_llm.adoc
new file mode 100644
index 00000000..1c047c46
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/assembly_configuring_llm.adoc
@@ -0,0 +1,43 @@
+:_newdoc-version: 2.18.3
+:_template-generated: 2025-04-08
+
+ifdef::context[:parent-context-of-configuring-llm: {context}]
+
+:_mod-docs-content-type: ASSEMBLY
+
+ifndef::context[]
+[id="configuring-llm"]
+endif::[]
+ifdef::context[]
+[id="configuring-llm_{context}"]
+endif::[]
+= Configuring large language models for analysis
+:context: configuring-llm
+
+{mta-dl-plugin} works with large language models (LLM) run in different environments to support analyzing Java applications in a wide range of scenarios. You can choose an LLM from well-known providers, local models that you run from Ollama or Podman desktop, and OpenAI API compatible models that are available as Model-as-a-Service deployments.
+
+The result of an analysis performed by {mta-dl-plugin} depends on the parameter configuration of the LLM that you choose. In order to use {mta-dl-plugin} for analysis, you must deploy your LLM and then, configure mandatory settings (for example, API key and secret) and other parameters for your LLM.
+
+You can run an LLM from the following providers:
+
+* OpenAI
+* Azure OpenAI
+* Google Gemini
+* Amazon Bedrock
+* Deepseek
+* OpenShift AI
+
+include::topics/developer-lightspeed/con_model-as-a-service.adoc[leveloffset=+1]
+
+include::assembly_maas-oc-install-config.adoc[leveloffset=+1]
+
+include::assembly_configuring-openshift-ai.adoc[leveloffset=+1]
+
+include::assembly_connecting-openshift-ai-llm.adoc[leveloffset=+1]
+
+include::assembly_preparing-llm-analysis.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_configuring-llm-podman-desktop.adoc[leveloffset=+1]
+
+ifdef::parent-context-of-configuring-llm[:context: {parent-context-of-configuring-llm}]
+ifndef::parent-context-of-configuring-llm[:!context:]
\ No newline at end of file
diff --git a/assemblies/developer-lightspeed-guide/assembly_connecting-openshift-ai-llm.adoc b/assemblies/developer-lightspeed-guide/assembly_connecting-openshift-ai-llm.adoc
new file mode 100644
index 00000000..e2c05364
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/assembly_connecting-openshift-ai-llm.adoc
@@ -0,0 +1,27 @@
+:_newdoc-version: 2.18.3
+:_template-generated: 2025-04-08
+
+ifdef::context[:parent-context-of-connecting-openshift-ai-llm: {context}]
+
+:_mod-docs-content-type: ASSEMBLY
+
+ifndef::context[]
+[id="connecting-openshift-ai-llm"]
+endif::[]
+ifdef::context[]
+[id="connecting-openshift-ai-llm_{context}"]
+endif::[]
+= Connecting {ocp-short} AI with the large language model
+:context: connecting-openshift-ai-llm
+
+Upload to your Amazon S3 bucket.
+
+include::topics/developer-lightspeed/proc_adding-data-connection.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_deploying-the-model.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_export-token-certificate.adoc[leveloffset=+1]
+
+
+ifdef::parent-context-of-connecting-openshift-ai-llm[:context: {parent-context-of-connecting-openshift-ai-llm}]
+ifndef::parent-context-of-connecting-openshift-ai-llm[:!context:]
\ No newline at end of file
diff --git a/assemblies/developer-lightspeed-guide/assembly_maas-oc-install-config.adoc b/assemblies/developer-lightspeed-guide/assembly_maas-oc-install-config.adoc
new file mode 100644
index 00000000..f8296c7c
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/assembly_maas-oc-install-config.adoc
@@ -0,0 +1,30 @@
+:_newdoc-version: 2.18.3
+:_template-generated: 2025-04-08
+
+ifdef::context[:parent-context-of-maas-oc-install-config: {context}]
+
+:_mod-docs-content-type: ASSEMBLY
+
+ifndef::context[]
+[id="maas-oc-install-config"]
+endif::[]
+ifdef::context[]
+[id="maas-oc-install-config_{context}"]
+endif::[]
+= Installing and configuring {ocp-short} cluster
+:context: maas-oc-install-config
+
+abc
+
+include::topics/developer-lightspeed/proc_install-oc-cluster.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_creating-identity-provider.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_configuring-operators.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_creating-gpu-machine-set.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_configuring-node-auto-scaling.adoc[leveloffset=+1]
+
+ifdef::parent-context-of-maas-oc-install-config[:context: {parent-context-of-maas-oc-install-config}]
+ifndef::parent-context-of-maas-oc-install-config[:!context:]
\ No newline at end of file
diff --git a/assemblies/developer-lightspeed-guide/assembly_preparing-llm-analysis.adoc b/assemblies/developer-lightspeed-guide/assembly_preparing-llm-analysis.adoc
new file mode 100644
index 00000000..22b2f2ff
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/assembly_preparing-llm-analysis.adoc
@@ -0,0 +1,24 @@
+:_newdoc-version: 2.18.3
+:_template-generated: 2025-04-08
+
+ifdef::context[:parent-context-of-preparing-llm-analysis: {context}]
+
+:_mod-docs-content-type: ASSEMBLY
+
+ifndef::context[]
+[id="preparing-llm-analysis"]
+endif::[]
+ifdef::context[]
+[id="preparing-llm-analysis_{context}"]
+endif::[]
+= Preparing the large language model for analysis
+:context: preparing-llm-analysis
+
+abc
+
+include::topics/developer-lightspeed/proc_downloading-certificate.adoc[leveloffset=+1]
+
+include::topics/developer-lightspeed/proc_configuring-openai-api-key.adoc[leveloffset=+1]
+
+ifdef::parent-context-of-preparing-llm-analysis[:context: {parent-context-of-preparing-llm-analysis}]
+ifndef::parent-context-of-preparing-llm-analysis[:!context:]
\ No newline at end of file
diff --git a/assemblies/developer-lightspeed-guide/topics b/assemblies/developer-lightspeed-guide/topics
new file mode 120000
index 00000000..9c652798
--- /dev/null
+++ b/assemblies/developer-lightspeed-guide/topics
@@ -0,0 +1 @@
+../../topics/
\ No newline at end of file
diff --git a/docs/developer-lightspeed-guide/assemblies b/docs/developer-lightspeed-guide/assemblies
new file mode 120000
index 00000000..51bb5102
--- /dev/null
+++ b/docs/developer-lightspeed-guide/assemblies
@@ -0,0 +1 @@
+../../assemblies/
\ No newline at end of file
diff --git a/docs/developer-lightspeed-guide/master-docinfo.xml b/docs/developer-lightspeed-guide/master-docinfo.xml
new file mode 100644
index 00000000..3a8b94c3
--- /dev/null
+++ b/docs/developer-lightspeed-guide/master-docinfo.xml
@@ -0,0 +1,11 @@
+
Developer Lightspeed Guide
+{DocInfoProductName}
+{DocInfoProductNumber}
+Using the {ProductName} Developer Lightspeed to modernize your applications
+
+ you can use {ProductFullName} Developer Lightspeed for application modernization in your organization by running Artificial Intelligence-driven static code analysis for Java applications.
+
+
+ Red Hat Customer Content Services
+
+
diff --git a/docs/developer-lightspeed-guide/master.adoc b/docs/developer-lightspeed-guide/master.adoc
new file mode 100644
index 00000000..980f47f9
--- /dev/null
+++ b/docs/developer-lightspeed-guide/master.adoc
@@ -0,0 +1,28 @@
+:mta:
+include::topics/templates/document-attributes.adoc[]
+:_mod-docs-content-type: ASSEMBLY
+[id="mta-developer-lightspeed"]
+= MTA Developer Lightspeed Guide
+
+:toc:
+:toclevels: 4
+:numbered:
+:imagesdir: topics/images
+:context: mta-developer-lightspeed
+:mta-developer-lightspeed:
+
+//Inclusive language statement
+include::topics/making-open-source-more-inclusive.adoc[]
+
+
+
+
+
+
+
+
+
+
+include::assemblies/developer-lightspeed-guide/assembly_configuring_llm.adoc[leveloffset=+1]
+
+:!mta-developer-lightspeed:
diff --git a/docs/developer-lightspeed-guide/topics b/docs/developer-lightspeed-guide/topics
new file mode 120000
index 00000000..cd717d99
--- /dev/null
+++ b/docs/developer-lightspeed-guide/topics
@@ -0,0 +1 @@
+../topics/
\ No newline at end of file
diff --git a/docs/topics/developer-lightspeed/con_model-as-a-service.adoc b/docs/topics/developer-lightspeed/con_model-as-a-service.adoc
new file mode 100644
index 00000000..e0887054
--- /dev/null
+++ b/docs/topics/developer-lightspeed/con_model-as-a-service.adoc
@@ -0,0 +1,19 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+
+:_mod-docs-content-type: CONCEPT
+
+[id="model-as-a-service_{context}"]
+= Deploying an LLM as a scalable service
+
+[role="_abstract"]
+{mta-dl-plugin} also supports large language models (LLMs) that are deployed as a scalable service on {ocp-full} clusters. These deployments, called model-as-a-service (MaaS), provide you with greater control to optimize resources such as compute, cluster nodes, and auto-scaling Graphical Processing Units (GPUs) while enabling you to leverage artificial intelligence to perform operations at a large scale.
+
+
+The workflow for configuring an LLM on {ocp-short} AI can be broadly divided into the following parts:
+
+* Installing and configuring resources: from creating an {ocp} cluster to configuring node auto scaling
+* Configuring OpenShift AI: from creating a data science project to creating an accelerator profile
+* Connecting OpenShift AI with the LLM: from uploading a model to exporting tokens and SSL certificate for the LLM
+* Preparing the LLM for analysis: from downloading the CA certificates to updating the `provider.settings` file.
+//* Configuring monitoring and alerting for the storage resource: creating a ConfigMap for monitoring storage and an alerting configuration file.
diff --git a/docs/topics/developer-lightspeed/con_model-as-a-service.html b/docs/topics/developer-lightspeed/con_model-as-a-service.html
new file mode 100644
index 00000000..799aa3ec
--- /dev/null
+++ b/docs/topics/developer-lightspeed/con_model-as-a-service.html
@@ -0,0 +1,450 @@
+
+
+
+
+
+
+
+Archetype inheritance
+
+
+
+
+
+
Archetype inheritance
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/topics/developer-lightspeed/proc_adding-data-connection.adoc b/docs/topics/developer-lightspeed/proc_adding-data-connection.adoc
new file mode 100644
index 00000000..9b464143
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_adding-data-connection.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="adding-data-connection_{context}"]
+= Adding a data connection
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_configuring-llm-podman-desktop.adoc b/docs/topics/developer-lightspeed/proc_configuring-llm-podman-desktop.adoc
new file mode 100644
index 00000000..c5b2fef2
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_configuring-llm-podman-desktop.adoc
@@ -0,0 +1,21 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="configuring-llm-podman_{context}"]
+= Configuring the LLM in Podman Desktop
+
+[role="_abstract"]
+
+Podman desktop configuration. See this example if you want to use your local model deployed through the Podman AI lab extension.
+
+
+Provider settings configuration is mandatory for all large language models (LLMs), no matter how you deployed the model. You must configure an API key for the LLM, the base URL, and other parameters to connect your LLM with {mta-dl-plugin} in the `provider-settings.json` file.
+
+.Prerequisites
+
+* You configured an API key for your LLM.
+
+.Procedure
+
+. .
diff --git a/docs/topics/developer-lightspeed/proc_configuring-llm-serving-runtime.adoc b/docs/topics/developer-lightspeed/proc_configuring-llm-serving-runtime.adoc
new file mode 100644
index 00000000..191b513f
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_configuring-llm-serving-runtime.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="configuring-llm-serving-runtime_{context}"]
+= Configuring the LLM serving runtime
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_configuring-node-auto-scaling.adoc b/docs/topics/developer-lightspeed/proc_configuring-node-auto-scaling.adoc
new file mode 100644
index 00000000..e2f00b3d
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_configuring-node-auto-scaling.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="configuring-node-auto-scaling_{context}"]
+= Configuring GPU node auto scaling
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_configuring-openai-api-key.adoc b/docs/topics/developer-lightspeed/proc_configuring-openai-api-key.adoc
new file mode 100644
index 00000000..72241934
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_configuring-openai-api-key.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="configuring-openai-api-key_{context}"]
+= Configuring the OpenAI API key
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_configuring-operators.adoc b/docs/topics/developer-lightspeed/proc_configuring-operators.adoc
new file mode 100644
index 00000000..fa3eedd9
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_configuring-operators.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="configuring-operators_{context}"]
+= Configuring operators for {ocp-short} AI
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_creating-accelerator-profile.adoc b/docs/topics/developer-lightspeed/proc_creating-accelerator-profile.adoc
new file mode 100644
index 00000000..c04f500f
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_creating-accelerator-profile.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="creating-accelerator-profile_{context}"]
+= Creating an accelerator profile
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_creating-datascience-cluster.adoc b/docs/topics/developer-lightspeed/proc_creating-datascience-cluster.adoc
new file mode 100644
index 00000000..7c184d31
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_creating-datascience-cluster.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="creating-datascience-cluster_{context}"]
+= Creating a DataScience project cluster
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_creating-gpu-machine-set.adoc b/docs/topics/developer-lightspeed/proc_creating-gpu-machine-set.adoc
new file mode 100644
index 00000000..f9cd87dd
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_creating-gpu-machine-set.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="creating-gpu-machine-set_{context}"]
+= Creating a GPU machine set
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_creating-identity-provider.adoc b/docs/topics/developer-lightspeed/proc_creating-identity-provider.adoc
new file mode 100644
index 00000000..79eeed63
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_creating-identity-provider.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="creating-identity-provider_{context}"]
+= Creating an identity provider
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_deploying-the-model.adoc b/docs/topics/developer-lightspeed/proc_deploying-the-model.adoc
new file mode 100644
index 00000000..51e84122
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_deploying-the-model.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="deploying-the-model_{context}"]
+= Deploying the LLM
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_downloading-certificate.adoc b/docs/topics/developer-lightspeed/proc_downloading-certificate.adoc
new file mode 100644
index 00000000..505378b4
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_downloading-certificate.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="downloading-certificate_{context}"]
+= Downloading SSL certificate for the LLM
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_export-token-certificate.adoc b/docs/topics/developer-lightspeed/proc_export-token-certificate.adoc
new file mode 100644
index 00000000..a0fe70a4
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_export-token-certificate.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="export-token-certificate_{context}"]
+= Exporting the authentication token and SSL certificate
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/developer-lightspeed/proc_install-oc-cluster.adoc b/docs/topics/developer-lightspeed/proc_install-oc-cluster.adoc
new file mode 100644
index 00000000..d364fb93
--- /dev/null
+++ b/docs/topics/developer-lightspeed/proc_install-oc-cluster.adoc
@@ -0,0 +1,16 @@
+:_newdoc-version: 2.15.0
+:_template-generated: 2024-2-21
+:_mod-docs-content-type: PROCEDURE
+
+[id="install-oc-cluster_{context}"]
+= Installing an {ocp-short} cluster
+
+[role="_abstract"]
+abc
+
+.Prerequisites
+
+
+
+.Procedure
+
diff --git a/docs/topics/templates/document-attributes.adoc b/docs/topics/templates/document-attributes.adoc
index 67cef78d..8ca87e93 100644
--- a/docs/topics/templates/document-attributes.adoc
+++ b/docs/topics/templates/document-attributes.adoc
@@ -28,6 +28,7 @@ ifdef::mta[]
:WebConsoleBookName: {WebNameTitle} Guide
:ProductVersion: 7.3.1
:PluginName: MTA plugin
+:mta-dl-plugin: MTA with Developer Lightspeed
// :MavenProductVersion: 7.0.0.GA-redhat-00001
:ProductDistributionVersion: 7.3.1.GA-redhat
:ProductDistribution: mta-7.3.1.GA-cli-offline.zip