diff --git a/Authentication/CreateDevAADApp.ps1 b/Authentication/CreateDevAADApp.ps1
index 895abc4..14dc400 100644
--- a/Authentication/CreateDevAADApp.ps1
+++ b/Authentication/CreateDevAADApp.ps1
@@ -41,8 +41,7 @@ if (-not $applicationName) {
if (-not $workloadName) {
$workloadName = Read-Host "Enter your workload name"
}
-while (-not ($workloadName -match "^Org\.[^.]+$"))
-{
+while (-not ($workloadName -match "^Org\.[^.]+$")) {
$workloadName = Read-Host "Workload name must start with Org. and contain only 2 segments!. please re-enter your workload name"
}
if (-not $tenantId) {
@@ -74,172 +73,172 @@ $randomString = -join ((65..90) + (97..122) | Get-Random -Count $length | ForEac
$applicationIdUri = "api://localdevinstance/" + $tenantId + "/" + $workloadName + "/" + $randomString
$application = @{
- displayName = $applicationName
- signInAudience = "AzureADMultipleOrgs"
- optionalClaims = @{
+ displayName = $applicationName
+ signInAudience = "AzureADMultipleOrgs"
+ optionalClaims = @{
accessToken = @(
@{
essential = $false
- name = "idtyp"
+ name = "idtyp"
}
)
}
- spa = @{
+ spa = @{
redirectUris = @(
$redirectUri
)
}
- identifierUris = @($applicationIdUri)
- api = @{
- oauth2PermissionScopes = @( # Scopes
+ identifierUris = @($applicationIdUri)
+ api = @{
+ oauth2PermissionScopes = @( # Scopes
@{
adminConsentDisplayName = "FabricWorkloadControl"
adminConsentDescription = "FabricWorkloadControl"
- value = "FabricWorkloadControl"
- id = $FabricWorkloadControlGuid
- isEnabled = $true
- type = "User"
+ value = "FabricWorkloadControl"
+ id = $FabricWorkloadControlGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "Item1.Read.All"
adminConsentDescription = "Item1.Read.All"
- value = "Item1.Read.All"
- id = $Item1ReadAllGuid
- isEnabled = $true
- type = "User"
+ value = "Item1.Read.All"
+ id = $Item1ReadAllGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "Item1.ReadWrite.All"
adminConsentDescription = "Item1.ReadWrite.All"
- value = "Item1.ReadWrite.All"
- id = $Item1ReadWriteAllGuid
- isEnabled = $true
- type = "User"
+ value = "Item1.ReadWrite.All"
+ id = $Item1ReadWriteAllGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "FabricLakehouse.Read.All"
adminConsentDescription = "FabricLakehouse.Read.All"
- value = "FabricLakehouse.Read.All"
- id = $FabricLakehouseReadAllGuid
- isEnabled = $true
- type = "User"
+ value = "FabricLakehouse.Read.All"
+ id = $FabricLakehouseReadAllGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "FabricLakehouse.ReadWrite.All"
adminConsentDescription = "FabricLakehouse.ReadWrite.All"
- value = "FabricLakehouse.ReadWrite.All"
- id = $FabricLakehouseReadWriteAllGuid
- isEnabled = $true
- type = "User"
+ value = "FabricLakehouse.ReadWrite.All"
+ id = $FabricLakehouseReadWriteAllGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "KQLDatabase.ReadWrite.All"
adminConsentDescription = "KQLDatabase.ReadWrite.All"
- value = "KQLDatabase.ReadWrite.All"
- id = $KQLDatabaseReadWriteAllGuid
- isEnabled = $true
- type = "User"
+ value = "KQLDatabase.ReadWrite.All"
+ id = $KQLDatabaseReadWriteAllGuid
+ isEnabled = $true
+ type = "User"
},
@{
adminConsentDisplayName = "FabricEventhouse.Read.All"
adminConsentDescription = "FabricEventhouse.Read.All"
- value = "FabricEventhouse.Read.All"
- id = $FabricEventhouseReadAllGuid
- isEnabled = $true
- type = "User"
+ value = "FabricEventhouse.Read.All"
+ id = $FabricEventhouseReadAllGuid
+ isEnabled = $true
+ type = "User"
}
)
preAuthorizedApplications = @( # Preauthorize
@{
- appId = "871c010f-5e61-4fb1-83ac-98610a7e9110"
+ appId = "871c010f-5e61-4fb1-83ac-98610a7e9110"
delegatedPermissionIds = @(
$Item1ReadAllGuid, $Item1ReadWriteAllGuid, $FabricLakehouseReadAllGuid, $FabricLakehouseReadWriteAllGuid, $KQLDatabaseReadWriteAllGuid, $FabricEventhouseReadAllGuid
)
},
- @{
- appId = "00000009-0000-0000-c000-000000000000"
+ @{
+ appId = "00000009-0000-0000-c000-000000000000"
delegatedPermissionIds = @(
$FabricWorkloadControlGuid
)
},
@{
- appId = "d2450708-699c-41e3-8077-b0c8341509aa"
+ appId = "d2450708-699c-41e3-8077-b0c8341509aa"
delegatedPermissionIds = @(
$FabricWorkloadControlGuid
)
}
)
}
- requiredResourceAccess = @( # API Permissions
- @{
- resourceAppId = "e406a681-f3d4-42a8-90b6-c2b029497af1" # Azure Storage
- resourceAccess = @(
- @{
- id = "03e0da56-190b-40ad-a80c-ea378c433f7f" # user_impersonation
- type = "Scope"
- }
- )
- },
- @{
- resourceAppId = "2746ea77-4702-4b45-80ca-3c97e680e8b7" # Azure Data Explorer
- resourceAccess = @(
- @{
- id = "00d678f0-da44-4b12-a6d6-c98bcfd1c5fe" # user_impersonation
- type = "Scope"
- }
- )
- },
- @{
- resourceAppId = "00000003-0000-0000-c000-000000000000" # Graph
- resourceAccess = @(
- @{
- id = "e1fe6dd8-ba31-4d61-89e7-88639da4683d" # User.Read
- type = "Scope"
- }
- )
- },
- @{
- resourceAppId = "00000009-0000-0000-c000-000000000000" # PBI Service
- resourceAccess = @(
- @{
- id = "7ba630b9-8110-4e27-8d17-81e5f2218787" # Fabric.Extend
- type = "Scope"
- },
- @{
- id = "b2f1b2fa-f35c-407c-979c-a858a808ba85" # Workspace.Read.All
- type = "Scope"
- },
- @{
- id = "caf40b1a-f10e-4da1-86e4-5fda17eb2b07" # Item.Execute.ALL
- type = "Scope"
- },
- @{
- id = "d2bc95fc-440e-4b0e-bafd-97182de7aef5" # Item.Read.All
- type = "Scope"
- },
- @{
- id = "7a27a256-301d-4359-b77b-c2b759d2e362" # Item.ReadWrite.All
- type = "Scope"
- },
- @{
- id = "02e8d710-956c-4760-b996-2e83935c2cf5" # Item.Reshare.All
- type = "Scope"
- },
- @{
- id = "13060bfd-9305-4ec6-8388-8916580f4fa9" # Lakehouse.Read.All
- type = "Scope"
- },
- @{
- id = "cd1718e4-3e09-4381-a6e1-183e245f8613" # Eventhouse.Read.All
- type = "Scope"
- },
- @{
- id = "726667b1-01a6-4be4-b04c-e95eae4023a8" # KQLDatabase.ReadWrite.All
- type = "Scope"
- }
- )
- }
- )
+ requiredResourceAccess = @( # API Permissions
+ @{
+ resourceAppId = "e406a681-f3d4-42a8-90b6-c2b029497af1" # Azure Storage
+ resourceAccess = @(
+ @{
+ id = "03e0da56-190b-40ad-a80c-ea378c433f7f" # user_impersonation
+ type = "Scope"
+ }
+ )
+ },
+ @{
+ resourceAppId = "2746ea77-4702-4b45-80ca-3c97e680e8b7" # Azure Data Explorer
+ resourceAccess = @(
+ @{
+ id = "00d678f0-da44-4b12-a6d6-c98bcfd1c5fe" # user_impersonation
+ type = "Scope"
+ }
+ )
+ },
+ @{
+ resourceAppId = "00000003-0000-0000-c000-000000000000" # Graph
+ resourceAccess = @(
+ @{
+ id = "e1fe6dd8-ba31-4d61-89e7-88639da4683d" # User.Read
+ type = "Scope"
+ }
+ )
+ },
+ @{
+ resourceAppId = "00000009-0000-0000-c000-000000000000" # PBI Service
+ resourceAccess = @(
+ @{
+ id = "7ba630b9-8110-4e27-8d17-81e5f2218787" # Fabric.Extend
+ type = "Scope"
+ },
+ @{
+ id = "b2f1b2fa-f35c-407c-979c-a858a808ba85" # Workspace.Read.All
+ type = "Scope"
+ },
+ @{
+ id = "caf40b1a-f10e-4da1-86e4-5fda17eb2b07" # Item.Execute.ALL
+ type = "Scope"
+ },
+ @{
+ id = "d2bc95fc-440e-4b0e-bafd-97182de7aef5" # Item.Read.All
+ type = "Scope"
+ },
+ @{
+ id = "7a27a256-301d-4359-b77b-c2b759d2e362" # Item.ReadWrite.All
+ type = "Scope"
+ },
+ @{
+ id = "02e8d710-956c-4760-b996-2e83935c2cf5" # Item.Reshare.All
+ type = "Scope"
+ },
+ @{
+ id = "13060bfd-9305-4ec6-8388-8916580f4fa9" # Lakehouse.Read.All
+ type = "Scope"
+ },
+ @{
+ id = "cd1718e4-3e09-4381-a6e1-183e245f8613" # Eventhouse.Read.All
+ type = "Scope"
+ },
+ @{
+ id = "726667b1-01a6-4be4-b04c-e95eae4023a8" # KQLDatabase.ReadWrite.All
+ type = "Scope"
+ }
+ )
+ }
+ )
}
# Convert to valid json format (escape the '"')
@@ -268,11 +267,11 @@ $startUtcDateTimeString = $startUtcDateTimeString.ToString('u') -replace ' ', 'T
$endUtcDateTimeString = $endUtcDateTimeString.ToString('u') -replace ' ', 'T'
$passwordCreds = @{
- passwordCredential = @{
- displayName = "SampleSecret"
- endDateTime = $endUtcDateTimeString
- startDateTime = $startUtcDateTimeString
- }
+ passwordCredential = @{
+ displayName = "SampleSecret"
+ endDateTime = $endUtcDateTimeString
+ startDateTime = $startUtcDateTimeString
+ }
}
# Convert to valid json format (escape the '"')
diff --git a/Backend/Fabric_Extension_BE_Boilerplate.sln b/Backend/dotnet/Fabric_Extension_BE_Boilerplate.sln
similarity index 100%
rename from Backend/Fabric_Extension_BE_Boilerplate.sln
rename to Backend/dotnet/Fabric_Extension_BE_Boilerplate.sln
diff --git a/Backend/src/.config/dotnet-tools.json b/Backend/dotnet/src/.config/dotnet-tools.json
similarity index 100%
rename from Backend/src/.config/dotnet-tools.json
rename to Backend/dotnet/src/.config/dotnet-tools.json
diff --git a/Backend/src/Config/workload-dev-mode.json b/Backend/dotnet/src/Config/workload-dev-mode.json
similarity index 100%
rename from Backend/src/Config/workload-dev-mode.json
rename to Backend/dotnet/src/Config/workload-dev-mode.json
diff --git a/Backend/src/Constants/ApiConstants.cs b/Backend/dotnet/src/Constants/ApiConstants.cs
similarity index 100%
rename from Backend/src/Constants/ApiConstants.cs
rename to Backend/dotnet/src/Constants/ApiConstants.cs
diff --git a/Backend/src/Constants/EnvironmentConstants.cs b/Backend/dotnet/src/Constants/EnvironmentConstants.cs
similarity index 99%
rename from Backend/src/Constants/EnvironmentConstants.cs
rename to Backend/dotnet/src/Constants/EnvironmentConstants.cs
index 1a50e71..baf737f 100644
--- a/Backend/src/Constants/EnvironmentConstants.cs
+++ b/Backend/dotnet/src/Constants/EnvironmentConstants.cs
@@ -25,4 +25,4 @@ public static class EnvironmentConstants
public const string FabricApiBaseUrl = "https://api.fabric.microsoft.com";
}
-}
+}
\ No newline at end of file
diff --git a/Backend/src/Constants/ErrorCodes.cs b/Backend/dotnet/src/Constants/ErrorCodes.cs
similarity index 100%
rename from Backend/src/Constants/ErrorCodes.cs
rename to Backend/dotnet/src/Constants/ErrorCodes.cs
diff --git a/Backend/src/Constants/FabricScopes.cs b/Backend/dotnet/src/Constants/FabricScopes.cs
similarity index 100%
rename from Backend/src/Constants/FabricScopes.cs
rename to Backend/dotnet/src/Constants/FabricScopes.cs
diff --git a/Backend/src/Constants/HttpConstants.cs b/Backend/dotnet/src/Constants/HttpConstants.cs
similarity index 100%
rename from Backend/src/Constants/HttpConstants.cs
rename to Backend/dotnet/src/Constants/HttpConstants.cs
diff --git a/Backend/src/Constants/OneLakeConstants.cs b/Backend/dotnet/src/Constants/OneLakeConstants.cs
similarity index 100%
rename from Backend/src/Constants/OneLakeConstants.cs
rename to Backend/dotnet/src/Constants/OneLakeConstants.cs
diff --git a/Backend/src/Constants/WorkloadConstants.cs b/Backend/dotnet/src/Constants/WorkloadConstants.cs
similarity index 100%
rename from Backend/src/Constants/WorkloadConstants.cs
rename to Backend/dotnet/src/Constants/WorkloadConstants.cs
diff --git a/Backend/src/Constants/WorkloadScopes.cs b/Backend/dotnet/src/Constants/WorkloadScopes.cs
similarity index 100%
rename from Backend/src/Constants/WorkloadScopes.cs
rename to Backend/dotnet/src/Constants/WorkloadScopes.cs
diff --git a/Backend/src/Contracts/AuthorizationContext.cs b/Backend/dotnet/src/Contracts/AuthorizationContext.cs
similarity index 100%
rename from Backend/src/Contracts/AuthorizationContext.cs
rename to Backend/dotnet/src/Contracts/AuthorizationContext.cs
diff --git a/Backend/src/Contracts/CommonItemMetadata.cs b/Backend/dotnet/src/Contracts/CommonItemMetadata.cs
similarity index 100%
rename from Backend/src/Contracts/CommonItemMetadata.cs
rename to Backend/dotnet/src/Contracts/CommonItemMetadata.cs
diff --git a/Backend/src/Contracts/FabricAPI/Workload/CreateItemPayload.cs b/Backend/dotnet/src/Contracts/FabricAPI/Workload/CreateItemPayload.cs
similarity index 100%
rename from Backend/src/Contracts/FabricAPI/Workload/CreateItemPayload.cs
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/CreateItemPayload.cs
diff --git a/Backend/src/Contracts/FabricAPI/Workload/GenerateServerStub.cmd b/Backend/dotnet/src/Contracts/FabricAPI/Workload/GenerateServerStub.cmd
similarity index 100%
rename from Backend/src/Contracts/FabricAPI/Workload/GenerateServerStub.cmd
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/GenerateServerStub.cmd
diff --git a/Backend/src/Contracts/FabricAPI/Workload/ItemPayload.cs b/Backend/dotnet/src/Contracts/FabricAPI/Workload/ItemPayload.cs
similarity index 100%
rename from Backend/src/Contracts/FabricAPI/Workload/ItemPayload.cs
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/ItemPayload.cs
diff --git a/Backend/src/Contracts/FabricAPI/Workload/UpdateItemPayload.cs b/Backend/dotnet/src/Contracts/FabricAPI/Workload/UpdateItemPayload.cs
similarity index 100%
rename from Backend/src/Contracts/FabricAPI/Workload/UpdateItemPayload.cs
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/UpdateItemPayload.cs
diff --git a/Backend/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs b/Backend/dotnet/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs
similarity index 99%
rename from Backend/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs
index 0f532f8..d5b153b 100644
--- a/Backend/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs
+++ b/Backend/dotnet/src/Contracts/FabricAPI/Workload/WorkloadAPI_Generated.cs
@@ -431,7 +431,7 @@ public interface IEndpointResolutionController
///
///
To resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid.
///
- ///
For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs).
+ ///
For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/dotnet/src/Controllers/EndpointResolutionControllerImpl.cs).
///
/// Endpoint resolution request payload
@@ -464,7 +464,7 @@ public EndpointResolutionController(IEndpointResolutionController implementation
///
///
To resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid.
///
- ///
For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs).
+ ///
For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/dotnet/src/Controllers/EndpointResolutionControllerImpl.cs).
///
/// Endpoint resolution request payload
/// Endpoint resolution response
@@ -912,10 +912,10 @@ public partial class EndpointResolutionResponse
}
-#pragma warning restore 108
-#pragma warning restore 114
-#pragma warning restore 472
-#pragma warning restore 612
+#pragma warning restore 108
+#pragma warning restore 114
+#pragma warning restore 472
+#pragma warning restore 612
#pragma warning restore 1573
#pragma warning restore 1591
#pragma warning restore 8073
diff --git a/Backend/src/Contracts/FabricAPI/Workload/nswag.json b/Backend/dotnet/src/Contracts/FabricAPI/Workload/nswag.json
similarity index 100%
rename from Backend/src/Contracts/FabricAPI/Workload/nswag.json
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/nswag.json
diff --git a/Backend/src/Contracts/FabricAPI/Workload/swagger.json b/Backend/dotnet/src/Contracts/FabricAPI/Workload/swagger.json
similarity index 99%
rename from Backend/src/Contracts/FabricAPI/Workload/swagger.json
rename to Backend/dotnet/src/Contracts/FabricAPI/Workload/swagger.json
index 36ddf47..1786d7e 100644
--- a/Backend/src/Contracts/FabricAPI/Workload/swagger.json
+++ b/Backend/dotnet/src/Contracts/FabricAPI/Workload/swagger.json
@@ -928,7 +928,7 @@
"/resolve-api-path-placeholder": {
"post": {
"summary": "Resolve an endpoint for a given service called by Microsoft Fabric",
- "description": "Resolves the endpoint for a given service called by Microsoft Fabric based on the tenant's region and workspace region. Fabric provides a set of context properties and returns the appropriate service endpoint URL and its time-to-live (TTL).\n\nThe Endpoint Resolution API is crucial for services that require dynamic endpoint determination based on operational context. This allows for optimized routing and regional compliance.\n\nTo resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid.\n\nFor a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs).",
+ "description": "Resolves the endpoint for a given service called by Microsoft Fabric based on the tenant's region and workspace region. Fabric provides a set of context properties and returns the appropriate service endpoint URL and its time-to-live (TTL).\n\nThe Endpoint Resolution API is crucial for services that require dynamic endpoint determination based on operational context. This allows for optimized routing and regional compliance.\n\nTo resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid.\n\nFor a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/dotnet/src/Controllers/EndpointResolutionControllerImpl.cs).",
"operationId": "EndpointResolution_Resolve",
"tags": [ "EndpointResolution" ],
"consumes": [ "application/json" ],
diff --git a/Backend/src/Contracts/FabricItem.cs b/Backend/dotnet/src/Contracts/FabricItem.cs
similarity index 100%
rename from Backend/src/Contracts/FabricItem.cs
rename to Backend/dotnet/src/Contracts/FabricItem.cs
diff --git a/Backend/src/Contracts/Item1Metadata.cs b/Backend/dotnet/src/Contracts/Item1Metadata.cs
similarity index 100%
rename from Backend/src/Contracts/Item1Metadata.cs
rename to Backend/dotnet/src/Contracts/Item1Metadata.cs
diff --git a/Backend/src/Contracts/ItemJobMetadata.cs b/Backend/dotnet/src/Contracts/ItemJobMetadata.cs
similarity index 100%
rename from Backend/src/Contracts/ItemJobMetadata.cs
rename to Backend/dotnet/src/Contracts/ItemJobMetadata.cs
diff --git a/Backend/src/Contracts/ItemMetadata.cs b/Backend/dotnet/src/Contracts/ItemMetadata.cs
similarity index 100%
rename from Backend/src/Contracts/ItemMetadata.cs
rename to Backend/dotnet/src/Contracts/ItemMetadata.cs
diff --git a/Backend/src/Contracts/ItemReference.cs b/Backend/dotnet/src/Contracts/ItemReference.cs
similarity index 100%
rename from Backend/src/Contracts/ItemReference.cs
rename to Backend/dotnet/src/Contracts/ItemReference.cs
diff --git a/Backend/src/Contracts/KustoItems/EventhouseItem.cs b/Backend/dotnet/src/Contracts/KustoItems/EventhouseItem.cs
similarity index 100%
rename from Backend/src/Contracts/KustoItems/EventhouseItem.cs
rename to Backend/dotnet/src/Contracts/KustoItems/EventhouseItem.cs
diff --git a/Backend/src/Contracts/KustoItems/QueryKqlDatabaseRequest.cs b/Backend/dotnet/src/Contracts/KustoItems/QueryKqlDatabaseRequest.cs
similarity index 100%
rename from Backend/src/Contracts/KustoItems/QueryKqlDatabaseRequest.cs
rename to Backend/dotnet/src/Contracts/KustoItems/QueryKqlDatabaseRequest.cs
diff --git a/Backend/src/Contracts/LakehouseFile.cs b/Backend/dotnet/src/Contracts/LakehouseFile.cs
similarity index 100%
rename from Backend/src/Contracts/LakehouseFile.cs
rename to Backend/dotnet/src/Contracts/LakehouseFile.cs
diff --git a/Backend/src/Contracts/LakehouseTable.cs b/Backend/dotnet/src/Contracts/LakehouseTable.cs
similarity index 100%
rename from Backend/src/Contracts/LakehouseTable.cs
rename to Backend/dotnet/src/Contracts/LakehouseTable.cs
diff --git a/Backend/src/Contracts/OneLakeAPI/OneLakeFolder.cs b/Backend/dotnet/src/Contracts/OneLakeAPI/OneLakeFolder.cs
similarity index 100%
rename from Backend/src/Contracts/OneLakeAPI/OneLakeFolder.cs
rename to Backend/dotnet/src/Contracts/OneLakeAPI/OneLakeFolder.cs
diff --git a/Backend/src/Contracts/OneLakePathContainer.cs b/Backend/dotnet/src/Contracts/OneLakePathContainer.cs
similarity index 100%
rename from Backend/src/Contracts/OneLakePathContainer.cs
rename to Backend/dotnet/src/Contracts/OneLakePathContainer.cs
diff --git a/Backend/src/Contracts/ResolvePermissionsResponse.cs b/Backend/dotnet/src/Contracts/ResolvePermissionsResponse.cs
similarity index 100%
rename from Backend/src/Contracts/ResolvePermissionsResponse.cs
rename to Backend/dotnet/src/Contracts/ResolvePermissionsResponse.cs
diff --git a/Backend/src/Contracts/SubjectAndAppToken.cs b/Backend/dotnet/src/Contracts/SubjectAndAppToken.cs
similarity index 100%
rename from Backend/src/Contracts/SubjectAndAppToken.cs
rename to Backend/dotnet/src/Contracts/SubjectAndAppToken.cs
diff --git a/Backend/src/Contracts/WorkloadJobTypes.cs b/Backend/dotnet/src/Contracts/WorkloadJobTypes.cs
similarity index 100%
rename from Backend/src/Contracts/WorkloadJobTypes.cs
rename to Backend/dotnet/src/Contracts/WorkloadJobTypes.cs
diff --git a/Backend/src/Contracts/WriteToLakeHouseFileRequest.cs b/Backend/dotnet/src/Contracts/WriteToLakeHouseFileRequest.cs
similarity index 100%
rename from Backend/src/Contracts/WriteToLakeHouseFileRequest.cs
rename to Backend/dotnet/src/Contracts/WriteToLakeHouseFileRequest.cs
diff --git a/Backend/src/Controllers/EndpointResolutionControllerImpl.cs b/Backend/dotnet/src/Controllers/EndpointResolutionControllerImpl.cs
similarity index 100%
rename from Backend/src/Controllers/EndpointResolutionControllerImpl.cs
rename to Backend/dotnet/src/Controllers/EndpointResolutionControllerImpl.cs
diff --git a/Backend/src/Controllers/EventhouseController.cs b/Backend/dotnet/src/Controllers/EventhouseController.cs
similarity index 99%
rename from Backend/src/Controllers/EventhouseController.cs
rename to Backend/dotnet/src/Controllers/EventhouseController.cs
index d4d91c1..4a70622 100644
--- a/Backend/src/Controllers/EventhouseController.cs
+++ b/Backend/dotnet/src/Controllers/EventhouseController.cs
@@ -14,6 +14,7 @@ namespace Boilerplate.Controllers
public class EventhouseController : ControllerBase
{
private static readonly IList EventhubFabricScopes = new[] { $"{EnvironmentConstants.FabricBackendResourceId}/Eventhouse.Read.All" };
+
private readonly ILogger _logger;
private readonly IHttpContextAccessor _httpContextAccessor;
private readonly IAuthenticationService _authenticationService;
@@ -46,6 +47,7 @@ IHttpClientService httpClientService
public async Task GetEventhouse(Guid workspaceId, Guid eventhouseId)
{
_logger.LogInformation("GetEventhouse: get eventhouse '{0}' in workspace '{1}'", eventhouseId, workspaceId);
+
var authorizationContext = await _authenticationService.AuthenticateDataPlaneCall(_httpContextAccessor.HttpContext, allowedScopes: new string[] {WorkloadScopes.FabricEventhouseReadAll});
var token = await _authenticationService.GetAccessTokenOnBehalfOf(authorizationContext, EventhubFabricScopes);
@@ -59,6 +61,7 @@ public async Task GetEventhouse(Guid workspaceId, Guid eventhouse
}
var eventhouse = await response.Content.ReadAsAsync();
return Ok(eventhouse);
+
}
}
}
\ No newline at end of file
diff --git a/Backend/src/Controllers/FabricExtensionController.cs b/Backend/dotnet/src/Controllers/FabricExtensionController.cs
similarity index 100%
rename from Backend/src/Controllers/FabricExtensionController.cs
rename to Backend/dotnet/src/Controllers/FabricExtensionController.cs
diff --git a/Backend/src/Controllers/ItemLifecycleControllerImpl.cs b/Backend/dotnet/src/Controllers/ItemLifecycleControllerImpl.cs
similarity index 100%
rename from Backend/src/Controllers/ItemLifecycleControllerImpl.cs
rename to Backend/dotnet/src/Controllers/ItemLifecycleControllerImpl.cs
diff --git a/Backend/src/Controllers/JobsControllerImpl.cs b/Backend/dotnet/src/Controllers/JobsControllerImpl.cs
similarity index 100%
rename from Backend/src/Controllers/JobsControllerImpl.cs
rename to Backend/dotnet/src/Controllers/JobsControllerImpl.cs
diff --git a/Backend/src/Controllers/KqlDatabaseController.cs b/Backend/dotnet/src/Controllers/KqlDatabaseController.cs
similarity index 100%
rename from Backend/src/Controllers/KqlDatabaseController.cs
rename to Backend/dotnet/src/Controllers/KqlDatabaseController.cs
diff --git a/Backend/src/Controllers/LakeHouseController.cs b/Backend/dotnet/src/Controllers/LakeHouseController.cs
similarity index 100%
rename from Backend/src/Controllers/LakeHouseController.cs
rename to Backend/dotnet/src/Controllers/LakeHouseController.cs
diff --git a/Backend/src/Controllers/OneLakeController.cs b/Backend/dotnet/src/Controllers/OneLakeController.cs
similarity index 100%
rename from Backend/src/Controllers/OneLakeController.cs
rename to Backend/dotnet/src/Controllers/OneLakeController.cs
diff --git a/Backend/src/Exceptions/AuthenticationException.cs b/Backend/dotnet/src/Exceptions/AuthenticationException.cs
similarity index 100%
rename from Backend/src/Exceptions/AuthenticationException.cs
rename to Backend/dotnet/src/Exceptions/AuthenticationException.cs
diff --git a/Backend/src/Exceptions/AuthenticationUIRequiredException.cs b/Backend/dotnet/src/Exceptions/AuthenticationUIRequiredException.cs
similarity index 100%
rename from Backend/src/Exceptions/AuthenticationUIRequiredException.cs
rename to Backend/dotnet/src/Exceptions/AuthenticationUIRequiredException.cs
diff --git a/Backend/src/Exceptions/DoubledOperandsOverflowException.cs b/Backend/dotnet/src/Exceptions/DoubledOperandsOverflowException.cs
similarity index 100%
rename from Backend/src/Exceptions/DoubledOperandsOverflowException.cs
rename to Backend/dotnet/src/Exceptions/DoubledOperandsOverflowException.cs
diff --git a/Backend/src/Exceptions/HttpResponseExceptionFilter.cs b/Backend/dotnet/src/Exceptions/HttpResponseExceptionFilter.cs
similarity index 100%
rename from Backend/src/Exceptions/HttpResponseExceptionFilter.cs
rename to Backend/dotnet/src/Exceptions/HttpResponseExceptionFilter.cs
diff --git a/Backend/src/Exceptions/InternalErrorException.cs b/Backend/dotnet/src/Exceptions/InternalErrorException.cs
similarity index 100%
rename from Backend/src/Exceptions/InternalErrorException.cs
rename to Backend/dotnet/src/Exceptions/InternalErrorException.cs
diff --git a/Backend/src/Exceptions/InvalidItemPayloadException.cs b/Backend/dotnet/src/Exceptions/InvalidItemPayloadException.cs
similarity index 100%
rename from Backend/src/Exceptions/InvalidItemPayloadException.cs
rename to Backend/dotnet/src/Exceptions/InvalidItemPayloadException.cs
diff --git a/Backend/src/Exceptions/InvalidRelativePathException.cs b/Backend/dotnet/src/Exceptions/InvalidRelativePathException.cs
similarity index 100%
rename from Backend/src/Exceptions/InvalidRelativePathException.cs
rename to Backend/dotnet/src/Exceptions/InvalidRelativePathException.cs
diff --git a/Backend/src/Exceptions/InvariantViolationException.cs b/Backend/dotnet/src/Exceptions/InvariantViolationException.cs
similarity index 100%
rename from Backend/src/Exceptions/InvariantViolationException.cs
rename to Backend/dotnet/src/Exceptions/InvariantViolationException.cs
diff --git a/Backend/src/Exceptions/ItemMetadataNotFoundException.cs b/Backend/dotnet/src/Exceptions/ItemMetadataNotFoundException.cs
similarity index 100%
rename from Backend/src/Exceptions/ItemMetadataNotFoundException.cs
rename to Backend/dotnet/src/Exceptions/ItemMetadataNotFoundException.cs
diff --git a/Backend/src/Exceptions/KustoDataException.cs b/Backend/dotnet/src/Exceptions/KustoDataException.cs
similarity index 100%
rename from Backend/src/Exceptions/KustoDataException.cs
rename to Backend/dotnet/src/Exceptions/KustoDataException.cs
diff --git a/Backend/src/Exceptions/TooManyRequestsException.cs b/Backend/dotnet/src/Exceptions/TooManyRequestsException.cs
similarity index 100%
rename from Backend/src/Exceptions/TooManyRequestsException.cs
rename to Backend/dotnet/src/Exceptions/TooManyRequestsException.cs
diff --git a/Backend/src/Exceptions/UnauthorizedException.cs b/Backend/dotnet/src/Exceptions/UnauthorizedException.cs
similarity index 100%
rename from Backend/src/Exceptions/UnauthorizedException.cs
rename to Backend/dotnet/src/Exceptions/UnauthorizedException.cs
diff --git a/Backend/src/Exceptions/UnexpectedItemTypeException.cs b/Backend/dotnet/src/Exceptions/UnexpectedItemTypeException.cs
similarity index 100%
rename from Backend/src/Exceptions/UnexpectedItemTypeException.cs
rename to Backend/dotnet/src/Exceptions/UnexpectedItemTypeException.cs
diff --git a/Backend/src/Exceptions/WorkloadExceptionBase.cs b/Backend/dotnet/src/Exceptions/WorkloadExceptionBase.cs
similarity index 100%
rename from Backend/src/Exceptions/WorkloadExceptionBase.cs
rename to Backend/dotnet/src/Exceptions/WorkloadExceptionBase.cs
diff --git a/Backend/src/FabricBackendExtension.cs b/Backend/dotnet/src/FabricBackendExtension.cs
similarity index 100%
rename from Backend/src/FabricBackendExtension.cs
rename to Backend/dotnet/src/FabricBackendExtension.cs
diff --git a/Backend/dotnet/src/Fabric_Extension_BE_Boilerplate.csproj b/Backend/dotnet/src/Fabric_Extension_BE_Boilerplate.csproj
new file mode 100644
index 0000000..7687a37
--- /dev/null
+++ b/Backend/dotnet/src/Fabric_Extension_BE_Boilerplate.csproj
@@ -0,0 +1,49 @@
+
+
+
+ net8.0
+ true
+ PreBuild
+ true
+ true
+
+
+
+ Packages\manifest\ManifestPackageRelease.nuspec
+
+
+
+ Packages\manifest\ManifestPackageDebug.nuspec
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ powershell.exe
+ pwsh
+ $(ProjectDir)..\..\..\tools\validation
+ $(ProjectDir)Packages\manifest
+ $(ProjectDir)appsettings.json
+
+
+
+
+
+
diff --git a/Backend/src/Items/IItem.cs b/Backend/dotnet/src/Items/IItem.cs
similarity index 100%
rename from Backend/src/Items/IItem.cs
rename to Backend/dotnet/src/Items/IItem.cs
diff --git a/Backend/src/Items/IItem1.cs b/Backend/dotnet/src/Items/IItem1.cs
similarity index 100%
rename from Backend/src/Items/IItem1.cs
rename to Backend/dotnet/src/Items/IItem1.cs
diff --git a/Backend/src/Items/Item1.cs b/Backend/dotnet/src/Items/Item1.cs
similarity index 100%
rename from Backend/src/Items/Item1.cs
rename to Backend/dotnet/src/Items/Item1.cs
diff --git a/Backend/src/Items/ItemBase.cs b/Backend/dotnet/src/Items/ItemBase.cs
similarity index 100%
rename from Backend/src/Items/ItemBase.cs
rename to Backend/dotnet/src/Items/ItemBase.cs
diff --git a/Backend/src/Packages/manifest/Item1.xml b/Backend/dotnet/src/Packages/manifest/Item1.xml
similarity index 100%
rename from Backend/src/Packages/manifest/Item1.xml
rename to Backend/dotnet/src/Packages/manifest/Item1.xml
diff --git a/Backend/dotnet/src/Packages/manifest/ManifestPackageDebug.nuspec b/Backend/dotnet/src/Packages/manifest/ManifestPackageDebug.nuspec
new file mode 100644
index 0000000..833538f
--- /dev/null
+++ b/Backend/dotnet/src/Packages/manifest/ManifestPackageDebug.nuspec
@@ -0,0 +1,19 @@
+
+
+
+
+ ManifestPackage
+ 1.0.0
+ author
+ owner
+ A NuGet package that holds the workload and frontend manifests.
+ false
+
+
+
+
+
+
+
+
+
diff --git a/Backend/src/Packages/manifest/ManifestPackageRelease.nuspec b/Backend/dotnet/src/Packages/manifest/ManifestPackageRelease.nuspec
similarity index 81%
rename from Backend/src/Packages/manifest/ManifestPackageRelease.nuspec
rename to Backend/dotnet/src/Packages/manifest/ManifestPackageRelease.nuspec
index 9f9a2e5..3207b57 100644
--- a/Backend/src/Packages/manifest/ManifestPackageRelease.nuspec
+++ b/Backend/dotnet/src/Packages/manifest/ManifestPackageRelease.nuspec
@@ -13,7 +13,7 @@
-
-
+
+
diff --git a/Backend/src/Packages/manifest/WorkloadManifest.xml b/Backend/dotnet/src/Packages/manifest/WorkloadManifest.xml
similarity index 97%
rename from Backend/src/Packages/manifest/WorkloadManifest.xml
rename to Backend/dotnet/src/Packages/manifest/WorkloadManifest.xml
index 68e1d68..f1cca4a 100644
--- a/Backend/src/Packages/manifest/WorkloadManifest.xml
+++ b/Backend/dotnet/src/Packages/manifest/WorkloadManifest.xml
@@ -25,4 +25,4 @@
-
+
\ No newline at end of file
diff --git a/Backend/src/Program.cs b/Backend/dotnet/src/Program.cs
similarity index 100%
rename from Backend/src/Program.cs
rename to Backend/dotnet/src/Program.cs
diff --git a/Backend/src/Properties/launchSettings.json b/Backend/dotnet/src/Properties/launchSettings.json
similarity index 100%
rename from Backend/src/Properties/launchSettings.json
rename to Backend/dotnet/src/Properties/launchSettings.json
diff --git a/Backend/src/Services/AuthenticationService.cs b/Backend/dotnet/src/Services/AuthenticationService.cs
similarity index 100%
rename from Backend/src/Services/AuthenticationService.cs
rename to Backend/dotnet/src/Services/AuthenticationService.cs
diff --git a/Backend/src/Services/AuthorizationHandler.cs b/Backend/dotnet/src/Services/AuthorizationHandler.cs
similarity index 100%
rename from Backend/src/Services/AuthorizationHandler.cs
rename to Backend/dotnet/src/Services/AuthorizationHandler.cs
diff --git a/Backend/src/Services/ConfigurationService.cs b/Backend/dotnet/src/Services/ConfigurationService.cs
similarity index 100%
rename from Backend/src/Services/ConfigurationService.cs
rename to Backend/dotnet/src/Services/ConfigurationService.cs
diff --git a/Backend/src/Services/HttpClientService.cs b/Backend/dotnet/src/Services/HttpClientService.cs
similarity index 100%
rename from Backend/src/Services/HttpClientService.cs
rename to Backend/dotnet/src/Services/HttpClientService.cs
diff --git a/Backend/src/Services/IAuthenticationService.cs b/Backend/dotnet/src/Services/IAuthenticationService.cs
similarity index 100%
rename from Backend/src/Services/IAuthenticationService.cs
rename to Backend/dotnet/src/Services/IAuthenticationService.cs
diff --git a/Backend/src/Services/IAuthorizationHandler.cs b/Backend/dotnet/src/Services/IAuthorizationHandler.cs
similarity index 100%
rename from Backend/src/Services/IAuthorizationHandler.cs
rename to Backend/dotnet/src/Services/IAuthorizationHandler.cs
diff --git a/Backend/src/Services/IConfigurationService.cs b/Backend/dotnet/src/Services/IConfigurationService.cs
similarity index 100%
rename from Backend/src/Services/IConfigurationService.cs
rename to Backend/dotnet/src/Services/IConfigurationService.cs
diff --git a/Backend/src/Services/IHttpClientService.cs b/Backend/dotnet/src/Services/IHttpClientService.cs
similarity index 100%
rename from Backend/src/Services/IHttpClientService.cs
rename to Backend/dotnet/src/Services/IHttpClientService.cs
diff --git a/Backend/src/Services/IItemFactory.cs b/Backend/dotnet/src/Services/IItemFactory.cs
similarity index 100%
rename from Backend/src/Services/IItemFactory.cs
rename to Backend/dotnet/src/Services/IItemFactory.cs
diff --git a/Backend/src/Services/IItemMetadataStore.cs b/Backend/dotnet/src/Services/IItemMetadataStore.cs
similarity index 100%
rename from Backend/src/Services/IItemMetadataStore.cs
rename to Backend/dotnet/src/Services/IItemMetadataStore.cs
diff --git a/Backend/src/Services/ILakeHouseClientService.cs b/Backend/dotnet/src/Services/ILakeHouseClientService.cs
similarity index 100%
rename from Backend/src/Services/ILakeHouseClientService.cs
rename to Backend/dotnet/src/Services/ILakeHouseClientService.cs
diff --git a/Backend/src/Services/IOneLakeClientService.cs b/Backend/dotnet/src/Services/IOneLakeClientService.cs
similarity index 100%
rename from Backend/src/Services/IOneLakeClientService.cs
rename to Backend/dotnet/src/Services/IOneLakeClientService.cs
diff --git a/Backend/src/Services/ItemFactory.cs b/Backend/dotnet/src/Services/ItemFactory.cs
similarity index 100%
rename from Backend/src/Services/ItemFactory.cs
rename to Backend/dotnet/src/Services/ItemFactory.cs
diff --git a/Backend/src/Services/ItemMetadataStore.cs b/Backend/dotnet/src/Services/ItemMetadataStore.cs
similarity index 93%
rename from Backend/src/Services/ItemMetadataStore.cs
rename to Backend/dotnet/src/Services/ItemMetadataStore.cs
index fed6a52..c222c5f 100644
--- a/Backend/src/Services/ItemMetadataStore.cs
+++ b/Backend/dotnet/src/Services/ItemMetadataStore.cs
@@ -8,7 +8,6 @@
using Fabric_Extension_BE_Boilerplate.Utils;
using Microsoft.Extensions.Logging;
using System;
-using System.Collections.Concurrent;
using System.IO;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -25,7 +24,7 @@ public class ItemMetadataStore : IItemMetadataStore
{
private const string CommonItemMetadataFilename = "common.metadata.json";
private const string TypeSpecificMetadataFilename = "item.metadata.json";
- private static readonly ConcurrentDictionary semaphores = new ConcurrentDictionary();
+ private static readonly SemaphoreSlim MetadataSemaphore = new SemaphoreSlim(1, 1);
private static readonly JsonConverter Item1OperatorConverter = new UnknownAsDefaultEnumConverter();
private static readonly JsonSerializerOptions ContentSerializationOptions = new JsonSerializerOptions
@@ -103,8 +102,7 @@ public bool ExistsJob(Guid tenantObjectId, Guid itemObjectId, Guid jobInstanceId
private async Task StoreFile(string directoryPath, string filename, TContent content)
{
var filePath = GetSubDirectoryFullPath(directoryPath, filename);
- var semaphore = semaphores.GetOrAdd(filePath, _ => new SemaphoreSlim(1, 1));
- await semaphore.WaitAsync();
+ await MetadataSemaphore.WaitAsync();
try
{
var serializedContent = JsonSerializer.Serialize(content, ContentSerializationOptions);
@@ -112,15 +110,14 @@ private async Task StoreFile(string directoryPath, string filename, TC
}
finally
{
- semaphore.Release();
+ MetadataSemaphore.Release();
}
}
private async Task LoadFile(string directoryPath, string filename)
{
var filePath = GetSubDirectoryFullPath(directoryPath, filename);
- var semaphore = semaphores.GetOrAdd(filePath, _ => new SemaphoreSlim(1, 1));
- await semaphore.WaitAsync();
+ await MetadataSemaphore.WaitAsync();
try
{
var content = await File.ReadAllTextAsync(filePath);
@@ -128,7 +125,7 @@ private async Task LoadFile(string directoryPath, string fil
}
finally
{
- semaphore.Release();
+ MetadataSemaphore.Release();
}
}
diff --git a/Backend/src/Services/LakeHouseClientService.cs b/Backend/dotnet/src/Services/LakeHouseClientService.cs
similarity index 100%
rename from Backend/src/Services/LakeHouseClientService.cs
rename to Backend/dotnet/src/Services/LakeHouseClientService.cs
diff --git a/Backend/src/Services/OneLakeClientService.cs b/Backend/dotnet/src/Services/OneLakeClientService.cs
similarity index 100%
rename from Backend/src/Services/OneLakeClientService.cs
rename to Backend/dotnet/src/Services/OneLakeClientService.cs
diff --git a/Backend/src/Startup.cs b/Backend/dotnet/src/Startup.cs
similarity index 100%
rename from Backend/src/Startup.cs
rename to Backend/dotnet/src/Startup.cs
diff --git a/Backend/src/Utils/ClaimsExtensions.cs b/Backend/dotnet/src/Utils/ClaimsExtensions.cs
similarity index 100%
rename from Backend/src/Utils/ClaimsExtensions.cs
rename to Backend/dotnet/src/Utils/ClaimsExtensions.cs
diff --git a/Backend/src/Utils/Ensure.cs b/Backend/dotnet/src/Utils/Ensure.cs
similarity index 100%
rename from Backend/src/Utils/Ensure.cs
rename to Backend/dotnet/src/Utils/Ensure.cs
diff --git a/Backend/src/Utils/IEnumerableExtensions.cs b/Backend/dotnet/src/Utils/IEnumerableExtensions.cs
similarity index 100%
rename from Backend/src/Utils/IEnumerableExtensions.cs
rename to Backend/dotnet/src/Utils/IEnumerableExtensions.cs
diff --git a/Backend/src/Utils/RequestLoggingFilter.cs b/Backend/dotnet/src/Utils/RequestLoggingFilter.cs
similarity index 100%
rename from Backend/src/Utils/RequestLoggingFilter.cs
rename to Backend/dotnet/src/Utils/RequestLoggingFilter.cs
diff --git a/Backend/src/Utils/UnknownAsDefaultEnumConverter.cs b/Backend/dotnet/src/Utils/UnknownAsDefaultEnumConverter.cs
similarity index 100%
rename from Backend/src/Utils/UnknownAsDefaultEnumConverter.cs
rename to Backend/dotnet/src/Utils/UnknownAsDefaultEnumConverter.cs
diff --git a/Backend/src/appsettings.Development.json b/Backend/dotnet/src/appsettings.Development.json
similarity index 100%
rename from Backend/src/appsettings.Development.json
rename to Backend/dotnet/src/appsettings.Development.json
diff --git a/Backend/src/appsettings.json b/Backend/dotnet/src/appsettings.json
similarity index 99%
rename from Backend/src/appsettings.json
rename to Backend/dotnet/src/appsettings.json
index e22acd7..6a123b3 100644
--- a/Backend/src/appsettings.json
+++ b/Backend/dotnet/src/appsettings.json
@@ -31,4 +31,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ControllerTestsBase.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ControllerTestsBase.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ControllerTestsBase.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ControllerTestsBase.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/EndpointResolutionControllerTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/EndpointResolutionControllerTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/EndpointResolutionControllerTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/EndpointResolutionControllerTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ExceptionFilterTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ExceptionFilterTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ExceptionFilterTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ExceptionFilterTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/FabricExtensionControllerTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/FabricExtensionControllerTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/FabricExtensionControllerTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/FabricExtensionControllerTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/LakeHouseControllerTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/LakeHouseControllerTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/LakeHouseControllerTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/LakeHouseControllerTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs
similarity index 91%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs
index f086688..3f98b1d 100644
--- a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs
+++ b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/ManifestPackageReleaseTests.cs
@@ -14,8 +14,8 @@ public void Parse_ValidNuspecFile_ReturnsCorrectFilesAndTargets()
{
{ @"WorkloadManifest.xml", @"BE\WorkloadManifest.xml" },
{ @"Item1.xml", @"BE\Item1.xml" },
- { @"..\..\..\..\Frontend\Package\*", @"FE" },
- { @"..\..\..\..\Frontend\Package\assets\**", @"FE\assets" }
+ { @"..\..\..\..\..\Frontend\Package\*", @"FE" },
+ { @"..\..\..\..\..\Frontend\Package\assets\**", @"FE\assets" }
};
var result = ParseNuspecFile(nuspecFilePath);
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/OneLakeControllerTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/OneLakeControllerTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/OneLakeControllerTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Controllers/OneLakeControllerTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/CustomWebApplicationFactory .cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/CustomWebApplicationFactory .cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/CustomWebApplicationFactory .cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/CustomWebApplicationFactory .cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Fabric_Extension_BE_Boilerplate_UnitTests.csproj b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Fabric_Extension_BE_Boilerplate_UnitTests.csproj
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Fabric_Extension_BE_Boilerplate_UnitTests.csproj
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Fabric_Extension_BE_Boilerplate_UnitTests.csproj
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Items/Item1Tests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Items/Item1Tests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Items/Item1Tests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Items/Item1Tests.cs
diff --git a/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json
new file mode 100644
index 0000000..deb12f7
--- /dev/null
+++ b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json
@@ -0,0 +1,22 @@
+{
+ "profiles": {
+ "Fabric_Extension_BE_Boilerplate_UnitTests": {
+ "commandName": "Project",
+ "launchBrowser": true,
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development"
+ },
+ "applicationUrl": "https://localhost:54848;http://localhost:54849"
+ },
+ "WSL": {
+ "commandName": "WSL2",
+ "launchBrowser": true,
+ "launchUrl": "https://localhost:54848",
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development",
+ "ASPNETCORE_URLS": "https://localhost:54848;http://localhost:54849"
+ },
+ "distributionName": ""
+ }
+ }
+}
\ No newline at end of file
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/SubjectAndAppTokenTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/SubjectAndAppTokenTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/SubjectAndAppTokenTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/SubjectAndAppTokenTests.cs
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Utils/UnknownAsDefaultEnumConverterTests.cs b/Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Utils/UnknownAsDefaultEnumConverterTests.cs
similarity index 100%
rename from Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Utils/UnknownAsDefaultEnumConverterTests.cs
rename to Backend/dotnet/test/Fabric_Extension_BE_Boilerplate_UnitTests/Utils/UnknownAsDefaultEnumConverterTests.cs
diff --git a/Backend/python/.flake8 b/Backend/python/.flake8
new file mode 100644
index 0000000..9e008c5
--- /dev/null
+++ b/Backend/python/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length = 88
+exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,.venv
diff --git a/Backend/python/.gitignore b/Backend/python/.gitignore
new file mode 100644
index 0000000..6325be2
--- /dev/null
+++ b/Backend/python/.gitignore
@@ -0,0 +1,209 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller - but exclude package manifests
+/*.manifest
+/build/*.manifest
+/dist/*.manifest
+*.spec
+
+# Package manifest files - DO NOT IGNORE
+!src/Packages/
+!src/Packages/**/*.manifest
+!src/Packages/**/manifest
+!src/Packages/**/MANIFEST
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+junit.xml
+.mutmut-cache
+
+# Translations
+*.mo
+*.pot
+
+# FastAPI specific
+*.log
+local_settings.py
+instance/
+.webassets-cache
+
+# Database
+*.db
+*.sqlite3
+*.sqlite3-journal
+
+# Jupyter Notebook
+.ipynb_checkpoints
+*.ipynb
+
+# IPython
+profile_default/
+ipython_config.py
+
+# Environments
+.env
+.env.*
+!.env.example
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+virtualenv/
+
+# IDE - VSCode
+.vscode/*
+!.vscode/settings.json.example
+!.vscode/tasks.json.example
+!.vscode/launch.json.example
+!.vscode/extensions.json
+*.code-workspace
+
+# IDE - PyCharm
+.idea/
+*.iml
+*.iws
+*.ipr
+
+# IDE - Spyder
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# OS generated files
+.DS_Store
+.DS_Store?
+._*
+.Spotlight-V100
+.Trashes
+ehthumbs.db
+Thumbs.db
+*~
+
+# Backup files
+*.bak
+*.backup
+*.old
+*.orig
+*.~*
+
+# Temporary files
+*.tmp
+*.temp
+*.swp
+*.swo
+
+# Logs
+logs/
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# Security - never commit these
+*.pem
+*.key
+*.crt
+*.p12
+*.pfx
+secrets/
+credentials/
+
+# Local development
+.local/
+local_*.py
+!local_*.py.example
+
+# Production artifacts
+*.pid
+*.seed
+*.pid.lock
+
+# Docker
+.dockerignore
+docker-compose.override.yml
+
+# Terraform
+*.tfstate
+*.tfstate.*
+.terraform/
+
+# Documentation builds
+docs/_build/
+docs/api/
+site/
+
+# Performance profiling
+*.prof
+*.lprof
+
+# FastAPI specific - OpenAPI schema cache
+openapi.json
+!docs/openapi.json
+
+# Alembic - local database migrations
+alembic.ini.local
+
+# Project specific - add your own
+# /path/to/specific/ignore
\ No newline at end of file
diff --git a/Backend/python/Dockerfile b/Backend/python/Dockerfile
new file mode 100644
index 0000000..42cd87b
--- /dev/null
+++ b/Backend/python/Dockerfile
@@ -0,0 +1,52 @@
+# Use Microsoft-approved Python image based on CBL-Mariner
+FROM mcr.microsoft.com/cbl-mariner/base/python:3 AS builder
+
+WORKDIR /app
+
+# Install system dependencies if needed
+RUN tdnf update -y && \
+ tdnf install -y gcc build-essential && \
+ tdnf clean all
+
+# Copy requirements first for better caching
+COPY requirements.txt .
+RUN pip install --no-cache-dir --upgrade pip && \
+ pip install --no-cache-dir -r requirements.txt
+
+# Copy application code
+COPY src/ ./src/
+
+# Final production image
+FROM mcr.microsoft.com/cbl-mariner/base/python:3 AS production
+
+WORKDIR /app
+
+# Install only production dependencies
+COPY requirements.txt .
+RUN pip install --no-cache-dir --upgrade pip && \
+ pip install --no-cache-dir -r requirements.txt
+
+# Copy application files
+COPY src/ ./src/
+
+# Install shadow-utils for useradd command, then create non-root user
+RUN tdnf install -y shadow-utils && \
+ tdnf clean all && \
+ useradd -m -u 1000 appuser && \
+ chown -R appuser:appuser /app
+
+USER appuser
+
+# Set environment variables
+ENV PYTHON_ENVIRONMENT=Production
+ENV PYTHONPATH=/app
+
+# Health check
+HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
+ CMD python3 -c "import urllib.request; urllib.request.urlopen('http://localhost:5000/health').read()" || exit 1
+
+# Expose port
+EXPOSE 5000
+
+# Run the application
+CMD ["python3", "src/main.py"]
\ No newline at end of file
diff --git a/Backend/python/README.md b/Backend/python/README.md
new file mode 100644
index 0000000..0540d20
--- /dev/null
+++ b/Backend/python/README.md
@@ -0,0 +1,482 @@
+# Microsoft Fabric Workload - Python FastAPI Backend
+
+A comprehensive Python FastAPI backend implementation for Microsoft Fabric workload development, providing calculator workload functionality with full integration to the Fabric ecosystem.
+
+## 🚀 Overview
+
+This backend service implements the Microsoft Fabric Workload APIs using FastAPI, demonstrating how to build production-ready workloads that integrate seamlessly with the Fabric platform. The calculator workload showcases:
+
+- **Item Lifecycle Management**: Create, read, update, and delete workload items
+- **Job Execution**: Support for various job types (instant, scheduled, long-running)
+- **OneLake Integration**: Direct integration with Fabric's data lake for file operations
+- **Authentication & Authorization**: Microsoft Entra ID integration with proper token validation
+- **Lakehouse Operations**: Create and manage calculations stored as text and parquet files
+
+## 📋 Prerequisites
+
+- **Python**: 3.11+ (recommended)
+- **Microsoft Fabric Tenant**: Access to Microsoft Fabric workspace
+- **Azure AD Application**: Registered application with proper permissions
+- **Development Environment**: Windows 10/11, Linux, or macOS
+- **Optional**: Docker and Docker Compose for containerized deployment
+
+## 🛠️ Installation & Setup
+
+### 1. Clone and Navigate
+
+```bash
+git clone
+cd Backend/python
+```
+
+### 2. Create Virtual Environment
+
+```bash
+# Create virtual environment
+python -m venv venv
+
+# Activate virtual environment
+# Windows
+venv\Scripts\activate
+# Linux/macOS
+source venv/bin/activate
+```
+
+### 3. Install Dependencies
+
+```bash
+# Install production dependencies
+pip install -r requirements.txt
+
+# Install development dependencies (optional)
+pip install -r tests/requirements-test.txt
+```
+
+### 4. Configuration Setup
+
+#### Environment Variables
+Create a `.env` file in the root directory:
+
+```env
+# Application Configuration
+PYTHON_ENVIRONMENT=Development
+DEBUG=false
+
+# Azure AD Configuration
+PUBLISHER_TENANT_ID=your-tenant-id
+CLIENT_ID=your-client-id
+CLIENT_SECRET=your-client-secret
+AUDIENCE=your-audience
+
+# Server Configuration
+HOST=0.0.0.0
+PORT=5000
+WORKERS=1
+
+# SSL Configuration (Production)
+SSL_KEYFILE=/path/to/private.key
+SSL_CERTFILE=/path/to/certificate.crt
+
+# Logging Configuration
+LOG_LEVEL=Information
+```
+
+#### Configuration Files
+The application uses JSON configuration files in [`src/`](src/) directory:
+
+- [`appsettings.json`](src/appsettings.json) - Base configuration
+- [`appsettings.Development.json`](src/appsettings.Development.json) - Development overrides
+
+**Important**: Update the authentication values in your configuration:
+
+```json
+{
+ "PublisherTenantId": "your-tenant-id",
+ "ClientId": "your-client-id",
+ "ClientSecret": "your-client-secret",
+ "Audience": "your-audience"
+}
+```
+
+## 🚀 Running the Application
+
+### Development Mode
+
+```bash
+# Method 1: Using Python directly
+cd src
+python main.py
+
+# Method 2: Using uvicorn with module path
+PYTHONPATH=src uvicorn fabric_api.main:app --host 0.0.0.0 --port 5000 --reload
+
+# Method 3: Using the simplified uvicorn command
+uvicorn main:app --host 0.0.0.0 --port 5000 --reload
+```
+
+### Production Mode
+
+```bash
+# Set production environment
+export PYTHON_ENVIRONMENT=Production
+
+# Run with optimized settings
+python src/main.py
+```
+
+### Docker Deployment
+
+```bash
+# Build and run with Docker Compose
+docker-compose up --build
+
+# Or build and run manually
+docker build -t fabric-python-backend .
+docker run -p 5000:5000 fabric-python-backend
+```
+
+## 📚 API Documentation
+
+Once running, access the interactive API documentation:
+
+- **OpenAPI/Swagger UI**: http://localhost:5000/docs
+- **ReDoc**: http://localhost:5000/redoc
+- **OpenAPI Schema**: http://localhost:5000/openapi.json
+
+### Core API Endpoints
+
+#### Item Lifecycle Management
+- `POST /workspaces/{workspaceId}/items/{itemType}/{itemId}` - Create item
+- `PATCH /workspaces/{workspaceId}/items/{itemType}/{itemId}` - Update item
+- `DELETE /workspaces/{workspaceId}/items/{itemType}/{itemId}` - Delete item
+- `GET /workspaces/{workspaceId}/items/{itemType}/{itemId}/payload` - Get item payload
+
+#### Job Management
+- `POST /workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}` - Create job instance
+- `GET /workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}` - Get job status
+- `POST /workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}/cancel` - Cancel job
+
+#### Endpoint Resolution
+- `POST /resolve-api-path-placeholder` - Resolve service endpoints
+
+#### Extension APIs
+- `GET /api/calculateText` - Calculate text operations
+- `GET /api/getItems` - Get workspace items
+- `POST /api/writeToLakehouseFile` - Write to lakehouse files
+
+## 🏗️ Architecture
+
+### Directory Structure
+
+```
+python/
+├── src/
+│ ├── constants/ # Application constants
+│ ├── core/ # Core services and DI
+│ ├── exceptions/ # Custom exceptions
+│ ├── fabric_api/ # Generated API models and controllers
+│ ├── impl/ # Implementation controllers
+│ ├── items/ # Item domain models
+│ ├── middleware/ # FastAPI middleware
+│ ├── models/ # Data models
+│ ├── services/ # Business logic services
+│ ├── Packages/ # Fabric manifest packages
+│ ├── appsettings.json # Configuration
+│ └── main.py # Application entry point
+├── tests/ # Test suites
+├── tools/ # Development tools
+├── docker-compose.yaml # Docker composition
+├── Dockerfile # Container definition
+├── requirements.txt # Python dependencies
+└── README.md # This file
+```
+
+### Key Components
+
+#### Services (Singleton Pattern)
+- **[`AuthenticationService`](src/services/authentication.py)**: Token validation and user authentication
+- **[`AuthorizationService`](src/services/authorization.py)**: Permission validation
+- **[`ConfigurationService`](src/services/configuration_service.py)**: Configuration management
+- **[`ItemMetadataStore`](src/services/item_metadata_store.py)**: Item metadata persistence
+- **[`OneLakeClientService`](src/services/onelake_client_service.py)**: OneLake file operations
+- **[`LakehouseClientService`](src/services/lakehouse_client_service.py)**: Lakehouse integration
+
+#### Controllers
+- **[`ItemLifecycleController`](src/fabric_api/impl/item_lifecycle_controller.py)**: Handles item CRUD operations
+- **[`JobsController`](src/fabric_api/impl/jobs_controller.py)**: Manages job execution
+- **[`FabricExtensionController`](src/impl/fabric_extension_controller.py)**: Custom workload APIs
+
+#### Models
+- **[`Item1`](src/items/item1.py)**: Calculator workload item implementation
+- **[`BaseItem`](src/items/base_item.py)**: Abstract base for all items
+- **API Models**: Generated from OpenAPI specification
+
+## 🧪 Testing
+
+### Running Tests
+
+```bash
+# Run all tests
+python run_tests.py
+
+# Run specific test types
+python run_tests.py unit # Unit tests only
+python run_tests.py integration # Integration tests
+python run_tests.py controllers # Controller tests
+python run_tests.py api # API endpoint tests
+python run_tests.py coverage # With coverage report
+
+# Run specific test patterns
+python run_tests.py specific test_item_lifecycle
+
+# Advanced testing
+python run_tests.py parallel # Parallel execution
+python run_tests.py debug # Debug mode
+python run_tests.py watch # Watch mode (requires pytest-watch)
+```
+
+### Test Structure
+
+```
+tests/
+├── unit/ # Unit tests
+│ ├── api/ # API layer tests
+│ ├── controllers/ # Controller tests
+│ └── services/ # Service tests
+├── integration/ # Integration tests
+├── conftest.py # Test configuration
+├── requirements-test.txt # Test dependencies
+└── test_helpers.py # Test utilities
+```
+
+### Test Markers
+
+Use pytest markers to organize tests:
+- `@pytest.mark.unit` - Unit tests
+- `@pytest.mark.integration` - Integration tests
+- `@pytest.mark.controllers` - Controller tests
+- `@pytest.mark.api` - API tests
+- `@pytest.mark.slow` - Slow running tests
+
+## 🔐 Security & Authentication
+
+### Microsoft Entra ID Integration
+The application uses Microsoft Entra ID for authentication with the following flow:
+
+1. **Subject Token**: User authentication token from Fabric
+2. **App Token**: Service-to-service authentication
+3. **Token Validation**: Validates both tokens against configured parameters
+
+### Required Configuration
+```json
+{
+ "PublisherTenantId": "your-tenant-id",
+ "ClientId": "your-registered-app-id",
+ "ClientSecret": "your-app-secret",
+ "Audience": "your-fabric-audience"
+}
+```
+
+### Security Headers
+The application includes security middleware:
+- CORS configuration
+- Trusted host validation
+- GZip compression
+- Request/response logging
+
+## 📊 Database & Storage
+
+### Metadata Storage
+- **File-based**: JSON files for item metadata
+- **Location**: Platform-specific directories
+ - Windows: `%APPDATA%\Microsoft_Fabric_Python_Backend\`
+ - macOS: `~/Library/Application Support/Microsoft_Fabric_Python_Backend/`
+ - Linux: `~/.config/Microsoft_Fabric_Python_Backend/`
+
+### OneLake Integration
+- Direct file operations in Fabric's data lake
+- Support for text and parquet file formats
+- Lakehouse table integration
+
+## 🚢 Deployment
+
+### Local Development
+```bash
+# Install dependencies
+pip install -r requirements.txt
+
+# Configure environment
+cp .env.example .env
+# Edit .env with your settings
+
+# Run development server
+python src/main.py
+```
+
+### Docker Deployment
+```bash
+# Using Docker Compose
+docker-compose up --build
+
+# Using Docker directly
+docker build -t fabric-backend .
+docker run -p 5000:5000 \
+ -e PUBLISHER_TENANT_ID=your-tenant-id \
+ -e CLIENT_ID=your-client-id \
+ -e CLIENT_SECRET=your-client-secret \
+ fabric-backend
+```
+
+### Production Deployment
+See [`deployment.md`](deployment.md) for detailed production deployment instructions including:
+- Environment variable configuration
+- SSL certificate setup
+- Kubernetes deployment
+- Load balancer configuration
+- Monitoring setup
+
+## 🔧 Configuration
+
+### Application Settings
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `PYTHON_ENVIRONMENT` | Environment mode | `Development` |
+| `HOST` | Server host | `0.0.0.0` |
+| `PORT` | Server port | `5000` |
+| `WORKERS` | Worker processes | `1` |
+| `DEBUG` | Debug mode | `false` |
+| `LOG_LEVEL` | Logging level | `Information` |
+
+### Authentication Settings
+
+| Setting | Description | Required |
+|---------|-------------|----------|
+| `PUBLISHER_TENANT_ID` | Azure AD Tenant ID | Yes |
+| `CLIENT_ID` | Application Client ID | Yes |
+| `CLIENT_SECRET` | Application Secret | Yes |
+| `AUDIENCE` | Token audience | Yes |
+
+## 🔍 Monitoring & Logging
+
+### Health Checks
+- **Health endpoint**: `GET /health`
+- **Readiness endpoint**: `GET /ready`
+- **Metrics**: Available through application logs
+
+### Logging Configuration
+```json
+{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information",
+ "Microsoft": "Warning",
+ "Microsoft.Hosting.Lifetime": "Information"
+ }
+ }
+}
+```
+
+### Log Locations
+- **Development**: Console output
+- **Production**: File-based logging in platform-specific directories
+
+## 🛠️ Development Tools
+
+### Code Quality
+```bash
+# Format code
+black src/ tests/
+
+# Sort imports
+isort src/ tests/
+
+# Lint code
+flake8 src/ tests/
+```
+
+### Manifest Generation
+```bash
+# Generate workload manifest
+python tools/manifest_package_generator.py
+```
+
+## 📝 Contributing
+
+### Development Setup
+1. Fork the repository
+2. Create a feature branch
+3. Make your changes
+4. Add tests for new functionality
+5. Ensure all tests pass
+6. Submit a pull request
+
+### Code Style
+- Use [Black](https://black.readthedocs.io/) for code formatting
+- Follow [PEP 8](https://www.python.org/dev/peps/pep-0008/) guidelines
+- Add type hints for all functions
+- Include docstrings for public APIs
+
+## 🐛 Troubleshooting
+
+### Common Issues
+
+**Port Already in Use**
+```bash
+# Find process using port 5000
+lsof -i :5000 # macOS/Linux
+netstat -ano | findstr :5000 # Windows
+
+# Kill the process and restart
+```
+
+**Authentication Errors**
+- Verify `PUBLISHER_TENANT_ID`, `CLIENT_ID`, `CLIENT_SECRET`, and `AUDIENCE` values
+- Check Azure AD application registration
+- Ensure proper scopes are configured
+
+**Module Import Errors**
+```bash
+# Ensure PYTHONPATH is set correctly
+export PYTHONPATH=src:$PYTHONPATH
+
+# Or use the provided test runner
+python run_tests.py
+```
+
+**Configuration Issues**
+- Verify [`appsettings.json`](src/appsettings.json) is properly configured
+- Check environment variables are set correctly
+- Ensure file permissions for metadata storage
+
+### Debug Mode
+```bash
+# Run with debug logging
+DEBUG=true python src/main.py
+
+# Run tests with debug output
+python run_tests.py debug
+```
+
+## 📄 License
+
+This project is licensed under the MIT License - see the [LICENSE](../LICENSE) file for details.
+
+## 🤝 Support
+
+- **Documentation**: [Microsoft Fabric Workload Development](https://docs.microsoft.com/en-us/fabric/workload-development-kit/)
+- **Issues**: Report issues in the GitHub repository
+- **Community**: Join the Microsoft Fabric community discussions
+
+## 📊 Project Status
+
+- ✅ **Core APIs**: Item lifecycle, job management, endpoint resolution
+- ✅ **Authentication**: Microsoft Entra ID integration
+- ✅ **Storage**: OneLake and lakehouse integration
+- ✅ **Testing**: Comprehensive test suite with 80%+ coverage
+- ✅ **Documentation**: Full API documentation and deployment guides
+- ✅ **Docker**: Container support for easy deployment
+
+---
+
+**Built with ❤️ using FastAPI for Microsoft Fabric**
diff --git a/Backend/python/deployment.md b/Backend/python/deployment.md
new file mode 100644
index 0000000..0c03fcd
--- /dev/null
+++ b/Backend/python/deployment.md
@@ -0,0 +1,128 @@
+# Microsoft Fabric Python Backend - Deployment Guide
+
+## Environment Configuration
+
+### Development
+
+```bash
+# No environment variables needed, uses appsettings.json defaults
+python src/main.py
+```
+
+### Production
+
+```bash
+# Required environment variables
+export PYTHON_ENVIRONMENT=Production
+export PUBLISHER_TENANT_ID=your-tenant-id
+export CLIENT_ID=your-client-id
+export CLIENT_SECRET=your-client-secret
+export AUDIENCE=your-audience
+
+# Optional SSL for HTTPS
+export SSL_KEYFILE=/path/to/private.key
+export SSL_CERTFILE=/path/to/certificate.crt
+
+# Run the application
+python src/main.py
+```
+
+## Docker Deployment
+
+```dockerfile
+FROM python:3.11-slim
+
+WORKDIR /app
+
+# Install dependencies
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy application
+COPY src/ ./src/
+
+# Set production environment
+ENV PYTHON_ENVIRONMENT=Production
+
+# Health check
+HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
+ CMD curl -f http://localhost:5000/health || exit 1
+
+# Run as non-root user
+RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
+USER appuser
+
+CMD ["python", "src/main.py"]
+```
+
+## Kubernetes Deployment
+
+```yaml
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: fabric-python-backend
+spec:
+ replicas: 3
+ selector:
+ matchLabels:
+ app: fabric-python-backend
+ template:
+ metadata:
+ labels:
+ app: fabric-python-backend
+ spec:
+ containers:
+ - name: app
+ image: your-registry/fabric-python-backend:latest
+ ports:
+ - containerPort: 5000
+ env:
+ - name: PYTHON_ENVIRONMENT
+ value: "Production"
+ - name: PUBLISHER_TENANT_ID
+ valueFrom:
+ secretKeyRef:
+ name: fabric-secrets
+ key: publisher-tenant-id
+ - name: CLIENT_ID
+ valueFrom:
+ secretKeyRef:
+ name: fabric-secrets
+ key: client-id
+ - name: CLIENT_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: fabric-secrets
+ key: client-secret
+ livenessProbe:
+ httpGet:
+ path: /health
+ port: 5000
+ initialDelaySeconds: 30
+ periodSeconds: 10
+ readinessProbe:
+ httpGet:
+ path: /ready
+ port: 5000
+ initialDelaySeconds: 5
+ periodSeconds: 5
+ resources:
+ requests:
+ memory: "256Mi"
+ cpu: "250m"
+ limits:
+ memory: "512Mi"
+ cpu: "500m"
+```
+
+## Monitoring
+
+- Health endpoint: `GET /health`
+- Readiness endpoint: `GET /ready`
+- Metrics: Check logs for request processing times
+- Logs: Located in platform-specific directories
+ - Windows: `%APPDATA%\Microsoft_Fabric_Python_Backend\logs`
+ - macOS: `~/Library/Application Support/Microsoft_Fabric_Python_Backend/logs`
+ - Linux: `~/.config/Microsoft_Fabric_Python_Backend/logs`
+
\ No newline at end of file
diff --git a/Backend/python/docker-compose.yaml b/Backend/python/docker-compose.yaml
new file mode 100644
index 0000000..9ef505e
--- /dev/null
+++ b/Backend/python/docker-compose.yaml
@@ -0,0 +1,24 @@
+version: '3.8'
+services:
+ fabric-backend:
+ build:
+ context: .
+ target: production
+ dockerfile: Dockerfile
+ ports:
+ - "5000:5000"
+ environment:
+ - PYTHON_ENVIRONMENT=Development
+ - PUBLISHER_TENANT_ID=${PUBLISHER_TENANT_ID}
+ - CLIENT_ID=${CLIENT_ID}
+ - CLIENT_SECRET=${CLIENT_SECRET}
+ - AUDIENCE=${AUDIENCE}
+ volumes:
+ - ./src/appsettings.json:/app/src/appsettings.json:ro
+ - ./src/appsettings.Development.json:/app/src/appsettings.Development.json:ro
+ healthcheck:
+ test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:5000/health').read()"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 40s
\ No newline at end of file
diff --git a/Backend/python/openapi.yaml b/Backend/python/openapi.yaml
new file mode 100644
index 0000000..6701a72
--- /dev/null
+++ b/Backend/python/openapi.yaml
@@ -0,0 +1,1055 @@
+openapi: 3.0.1
+info:
+ description: APIs to be implemented by workloads for integration with Microsoft
+ Fabric
+ title: Workload REST APIs
+ version: v1
+servers:
+- url: https://workload.contoso.com/workload-api-path-placeholder
+tags: []
+paths:
+ /workspaces/{workspaceId}/items/{itemType}/{itemId}:
+ delete:
+ description: "Upon item deletion Fabric performs basic validations and calls\
+ \ this API to notify the workload. The workload is expected to delete the\
+ \ item metadata and free resources. \n\nThis API should accept SubjectAndApp\
+ \ authentication. However, the subject token may be unavailable in some cases.\n\
+ \n## Permissions\n\nPermissions are checked by Microsoft Fabric."
+ operationId: ItemLifecycle_DeleteItem
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ content: {}
+ description: Successfully deleted.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for deleting an existing item.
+ tags:
+ - ItemLifecycle
+ patch:
+ description: |-
+ Upon item update Fabric performs basic validations and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate and/or free resources, and update the Fabric item metadata cache with item relations and ETag.
+
+ This API should accept SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: ItemLifecycle_UpdateItem
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UpdateItemRequest'
+ description: The item update request.
+ required: true
+ responses:
+ "200":
+ content: {}
+ description: Successfully updated.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for updating an existing item.
+ tags:
+ - ItemLifecycle
+ x-codegen-request-body-name: updateItemRequest
+ post:
+ description: |-
+ Upon item creation Fabric performs basic validations, creates the item in a provisioning state and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate required resources, and update the Fabric item metadata cache with item relations and ETag.
+
+ This API should accept SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: ItemLifecycle_CreateItem
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateItemRequest'
+ description: The item creation request.
+ required: true
+ responses:
+ "200":
+ content: {}
+ description: Successfully created.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for creating a new item.
+ tags:
+ - ItemLifecycle
+ x-codegen-request-body-name: createItemRequest
+ /workspaces/{workspaceId}/items/{itemType}/{itemId}/payload:
+ get:
+ description: |-
+ When the item editor <iframe> requests an item, Fabric performs basic validations and calls this API to retrieve the payload from the workload.
+
+ This API accepts SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: ItemLifecycle_GetItemPayload
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/GetItemPayloadResponse'
+ description: Completed successfully.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for retrieving the workload payload for
+ an item.
+ tags:
+ - ItemLifecycle
+ /workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}:
+ get:
+ description: |-
+ Fabric performs basic validations and calls this API to retrieve the item job instance state in the workload.
+
+ This API should accept SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: Jobs_GetItemJobInstanceState
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The job type.
+ in: path
+ name: jobType
+ required: true
+ schema:
+ type: string
+ - description: The job instance ID.
+ in: path
+ name: jobInstanceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ItemJobInstanceState'
+ description: Completed successfully.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for retrieving a job instance state.
+ tags:
+ - Jobs
+ post:
+ description: |-
+ Fabric performs basic validations and calls this API to start a new instance of the job in the workload.
+
+ This API should accept SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: Jobs_CreateItemJobInstance
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The job type.
+ in: path
+ name: jobType
+ required: true
+ schema:
+ type: string
+ - description: The job instance ID.
+ in: path
+ name: jobInstanceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateItemJobInstanceRequest'
+ description: The job instance properties.
+ required: true
+ responses:
+ "202":
+ content: {}
+ description: Successfully scheduled.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for starting a new job instance.
+ tags:
+ - Jobs
+ x-codegen-request-body-name: createItemJobInstanceRequest
+ /workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}/cancel:
+ post:
+ description: |-
+ Fabric performs basic validations and calls this API to cancel an item job instance in the workload.
+
+ This API should accept SubjectAndApp authentication.
+
+ ## Permissions
+
+ Permissions are checked by Microsoft Fabric.
+ operationId: Jobs_CancelItemJobInstance
+ parameters:
+ - description: The workspace ID.
+ in: path
+ name: workspaceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The item type.
+ in: path
+ name: itemType
+ required: true
+ schema:
+ type: string
+ - description: The item ID.
+ in: path
+ name: itemId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: The job type.
+ in: path
+ name: jobType
+ required: true
+ schema:
+ type: string
+ - description: The job instance ID.
+ in: path
+ name: jobInstanceId
+ required: true
+ schema:
+ format: uuid
+ type: string
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ - description: The tenant ID of the client making the request.
+ in: header
+ name: x-ms-client-tenant-id
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ItemJobInstanceState'
+ description: Completed successfully.
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Called by Microsoft Fabric for cancelling a job instance.
+ tags:
+ - Jobs
+ /resolve-api-path-placeholder:
+ post:
+ description: |-
+ Resolves the endpoint for a given service called by Microsoft Fabric based on the tenant's region and workspace region. Fabric provides a set of context properties and returns the appropriate service endpoint URL and its time-to-live (TTL).
+
+ The Endpoint Resolution API is crucial for services that require dynamic endpoint determination based on operational context. This allows for optimized routing and regional compliance.
+
+ To resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid.
+
+ For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs).
+ operationId: EndpointResolution_Resolve
+ parameters:
+ - description: A unique ID for correlating the request with your system when
+ a user interacts with your workload.
+ in: header
+ name: ActivityId
+ required: true
+ schema:
+ type: string
+ - description: A globally unique ID that helps Fabric correlate your request
+ with our logs. Provide this ID when reporting an issue.
+ in: header
+ name: RequestId
+ required: true
+ schema:
+ type: string
+ - description: "A dual token authorization header that allows the workload to\
+ \ validate the request origin, provide user context, and call other services.\
+ \ This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"\
+ delegated token\", appToken=\"S2S token\"`."
+ in: header
+ name: Authorization
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/EndpointResolutionRequest'
+ description: Endpoint resolution request payload
+ required: true
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/EndpointResolutionResponse'
+ description: Endpoint resolution response
+ default:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in\
+ \ the response body."
+ summary: Resolve an endpoint for a given service called by Microsoft Fabric
+ tags:
+ - EndpointResolution
+ x-codegen-request-body-name: body
+components:
+ responses:
+ StandardErrorResponse:
+ content:
+ '*/*':
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ description: "For error conditions the workload should return an appropriate\
+ \ HTTP error status code (4xx, 5xx) with detailed error information in the\
+ \ response body."
+ schemas:
+ ErrorSource:
+ description: The source of the error.
+ enum:
+ - System
+ - User
+ - External
+ title: ErrorSource
+ type: string
+ JobInvokeType:
+ description: The job invoke type.
+ enum:
+ - UnknownFutureValue
+ - Scheduled
+ - Manual
+ title: JobInvokeType
+ type: string
+ JobInstanceStatus:
+ description: The status of an item job instance.
+ enum:
+ - NotStarted
+ - InProgress
+ - Completed
+ - Failed
+ - Cancelled
+ title: JobInstanceStatus
+ type: string
+ NameValuePair:
+ additionalProperties: false
+ description: A name-value pair.
+ example:
+ name: name
+ value: value
+ properties:
+ name:
+ description: The name.
+ readOnly: true
+ title: name
+ type: string
+ value:
+ description: The value.
+ readOnly: true
+ title: value
+ type: string
+ required:
+ - name
+ - value
+ title: NameValuePair
+ type: object
+ ErrorBasicInformation:
+ additionalProperties: false
+ description: The basic error information
+ properties:
+ errorCode:
+ description: The error code.
+ readOnly: true
+ title: errorCode
+ type: string
+ message:
+ description: The error message.
+ readOnly: true
+ title: message
+ type: string
+ messageParameters:
+ description: A list of parameters for formatting a localized message.
+ items:
+ readOnly: true
+ type: string
+ readOnly: true
+ title: messageParameters
+ type: array
+ required:
+ - errorCode
+ - message
+ title: ErrorBasicInformation
+ type: object
+ ErrorExtendedInformation:
+ allOf:
+ - $ref: '#/components/schemas/ErrorBasicInformation'
+ description: The extended error information.
+ example:
+ additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ properties:
+ additionalParameters:
+ description: A list of additional parameters specific to the error.
+ items:
+ $ref: '#/components/schemas/NameValuePair'
+ readOnly: true
+ title: additionalParameters
+ type: array
+ title: ErrorExtendedInformation
+ ErrorDetails:
+ allOf:
+ - $ref: '#/components/schemas/ErrorBasicInformation'
+ description: The error details.
+ example:
+ isPermanent: true
+ moreDetails:
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ source: System
+ properties:
+ source:
+ $ref: '#/components/schemas/ErrorSource'
+ isPermanent:
+ description: Indicates whether the error is permanent or the operation can
+ be retried.
+ readOnly: true
+ title: isPermanent
+ type: boolean
+ moreDetails:
+ description: A list of additional error details.
+ items:
+ $ref: '#/components/schemas/ErrorExtendedInformation'
+ readOnly: true
+ title: moreDetails
+ type: array
+ required:
+ - source
+ title: ErrorDetails
+ ErrorResponse:
+ allOf:
+ - $ref: '#/components/schemas/ErrorDetails'
+ description: The error response.
+ example:
+ isPermanent: true
+ moreDetails:
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ source: System
+ title: ErrorResponse
+ CreateItemPayload:
+ additionalProperties: true
+ description: Creation payload specific to the workload and item type. Fabric
+ forwards this payload as-is to your backend system without storing or examining
+ its contents.
+ title: creationPayload
+ type: object
+ UpdateItemPayload:
+ additionalProperties: true
+ description: Update payload specific to the workload and item type.
+ title: updatePayload
+ type: object
+ ItemPayload:
+ additionalProperties: true
+ description: Item payload specific to the workload and item type.
+ title: itemPayload
+ type: object
+ CreateItemJobInstancePayload:
+ additionalProperties: true
+ description: "Job instance creation payload specific to the workload, item and\
+ \ job type."
+ title: creationPayload
+ type: object
+ CreateItemRequest:
+ additionalProperties: false
+ description: Create item request content.
+ example:
+ displayName: displayName
+ description: description
+ creationPayload:
+ key: ""
+ properties:
+ displayName:
+ description: The item display name. Cannot exceed 256 characters.
+ maxLength: 256
+ title: displayName
+ type: string
+ description:
+ description: The item description.
+ title: description
+ type: string
+ creationPayload:
+ additionalProperties: true
+ description: Creation payload specific to the workload and item type. Fabric
+ forwards this payload as-is to your backend system without storing or
+ examining its contents.
+ title: creationPayload
+ type: object
+ required:
+ - displayName
+ title: CreateItemRequest
+ type: object
+ UpdateItemRequest:
+ additionalProperties: false
+ description: "Update item request content. This content has 'PATCH' semantics:\
+ \ for properties which are not provided (null) the item current values should\
+ \ be preserved."
+ example:
+ updatePayload:
+ key: ""
+ displayName: displayName
+ description: description
+ properties:
+ displayName:
+ description: The item display name. Cannot exceed 256 characters.
+ maxLength: 256
+ title: displayName
+ type: string
+ description:
+ description: The item description.
+ title: description
+ type: string
+ updatePayload:
+ additionalProperties: true
+ description: Update payload specific to the workload and item type.
+ title: updatePayload
+ type: object
+ title: UpdateItemRequest
+ type: object
+ GetItemPayloadResponse:
+ additionalProperties: false
+ description: The item payload for the item editor.
+ example:
+ itemPayload:
+ key: ""
+ properties:
+ itemPayload:
+ additionalProperties: true
+ description: Item payload specific to the workload and item type.
+ title: itemPayload
+ type: object
+ title: GetItemPayloadResponse
+ type: object
+ CreateItemJobInstanceRequest:
+ additionalProperties: false
+ description: The parameters controlling the job instance execution.
+ example:
+ invokeType: UnknownFutureValue
+ creationPayload:
+ key: ""
+ properties:
+ invokeType:
+ $ref: '#/components/schemas/JobInvokeType'
+ creationPayload:
+ additionalProperties: true
+ description: "Job instance creation payload specific to the workload, item\
+ \ and job type."
+ title: creationPayload
+ type: object
+ required:
+ - invokeType
+ title: CreateItemJobInstanceRequest
+ type: object
+ ItemJobInstanceState:
+ additionalProperties: false
+ description: The job instance state.
+ example:
+ endTimeUtc: 2000-01-23T04:56:07.000+00:00
+ startTimeUtc: 2000-01-23T04:56:07.000+00:00
+ status: NotStarted
+ errorDetails:
+ isPermanent: true
+ moreDetails:
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ - additionalParameters:
+ - name: name
+ value: value
+ - name: name
+ value: value
+ source: System
+ properties:
+ status:
+ $ref: '#/components/schemas/JobInstanceStatus'
+ startTimeUtc:
+ description: The job instance start time.
+ format: date-time
+ readOnly: true
+ title: startTimeUtc
+ type: string
+ endTimeUtc:
+ description: The job instance end time.
+ format: date-time
+ readOnly: true
+ title: endTimeUtc
+ type: string
+ errorDetails:
+ $ref: '#/components/schemas/ErrorDetails'
+ required:
+ - status
+ title: ItemJobInstanceState
+ type: object
+ EndpointResolutionContextPropertyName:
+ description: The name of the property for endpoint resolution context.
+ enum:
+ - EndpointName
+ - TenantRegion
+ - WorkspaceRegion
+ - TenantId
+ title: EndpointResolutionContextPropertyName
+ type: string
+ EndpointResolutionContextProperty:
+ additionalProperties: false
+ description: Defines a context property used in endpoint resolution. This property
+ must be specified in the workload manifest to ensure correct endpoint determination
+ by Fabric.
+ example:
+ name: EndpointName
+ value: value
+ properties:
+ name:
+ $ref: '#/components/schemas/EndpointResolutionContextPropertyName'
+ value:
+ description: The value of the context property.
+ title: value
+ type: string
+ required:
+ - name
+ - value
+ title: EndpointResolutionContextProperty
+ type: object
+ EndpointResolutionRequest:
+ additionalProperties: false
+ example:
+ context:
+ - name: EndpointName
+ value: value
+ - name: EndpointName
+ value: value
+ properties:
+ context:
+ description: Array of context properties for endpoint resolution.
+ items:
+ $ref: '#/components/schemas/EndpointResolutionContextProperty'
+ title: context
+ type: array
+ required:
+ - context
+ title: EndpointResolutionRequest
+ type: object
+ EndpointResolutionResponse:
+ additionalProperties: false
+ example:
+ ttlInMinutes: 0
+ url: url
+ properties:
+ url:
+ description: The resolved URL of the service.
+ title: url
+ type: string
+ ttlInMinutes:
+ description: Time to live in minutes for the resolved URL.
+ format: int32
+ title: ttlInMinutes
+ type: integer
+ required:
+ - ttlInMinutes
+ - url
+ title: EndpointResolutionResponse
+ type: object
+x-original-swagger-version: "2.0"
diff --git a/Backend/python/pyproject.toml b/Backend/python/pyproject.toml
new file mode 100644
index 0000000..c2826df
--- /dev/null
+++ b/Backend/python/pyproject.toml
@@ -0,0 +1,30 @@
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.black]
+line-length = 88
+exclude = '''
+(
+ /(
+ \.eggs # exclude a few common directories in the
+ | \.git # root of the project
+ | \.hg
+ | \.mypy_cache
+ | \.tox
+ | \.venv
+ | _build
+ | buck-out
+ | build
+ | dist
+ )/
+)
+'''
+
+[tool.isort]
+profile = "black"
+skip = [
+ '.eggs', '.git', '.hg', '.mypy_cache', '.nox', '.pants.d', '.tox',
+ '.venv', '_build', 'buck-out', 'build', 'dist', 'node_modules', 'venv',
+]
+skip_gitignore = true
diff --git a/Backend/python/pytest.ini b/Backend/python/pytest.ini
new file mode 100644
index 0000000..3fad876
--- /dev/null
+++ b/Backend/python/pytest.ini
@@ -0,0 +1,64 @@
+[tool:pytest]
+minversion = 7.0
+testpaths = tests
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+asyncio_mode = auto
+pythonpath = src
+
+# Coverage settings - removed from addopts to make them optional
+addopts =
+ -v
+ --tb=short
+ --strict-markers
+
+# Markers
+markers =
+ unit: Unit tests
+ integration: Integration tests
+ slow: Slow running tests
+ smoke: Smoke tests for CI/CD
+ controllers: Controller layer tests
+ services: Service layer tests
+ api: API endpoint tests
+ models: Model and domain entity tests
+
+filterwarnings =
+ ignore::pytest.PytestUnknownMarkWarning
+ ignore::PendingDeprecationWarning
+
+# Logging
+log_cli = true
+log_cli_level = INFO
+log_cli_format = %(asctime)s [%(levelname)8s] %(message)s
+log_cli_date_format = %Y-%m-%d %H:%M:%S
+
+# Timeout
+timeout = 300
+
+# Coverage settings (when running with --cov)
+[coverage:run]
+source = src
+omit =
+ */tests/*
+ */test_*
+ */__pycache__/*
+ */.venv/*
+ */venv/*
+ */migrations/*
+ */config/*
+
+[coverage:report]
+exclude_lines =
+ pragma: no cover
+ def __repr__
+ raise AssertionError
+ raise NotImplementedError
+ if __name__ == .__main__.:
+ if TYPE_CHECKING:
+ @abstractmethod
+fail_under = 80
+
+[coverage:html]
+directory = htmlcov
\ No newline at end of file
diff --git a/Backend/python/requirements.txt b/Backend/python/requirements.txt
new file mode 100644
index 0000000..037cac6
Binary files /dev/null and b/Backend/python/requirements.txt differ
diff --git a/Backend/python/run_tests.py b/Backend/python/run_tests.py
new file mode 100644
index 0000000..60ecbdf
--- /dev/null
+++ b/Backend/python/run_tests.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python3
+"""
+Cross-platform test runner for the Python Backend.
+Works on Windows, Linux, and macOS without modification.
+"""
+
+import sys
+import subprocess
+import os
+import argparse
+from pathlib import Path
+
+
+class Colors:
+ """ANSI color codes that work cross-platform."""
+ RED = '\033[91m'
+ GREEN = '\033[92m'
+ YELLOW = '\033[93m'
+ BLUE = '\033[94m'
+ RESET = '\033[0m'
+
+ @staticmethod
+ def disable():
+ """Disable colors for environments that don't support them."""
+ Colors.RED = ''
+ Colors.GREEN = ''
+ Colors.YELLOW = ''
+ Colors.BLUE = ''
+ Colors.RESET = ''
+
+
+# Check if the terminal supports colors
+if not sys.stdout.isatty() or os.environ.get('NO_COLOR'):
+ Colors.disable()
+# Windows color support
+elif sys.platform == 'win32':
+ try:
+ import colorama
+ colorama.init()
+ except ImportError:
+ # If colorama is not available, disable colors on Windows
+ if os.environ.get('TERM') is None:
+ Colors.disable()
+
+
+def print_colored(message, color=Colors.RESET):
+ """Print a colored message."""
+ print(f"{color}{message}{Colors.RESET}")
+
+
+def setup_environment():
+ """Setup the environment for running tests."""
+ # Get the root directory
+ root_dir = Path(__file__).parent.absolute()
+ src_dir = root_dir / "src"
+
+ # Set PYTHONPATH to include src directory
+ env = os.environ.copy()
+
+ if 'PYTHONPATH' in env:
+ env['PYTHONPATH'] = f"{src_dir}{os.pathsep}{env['PYTHONPATH']}"
+ else:
+ env['PYTHONPATH'] = str(src_dir)
+
+ # Also add to sys.path for this process
+ if str(src_dir) not in sys.path:
+ sys.path.insert(0, str(src_dir))
+
+ return env
+
+
+def check_virtual_env():
+ """Check if a virtual environment is activated."""
+ if not os.environ.get('VIRTUAL_ENV'):
+ print_colored("Warning: Virtual environment not activated", Colors.YELLOW)
+ print_colored("Please activate your virtual environment before running tests.", Colors.YELLOW)
+
+ # Provide platform-specific instructions
+ if sys.platform.startswith('win'):
+ print(" Windows: .\\venv\\Scripts\\activate")
+ else:
+ print(" Linux/Mac: source venv/bin/activate")
+
+ response = input("\nContinue anyway? (y/N): ").lower()
+ if response != 'y':
+ sys.exit(1)
+
+
+def install_dependencies():
+ """Install test dependencies."""
+ print_colored("Installing test dependencies...", Colors.GREEN)
+ requirements_path = Path(__file__).parent / "tests" / "requirements-test.txt"
+
+ if not requirements_path.exists():
+ print_colored(f"Error: {requirements_path} not found!", Colors.RED)
+ sys.exit(1)
+
+ # Also install main requirements
+ main_requirements = Path(__file__).parent / "requirements.txt"
+ if main_requirements.exists():
+ print_colored("Installing main dependencies...", Colors.GREEN)
+ result = subprocess.run(
+ [sys.executable, "-m", "pip", "install", "-r", str(main_requirements)],
+ capture_output=True,
+ text=True
+ )
+ if result.returncode != 0:
+ print_colored("Failed to install main dependencies:", Colors.RED)
+ print(result.stderr)
+ sys.exit(1)
+
+ result = subprocess.run(
+ [sys.executable, "-m", "pip", "install", "-r", str(requirements_path)],
+ capture_output=True,
+ text=True
+ )
+
+ if result.returncode != 0:
+ print_colored("Failed to install test dependencies:", Colors.RED)
+ print(result.stderr)
+ sys.exit(1)
+
+
+def run_tests(test_type, specific_test=None):
+ """Run the tests based on the specified type."""
+ # Setup environment
+ env = setup_environment()
+
+ # Build pytest command
+ cmd = [sys.executable, "-m", "pytest"]
+
+ # Add common options
+ cmd.extend(["-v", "--tb=short"])
+
+ cmd.extend([
+ "-W", "ignore::pytest.PytestUnknownMarkWarning",
+ "-W", "ignore::PendingDeprecationWarning"
+ ])
+
+ if test_type == "unit":
+ print_colored("Running unit tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "unit"])
+
+ elif test_type == "integration":
+ print_colored("Running integration tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "integration"])
+
+ elif test_type == "controllers":
+ print_colored("Running controller tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "controllers"])
+
+ elif test_type == "api":
+ print_colored("Running API tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "api"])
+
+ elif test_type == "services":
+ print_colored("Running service tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "services"])
+
+ elif test_type == "models":
+ print_colored("Running model/domain entity tests...", Colors.GREEN)
+ cmd.extend(["tests/", "-m", "models"])
+
+ elif test_type == "coverage":
+ print_colored("Running tests with coverage...", Colors.GREEN)
+ cmd.extend(["tests/", "--cov=src", "--cov-report=html", "--cov-report=term"])
+
+ elif test_type == "specific":
+ if not specific_test:
+ print_colored("Please specify a test file or pattern", Colors.RED)
+ print("Usage: python run_tests.py specific ")
+ sys.exit(1)
+ print_colored(f"Running specific tests: {specific_test}", Colors.GREEN)
+ cmd.extend(["tests/", "-k", specific_test, "-vv"])
+
+ elif test_type == "parallel":
+ print_colored("Running tests in parallel...", Colors.GREEN)
+ cmd.extend(["tests/", "-n", "auto"])
+
+ elif test_type == "watch":
+ print_colored("Running tests in watch mode...", Colors.GREEN)
+ # Use pytest-watch if available
+ result = subprocess.run([sys.executable, "-m", "pip", "show", "pytest-watch"],
+ capture_output=True, text=True)
+ if result.returncode == 0:
+ cmd = [sys.executable, "-m", "pytest_watch", "tests/", "--", "-v"]
+ else:
+ print_colored("pytest-watch not installed. Install it with: pip install pytest-watch", Colors.YELLOW)
+ print_colored("Running normal test mode instead.", Colors.YELLOW)
+ cmd.append("tests/")
+
+ elif test_type == "debug":
+ print_colored("Running tests with debugging output...", Colors.GREEN)
+ cmd.extend(["tests/", "-vv", "-s", "--tb=long"])
+
+ else:
+ print_colored("Running all tests...", Colors.GREEN)
+ cmd.append("tests/")
+
+ # Run the tests with the modified environment
+ print_colored(f"Executing: {' '.join(cmd)}", Colors.BLUE)
+ print_colored(f"PYTHONPATH: {env.get('PYTHONPATH', 'Not set')}", Colors.BLUE)
+
+ result = subprocess.run(cmd, env=env)
+
+ if result.returncode == 0:
+ print_colored("\n✓ Tests passed successfully!", Colors.GREEN)
+
+ if test_type == "coverage":
+ coverage_path = Path(__file__).parent / "htmlcov" / "index.html"
+ print_colored(f"\nCoverage report generated at: {coverage_path}", Colors.BLUE)
+ else:
+ print_colored("\n✗ Tests failed!", Colors.RED)
+ sys.exit(1)
+
+
+def main():
+ """Main entry point."""
+ parser = argparse.ArgumentParser(
+ description="Run tests for the Python Backend",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
+Examples:
+ python run_tests.py # Run all tests
+ python run_tests.py unit # Run unit tests only
+ python run_tests.py integration # Run integration tests only
+ python run_tests.py controllers # Run controller tests only
+ python run_tests.py api # Run API tests only
+ python run_tests.py services # Run service tests only
+ python run_tests.py models # Run model/domain entity tests only
+ python run_tests.py coverage # Run with coverage report
+ python run_tests.py specific test_item_lifecycle # Run specific tests
+ python run_tests.py debug # Run with debugging output
+ """
+ )
+
+ parser.add_argument(
+ 'type',
+ nargs='?',
+ default='all',
+ choices=['all', 'unit', 'integration', 'controllers', 'api', 'services', 'models', 'coverage', 'specific', 'parallel', 'watch', 'debug'],
+ help='Type of tests to run'
+ )
+
+ parser.add_argument(
+ 'specific_test',
+ nargs='?',
+ help='Specific test pattern (only used with "specific" type)'
+ )
+
+ parser.add_argument(
+ '--no-deps',
+ action='store_true',
+ help='Skip installing test dependencies'
+ )
+
+ args = parser.parse_args()
+
+ print_colored("Python Backend Test Runner", Colors.GREEN)
+ print("=" * 40)
+
+ # Check virtual environment
+ check_virtual_env()
+
+ # Install dependencies unless skipped
+ if not args.no_deps:
+ install_dependencies()
+
+ # Run tests
+ run_tests(args.type, args.specific_test)
+
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/Backend/python/setup.cfg b/Backend/python/setup.cfg
new file mode 100644
index 0000000..11e8590
--- /dev/null
+++ b/Backend/python/setup.cfg
@@ -0,0 +1,21 @@
+[metadata]
+name = fabric_api
+version = v1
+description = APIs to be implemented by workloads for integration with Microsoft Fabric
+long_description = file: README.md
+keywords = OpenAPI Workload REST APIs
+python_requires = >= 3.11
+classifiers =
+ Operating System :: OS Independent
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.11
+ Programming Language :: Python :: 3.12
+
+[options]
+install_requires = fastapi[all]
+setup_requires = setuptools
+package_dir = =src
+packages = find_namespace:
+
+[options.packages.find]
+where = src
\ No newline at end of file
diff --git a/Backend/src/Packages/manifest/CommonTypesDefinitions.xsd b/Backend/python/src/Packages/manifest/CommonTypesDefinitions.xsd
similarity index 100%
rename from Backend/src/Packages/manifest/CommonTypesDefinitions.xsd
rename to Backend/python/src/Packages/manifest/CommonTypesDefinitions.xsd
diff --git a/Backend/python/src/Packages/manifest/Item1.xml b/Backend/python/src/Packages/manifest/Item1.xml
new file mode 100644
index 0000000..b02caca
--- /dev/null
+++ b/Backend/python/src/Packages/manifest/Item1.xml
@@ -0,0 +1,21 @@
+
+
+ -
+
+
+
+ PerItem
+ PerItem
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Backend/src/Packages/manifest/ItemDefinition.xsd b/Backend/python/src/Packages/manifest/ItemDefinition.xsd
similarity index 100%
rename from Backend/src/Packages/manifest/ItemDefinition.xsd
rename to Backend/python/src/Packages/manifest/ItemDefinition.xsd
diff --git a/Backend/src/Packages/manifest/ManifestPackageDebug.nuspec b/Backend/python/src/Packages/manifest/ManifestPackageDebug.nuspec
similarity index 100%
rename from Backend/src/Packages/manifest/ManifestPackageDebug.nuspec
rename to Backend/python/src/Packages/manifest/ManifestPackageDebug.nuspec
diff --git a/Backend/python/src/Packages/manifest/ManifestPackageRelease.nuspec b/Backend/python/src/Packages/manifest/ManifestPackageRelease.nuspec
new file mode 100644
index 0000000..a163f34
--- /dev/null
+++ b/Backend/python/src/Packages/manifest/ManifestPackageRelease.nuspec
@@ -0,0 +1,24 @@
+
+
+
+
+ ManifestPackageRelease
+ 1.0.0
+ author
+ owner
+ A NuGet package ready for uploading to fabric.
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Backend/src/Packages/manifest/WorkloadDefinition.xsd b/Backend/python/src/Packages/manifest/WorkloadDefinition.xsd
similarity index 100%
rename from Backend/src/Packages/manifest/WorkloadDefinition.xsd
rename to Backend/python/src/Packages/manifest/WorkloadDefinition.xsd
diff --git a/Backend/python/src/Packages/manifest/WorkloadManifest.xml b/Backend/python/src/Packages/manifest/WorkloadManifest.xml
new file mode 100644
index 0000000..f1cca4a
--- /dev/null
+++ b/Backend/python/src/Packages/manifest/WorkloadManifest.xml
@@ -0,0 +1,28 @@
+
+
+
+ 1.0.0
+
+
+ Public
+
+ 00000000-0000-0000-0000-000000000000
+ http://localhost
+ api://localdevinstance/tenantID/Org.WorkloadSample
+
+
+
+ Workload
+ https://be.endpointurl.net/workload
+ false
+
+
+ Frontend
+ https://fe.endpointurl.net
+ false
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Backend/python/src/appsettings.Development.json b/Backend/python/src/appsettings.Development.json
new file mode 100644
index 0000000..070a0d1
--- /dev/null
+++ b/Backend/python/src/appsettings.Development.json
@@ -0,0 +1,5 @@
+{
+ "Logging": {
+ "LogLevel": "Information"
+ }
+}
\ No newline at end of file
diff --git a/Backend/python/src/appsettings.json b/Backend/python/src/appsettings.json
new file mode 100644
index 0000000..a54403a
--- /dev/null
+++ b/Backend/python/src/appsettings.json
@@ -0,0 +1,32 @@
+{
+ "PublisherTenantId": "",
+ "ClientId": "",
+ "ClientSecret": "",
+ "Audience": "",
+ "Application": {
+ "Name": "Microsoft Fabric Python Backend",
+ "Environment": "Development",
+ "Debug": false
+ },
+ "Server": {
+ "Host": "0.0.0.0",
+ "Port": 5000,
+ "Workers": 1,
+ "ShutdownTimeout": 3,
+ "ForceShutdownTimeout": 5
+ },
+ "Security": {
+ "AllowedHosts": ["*"],
+ "CorsOrigins": ["*"]
+ },
+ "Logging": {
+ "LogLevel": "Information"
+ },
+ "Storage": {
+ "Metadata": {
+ "JobsDirectory": "jobs",
+ "CommonMetadataFile": "common_metadata.json",
+ "TypeSpecificMetadataFile": "type_specific_metadata.json"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Backend/python/src/constants/__init__.py b/Backend/python/src/constants/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/constants/api_constants.py b/Backend/python/src/constants/api_constants.py
new file mode 100644
index 0000000..7949022
--- /dev/null
+++ b/Backend/python/src/constants/api_constants.py
@@ -0,0 +1,5 @@
+from constants.environment_constants import EnvironmentConstants
+class ApiConstants:
+ """Api constants."""
+ WORKLOAD_CONTROL_API_BASE_URL = f"{EnvironmentConstants.FABRIC_API_BASE_URL}/v1/workload-control"
+ DEFAULT_OPENID_CONFIG_ENDPOINT = f"{EnvironmentConstants.AAD_INSTANCE_URL}/common/.well-known/openid-configuration"
\ No newline at end of file
diff --git a/Backend/python/src/constants/environment_constants.py b/Backend/python/src/constants/environment_constants.py
new file mode 100644
index 0000000..482ae76
--- /dev/null
+++ b/Backend/python/src/constants/environment_constants.py
@@ -0,0 +1,10 @@
+class EnvironmentConstants:
+ """Environment constants."""
+
+ FABRIC_BACKEND_RESOURCE_ID = "https://analysis.windows.net/powerbi/api"
+ AAD_INSTANCE_URL = "https://login.microsoftonline.com"
+ ONELAKE_DFS_BASE_URL = "https://onelake.dfs.fabric.microsoft.com"
+ FABRIC_API_BASE_URL = "https://api.fabric.microsoft.com"
+
+ FABRIC_BACKEND_APP_ID = "00000009-0000-0000-c000-000000000000"
+ FABRIC_CLIENT_FOR_WORKLOADS_APP_ID = "d2450708-699c-41e3-8077-b0c8341509aa"
\ No newline at end of file
diff --git a/Backend/python/src/constants/error_codes.py b/Backend/python/src/constants/error_codes.py
new file mode 100644
index 0000000..3d88a74
--- /dev/null
+++ b/Backend/python/src/constants/error_codes.py
@@ -0,0 +1,27 @@
+class ErrorCodes:
+ """Error codes for different types of errors."""
+
+ INTERNAL_ERROR = "InternalError"
+ INVALID_REQUEST = "InvalidRequest"
+ INVALID_PARAMETER = "InvalidParameter"
+
+ class Authentication:
+ AUTH_UI_REQUIRED = "AuthUIRequired"
+ AUTH_ERROR = "AuthError"
+
+ class Security:
+ ACCESS_DENIED = "AccessDenied"
+
+ class ItemPayload:
+ INVALID_ITEM_PAYLOAD = "InvalidItemPayload"
+ MISSING_LAKEHOUSE_REFERENCE = "MissingLakehouseReference"
+
+ class RateLimiting:
+ TOO_MANY_REQUESTS = "TooManyRequests"
+
+ class Item:
+ ITEM_METADATA_NOT_FOUND = "ItemMetadataNotFound"
+ DOUBLED_OPERANDS_OVERFLOW = "DoubledOperandsOverflow"
+
+ class Kusto:
+ KUSTO_DATA_EXCEPTION = "KustoDataException"
\ No newline at end of file
diff --git a/Backend/python/src/constants/http_constants.py b/Backend/python/src/constants/http_constants.py
new file mode 100644
index 0000000..f1a19b4
--- /dev/null
+++ b/Backend/python/src/constants/http_constants.py
@@ -0,0 +1,12 @@
+class HttpHeaders:
+ """Constants for the workload."""
+ AUTHORIZATION = "Authorization"
+ X_MS_CLIENT_TENANT_ID = "x-ms-client-tenant-id"
+ X_MS_CLIENT_REQUEST_ID = "x-ms-client-request-id"
+ ACTIVITY_ID = "activity-id"
+ REQUEST_ID = "request-id"
+
+class AuthorizationSchemes:
+ """Constants for OAuth scopes used in the workload."""
+ BEARER = "Bearer"
+
\ No newline at end of file
diff --git a/Backend/python/src/constants/item1_field_names.py b/Backend/python/src/constants/item1_field_names.py
new file mode 100644
index 0000000..6db2cb6
--- /dev/null
+++ b/Backend/python/src/constants/item1_field_names.py
@@ -0,0 +1,16 @@
+class Item1FieldNames:
+ """Constants for Item1 metadata field names."""
+ # Payload structure fields
+ PAYLOAD_METADATA = "item1Metadata" # The key in the payload containing Item1 metadata
+
+ # JSON/Client-side field names (camelCase)
+ LAKEHOUSE_FIELD = "lakehouse"
+ OPERAND1_FIELD = "operand1"
+ OPERAND2_FIELD = "operand2"
+ OPERATOR_FIELD = "operator"
+ USE_ONELAKE_FIELD = "useOneLake"
+ RESULT_LOCATION_FIELD = "lastCalculationResultLocation"
+
+ # Nested field names
+ LAKEHOUSE_WORKSPACE_ID_FIELD = "workspaceId"
+ LAKEHOUSE_ID_FIELD = "id"
\ No newline at end of file
diff --git a/Backend/python/src/constants/job_types.py b/Backend/python/src/constants/job_types.py
new file mode 100644
index 0000000..6aa7b9c
--- /dev/null
+++ b/Backend/python/src/constants/job_types.py
@@ -0,0 +1,12 @@
+"""
+Constants for job types used by the workload.
+"""
+from constants.workload_constants import WorkloadConstants
+
+class Item1JobType:
+ """Job types for Item1."""
+ SCHEDULED_JOB = f"{WorkloadConstants.ItemTypes.ITEM1}.ScheduledJob"
+ CALCULATE_AS_TEXT = f"{WorkloadConstants.ItemTypes.ITEM1}.CalculateAsText"
+ CALCULATE_AS_PARQUET = f"{WorkloadConstants.ItemTypes.ITEM1}.CalculateAsParquet"
+ LONG_RUNNING_CALCULATE_AS_TEXT = f"{WorkloadConstants.ItemTypes.ITEM1}.LongRunningCalculateAsText"
+ INSTANT_JOB = f"{WorkloadConstants.ItemTypes.ITEM1}.InstantJob"
\ No newline at end of file
diff --git a/Backend/python/src/constants/onelake_constants.py b/Backend/python/src/constants/onelake_constants.py
new file mode 100644
index 0000000..dd8ac70
--- /dev/null
+++ b/Backend/python/src/constants/onelake_constants.py
@@ -0,0 +1,4 @@
+class OneLakeConstants:
+ """Constants for OneLake integration."""
+
+ ONELAKE_SCOPES = ["https://storage.azure.com/.default"]
\ No newline at end of file
diff --git a/Backend/python/src/constants/workload_constants.py b/Backend/python/src/constants/workload_constants.py
new file mode 100644
index 0000000..e6144a4
--- /dev/null
+++ b/Backend/python/src/constants/workload_constants.py
@@ -0,0 +1,10 @@
+class WorkloadConstants:
+ """Constants for the workload."""
+ WORKLOAD_NAME = "Org.WorkloadSample"
+
+ class ItemTypes:
+ """Nested class containing item type constants."""
+ ITEM1 = None # placeholder, will be set after class definition
+
+WorkloadConstants.ItemTypes.ITEM1 = f"{WorkloadConstants.WORKLOAD_NAME}.SampleWorkloadItem"
+
\ No newline at end of file
diff --git a/Backend/python/src/constants/workload_scopes.py b/Backend/python/src/constants/workload_scopes.py
new file mode 100644
index 0000000..dbc0ec4
--- /dev/null
+++ b/Backend/python/src/constants/workload_scopes.py
@@ -0,0 +1,13 @@
+from constants.environment_constants import EnvironmentConstants
+class WorkloadScopes:
+ """Constants for OAuth scopes used in the workload."""
+ FABRIC_BACKEND_RESOURCE_ID = EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID
+ # Item1 scopes
+ ITEM1_READ_WRITE_ALL = "Item1.ReadWrite.All"
+ ITEM1_READ_ALL = "Item1.Read.All"
+
+ # Lakehouse scopes
+ FABRIC_LAKEHOUSE_READ_ALL = "FabricLakehouse.Read.All"
+ FABRIC_LAKEHOUSE_READ_WRITE_ALL = "FabricLakehouse.ReadWrite.All"
+
+ FABRIC_WORKLOAD_CONTROL = "FabricWorkloadControl"
\ No newline at end of file
diff --git a/Backend/python/src/core/dependencies.py b/Backend/python/src/core/dependencies.py
new file mode 100644
index 0000000..a199c5a
--- /dev/null
+++ b/Backend/python/src/core/dependencies.py
@@ -0,0 +1,23 @@
+"""
+FastAPI dependency injection helpers.
+These provide clean interfaces for injecting services into controllers.
+"""
+from typing import Annotated
+from fastapi import Depends
+
+from services.authentication import AuthenticationService, get_authentication_service
+from services.authorization import AuthorizationHandler, get_authorization_service
+from services.item_factory import ItemFactory, get_item_factory
+from services.item_metadata_store import ItemMetadataStore, get_item_metadata_store
+from services.http_client import HttpClientService, get_http_client_service
+from services.lakehouse_client_service import LakehouseClientService, get_lakehouse_client_service
+from services.onelake_client_service import OneLakeClientService, get_onelake_client_service
+
+# Type aliases for cleaner dependency injection
+AuthServiceDep = Annotated[AuthenticationService, Depends(get_authentication_service)]
+AuthHandlerDep = Annotated[AuthorizationHandler, Depends(get_authorization_service)]
+ItemFactoryDep = Annotated[ItemFactory, Depends(get_item_factory)]
+ItemMetadataStoreDep = Annotated[ItemMetadataStore, Depends(get_item_metadata_store)]
+HttpClientDep = Annotated[HttpClientService, Depends(get_http_client_service)]
+LakehouseClientDep = Annotated[LakehouseClientService, Depends(get_lakehouse_client_service)]
+OneLakeClientDep = Annotated[OneLakeClientService, Depends(get_onelake_client_service)]
\ No newline at end of file
diff --git a/Backend/python/src/core/service_initializer.py b/Backend/python/src/core/service_initializer.py
new file mode 100644
index 0000000..a186c02
--- /dev/null
+++ b/Backend/python/src/core/service_initializer.py
@@ -0,0 +1,146 @@
+import asyncio
+import logging
+from typing import TypeVar
+
+from services.configuration_service import get_configuration_service
+from core.service_registry import get_service_registry
+from services.open_id_connect_configuration import OpenIdConnectConfigurationManager, get_openid_manager_service
+from services.authentication import AuthenticationService
+from services.authorization import AuthorizationHandler
+from services.http_client import HttpClientService
+from services.item_factory import ItemFactory
+from services.item_metadata_store import ItemMetadataStore
+from services.lakehouse_client_service import LakehouseClientService
+from services.onelake_client_service import OneLakeClientService
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar('T')
+
+class ServiceInitializer:
+ """Handles initialization of all application services with optimized parallel loading."""
+
+ def __init__(self):
+ self.registry = get_service_registry()
+ self._initialization_lock = asyncio.Lock()
+
+ async def initialize_all_services(self) -> None:
+ """
+ Initialize all services with parallel execution where possible.
+ This should be called once at application startup.
+ """
+ async with self._initialization_lock:
+ if self.registry.is_initialized:
+ logger.warning("Services already initialized, skipping...")
+ return
+
+ logger.info("Starting service initialization...")
+
+ try:
+ # 0. Initialize ConfigurationService first (all other services may depend on it)
+ config_service = get_configuration_service()
+ logger.info(f"Configuration loaded for environment: {config_service.get_environment()}")
+
+ # 1. Initialize services with no dependencies in parallel
+ logger.info("Initializing independent services...")
+ independent_tasks = [
+ self._initialize_openid_manager(),
+ self._initialize_http_client(),
+ self._initialize_item_metadata_store(),
+ ]
+
+ # Execute independent initializations in parallel
+ results = await asyncio.gather(*independent_tasks, return_exceptions=True)
+
+ # Check for any initialization errors
+ for i, result in enumerate(results):
+ if isinstance(result, Exception):
+ logger.error(f"Failed to initialize service at index {i}: {result}")
+ raise result
+
+ # 2. Initialize services that depend on OpenID manager
+ await self._initialize_authentication_service()
+
+ # 3. Initialize remaining services in parallel
+ logger.info("Initializing dependent services...")
+ dependent_tasks = [
+ self._initialize_authorization_handler(),
+ self._initialize_item_factory(),
+ self._initialize_lakehouse_client(),
+ self._initialize_onelake_client(),
+ ]
+
+ results = await asyncio.gather(*dependent_tasks, return_exceptions=True)
+
+ # Check for any initialization errors
+ for i, result in enumerate(results):
+ if isinstance(result, Exception):
+ logger.error(f"Failed to initialize dependent service at index {i}: {result}")
+ raise result
+
+ self.registry.mark_initialized()
+ logger.info("All services initialized successfully!")
+
+ except Exception as e:
+ logger.error(f"Failed to initialize services: {str(e)}")
+ self.registry.clear()
+ raise
+
+ async def _initialize_openid_manager(self) -> None:
+ """Initialize OpenID Connect Configuration Manager."""
+ logger.info("Initializing OpenID Connect Configuration Manager...")
+ openid_manager = await get_openid_manager_service()
+ self.registry.register(OpenIdConnectConfigurationManager, openid_manager)
+
+
+ async def _initialize_http_client(self) -> None:
+ """Initialize HTTP Client Service."""
+ logger.info("Initializing HTTP Client Service...")
+ http_client = HttpClientService()
+ self.registry.register(HttpClientService, http_client)
+
+ async def _initialize_item_metadata_store(self) -> None:
+ """Initialize Item Metadata Store."""
+ logger.info("Initializing Item Metadata Store...")
+ metadata_store = ItemMetadataStore()
+ self.registry.register(ItemMetadataStore, metadata_store)
+
+ async def _initialize_authentication_service(self) -> None:
+ """Initialize Authentication Service."""
+ logger.info("Initializing Authentication Service...")
+ openid_manager = self.registry.get(OpenIdConnectConfigurationManager)
+ auth_service = AuthenticationService(openid_manager=openid_manager)
+ self.registry.register(AuthenticationService, auth_service)
+
+ async def _initialize_authorization_handler(self) -> None:
+ """Initialize Authorization Handler."""
+ logger.info("Initializing Authorization Handler...")
+ auth_handler = AuthorizationHandler()
+ self.registry.register(AuthorizationHandler, auth_handler)
+
+ async def _initialize_item_factory(self) -> None:
+ """Initialize Item Factory."""
+ logger.info("Initializing Item Factory...")
+ item_factory = ItemFactory()
+ self.registry.register(ItemFactory, item_factory)
+
+ async def _initialize_lakehouse_client(self) -> None:
+ """Initialize Lakehouse Client Service."""
+ logger.info("Initializing Lakehouse Client Service...")
+ lakehouse_client = LakehouseClientService()
+ self.registry.register(LakehouseClientService, lakehouse_client)
+
+ async def _initialize_onelake_client(self) -> None:
+ """Initialize OneLake Client Service."""
+ logger.info("Initializing OneLake Client Service...")
+ onelake_client = OneLakeClientService()
+ self.registry.register(OneLakeClientService, onelake_client)
+
+def get_service_initializer() -> ServiceInitializer:
+ """Get the singleton ServiceInitializer instance."""
+ registry = get_service_registry()
+ if not registry.has(ServiceInitializer):
+ initializer = ServiceInitializer()
+ registry.register(ServiceInitializer, initializer)
+ logger.debug("ServiceInitializer registered in ServiceRegistry")
+ return registry.get(ServiceInitializer)
\ No newline at end of file
diff --git a/Backend/python/src/core/service_registry.py b/Backend/python/src/core/service_registry.py
new file mode 100644
index 0000000..b11e670
--- /dev/null
+++ b/Backend/python/src/core/service_registry.py
@@ -0,0 +1,179 @@
+import asyncio
+import logging
+from typing import Dict, List, Type, TypeVar, Optional, Callable, Any
+from threading import Lock
+import inspect
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar('T')
+
+class ServiceRegistry:
+ """
+ Thread-safe service registry for managing singleton instances.
+ Supports both sync and async cleanup methods.
+ """
+
+ _instance: Optional['ServiceRegistry'] = None
+ _lock = Lock()
+
+ def __new__(cls) -> 'ServiceRegistry':
+ if cls._instance is None:
+ with cls._lock:
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self):
+ # Only initialize once
+ if not hasattr(self, '_initialized_once'):
+ self._services = {}
+ self._factories = {}
+ self._cleanup_handlers = []
+ self._initialized = False
+ self._is_cleaning_up = False
+ self._initialized_once = True
+
+ def register_factory(self, service_type: Type[T], factory: Callable[[], T]) -> None:
+ """Register a factory function for lazy service creation."""
+ self._factories[service_type] = factory
+ logger.debug(f"Registered factory for {service_type.__name__}")
+
+ def register(self, service_type: Type[T], instance: T) -> None:
+ """Register a service instance directly."""
+ self._services[service_type] = instance
+ logger.debug(f"Registered instance for {service_type.__name__}")
+
+ # Auto-register cleanup handlers in priority order
+ if hasattr(instance, 'dispose_async') and callable(getattr(instance, 'dispose_async')):
+ self._cleanup_handlers.append((service_type.__name__, instance))
+ elif hasattr(instance, 'close') and callable(getattr(instance, 'close')):
+ self._cleanup_handlers.append((service_type.__name__, instance))
+
+ def get(self, service_type: Type[T]) -> T:
+ """Get a service instance. Creates it using factory if not exists."""
+ if service_type in self._services:
+ return self._services[service_type]
+
+ if service_type in self._factories:
+ instance = self._factories[service_type]()
+ self._services[service_type] = instance
+ logger.info(f"Created service instance: {service_type.__name__}")
+ # Auto-register cleanup
+ if hasattr(instance, 'dispose_async') and callable(getattr(instance, 'dispose_async')):
+ self._cleanup_handlers.append((service_type.__name__, instance))
+ elif hasattr(instance, 'close') and callable(getattr(instance, 'close')):
+ self._cleanup_handlers.append((service_type.__name__, instance))
+
+ return instance
+
+ raise KeyError(f"Service not registered: {service_type.__name__}")
+
+ def has(self, service_type: Type[T]) -> bool:
+ """Check if a service is registered."""
+ return service_type in self._services or service_type in self._factories
+
+ async def cleanup(self) -> None:
+ """
+ Cleanup all registered services that have cleanup methods.
+ Properly handles both sync and async cleanup methods.
+ """
+ if self._is_cleaning_up:
+ logger.debug("Cleanup already in progress, skipping...")
+ return
+
+ self._is_cleaning_up = True
+ try:
+ if not self._cleanup_handlers:
+ logger.info("No services to cleanup")
+ return
+
+ logger.info(f"Starting cleanup of {len(self._cleanup_handlers)} services...")
+
+ # Process in reverse order (LIFO)
+ for service_name, instance in reversed(self._cleanup_handlers):
+ try:
+ # Check for dispose_async first (preferred pattern)
+ if hasattr(instance, 'dispose_async'):
+ dispose_method = getattr(instance, 'dispose_async')
+ if inspect.iscoroutinefunction(dispose_method):
+ try:
+ await dispose_method()
+ logger.debug(f"Disposed {service_name} using dispose_async")
+ continue
+ except RuntimeError as e:
+ if "no running event loop" in str(e):
+ logger.warning(f"No event loop for {service_name}, skipping async cleanup")
+ continue
+ raise
+
+ # Fallback to close method
+ if hasattr(instance, 'close'):
+ close_method = getattr(instance, 'close')
+ if inspect.iscoroutinefunction(close_method):
+ try:
+ await close_method()
+ logger.debug(f"Cleaned up {service_name} using async close")
+ except RuntimeError as e:
+ if "no running event loop" in str(e):
+ logger.warning(f"No event loop for {service_name}, trying sync close")
+ continue
+ raise
+ else:
+ # Sync close method
+ close_method()
+ logger.debug(f"Cleaned up {service_name} using sync close")
+
+ except Exception as e:
+ logger.error(f"Error cleaning up {service_name}: {e}", exc_info=True)
+ # Continue with other services even if one fails
+
+ logger.info("Service cleanup complete")
+
+ finally:
+ # Always clear the state, even if cleanup failed
+ self._cleanup_handlers.clear()
+ self._services.clear()
+ self._initialized = False
+ self._is_cleaning_up = False
+
+ def clear(self) -> None:
+ """Clear all registered services synchronously (for emergency cleanup)."""
+ try:
+ # Try to cleanup sync services first
+ for service_name, instance in reversed(self._cleanup_handlers):
+ try:
+ if hasattr(instance, 'close'):
+ close_method = getattr(instance, 'close')
+ if not inspect.iscoroutinefunction(close_method):
+ close_method()
+ logger.debug(f"Sync cleanup of {service_name}")
+ except Exception as e:
+ logger.error(f"Error in sync cleanup of {service_name}: {e}")
+ except Exception as e:
+ logger.error(f"Error during sync cleanup: {e}")
+ finally:
+ # Always clear the registry
+ self._services.clear()
+ self._factories.clear()
+ self._cleanup_handlers.clear()
+ self._initialized = False
+ self._is_cleaning_up = False
+ logger.info("Service registry cleared")
+
+ @property
+ def is_initialized(self) -> bool:
+ """Check if the registry has been initialized."""
+ return self._initialized
+
+ def mark_initialized(self) -> None:
+ """Mark the registry as initialized."""
+ self._initialized = True
+
+ def get_all_services(self) -> List[str]:
+ """Get list of all registered service names."""
+ return [svc.__name__ for svc in self._services.keys()]
+
+def get_service_registry() -> ServiceRegistry:
+ """Get the singleton ServiceRegistry instance."""
+ return ServiceRegistry()
\ No newline at end of file
diff --git a/Backend/python/src/exceptions/__init__.py b/Backend/python/src/exceptions/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/exceptions/base_exception.py b/Backend/python/src/exceptions/base_exception.py
new file mode 100644
index 0000000..e7bef2b
--- /dev/null
+++ b/Backend/python/src/exceptions/base_exception.py
@@ -0,0 +1,69 @@
+from typing import List, Optional, Dict, Any, Tuple
+from fastapi import status
+from fastapi.responses import JSONResponse
+from fabric_api.models.error_source import ErrorSource
+from fabric_api.models.error_response import ErrorResponse
+from fabric_api.models.error_extended_information import ErrorExtendedInformation
+from fabric_api.models.name_value_pair import NameValuePair
+
+class WorkloadExceptionBase(Exception):
+ """Base class for workload exceptions."""
+
+ def __init__(
+ self,
+ http_status_code: int,
+ error_code: str,
+ message_template: str,
+ message_parameters: Optional[List[str]] = None,
+ error_source: ErrorSource = ErrorSource.SYSTEM,
+ is_permanent: bool = False
+ ):
+ self.http_status_code = http_status_code
+ self.error_code = error_code
+ self.message_template = message_template
+ self.message_parameters = message_parameters or []
+ self.error_source = error_source
+ self.is_permanent = is_permanent
+ self.details: List[ErrorExtendedInformation] = []
+
+ # Format the message with parameters
+ if message_parameters:
+ formatted_message = message_template.format(*message_parameters)
+ else:
+ formatted_message = message_template
+
+ super().__init__(formatted_message)
+
+ def with_detail(self, error_code: str, message_template: str, *parameters: Tuple[str, str]) -> 'WorkloadExceptionBase':
+ """Add detailed error information."""
+ parameter_values = [p[1] for p in parameters]
+
+ detail = ErrorExtendedInformation(
+ error_code=error_code,
+ message=message_template.format(*parameter_values),
+ message_parameters=parameter_values,
+ additional_parameters=[NameValuePair(name=p[0], value=p[1]) for p in parameters]
+ )
+
+ self.details.append(detail)
+ return self
+
+ def to_response(self) -> JSONResponse:
+ """Convert exception to FastAPI JSONResponse."""
+ response = ErrorResponse(
+ error_code=self.error_code,
+ message=str(self),
+ message_parameters=self.message_parameters if self.message_parameters else None,
+ source=self.error_source,
+ is_permanent=self.is_permanent,
+ more_details=self.details if self.details else None
+ )
+
+ return JSONResponse(
+ status_code=self.http_status_code,
+ content=response.model_dump(exclude_none=True)
+ )
+
+ def to_telemetry_string(self) -> str:
+ """Convert to string for telemetry purposes."""
+ return str(self)
\ No newline at end of file
diff --git a/Backend/python/src/exceptions/exceptions.py b/Backend/python/src/exceptions/exceptions.py
new file mode 100644
index 0000000..c72a5dc
--- /dev/null
+++ b/Backend/python/src/exceptions/exceptions.py
@@ -0,0 +1,237 @@
+from uuid import UUID
+from fastapi import status
+from typing import List, Optional
+
+from exceptions.base_exception import WorkloadExceptionBase
+from fabric_api.models.error_source import ErrorSource
+from constants.error_codes import ErrorCodes
+
+class InternalErrorException(WorkloadExceptionBase):
+ """Exception for internal errors."""
+
+ def __init__(self, message: str):
+ super().__init__(
+ http_status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ error_code=ErrorCodes.INTERNAL_ERROR,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.SYSTEM,
+ is_permanent=False
+ )
+ self.internal_message = message
+
+ def to_telemetry_string(self) -> str:
+ return self.internal_message
+
+class InvariantViolationException(InternalErrorException):
+ """Exception for invariant violations."""
+
+ def __init__(self, message: str):
+ super().__init__(message)
+
+ def to_telemetry_string(self) -> str:
+ return f"INVARIANT VIOLATION: {self.internal_message}"
+
+class InvalidRelativePathException(InternalErrorException):
+ """Exception for invalid relative paths."""
+
+ def __init__(self, relative_path: str):
+ super().__init__(f"The relative path is invalid: {relative_path}")
+
+class UnexpectedItemTypeException(InternalErrorException):
+ """Exception for unexpected item types."""
+
+ def __init__(self, message: str):
+ super().__init__(message)
+
+class UnauthorizedException(WorkloadExceptionBase):
+ """Exception for access denied situations."""
+
+ def __init__(self, message: str = "Access denied"):
+ super().__init__(
+ http_status_code=status.HTTP_403_FORBIDDEN,
+ error_code=ErrorCodes.Security.ACCESS_DENIED,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.USER,
+ is_permanent=True
+ )
+
+class AuthenticationException(WorkloadExceptionBase):
+ """Exception for authentication errors."""
+
+ def __init__(self, message: str):
+ super().__init__(
+ http_status_code=status.HTTP_401_UNAUTHORIZED,
+ error_code=ErrorCodes.Authentication.AUTH_ERROR,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.EXTERNAL,
+ is_permanent=False
+ )
+
+class AuthenticationUIRequiredException(WorkloadExceptionBase):
+ """Exception raised when UI authentication is required."""
+
+ ADDITIONAL_SCOPES_TO_CONSENT_NAME = "additionalScopesToConsent"
+ CLAIMS_FOR_CONDITIONAL_ACCESS_POLICY_NAME = "claimsForConditionalAccessPolicy"
+
+ def __init__(self, message: str):
+ super().__init__(
+ http_status_code=status.HTTP_401_UNAUTHORIZED,
+ error_code=ErrorCodes.Authentication.AUTH_UI_REQUIRED,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.SYSTEM,
+ is_permanent=False
+ )
+ # Initialize private attributes to store claims and scopes
+ self._claims_for_conditional_access = None
+ self._additional_scopes_to_consent = None
+
+ @property
+ def claims_for_conditional_access_policy(self) -> Optional[str]:
+ """Get claims for conditional access policy from Details, matching C# implementation."""
+ if self.details and len(self.details) > 0:
+ # Look for the claims in the first detail's additional parameters
+ for param in self.details[0].additional_parameters or []:
+ if param.name == self.CLAIMS_FOR_CONDITIONAL_ACCESS_POLICY_NAME:
+ return param.value
+ return self._claims_for_conditional_access
+
+ def add_claims_for_conditional_access(self, claims: str) -> 'AuthenticationUIRequiredException':
+ """Add claims for conditional access."""
+ self._claims_for_conditional_access = claims # Store the raw claims
+ self.with_detail(
+ "conditionalAccess",
+ "{0}",
+ (self.CLAIMS_FOR_CONDITIONAL_ACCESS_POLICY_NAME, claims)
+ )
+ return self
+
+ def add_scopes_to_consent(self, scopes: List[str]) -> 'AuthenticationUIRequiredException':
+ """Add scopes that need consent."""
+ self._additional_scopes_to_consent = scopes # Store the raw scopes list
+ self.with_detail(
+ "scopesToConsent",
+ "{0}",
+ (self.ADDITIONAL_SCOPES_TO_CONSENT_NAME, ", ".join(scopes))
+ )
+ return self
+
+ def to_www_authenticate_header(self) -> str:
+ """
+ Creates a WWW-Authenticate header value for this exception,
+ matching the C# AuthenticationService.AddBearerClaimToResponse logic.
+ """
+ header_parts = ["Bearer"]
+ error_description = str(self.message_template).replace('\r', ' ').replace('\n', ' ')
+
+ # Always include the authorization_uri for better client compatibility
+ header_parts.append('authorization_uri="https://login.microsoftonline.com/common/oauth2/authorize"')
+
+ if self._claims_for_conditional_access:
+ header_parts.append(f'error="invalid_token"')
+ header_parts.append(f'error_description="{error_description}"')
+ header_parts.append(f'claims="{self._claims_for_conditional_access}"')
+ elif self._additional_scopes_to_consent:
+ scopes_str = " ".join(self._additional_scopes_to_consent)
+ header_parts.append(f'error="insufficient_scope"')
+ header_parts.append(f'error_description="{error_description}"')
+ header_parts.append(f'scope="{scopes_str}"')
+ else:
+ header_parts.append(f'error="interaction_required"')
+ header_parts.append(f'error_description="{error_description}"')
+
+ return ", ".join(header_parts)
+
+class TooManyRequestsException(WorkloadExceptionBase):
+ """Exception for rate-limiting (429 Too Many Requests)."""
+
+ def __init__(self, message: str = "Too many requests"):
+ super().__init__(
+ http_status_code=status.HTTP_429_TOO_MANY_REQUESTS,
+ error_code=ErrorCodes.RateLimiting.TOO_MANY_REQUESTS,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.USER,
+ is_permanent=False
+ )
+
+class InvalidItemPayloadException(WorkloadExceptionBase):
+ """Exception for invalid item payloads."""
+
+ def __init__(self, item_type: str, item_id: str):
+ super().__init__(
+ http_status_code=status.HTTP_400_BAD_REQUEST,
+ error_code=ErrorCodes.ItemPayload.INVALID_ITEM_PAYLOAD,
+ message_template="{0} payload is invalid for id={1}. See MoreDetails for additional information.",
+ message_parameters=[item_type, item_id],
+ error_source=ErrorSource.USER,
+ is_permanent=True
+ )
+
+class DoubledOperandsOverflowException(WorkloadExceptionBase):
+ """Exception for overflow in doubled operands."""
+
+ def __init__(self, message_parameters: List[str]):
+ super().__init__(
+ http_status_code=status.HTTP_400_BAD_REQUEST,
+ error_code=ErrorCodes.Item.DOUBLED_OPERANDS_OVERFLOW,
+ message_template="{0} may lead to overflow",
+ message_parameters=message_parameters,
+ error_source=ErrorSource.USER,
+ is_permanent=False
+ )
+
+class ItemMetadataNotFoundException(WorkloadExceptionBase):
+ """Exception raised when an item's metadata cannot be found."""
+
+ def __init__(self, item_object_id: UUID):
+ super().__init__(
+ http_status_code=status.HTTP_404_NOT_FOUND,
+ error_code=ErrorCodes.Item.ITEM_METADATA_NOT_FOUND,
+ message_template="Item metadata file cannot be found. It is advised to delete this item and create a new item instead (ItemId: {0})",
+ message_parameters=[str(item_object_id)],
+ error_source=ErrorSource.SYSTEM,
+ is_permanent=True
+ )
+
+class InvalidParameterException(WorkloadExceptionBase):
+ """Exception for invalid parameters."""
+
+ def __init__(self, parameter_name: str, message: str):
+ super().__init__(
+ http_status_code=status.HTTP_400_BAD_REQUEST,
+ error_code=ErrorCodes.INVALID_PARAMETER,
+ message_template="Invalid parameter '{0}': {1}",
+ message_parameters=[parameter_name, message],
+ error_source=ErrorSource.USER,
+ is_permanent=True
+ )
+
+class KustoDataException(WorkloadExceptionBase):
+ """Exception for Kusto data errors."""
+
+ def __init__(self, message: str):
+ super().__init__(
+ http_status_code=status.HTTP_400_BAD_REQUEST,
+ error_code=ErrorCodes.Kusto.KUSTO_DATA_EXCEPTION,
+ message_template=message,
+ message_parameters=None,
+ error_source=ErrorSource.USER,
+ is_permanent=True
+ )
+
+class MissingLakehouseReferenceException(WorkloadExceptionBase):
+ """Exception raised when a lakehouse reference is required but missing."""
+
+ def __init__(self):
+ super().__init__(
+ http_status_code=status.HTTP_400_BAD_REQUEST,
+ error_code=ErrorCodes.INVALID_REQUEST,
+ message_template="Missing lakehouse reference",
+ message_parameters=None,
+ error_source=ErrorSource.USER,
+ is_permanent=True
+ )
diff --git a/Backend/python/src/fabric_api/apis/__init__.py b/Backend/python/src/fabric_api/apis/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/fabric_api/apis/endpoint_resolution_api.py b/Backend/python/src/fabric_api/apis/endpoint_resolution_api.py
new file mode 100644
index 0000000..73dc326
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/endpoint_resolution_api.py
@@ -0,0 +1,59 @@
+# coding: utf-8
+
+from typing import Dict, List # noqa: F401
+import importlib
+import pkgutil
+
+from fabric_api.apis.endpoint_resolution_api_base import BaseEndpointResolutionApi
+import fabric_api.impl
+
+from fastapi import ( # noqa: F401
+ APIRouter,
+ Body,
+ Cookie,
+ Depends,
+ Form,
+ Header,
+ HTTPException,
+ Path,
+ Query,
+ Response,
+ Security,
+ status,
+)
+
+from fabric_api.models.extra_models import TokenModel # noqa: F401
+from pydantic import Field, StrictStr
+from typing_extensions import Annotated
+from fabric_api.models.endpoint_resolution_request import EndpointResolutionRequest
+from fabric_api.models.endpoint_resolution_response import EndpointResolutionResponse
+from fabric_api.models.error_response import ErrorResponse
+
+
+router = APIRouter()
+
+ns_pkg = fabric_api.impl
+for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."):
+ importlib.import_module(name)
+
+
+@router.post(
+ "/resolve-api-path-placeholder",
+ responses={
+ 200: {"model": EndpointResolutionResponse, "description": "Endpoint resolution response"},
+ 200: {"model": ErrorResponse, "description": "For error conditions the workload should return an appropriate HTTP error status code (4xx, 5xx) with detailed error information in the response body."},
+ },
+ tags=["EndpointResolution"],
+ summary="Resolve an endpoint for a given service called by Microsoft Fabric",
+ response_model_by_alias=True,
+)
+async def endpoint_resolution_resolve(
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ body: Annotated[EndpointResolutionRequest, Field(description="Endpoint resolution request payload")] = Body(None, description="Endpoint resolution request payload"),
+) -> EndpointResolutionResponse:
+ """Resolves the endpoint for a given service called by Microsoft Fabric based on the tenant's region and workspace region. Fabric provides a set of context properties and returns the appropriate service endpoint URL and its time-to-live (TTL). The Endpoint Resolution API is crucial for services that require dynamic endpoint determination based on operational context. This allows for optimized routing and regional compliance. To resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid. For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs)."""
+ if not BaseEndpointResolutionApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+ return await BaseEndpointResolutionApi.subclasses[0]().endpoint_resolution_resolve(activity_id, request_id, authorization, body)
diff --git a/Backend/python/src/fabric_api/apis/endpoint_resolution_api_base.py b/Backend/python/src/fabric_api/apis/endpoint_resolution_api_base.py
new file mode 100644
index 0000000..a3ab6bb
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/endpoint_resolution_api_base.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+
+from typing import ClassVar, Dict, List, Tuple # noqa: F401
+
+from pydantic import Field, StrictStr
+from typing_extensions import Annotated
+from fabric_api.models.endpoint_resolution_request import EndpointResolutionRequest
+from fabric_api.models.endpoint_resolution_response import EndpointResolutionResponse
+from fabric_api.models.error_response import ErrorResponse
+
+
+class BaseEndpointResolutionApi:
+ subclasses: ClassVar[Tuple] = ()
+
+ def __init_subclass__(cls, **kwargs):
+ super().__init_subclass__(**kwargs)
+ BaseEndpointResolutionApi.subclasses = BaseEndpointResolutionApi.subclasses + (cls,)
+ async def endpoint_resolution_resolve(
+ self,
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ body: Annotated[EndpointResolutionRequest, Field(description="Endpoint resolution request payload")],
+ ) -> EndpointResolutionResponse:
+ """Resolves the endpoint for a given service called by Microsoft Fabric based on the tenant's region and workspace region. Fabric provides a set of context properties and returns the appropriate service endpoint URL and its time-to-live (TTL). The Endpoint Resolution API is crucial for services that require dynamic endpoint determination based on operational context. This allows for optimized routing and regional compliance. To resolve an endpoint, Fabric will send a POST request with the required context properties in the request body. The response will contain the resolved URL and its TTL, which indicates how long the URL is considered valid. For a sample implementation and usage examples, please refer to the [Endpoint Resolution Sample Code](https://github.com/microsoft/Microsoft-Fabric-workload-development-sample/blob/main/Backend/src/Controllers/EndpointResolutionControllerImpl.cs)."""
+ ...
diff --git a/Backend/python/src/fabric_api/apis/item_lifecycle_api.py b/Backend/python/src/fabric_api/apis/item_lifecycle_api.py
new file mode 100644
index 0000000..f72bd4b
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/item_lifecycle_api.py
@@ -0,0 +1,154 @@
+# coding: utf-8
+
+from typing import Dict, List # noqa: F401
+import importlib
+import pkgutil
+from uuid import UUID
+
+from fabric_api.apis.item_lifecycle_api_base import BaseItemLifecycleApi
+import fabric_api.impl
+
+from fastapi import ( # noqa: F401
+ APIRouter,
+ Body,
+ Cookie,
+ Depends,
+ Form,
+ Header,
+ HTTPException,
+ Path,
+ Query,
+ Response,
+ Security,
+ status,
+)
+
+from fabric_api.models.extra_models import TokenModel # noqa: F401
+from pydantic import Field, StrictStr
+from typing import Any
+from typing_extensions import Annotated
+from fabric_api.models.create_item_request import CreateItemRequest
+from fabric_api.models.error_response import ErrorResponse
+from fabric_api.models.get_item_payload_response import GetItemPayloadResponse
+from fabric_api.models.update_item_request import UpdateItemRequest
+
+
+router = APIRouter()
+
+ns_pkg = fabric_api.impl
+for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."):
+ importlib.import_module(name)
+
+
+@router.post(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}",
+ responses={
+ 200: {"description": "Successfully created."},
+ 200: {"model": ErrorResponse, "description": "For error conditions the workload should return an appropriate HTTP error status code (4xx, 5xx) with detailed error information in the response body."},
+ },
+ tags=["ItemLifecycle"],
+ summary="Called by Microsoft Fabric for creating a new item.",
+ response_model_by_alias=True,
+)
+async def item_lifecycle_create_item(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+ create_item_request: Annotated[CreateItemRequest, Field(description="The item creation request.")] = Body(None, description="The item creation request."),
+) -> None:
+ """Upon item creation Fabric performs basic validations, creates the item in a provisioning state and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate required resources, and update the Fabric item metadata cache with item relations and ETag. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ if not BaseItemLifecycleApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspaceId = UUID(workspaceId)
+ itemId = UUID(itemId)
+ return await BaseItemLifecycleApi.subclasses[0]().item_lifecycle_create_item(workspaceId, itemType, itemId, activity_id, request_id, authorization, x_ms_client_tenant_id, create_item_request)
+
+
+@router.delete(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}",
+ responses={
+ 200: {"description": "Successfully deleted."},
+ 200: {"model": ErrorResponse, "description": "For error conditions the workload should return an appropriate HTTP error status code (4xx, 5xx) with detailed error information in the response body."},
+ },
+ tags=["ItemLifecycle"],
+ summary="Called by Microsoft Fabric for deleting an existing item.",
+ response_model_by_alias=True,
+)
+async def item_lifecycle_delete_item(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+) -> None:
+ """Upon item deletion Fabric performs basic validations and calls this API to notify the workload. The workload is expected to delete the item metadata and free resources. This API should accept SubjectAndApp authentication. However, the subject token may be unavailable in some cases. ## Permissions Permissions are checked by Microsoft Fabric."""
+ if not BaseItemLifecycleApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspaceId = UUID(workspaceId)
+ itemId = UUID(itemId)
+ return await BaseItemLifecycleApi.subclasses[0]().item_lifecycle_delete_item(workspaceId, itemType, itemId, activity_id, request_id, authorization, x_ms_client_tenant_id)
+
+
+@router.get(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}/payload",
+ responses={
+ 200: {"model": GetItemPayloadResponse, "description": "Completed successfully."},
+ 200: {"model": ErrorResponse, "description": "For error conditions the workload should return an appropriate HTTP error status code (4xx, 5xx) with detailed error information in the response body."},
+ },
+ tags=["ItemLifecycle"],
+ summary="Called by Microsoft Fabric for retrieving the workload payload for an item.",
+ response_model_by_alias=True,
+)
+async def item_lifecycle_get_item_payload(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+) -> GetItemPayloadResponse:
+ """When the item editor <iframe> requests an item, Fabric performs basic validations and calls this API to retrieve the payload from the workload. This API accepts SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ if not BaseItemLifecycleApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspaceId = UUID(workspaceId)
+ itemId = UUID(itemId)
+ return await BaseItemLifecycleApi.subclasses[0]().item_lifecycle_get_item_payload(workspaceId, itemType, itemId, activity_id, request_id, authorization, x_ms_client_tenant_id)
+
+
+@router.patch(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}",
+ responses={
+ 200: {"description": "Successfully updated."},
+ 200: {"model": ErrorResponse, "description": "For error conditions the workload should return an appropriate HTTP error status code (4xx, 5xx) with detailed error information in the response body."},
+ },
+ tags=["ItemLifecycle"],
+ summary="Called by Microsoft Fabric for updating an existing item.",
+ response_model_by_alias=True,
+)
+async def item_lifecycle_update_item(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+ update_item_request: Annotated[UpdateItemRequest, Field(description="The item update request.")] = Body(None, description="The item update request."),
+) -> None:
+ """Upon item update Fabric performs basic validations and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate and/or free resources, and update the Fabric item metadata cache with item relations and ETag. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ if not BaseItemLifecycleApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspaceId = UUID(workspaceId)
+ itemId = UUID(itemId)
+ return await BaseItemLifecycleApi.subclasses[0]().item_lifecycle_update_item(workspaceId, itemType, itemId, activity_id, request_id, authorization, x_ms_client_tenant_id, update_item_request)
diff --git a/Backend/python/src/fabric_api/apis/item_lifecycle_api_base.py b/Backend/python/src/fabric_api/apis/item_lifecycle_api_base.py
new file mode 100644
index 0000000..0fcd880
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/item_lifecycle_api_base.py
@@ -0,0 +1,75 @@
+# coding: utf-8
+
+from typing import ClassVar, Dict, List, Tuple # noqa: F401
+
+from pydantic import Field, StrictStr
+from typing import Any
+from typing_extensions import Annotated
+from fabric_api.models.create_item_request import CreateItemRequest
+from fabric_api.models.error_response import ErrorResponse
+from fabric_api.models.get_item_payload_response import GetItemPayloadResponse
+from fabric_api.models.update_item_request import UpdateItemRequest
+
+
+class BaseItemLifecycleApi:
+ subclasses: ClassVar[Tuple] = ()
+
+ def __init_subclass__(cls, **kwargs):
+ super().__init_subclass__(**kwargs)
+ BaseItemLifecycleApi.subclasses = BaseItemLifecycleApi.subclasses + (cls,)
+ async def item_lifecycle_create_item(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ create_item_request: Annotated[CreateItemRequest, Field(description="The item creation request.")],
+ ) -> None:
+ """Upon item creation Fabric performs basic validations, creates the item in a provisioning state and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate required resources, and update the Fabric item metadata cache with item relations and ETag. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
+
+
+ async def item_lifecycle_delete_item(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ ) -> None:
+ """Upon item deletion Fabric performs basic validations and calls this API to notify the workload. The workload is expected to delete the item metadata and free resources. This API should accept SubjectAndApp authentication. However, the subject token may be unavailable in some cases. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
+
+
+ async def item_lifecycle_get_item_payload(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ ) -> GetItemPayloadResponse:
+ """When the item editor <iframe> requests an item, Fabric performs basic validations and calls this API to retrieve the payload from the workload. This API accepts SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
+
+
+ async def item_lifecycle_update_item(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ update_item_request: Annotated[UpdateItemRequest, Field(description="The item update request.")],
+ ) -> None:
+ """Upon item update Fabric performs basic validations and calls this API to notify the workload. The workload is expected to perform required validations, store the item metadata, allocate and/or free resources, and update the Fabric item metadata cache with item relations and ETag. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
diff --git a/Backend/python/src/fabric_api/apis/jobs_api.py b/Backend/python/src/fabric_api/apis/jobs_api.py
new file mode 100644
index 0000000..6e2408c
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/jobs_api.py
@@ -0,0 +1,164 @@
+# coding: utf-8
+
+from typing import Dict, List # noqa: F401
+import importlib
+import pkgutil
+import logging
+from uuid import UUID
+
+from fabric_api.apis.jobs_api_base import BaseJobsApi
+import fabric_api.impl
+
+from fastapi import ( # noqa: F401
+ APIRouter,
+ Body,
+ Cookie,
+ Depends,
+ Form,
+ Header,
+ HTTPException,
+ Path,
+ Query,
+ Response,
+ Security,
+ status,
+)
+
+from fabric_api.models.extra_models import TokenModel # noqa: F401
+from pydantic import Field, StrictStr
+from typing import Any
+from typing_extensions import Annotated
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.create_item_job_instance_request import CreateItemJobInstanceRequest
+from fabric_api.models.error_response import ErrorResponse
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+
+from fastapi import Request
+
+logger = logging.getLogger(__name__)
+router = APIRouter()
+
+ns_pkg = fabric_api.impl
+for _, name, _ in pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + "."):
+ importlib.import_module(name)
+
+
+@router.post(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}/cancel",
+ responses={
+ 200: {"model": ItemJobInstanceState, "description": "Completed successfully."},
+ 400: {"model": ErrorResponse, "description": "Bad request"},
+ 401: {"model": ErrorResponse, "description": "Unauthorized"},
+ 403: {"model": ErrorResponse, "description": "Forbidden"},
+ 500: {"model": ErrorResponse, "description": "Server error"},
+ },
+ tags=["Jobs"],
+ summary="Called by Microsoft Fabric for cancelling a job instance.",
+ response_model_by_alias=True,
+)
+async def jobs_cancel_item_job_instance(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ jobType: Annotated[StrictStr, Field(description="The job type.")] = Path(..., description="The job type."),
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")] = Path(..., description="The job instance ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+) -> ItemJobInstanceState:
+ """Fabric performs basic validations and calls this API to cancel an item job instance in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ logger.info(f"Cancelling job instance: {workspaceId}/{itemType}/{itemId}/{jobType}/{jobInstanceId}")
+
+ if not BaseJobsApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspace_id = UUID(workspaceId)
+ item_id = UUID(itemId)
+ job_instance_id = UUID(jobInstanceId)
+
+ return await BaseJobsApi.subclasses[0]().jobs_cancel_item_job_instance(
+ workspace_id, itemType, item_id, jobType, job_instance_id,
+ activity_id, request_id, authorization, x_ms_client_tenant_id
+ )
+
+@router.post(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}",
+ responses={
+ 202: {"description": "Successfully scheduled."},
+ 400: {"model": ErrorResponse, "description": "Bad request"},
+ 401: {"model": ErrorResponse, "description": "Unauthorized"},
+ 403: {"model": ErrorResponse, "description": "Forbidden"},
+ 500: {"model": ErrorResponse, "description": "Server error"},
+ },
+ tags=["Jobs"],
+ summary="Called by Microsoft Fabric for starting a new job instance.",
+ response_model_by_alias=True,
+ status_code=status.HTTP_202_ACCEPTED,
+)
+async def jobs_create_item_job_instance(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ jobType: Annotated[StrictStr, Field(description="The job type.")] = Path(..., description="The job type."),
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")] = Path(..., description="The job instance ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+ create_item_job_instance_request: Annotated[CreateItemJobInstanceRequest, Field(description="The job instance properties.")] = Body(None, description="The job instance properties."),
+) -> None:
+ """Fabric performs basic validations and calls this API to start a new instance of the job in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ logger.info(f"Creating job instance: {workspaceId}/{itemType}/{itemId}/{jobType}/{jobInstanceId}")
+ logger.info(f"Request body: {create_item_job_instance_request}")
+ if not BaseJobsApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspace_id = UUID(workspaceId)
+ item_id = UUID(itemId)
+ job_instance_id = UUID(jobInstanceId)
+
+ await BaseJobsApi.subclasses[0]().jobs_create_item_job_instance(
+ workspace_id, itemType, item_id, jobType, job_instance_id,
+ activity_id, request_id, authorization, x_ms_client_tenant_id,
+ create_item_job_instance_request
+ )
+ return None
+
+
+@router.get(
+ "/workspaces/{workspaceId}/items/{itemType}/{itemId}/jobTypes/{jobType}/instances/{jobInstanceId}",
+ responses={
+ 200: {"model": ItemJobInstanceState, "description": "Completed successfully."},
+ 400: {"model": ErrorResponse, "description": "Bad request"},
+ 401: {"model": ErrorResponse, "description": "Unauthorized"},
+ 403: {"model": ErrorResponse, "description": "Forbidden"},
+ 500: {"model": ErrorResponse, "description": "Server error"},
+ },
+ tags=["Jobs"],
+ summary="Called by Microsoft Fabric for retrieving a job instance state.",
+ response_model_by_alias=True,
+)
+async def jobs_get_item_job_instance_state(
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")] = Path(..., description="The workspace ID."),
+ itemType: Annotated[StrictStr, Field(description="The item type.")] = Path(..., description="The item type."),
+ itemId: Annotated[StrictStr, Field(description="The item ID.")] = Path(..., description="The item ID."),
+ jobType: Annotated[StrictStr, Field(description="The job type.")] = Path(..., description="The job type."),
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")] = Path(..., description="The job instance ID."),
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")] = Header(None, description="A unique ID for correlating the request with your system when a user interacts with your workload."),
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")] = Header(None, description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue."),
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")] = Header(None, description=r"A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`."),
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")] = Header(None, description="The tenant ID of the client making the request."),
+) -> ItemJobInstanceState:
+ """Fabric performs basic validations and calls this API to retrieve the item job instance state in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ if not BaseJobsApi.subclasses:
+ raise HTTPException(status_code=500, detail="Not implemented")
+
+ workspace_id = UUID(workspaceId)
+ item_id = UUID(itemId)
+ job_instance_id = UUID(jobInstanceId)
+
+ return await BaseJobsApi.subclasses[0]().jobs_get_item_job_instance_state(
+ workspace_id, itemType, item_id, jobType, job_instance_id,
+ activity_id, request_id, authorization, x_ms_client_tenant_id
+ )
diff --git a/Backend/python/src/fabric_api/apis/jobs_api_base.py b/Backend/python/src/fabric_api/apis/jobs_api_base.py
new file mode 100644
index 0000000..405ca5b
--- /dev/null
+++ b/Backend/python/src/fabric_api/apis/jobs_api_base.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+
+from typing import ClassVar, Dict, List, Tuple # noqa: F401
+
+from pydantic import Field, StrictStr
+from typing import Any
+from typing_extensions import Annotated
+from fabric_api.models.create_item_job_instance_request import CreateItemJobInstanceRequest
+from fabric_api.models.error_response import ErrorResponse
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+
+
+class BaseJobsApi:
+ subclasses: ClassVar[Tuple] = ()
+
+ def __init_subclass__(cls, **kwargs):
+ super().__init_subclass__(**kwargs)
+ BaseJobsApi.subclasses = BaseJobsApi.subclasses + (cls,)
+ async def jobs_cancel_item_job_instance(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ jobType: Annotated[StrictStr, Field(description="The job type.")],
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ ) -> ItemJobInstanceState:
+ """Fabric performs basic validations and calls this API to cancel an item job instance in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
+
+
+ async def jobs_create_item_job_instance(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ jobType: Annotated[StrictStr, Field(description="The job type.")],
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ create_item_job_instance_request: Annotated[CreateItemJobInstanceRequest, Field(description="The job instance properties.")],
+ ) -> None:
+ """Fabric performs basic validations and calls this API to start a new instance of the job in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
+
+
+ async def jobs_get_item_job_instance_state(
+ self,
+ workspaceId: Annotated[StrictStr, Field(description="The workspace ID.")],
+ itemType: Annotated[StrictStr, Field(description="The item type.")],
+ itemId: Annotated[StrictStr, Field(description="The item ID.")],
+ jobType: Annotated[StrictStr, Field(description="The job type.")],
+ jobInstanceId: Annotated[StrictStr, Field(description="The job instance ID.")],
+ activity_id: Annotated[StrictStr, Field(description="A unique ID for correlating the request with your system when a user interacts with your workload.")],
+ request_id: Annotated[StrictStr, Field(description="A globally unique ID that helps Fabric correlate your request with our logs. Provide this ID when reporting an issue.")],
+ authorization: Annotated[StrictStr, Field(description="A dual token authorization header that allows the workload to validate the request origin, provide user context, and call other services. This header has the following format: `SubjectAndAppToken1.0 subjectToken=\"delegated token\", appToken=\"S2S token\"`.")],
+ x_ms_client_tenant_id: Annotated[StrictStr, Field(description="The tenant ID of the client making the request.")],
+ ) -> ItemJobInstanceState:
+ """Fabric performs basic validations and calls this API to retrieve the item job instance state in the workload. This API should accept SubjectAndApp authentication. ## Permissions Permissions are checked by Microsoft Fabric."""
+ ...
diff --git a/Backend/python/src/fabric_api/impl/__init__.py b/Backend/python/src/fabric_api/impl/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/fabric_api/impl/endpoint_resolution_controller.py b/Backend/python/src/fabric_api/impl/endpoint_resolution_controller.py
new file mode 100644
index 0000000..27c732b
--- /dev/null
+++ b/Backend/python/src/fabric_api/impl/endpoint_resolution_controller.py
@@ -0,0 +1,171 @@
+"""
+Endpoint Resolution Controller Implementation
+
+This controller handles endpoint resolution requests from Microsoft Fabric,
+determining the appropriate service endpoint URL based on the provided context.
+"""
+
+import logging
+import json
+from typing import Optional
+from urllib.parse import urlparse
+
+from starlette.requests import Request
+from fastapi import HTTPException
+
+from fabric_api.apis.endpoint_resolution_api_base import BaseEndpointResolutionApi
+from fabric_api.models.endpoint_resolution_request import EndpointResolutionRequest
+from fabric_api.models.endpoint_resolution_response import EndpointResolutionResponse
+from fabric_api.models.endpoint_resolution_context_property import EndpointResolutionContextProperty
+
+from services.authentication import get_authentication_service
+from exceptions.exceptions import AuthenticationException
+
+logger = logging.getLogger(__name__)
+
+class EndpointResolutionController(BaseEndpointResolutionApi):
+ """
+ Implementation of the Endpoint Resolution API.
+
+ This controller resolves service endpoints for requests originating from Microsoft Fabric
+ based on resolution context properties such as tenant region and workspace region.
+ """
+
+ def __init__(self, request: Request):
+ """
+ Initialize the controller with the current request context.
+
+ Args:
+ request: The FastAPI/Starlette request object
+ """
+ self.request = request
+ self.logger = logging.getLogger(__name__)
+
+ async def endpoint_resolution_resolve(
+ self,
+ activity_id: str,
+ request_id: str,
+ authorization: str,
+ body: EndpointResolutionRequest
+ ) -> EndpointResolutionResponse:
+ """
+ Resolve an endpoint for a given service called by Microsoft Fabric.
+
+ This method determines the appropriate service endpoint URL based on the
+ provided context properties (e.g., tenant region, workspace region).
+
+ Args:
+ activity_id: A unique ID for correlating the request
+ request_id: A globally unique ID for request tracking
+ authorization: The authorization header containing SubjectAndApp tokens
+ body: The endpoint resolution request containing context properties
+
+ Returns:
+ EndpointResolutionResponse with the resolved URL and TTL
+
+ Raises:
+ HTTPException: If the request is invalid or authentication fails
+ """
+ self.logger.info(f"ResolveAsync: Attempting to resolve endpoint. Activity ID: {activity_id}, Request ID: {request_id}")
+
+ # Validate request body
+ if not body:
+ self.logger.error("ResolveAsync: The request cannot be null.")
+ raise HTTPException(status_code=400, detail="The request cannot be null.")
+
+ if not body.context or len(body.context) == 0:
+ self.logger.error("ResolveAsync: The resolution context is missing or empty.")
+ raise HTTPException(status_code=400, detail="The resolution context is missing or empty.")
+
+ try:
+ # Authenticate the call (without requiring subject token or tenant ID header)
+ auth_service = get_authentication_service()
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ tenant_id=None,
+ require_subject_token=False,
+ require_tenant_id_header=False
+ )
+
+ # Log context properties for debugging
+ context_dict = {prop.name: prop.value for prop in body.context}
+ context_json = json.dumps(context_dict)
+ self.logger.info(f"Resolving endpoint with Context Properties: {context_json}")
+
+ # Resolve the endpoint URL based on the request
+ resolved_url = self._resolve_endpoint_url(body)
+
+ # Set TTL (time-to-live) in minutes - default to 60 minutes
+ ttl_in_minutes = 60
+
+ # Create and return the response
+ response = EndpointResolutionResponse(
+ url=resolved_url,
+ ttl_in_minutes=ttl_in_minutes
+ )
+
+ self.logger.info(f"Resolved endpoint URL: {response.url}")
+
+ return response
+
+ except AuthenticationException as e:
+ self.logger.error(f"Authentication failed: {str(e)}")
+ raise HTTPException(status_code=401, detail=str(e))
+ except Exception as e:
+ self.logger.error(f"Error resolving endpoint: {str(e)}", exc_info=True)
+ raise HTTPException(status_code=500, detail=f"Error resolving endpoint: {str(e)}")
+
+ def _resolve_endpoint_url(self, request: EndpointResolutionRequest) -> str:
+ """
+ Resolve the endpoint URL based on the request context.
+
+ This is a placeholder implementation that returns the workload's base URL.
+ In a production environment, this should implement actual endpoint resolution
+ logic based on the context properties (e.g., routing to different regions).
+
+ Args:
+ request: The endpoint resolution request containing context properties
+
+ Returns:
+ The resolved endpoint URL
+ """
+ # Extract context properties for potential routing logic
+ context_dict = {prop.name: prop.value for prop in request.context}
+
+ # Log the context for debugging
+ self.logger.debug(f"Endpoint resolution context: {context_dict}")
+
+ # Get endpoint name if provided
+ endpoint_name = context_dict.get("EndpointName")
+ tenant_region = context_dict.get("TenantRegion")
+ workspace_region = context_dict.get("WorkspaceRegion")
+ tenant_id = context_dict.get("TenantId")
+
+ self.logger.info(f"Resolving endpoint: name={endpoint_name}, tenant_region={tenant_region}, "
+ f"workspace_region={workspace_region}, tenant_id={tenant_id}")
+
+ # Build the base URL from the current request
+ # This ensures we return the correct scheme, host, and port
+ if hasattr(self.request, 'url'):
+ # Starlette Request object
+ base_url = f"{self.request.url.scheme}://{self.request.url.netloc}"
+ else:
+ # Fallback for testing or other contexts
+ base_url = "http://localhost:5000"
+ self.logger.warning("Request context not available, using fallback URL")
+
+ # Add the workload API base path
+ api_base_route = "/workload" # This should match your actual API base route
+ resolved_url = f"{base_url}{api_base_route}"
+
+ return resolved_url
+
+
+# Dependency injection for FastAPI
+async def get_endpoint_resolution_controller(request: Request) -> EndpointResolutionController:
+ """
+ FastAPI dependency to create an EndpointResolutionController instance.
+
+ This allows the controller to access the current request context.
+ """
+ return EndpointResolutionController(request)
diff --git a/Backend/python/src/fabric_api/impl/item_lifecycle_controller.py b/Backend/python/src/fabric_api/impl/item_lifecycle_controller.py
new file mode 100644
index 0000000..e687b2f
--- /dev/null
+++ b/Backend/python/src/fabric_api/impl/item_lifecycle_controller.py
@@ -0,0 +1,150 @@
+import logging
+from uuid import UUID
+import json
+from typing import Dict, Any
+
+from fabric_api.apis.item_lifecycle_api_base import BaseItemLifecycleApi
+from fabric_api.models.create_item_request import CreateItemRequest
+from fabric_api.models.update_item_request import UpdateItemRequest
+from fabric_api.models.get_item_payload_response import GetItemPayloadResponse
+
+from services.authentication import get_authentication_service, AuthenticationService
+from services.item_factory import get_item_factory, ItemFactory
+
+logger = logging.getLogger(__name__)
+
+class ItemLifecycleController(BaseItemLifecycleApi):
+ """Implementation of the Item Lifecycle API"""
+
+ async def item_lifecycle_create_item(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None,
+ create_item_request: CreateItemRequest = None
+ ) -> None:
+ """
+ Called by Microsoft Fabric for creating a new item.
+
+ This endpoint is triggered when the frontend calls callItemCreate,
+ which happens during handleCreateSampleItem in SampleWorkloadCreateDialog.
+ """
+ logger.info(f"Creating item: {itemType} with ID {itemId} in workspace {workspaceId}")
+
+ logger.debug(f"Create item request: {create_item_request}")
+
+ # Get required services
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ logger.debug(f"Authenticating control plane call with x_ms_client_tenant_id ID: {x_ms_client_tenant_id}")
+ # Authenticate the call
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ # Create the item
+ item = item_factory.create_item(itemType, auth_context)
+ await item.create(workspaceId, itemId, create_item_request)
+
+ logger.info(f"Successfully created item {itemId}")
+ return None
+
+ async def item_lifecycle_update_item(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None,
+ update_item_request: UpdateItemRequest = None
+ ) -> None:
+ """Called by Microsoft Fabric for updating an existing item."""
+ logger.info(f"Updating item: {itemType} with ID {itemId} in workspace {workspaceId}")
+ logger.debug(f"Update item request: {update_item_request}")
+
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+ await item.update(update_item_request)
+
+ logger.info(f"Successfully updated item {itemId}")
+ return None
+
+ async def item_lifecycle_delete_item(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None
+ ) -> None:
+ """Called by Microsoft Fabric for deleting an existing item."""
+ logger.info(f"Deleting item: {itemType} with ID {itemId} in workspace {workspaceId}")
+
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ tenant_id=x_ms_client_tenant_id,
+ require_subject_token=False
+ )
+ if not auth_context.has_subject_context:
+ logger.warning(f"Subject token not provided for item deletion: {itemId}")
+
+
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+ await item.delete()
+
+ logger.info(f"Successfully deleted item {itemId}")
+ return None
+
+ async def item_lifecycle_get_item_payload(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None
+ ) -> GetItemPayloadResponse:
+ """
+ Called by Microsoft Fabric for retrieving the workload payload for an item.
+
+ This endpoint is called when the editor loads via loadDataFromUrl.
+ """
+ logger.info(f"Getting payload for item: {itemType} with ID {itemId} in workspace {workspaceId}")
+
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+ item_payload = await item.get_item_payload()
+
+ logger.debug(f"Retrieved payload for item {itemId}: {item_payload}")
+ return GetItemPayloadResponse(item_payload=item_payload)
\ No newline at end of file
diff --git a/Backend/python/src/fabric_api/impl/jobs_controller.py b/Backend/python/src/fabric_api/impl/jobs_controller.py
new file mode 100644
index 0000000..3fb56d0
--- /dev/null
+++ b/Backend/python/src/fabric_api/impl/jobs_controller.py
@@ -0,0 +1,224 @@
+import logging
+import asyncio
+from uuid import UUID
+from typing import Set
+from fabric_api.apis.jobs_api_base import BaseJobsApi
+from fabric_api.models.create_item_job_instance_request import CreateItemJobInstanceRequest
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.error_details import ErrorDetails
+from fabric_api.models.error_source import ErrorSource
+
+from services.authentication import get_authentication_service
+from services.item_factory import get_item_factory
+
+logger = logging.getLogger(__name__)
+
+# # Global set to track background tasks
+_background_tasks: Set[asyncio.Task] = set()
+
+class JobsController(BaseJobsApi):
+ """Implementation of the Jobs API for handling job lifecycle operations"""
+
+ async def jobs_create_item_job_instance(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ jobType: str,
+ jobInstanceId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None,
+ create_item_job_instance_request: CreateItemJobInstanceRequest = None
+ ) -> None:
+ """Called by Microsoft Fabric for starting a new job instance."""
+ logger.info(f"Creating job instance: {jobType}/{jobInstanceId} for item {itemType}/{itemId}")
+
+ # Get required services
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ try:
+ # Authenticate the call
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ # Create and load the item
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+
+ logger.info(f"Running job type: {jobType}")
+
+ # Start job execution in the background without awaiting it
+ task = asyncio.create_task(
+ self._execute_job_wrapper(
+ item,
+ jobType,
+ jobInstanceId,
+ create_item_job_instance_request.invoke_type if create_item_job_instance_request else None,
+ create_item_job_instance_request.creation_payload if create_item_job_instance_request else {}
+ ),
+ name=f"Job_{jobType}_{jobInstanceId}"
+ )
+
+ # Add to background tasks set to prevent garbage collection
+ _background_tasks.add(task)
+
+ # Remove from set when done
+ task.add_done_callback(_background_tasks.discard)
+
+ # Return 202 Accepted response (handled by FastAPI)
+ logger.info(f"Job {jobInstanceId} started successfully")
+ return None
+ except Exception as e:
+ logger.error(f"Error creating job instance: {str(e)}", exc_info=True)
+ raise
+
+ async def _execute_job_wrapper(self, item, job_type: str, job_instance_id: UUID,
+ invoke_type: JobInvokeType, creation_payload: dict):
+ """Wrapper for job execution with proper error handling"""
+ try:
+ await item.execute_job(job_type, job_instance_id, invoke_type, creation_payload)
+ logger.info(f"Job {job_instance_id} completed successfully")
+ except asyncio.CancelledError:
+ logger.warning(f"Job {job_instance_id} was cancelled during shutdown")
+ raise # Re-raise to properly handle cancellation
+ except Exception as e:
+ logger.error(f"Error during execution of job {job_instance_id} (type: {job_type}): {str(e)}", exc_info=True)
+ # Don't re-raise - this is a background task
+
+ async def jobs_get_item_job_instance_state(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ jobType: str,
+ jobInstanceId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None
+ ) -> ItemJobInstanceState:
+ """Called by Microsoft Fabric for retrieving a job instance state."""
+ logger.info(f"Getting job instance state: {jobType}/{jobInstanceId} for item {itemType}/{itemId}")
+
+ # Get required services
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ try:
+ # Authenticate the call
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ # Create and load the item
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+
+ # Check if item exists
+ if not item.item_object_id:
+ logger.error(f"Item {itemId} not found")
+ return ItemJobInstanceState(
+ status=JobInstanceStatus.FAILED,
+ error_details=ErrorDetails(
+ error_code="ItemNotFound",
+ message="Item not found.",
+ source=ErrorSource.SYSTEM
+ )
+ )
+
+ # Get job state
+ job_state = await item.get_job_state(jobType, jobInstanceId)
+ logger.info(f"Job {jobInstanceId} state: {job_state.status}")
+ return job_state
+ except Exception as e:
+ logger.error(f"Error getting job instance state: {str(e)}", exc_info=True)
+ raise
+
+ async def jobs_cancel_item_job_instance(
+ self,
+ workspaceId: UUID,
+ itemType: str,
+ itemId: UUID,
+ jobType: str,
+ jobInstanceId: UUID,
+ activity_id: str = None,
+ request_id: str = None,
+ authorization: str = None,
+ x_ms_client_tenant_id: str = None
+ ) -> ItemJobInstanceState:
+ """Called by Microsoft Fabric for cancelling a job instance."""
+ logger.info(f"Cancelling job instance: {jobType}/{jobInstanceId} for item {itemType}/{itemId}")
+
+ # Get required services
+ auth_service = get_authentication_service()
+ item_factory = get_item_factory()
+
+ try:
+ # Authenticate the call
+ auth_context = await auth_service.authenticate_control_plane_call(
+ authorization,
+ x_ms_client_tenant_id
+ )
+
+ # Create and load the item
+ item = item_factory.create_item(itemType, auth_context)
+ await item.load(itemId)
+
+ # Check if item exists
+ if not item.item_object_id:
+ logger.error(f"Item {itemId} not found")
+ return ItemJobInstanceState(
+ status=JobInstanceStatus.FAILED,
+ error_details=ErrorDetails(
+ error_code="ItemNotFound",
+ message="Item not found.",
+ source=ErrorSource.SYSTEM
+ )
+ )
+
+ # Cancel the job
+ logger.info(f"Canceling job {jobType}/{jobInstanceId}")
+ await item.cancel_job(jobType, jobInstanceId)
+
+ # Return canceled state
+ return ItemJobInstanceState(
+ status=JobInstanceStatus.CANCELLED
+ )
+ except Exception as e:
+ logger.error(f"Error cancelling job instance: {str(e)}", exc_info=True)
+ raise
+
+async def cleanup_background_tasks(timeout: float = 3.0):
+ """Clean up any remaining background tasks during shutdown."""
+ if not _background_tasks:
+ return
+
+ pending_tasks = [task for task in _background_tasks if not task.done()]
+ if not pending_tasks:
+ _background_tasks.clear()
+ return
+
+ logger.info(f"Cancelling {len(pending_tasks)} pending background tasks...")
+
+ # Cancel all pending tasks
+ for task in pending_tasks:
+ task.cancel()
+
+ # Wait for cancellation with timeout
+ try:
+ await asyncio.wait_for(
+ asyncio.gather(*pending_tasks, return_exceptions=True),
+ timeout=timeout
+ )
+ except asyncio.TimeoutError:
+ logger.warning(f"Some tasks did not complete within {timeout}s timeout")
+
+ _background_tasks.clear()
\ No newline at end of file
diff --git a/Backend/python/src/fabric_api/models/__init__.py b/Backend/python/src/fabric_api/models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/fabric_api/models/create_item_job_instance_request.py b/Backend/python/src/fabric_api/models/create_item_job_instance_request.py
new file mode 100644
index 0000000..474ef45
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/create_item_job_instance_request.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing import Any, ClassVar, Dict, List, Optional
+from fabric_api.models.job_invoke_type import JobInvokeType
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class CreateItemJobInstanceRequest(BaseModel):
+ """
+ The parameters controlling the job instance execution.
+ """ # noqa: E501
+ invoke_type: JobInvokeType = Field(alias="invokeType")
+ creation_payload: Optional[Dict[str, Any]] = Field(default=None, description="Job instance creation payload specific to the workload, item and job type.", alias="creationPayload")
+ __properties: ClassVar[List[str]] = ["invokeType", "creationPayload"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of CreateItemJobInstanceRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of CreateItemJobInstanceRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "invokeType": obj.get("invokeType"),
+ "creationPayload": obj.get("creationPayload")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/create_item_request.py b/Backend/python/src/fabric_api/models/create_item_request.py
new file mode 100644
index 0000000..c0b713e
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/create_item_request.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing_extensions import Annotated
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class CreateItemRequest(BaseModel):
+ """
+ Create item request content.
+ """ # noqa: E501
+ display_name: Annotated[str, Field(strict=True, max_length=256)] = Field(description="The item display name. Cannot exceed 256 characters.", alias="displayName")
+ description: Optional[StrictStr] = Field(default=None, description="The item description.")
+ creation_payload: Optional[Dict[str, Any]] = Field(default=None, description="Creation payload specific to the workload and item type. Fabric forwards this payload as-is to your backend system without storing or examining its contents.", alias="creationPayload")
+ __properties: ClassVar[List[str]] = ["displayName", "description", "creationPayload"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of CreateItemRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of CreateItemRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "displayName": obj.get("displayName"),
+ "description": obj.get("description"),
+ "creationPayload": obj.get("creationPayload")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/endpoint_resolution_context_property.py b/Backend/python/src/fabric_api/models/endpoint_resolution_context_property.py
new file mode 100644
index 0000000..48bf709
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/endpoint_resolution_context_property.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List
+from fabric_api.models.endpoint_resolution_context_property_name import EndpointResolutionContextPropertyName
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class EndpointResolutionContextProperty(BaseModel):
+ """
+ Defines a context property used in endpoint resolution. This property must be specified in the workload manifest to ensure correct endpoint determination by Fabric.
+ """ # noqa: E501
+ name: EndpointResolutionContextPropertyName
+ value: StrictStr = Field(description="The value of the context property.")
+ __properties: ClassVar[List[str]] = ["name", "value"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EndpointResolutionContextProperty from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of EndpointResolutionContextProperty from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "name": obj.get("name"),
+ "value": obj.get("value")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/endpoint_resolution_context_property_name.py b/Backend/python/src/fabric_api/models/endpoint_resolution_context_property_name.py
new file mode 100644
index 0000000..beacd05
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/endpoint_resolution_context_property_name.py
@@ -0,0 +1,47 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import json
+import pprint
+import re # noqa: F401
+from enum import Enum
+
+
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+
+class EndpointResolutionContextPropertyName(str, Enum):
+ """
+ The name of the property for endpoint resolution context.
+ """
+
+ """
+ allowed enum values
+ """
+ ENDPOINTNAME = 'EndpointName'
+ TENANTREGION = 'TenantRegion'
+ WORKSPACEREGION = 'WorkspaceRegion'
+ TENANTID = 'TenantId'
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EndpointResolutionContextPropertyName from a JSON string"""
+ return cls(json.loads(json_str))
+
+
diff --git a/Backend/python/src/fabric_api/models/endpoint_resolution_request.py b/Backend/python/src/fabric_api/models/endpoint_resolution_request.py
new file mode 100644
index 0000000..3053b79
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/endpoint_resolution_request.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing import Any, ClassVar, Dict, List
+from fabric_api.models.endpoint_resolution_context_property import EndpointResolutionContextProperty
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class EndpointResolutionRequest(BaseModel):
+ """
+ EndpointResolutionRequest
+ """ # noqa: E501
+ context: List[EndpointResolutionContextProperty] = Field(description="Array of context properties for endpoint resolution.")
+ __properties: ClassVar[List[str]] = ["context"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EndpointResolutionRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in context (list)
+ _items = []
+ if self.context:
+ for _item in self.context:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict['context'] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of EndpointResolutionRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "context": [EndpointResolutionContextProperty.from_dict(_item) for _item in obj.get("context")] if obj.get("context") is not None else None
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/endpoint_resolution_response.py b/Backend/python/src/fabric_api/models/endpoint_resolution_response.py
new file mode 100644
index 0000000..6d7d91b
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/endpoint_resolution_response.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing import Any, ClassVar, Dict, List
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class EndpointResolutionResponse(BaseModel):
+ """
+ EndpointResolutionResponse
+ """ # noqa: E501
+ url: StrictStr = Field(description="The resolved URL of the service.")
+ ttl_in_minutes: StrictInt = Field(description="Time to live in minutes for the resolved URL.", alias="ttlInMinutes")
+ __properties: ClassVar[List[str]] = ["url", "ttlInMinutes"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EndpointResolutionResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of EndpointResolutionResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "url": obj.get("url"),
+ "ttlInMinutes": obj.get("ttlInMinutes")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/error_basic_information.py b/Backend/python/src/fabric_api/models/error_basic_information.py
new file mode 100644
index 0000000..76fdd10
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/error_basic_information.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class ErrorBasicInformation(BaseModel):
+ """
+ The basic error information
+ """ # noqa: E501
+ error_code: StrictStr = Field(description="The error code.", alias="errorCode")
+ message: StrictStr = Field(description="The error message.")
+ message_parameters: Optional[List[StrictStr]] = Field(default=None, description="A list of parameters for formatting a localized message.", alias="messageParameters")
+ __properties: ClassVar[List[str]] = ["errorCode", "message", "messageParameters"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ErrorBasicInformation from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "error_code",
+ "message",
+ "message_parameters",
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of ErrorBasicInformation from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "errorCode": obj.get("errorCode"),
+ "message": obj.get("message"),
+ "messageParameters": obj.get("messageParameters")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/error_details.py b/Backend/python/src/fabric_api/models/error_details.py
new file mode 100644
index 0000000..060c1f6
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/error_details.py
@@ -0,0 +1,119 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from fabric_api.models.error_extended_information import ErrorExtendedInformation
+from fabric_api.models.error_source import ErrorSource
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class ErrorDetails(BaseModel):
+ """
+ The error details.
+ """ # noqa: E501
+ source: ErrorSource
+ is_permanent: Optional[StrictBool] = Field(default=None, description="Indicates whether the error is permanent or the operation can be retried.", alias="isPermanent")
+ more_details: Optional[List[ErrorExtendedInformation]] = Field(default=None, description="A list of additional error details.", alias="moreDetails")
+ error_code: StrictStr = Field(description="The error code.", alias="errorCode")
+ message: StrictStr = Field(description="The error message.")
+ message_parameters: Optional[List[StrictStr]] = Field(default=None, description="A list of parameters for formatting a localized message.", alias="messageParameters")
+ __properties: ClassVar[List[str]] = ["source", "isPermanent", "moreDetails", "errorCode", "message", "messageParameters"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ErrorDetails from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "is_permanent",
+ "more_details",
+ "error_code",
+ "message",
+ "message_parameters",
+ },
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in more_details (list)
+ _items = []
+ if self.more_details:
+ for _item in self.more_details:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict['moreDetails'] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of ErrorDetails from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "source": obj.get("source"),
+ "isPermanent": obj.get("isPermanent"),
+ "moreDetails": [ErrorExtendedInformation.from_dict(_item) for _item in obj.get("moreDetails")] if obj.get("moreDetails") is not None else None,
+ "errorCode": obj.get("errorCode"),
+ "message": obj.get("message"),
+ "messageParameters": obj.get("messageParameters")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/error_extended_information.py b/Backend/python/src/fabric_api/models/error_extended_information.py
new file mode 100644
index 0000000..e5a1882
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/error_extended_information.py
@@ -0,0 +1,112 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from fabric_api.models.name_value_pair import NameValuePair
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class ErrorExtendedInformation(BaseModel):
+ """
+ The extended error information.
+ """ # noqa: E501
+ additional_parameters: Optional[List[NameValuePair]] = Field(default=None, description="A list of additional parameters specific to the error.", alias="additionalParameters")
+ error_code: StrictStr = Field(description="The error code.", alias="errorCode")
+ message: StrictStr = Field(description="The error message.")
+ message_parameters: Optional[List[StrictStr]] = Field(default=None, description="A list of parameters for formatting a localized message.", alias="messageParameters")
+ __properties: ClassVar[List[str]] = ["additionalParameters", "errorCode", "message", "messageParameters"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ErrorExtendedInformation from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "additional_parameters",
+ "error_code",
+ "message",
+ "message_parameters",
+ },
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in additional_parameters (list)
+ _items = []
+ if self.additional_parameters:
+ for _item in self.additional_parameters:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict['additionalParameters'] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of ErrorExtendedInformation from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "additionalParameters": [NameValuePair.from_dict(_item) for _item in obj.get("additionalParameters")] if obj.get("additionalParameters") is not None else None,
+ "errorCode": obj.get("errorCode"),
+ "message": obj.get("message"),
+ "messageParameters": obj.get("messageParameters")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/error_response.py b/Backend/python/src/fabric_api/models/error_response.py
new file mode 100644
index 0000000..069d170
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/error_response.py
@@ -0,0 +1,118 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from fabric_api.models.error_extended_information import ErrorExtendedInformation
+from fabric_api.models.error_source import ErrorSource
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class ErrorResponse(BaseModel):
+ """
+ The error response.
+ """ # noqa: E501
+ source: ErrorSource
+ is_permanent: Optional[StrictBool] = Field(default=None, description="Indicates whether the error is permanent or the operation can be retried.", alias="isPermanent")
+ more_details: Optional[List[ErrorExtendedInformation]] = Field(default=None, description="A list of additional error details.", alias="moreDetails")
+ error_code: StrictStr = Field(description="The error code.", alias="errorCode")
+ message: StrictStr = Field(description="The error message.")
+ message_parameters: Optional[List[StrictStr]] = Field(default=None, description="A list of parameters for formatting a localized message.", alias="messageParameters")
+ __properties: ClassVar[List[str]] = ["source", "isPermanent", "moreDetails", "errorCode", "message", "messageParameters"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ErrorResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "is_permanent",
+ "more_details",
+ "error_code",
+ "message",
+ "message_parameters",
+ },
+ exclude_none=True,
+ )
+ _items = []
+ if self.more_details:
+ for _item in self.more_details:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict['moreDetails'] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of ErrorResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "source": obj.get("source"),
+ "isPermanent": obj.get("isPermanent"),
+ "moreDetails": [ErrorExtendedInformation.from_dict(_item) for _item in obj.get("moreDetails")] if obj.get("moreDetails") is not None else None,
+ "errorCode": obj.get("errorCode"),
+ "message": obj.get("message"),
+ "messageParameters": obj.get("messageParameters")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/error_source.py b/Backend/python/src/fabric_api/models/error_source.py
new file mode 100644
index 0000000..88d0a59
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/error_source.py
@@ -0,0 +1,46 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import json
+import pprint
+import re # noqa: F401
+from enum import Enum
+
+
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+
+class ErrorSource(str, Enum):
+ """
+ The source of the error.
+ """
+
+ """
+ allowed enum values
+ """
+ SYSTEM = 'System'
+ USER = 'User'
+ EXTERNAL = 'External'
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ErrorSource from a JSON string"""
+ return cls(json.loads(json_str))
+
+
diff --git a/Backend/python/src/fabric_api/models/extra_models.py b/Backend/python/src/fabric_api/models/extra_models.py
new file mode 100644
index 0000000..a3a283f
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/extra_models.py
@@ -0,0 +1,8 @@
+# coding: utf-8
+
+from pydantic import BaseModel
+
+class TokenModel(BaseModel):
+ """Defines a token model."""
+
+ sub: str
diff --git a/Backend/python/src/fabric_api/models/get_item_payload_response.py b/Backend/python/src/fabric_api/models/get_item_payload_response.py
new file mode 100644
index 0000000..a30c4c7
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/get_item_payload_response.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing import Any, ClassVar, Dict, List, Optional
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class GetItemPayloadResponse(BaseModel):
+ """
+ The item payload for the item editor.
+ """ # noqa: E501
+ item_payload: Optional[Dict[str, Any]] = Field(default=None, description="Item payload specific to the workload and item type.", alias="itemPayload")
+ __properties: ClassVar[List[str]] = ["itemPayload"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of GetItemPayloadResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of GetItemPayloadResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "itemPayload": obj.get("itemPayload")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/item_job_instance_state.py b/Backend/python/src/fabric_api/models/item_job_instance_state.py
new file mode 100644
index 0000000..37ff22f
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/item_job_instance_state.py
@@ -0,0 +1,105 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from datetime import datetime
+from pydantic import BaseModel, ConfigDict, Field
+from typing import Any, ClassVar, Dict, List, Optional
+from fabric_api.models.error_details import ErrorDetails
+from fabric_api.models.job_instance_status import JobInstanceStatus
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class ItemJobInstanceState(BaseModel):
+ """
+ The job instance state.
+ """ # noqa: E501
+ status: JobInstanceStatus
+ start_time_utc: Optional[datetime] = Field(default=None, description="The job instance start time.", alias="startTimeUtc")
+ end_time_utc: Optional[datetime] = Field(default=None, description="The job instance end time.", alias="endTimeUtc")
+ error_details: Optional[ErrorDetails] = Field(default=None, alias="errorDetails")
+ __properties: ClassVar[List[str]] = ["status", "startTimeUtc", "endTimeUtc", "errorDetails"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ItemJobInstanceState from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "start_time_utc",
+ "end_time_utc",
+ },
+ exclude_none=True,
+ )
+ if self.error_details:
+ _dict['errorDetails'] = self.error_details.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of ItemJobInstanceState from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "status": obj.get("status"),
+ "startTimeUtc": obj.get("startTimeUtc"),
+ "endTimeUtc": obj.get("endTimeUtc"),
+ "errorDetails": ErrorDetails.from_dict(obj.get("errorDetails")) if obj.get("errorDetails") is not None else None
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/job_instance_status.py b/Backend/python/src/fabric_api/models/job_instance_status.py
new file mode 100644
index 0000000..86c1afb
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/job_instance_status.py
@@ -0,0 +1,48 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import json
+import pprint
+import re # noqa: F401
+from enum import Enum
+
+
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+
+class JobInstanceStatus(str, Enum):
+ """
+ The status of an item job instance.
+ """
+
+ """
+ allowed enum values
+ """
+ NOTSTARTED = 'NotStarted'
+ INPROGRESS = 'InProgress'
+ COMPLETED = 'Completed'
+ FAILED = 'Failed'
+ CANCELLED = 'Cancelled'
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of JobInstanceStatus from a JSON string"""
+ return cls(json.loads(json_str))
+
+
diff --git a/Backend/python/src/fabric_api/models/job_invoke_type.py b/Backend/python/src/fabric_api/models/job_invoke_type.py
new file mode 100644
index 0000000..99a16e8
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/job_invoke_type.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import json
+import pprint
+import re # noqa: F401
+from enum import Enum
+from typing import Union, Any
+
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+
+class JobInvokeType(str, Enum):
+ """
+ The job invoke type.
+ """
+
+ """
+ allowed enum values
+ """
+ UNKNOWNFUTUREVALUE = 'UnknownFutureValue'
+ SCHEDULED = 'Scheduled'
+ MANUAL = 'Manual'
+
+ @classmethod
+ def _missing_(cls, value):
+ if isinstance(value, int):
+ int_to_enum = {
+ 0: cls.UNKNOWNFUTUREVALUE,
+ 1: cls.SCHEDULED,
+ 2: cls.MANUAL
+ }
+ return int_to_enum.get(value)
+ return None
+
+ def __int__(self):
+ enum_to_int = {
+ self.UNKNOWNFUTUREVALUE: 0,
+ self.SCHEDULED: 1,
+ self.MANUAL: 2
+ }
+ return enum_to_int[self]
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of JobInvokeType from a JSON string"""
+ return cls(json.loads(json_str))
\ No newline at end of file
diff --git a/Backend/python/src/fabric_api/models/name_value_pair.py b/Backend/python/src/fabric_api/models/name_value_pair.py
new file mode 100644
index 0000000..f36f8cb
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/name_value_pair.py
@@ -0,0 +1,96 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class NameValuePair(BaseModel):
+ """
+ A name-value pair.
+ """ # noqa: E501
+ name: StrictStr = Field(description="The name.")
+ value: StrictStr = Field(description="The value.")
+ __properties: ClassVar[List[str]] = ["name", "value"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of NameValuePair from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ "name",
+ "value",
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of NameValuePair from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "name": obj.get("name"),
+ "value": obj.get("value")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/models/update_item_request.py b/Backend/python/src/fabric_api/models/update_item_request.py
new file mode 100644
index 0000000..4292eb4
--- /dev/null
+++ b/Backend/python/src/fabric_api/models/update_item_request.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+ Workload REST APIs
+
+ APIs to be implemented by workloads for integration with Microsoft Fabric
+
+ The version of the OpenAPI document: v1
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+
+
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing_extensions import Annotated
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+class UpdateItemRequest(BaseModel):
+ """
+ Update item request content. This content has 'PATCH' semantics: for properties which are not provided (null) the item current values should be preserved.
+ """ # noqa: E501
+ display_name: Optional[Annotated[str, Field(strict=True, max_length=256)]] = Field(default=None, description="The item display name. Cannot exceed 256 characters.", alias="displayName")
+ description: Optional[StrictStr] = Field(default=None, description="The item description.")
+ update_payload: Optional[Dict[str, Any]] = Field(default=None, description="Update payload specific to the workload and item type.", alias="updatePayload")
+ __properties: ClassVar[List[str]] = ["displayName", "description", "updatePayload"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of UpdateItemRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude={
+ },
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Dict) -> Self:
+ """Create an instance of UpdateItemRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "displayName": obj.get("displayName"),
+ "description": obj.get("description"),
+ "updatePayload": obj.get("updatePayload")
+ })
+ return _obj
+
+
diff --git a/Backend/python/src/fabric_api/security_api.py b/Backend/python/src/fabric_api/security_api.py
new file mode 100644
index 0000000..c4fa794
--- /dev/null
+++ b/Backend/python/src/fabric_api/security_api.py
@@ -0,0 +1,20 @@
+# coding: utf-8
+
+from typing import List
+
+from fastapi import Depends, Security # noqa: F401
+from fastapi.openapi.models import OAuthFlowImplicit, OAuthFlows # noqa: F401
+from fastapi.security import ( # noqa: F401
+ HTTPAuthorizationCredentials,
+ HTTPBasic,
+ HTTPBasicCredentials,
+ HTTPBearer,
+ OAuth2,
+ OAuth2AuthorizationCodeBearer,
+ OAuth2PasswordBearer,
+ SecurityScopes,
+)
+from fastapi.security.api_key import APIKeyCookie, APIKeyHeader, APIKeyQuery # noqa: F401
+
+from fabric_api.models.extra_models import TokenModel
+
diff --git a/Backend/python/src/impl/__init__.py b/Backend/python/src/impl/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/impl/fabric_extension_controller.py b/Backend/python/src/impl/fabric_extension_controller.py
new file mode 100644
index 0000000..27fca0b
--- /dev/null
+++ b/Backend/python/src/impl/fabric_extension_controller.py
@@ -0,0 +1,105 @@
+import logging
+from fastapi import APIRouter, Depends, Header, Path, HTTPException
+from typing import Optional, List, Dict, Any
+from uuid import UUID
+from fastapi.responses import JSONResponse, PlainTextResponse
+from services.authentication import AuthenticationService, get_authentication_service
+from services.item_factory import ItemFactory, get_item_factory
+from constants.workload_scopes import WorkloadScopes
+from models.item1_metadata import Item1Operator
+from constants.workload_constants import WorkloadConstants
+from services.authorization import get_authorization_service
+
+router = APIRouter(tags=["FabricExtension"])
+logger = logging.getLogger(__name__)
+
+@router.get("/item1SupportedOperators", response_model=List[str])
+async def get_item1_supported_operators(
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service)
+):
+ """
+ Gets a list of arithmetic operators supported for Item1.
+ This endpoint is called by loadSupportedOperators in SampleWorkloadEditor.
+ """
+ logger.info("Getting supported operators for Item1")
+
+ # Authenticate the call
+ try:
+
+ await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.ITEM1_READ_WRITE_ALL]
+ )
+
+ operators = [op.name.title() for op in Item1Operator if op != Item1Operator.UNDEFINED]
+ logger.info(f"Returning supported operators: {operators}")
+ return operators
+ except Exception as e:
+ logger.error(f"Error getting supported operators: {e}")
+ return JSONResponse(
+ status_code=500,
+ content={"message": f"Error getting supported operators:: {str(e)}"}
+ )
+
+@router.post("/{workspace_id}/{item_id}/item1DoubleResult")
+async def item1_double_result(
+ workspace_id: UUID,
+ item_id: UUID,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ item_factory: ItemFactory = Depends(get_item_factory)
+):
+ """Doubles the result of the calculation for an instance of Item1."""
+ logger.info(f"Doubling result for Item1 {item_id}")
+
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.ITEM1_READ_WRITE_ALL]
+ )
+ # Add this authorization check
+ auth_handler = get_authorization_service()
+ await auth_handler.validate_permissions(
+ auth_context,
+ workspace_id,
+ item_id,
+ ["Read", "Write"]
+ )
+
+
+ item = item_factory.create_item(WorkloadConstants.ItemTypes.ITEM1, auth_context)
+ await item.load(item_id)
+ operands = await item.double()
+
+ return {"Operand1": operands[0], "Operand2": operands[1]}
+
+@router.get("/{item_id}/getLastResult", response_class=PlainTextResponse)
+async def get_last_result(
+ item_id: UUID,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ item_factory: ItemFactory = Depends(get_item_factory)
+):
+ """Get the last calculation result for an Item1."""
+ logger.info(f"Getting last result for Item1 {item_id}")
+
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.ITEM1_READ_WRITE_ALL]
+ )
+ logger.debug(f"getLastResut Using tenant ID: {auth_context.tenant_object_id}")
+
+ try:
+ item = item_factory.create_item(WorkloadConstants.ItemTypes.ITEM1, auth_context)
+ await item.load(item_id)
+ result = await item.get_last_result()
+ return result
+ except ValueError:
+ logger.warning(f"Item {item_id} not found, returning empty result")
+ return ""
+ except Exception as e:
+ logger.error(f"Error getting last result: {e}")
+ return JSONResponse(
+ status_code=500,
+ content={"message": f"Error getting last result: {str(e)}"}
+ )
\ No newline at end of file
diff --git a/Backend/python/src/impl/lakehouse_controller.py b/Backend/python/src/impl/lakehouse_controller.py
new file mode 100644
index 0000000..7bb03ad
--- /dev/null
+++ b/Backend/python/src/impl/lakehouse_controller.py
@@ -0,0 +1,143 @@
+import logging
+from fastapi import APIRouter, Depends, Header, Path, HTTPException, Query, Body
+from typing import Optional, List, Dict, Any, Union
+from uuid import UUID
+
+from constants.onelake_constants import OneLakeConstants
+from constants.workload_scopes import WorkloadScopes
+from models.write_to_lakehouse_file_request import WriteToLakehouseFileRequest
+from models.lakehouse_table import LakehouseTable
+from services.authentication import AuthenticationService, get_authentication_service, AuthenticationUIRequiredException
+from services.onelake_client_service import OneLakeClientService, get_onelake_client_service
+from services.lakehouse_client_service import LakehouseClientService, get_lakehouse_client_service
+
+router = APIRouter(tags=["Lakehouse"])
+logger = logging.getLogger(__name__)
+
+@router.get("/getLakehouseFile")
+async def get_lakehouse_file(
+ source: str,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ onelake_service: OneLakeClientService = Depends(get_onelake_client_service)
+):
+ """
+ Retrieves a file from the OneLake storage.
+ """
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.FABRIC_LAKEHOUSE_READ_ALL, WorkloadScopes.FABRIC_LAKEHOUSE_READ_WRITE_ALL]
+ )
+
+ # Attempt to get token with OneLake scopes
+ lakehouse_access_token = await auth_service.get_access_token_on_behalf_of(
+ auth_context, OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ data = await onelake_service.get_onelake_file(lakehouse_access_token, source)
+
+ if not data:
+ logger.warning(f"GetOneLakeFile returned empty data for source: {source}")
+ # Return a 204 No Content status code for empty data
+ return None
+
+ logger.info(f"GetOneLakeFile succeeded for source: {source}")
+ return data
+
+@router.put("/writeToLakehouseFile")
+async def write_to_lakehouse_file(
+ request: WriteToLakehouseFileRequest,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ onelake_service: OneLakeClientService = Depends(get_onelake_client_service)
+):
+ """
+ Writes content to a file in the OneLake storage.
+ """
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.FABRIC_LAKEHOUSE_READ_WRITE_ALL]
+ )
+
+ lakehouse_access_token = await auth_service.get_access_token_on_behalf_of(
+ auth_context, OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ file_path = onelake_service.get_onelake_file_path(
+ request.workspace_id,
+ request.lakehouse_id,
+ request.file_name
+ )
+
+ file_exists = await onelake_service.check_if_file_exists(
+ lakehouse_access_token, file_path
+ )
+
+ if file_exists and not request.overwrite_if_exists:
+ # File exists, and overwrite is not allowed, return conflict
+ logger.error(f"WriteToOneLakeFile failed. The file already exists at filePath: {file_path}.")
+ raise HTTPException(status_code=409, detail="File already exists. Overwrite is not allowed.")
+
+ # Write content to file
+ await onelake_service.write_to_onelake_file(
+ lakehouse_access_token, file_path, request.content
+ )
+
+ logger.info(f"WriteToOneLakeFile succeeded for filePath: {file_path}")
+ return {"success": True}
+
+@router.get("/onelake/{workspace_id}/{lakehouse_id}/tables")
+async def get_tables(
+ workspace_id: UUID,
+ lakehouse_id: UUID,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ lakehouse_service: LakehouseClientService = Depends(get_lakehouse_client_service)
+):
+ """
+ Retrieves tables from a Lakehouse.
+ """
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.FABRIC_LAKEHOUSE_READ_ALL, WorkloadScopes.FABRIC_LAKEHOUSE_READ_WRITE_ALL]
+ )
+
+ token = await auth_service.get_access_token_on_behalf_of(
+ auth_context, OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ tables = await lakehouse_service.get_lakehouse_tables(token, workspace_id, lakehouse_id)
+
+ # Convert LakehouseTable objects to dictionaries for JSON serialization
+ result = []
+ for table in tables:
+ result.append({
+ "name": table.name,
+ "path": table.path,
+ "schema": table.schema_name
+ })
+
+ return result
+
+@router.get("/onelake/{workspace_id}/{lakehouse_id}/files")
+async def get_files(
+ workspace_id: UUID,
+ lakehouse_id: UUID,
+ authorization: Optional[str] = Header(None),
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ lakehouse_service: LakehouseClientService = Depends(get_lakehouse_client_service)
+):
+ """
+ Retrieves files from a Lakehouse.
+ """
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.FABRIC_LAKEHOUSE_READ_ALL, WorkloadScopes.FABRIC_LAKEHOUSE_READ_WRITE_ALL]
+ )
+
+ token = await auth_service.get_access_token_on_behalf_of(
+ auth_context, OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ files = await lakehouse_service.get_lakehouse_files(token, workspace_id, lakehouse_id)
+ return files
diff --git a/Backend/python/src/impl/onelake_controller.py b/Backend/python/src/impl/onelake_controller.py
new file mode 100644
index 0000000..ff42efd
--- /dev/null
+++ b/Backend/python/src/impl/onelake_controller.py
@@ -0,0 +1,55 @@
+import logging
+from fastapi import APIRouter, Depends, Request
+from typing import Optional
+from uuid import UUID
+
+from constants.onelake_constants import OneLakeConstants
+from constants.workload_scopes import WorkloadScopes
+from services.authentication import AuthenticationService, get_authentication_service
+from services.onelake_client_service import OneLakeClientService, get_onelake_client_service
+
+router = APIRouter(tags=["OneLake"])
+logger = logging.getLogger(__name__)
+
+@router.get("/{workspace_object_id}/{item_object_id}/isOneLakeSupported")
+async def is_onelake_supported(
+ workspace_object_id: UUID,
+ item_object_id: UUID,
+ request: Request,
+ auth_service: AuthenticationService = Depends(get_authentication_service),
+ onelake_service: OneLakeClientService = Depends(get_onelake_client_service)
+):
+ """
+ Returns a flag indicating whether OneLake storage is supported for this item.
+ OneLake is supported if the workload opts in via the "CreateOneLakeFoldersOnArtifactCreation" flag
+
+ Returns:
+ bool: true if OneLake is supported for this item, false otherwise
+ """
+
+ # Extract authorization header from request
+ authorization = request.headers.get("authorization")
+
+ # Authenticate the data plane call with allowed scopes
+ auth_context = await auth_service.authenticate_data_plane_call(
+ authorization,
+ allowed_scopes=[WorkloadScopes.ITEM1_READ_WRITE_ALL]
+ )
+
+ # Get token for OneLake access
+ token = await auth_service.get_access_token_on_behalf_of(
+ auth_context,
+ OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ # Get OneLake folder names
+ folder_names = await onelake_service.get_onelake_folder_names(
+ token,
+ workspace_object_id,
+ item_object_id
+ )
+
+ # OneLake is supported if there are any folders
+ is_supported = bool(folder_names)
+
+ return is_supported
diff --git a/Backend/python/src/items/__init__.py b/Backend/python/src/items/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/items/base_item.py b/Backend/python/src/items/base_item.py
new file mode 100644
index 0000000..56a11e2
--- /dev/null
+++ b/Backend/python/src/items/base_item.py
@@ -0,0 +1,257 @@
+from abc import ABC, abstractmethod
+from typing import Dict, Any, Optional, TypeVar, Generic, Tuple
+from uuid import UUID
+import logging
+import datetime
+from exceptions.exceptions import ItemMetadataNotFoundException, InvariantViolationException, UnexpectedItemTypeException, InvalidItemPayloadException
+from models.authentication_models import AuthorizationContext
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.create_item_request import CreateItemRequest
+from fabric_api.models.update_item_request import UpdateItemRequest
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from models.common_item_metadata import CommonItemMetadata
+from typing import Type
+
+# Define type variables for metadata
+TItemMetadata = TypeVar('TItemMetadata')
+TItemClientMetadata = TypeVar('TItemClientMetadata')
+
+class ItemBase(ABC, Generic[TItemMetadata, TItemClientMetadata]):
+ """
+ Base class for all items. This is a Python equivalent of ItemBase.
+ """
+
+ def __init__(self, auth_context: AuthorizationContext):
+ """Initialize a base item."""
+ from services.item_metadata_store import get_item_metadata_store
+ from services.onelake_client_service import get_onelake_client_service
+ from services.authentication import get_authentication_service
+
+ self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
+
+ self.auth_context = auth_context
+
+ self.item_metadata_store = get_item_metadata_store()
+ self.authentication_service = get_authentication_service()
+ self.onelake_client_service = get_onelake_client_service()
+
+ self.tenant_object_id = None
+ self.workspace_object_id = None
+ self.item_object_id = None
+ self.display_name = None
+ self.description = None
+
+ def _ensure_not_null(self, obj: Any, name: str) -> Any:
+ if obj is None:
+ raise InvariantViolationException(f"Object reference must not be null: {name}")
+ return obj
+
+ def _ensure_condition(self, condition: bool, description: str) -> None:
+ if not condition:
+ raise InvariantViolationException(f"Condition violation detected: {description}")
+
+ @property
+ @abstractmethod
+ def item_type(self) -> str:
+ """Get the item type."""
+ pass
+
+ @abstractmethod
+ def get_metadata_class(self) -> Type[TItemMetadata]:
+ """Return the class type of the type-specific metadata."""
+ pass
+
+ async def load(self, item_id: UUID) -> None:
+ """Load an existing item or create a default one if not found."""
+ self.logger.info(f"Loading item {item_id}")
+ self.item_object_id = str(item_id)
+ tenant_object_id = self.auth_context.tenant_object_id
+
+ # Check if the item exists in storage
+ if not await self.item_metadata_store.exists(tenant_object_id, str(item_id)):
+ self.logger.error(f"Item {item_id} not found")
+ raise ItemMetadataNotFoundException(f"Item not found: {item_id}")
+
+ metadata_class = self.get_metadata_class()
+
+ item_metadata = await self.item_metadata_store.load(tenant_object_id,
+ str(item_id),
+ metadata_class)
+
+ self._ensure_not_null(item_metadata, "itemMetadata")
+ self._ensure_not_null(item_metadata.common_metadata, "itemMetadata.CommonMetadata")
+ self._ensure_not_null(item_metadata.type_specific_metadata, "itemMetadata.TypeSpecificMetadata")
+
+ common_metadata = item_metadata.common_metadata
+
+ if common_metadata.type != self.item_type:
+ self.logger.error(f"Unexpected item type '{common_metadata.type}'. Expected '{self.item_type}'")
+ raise UnexpectedItemTypeException(f"Unexpected item type '{common_metadata.type}'. Expected '{self.item_type}'")
+
+ self._ensure_condition(
+ str(common_metadata.tenant_object_id).lower() == str(tenant_object_id).lower(),
+ "TenantObjectId must match"
+ )
+ self._ensure_condition(
+ str(common_metadata.item_object_id) == str(item_id),
+ "ItemObjectId must match"
+ )
+
+ self.tenant_object_id = str(common_metadata.tenant_object_id)
+ self.workspace_object_id = str(common_metadata.workspace_object_id)
+ self.item_object_id = str(common_metadata.item_object_id)
+ self.display_name = common_metadata.display_name
+ self.description = common_metadata.description
+ self.set_type_specific_metadata(item_metadata.type_specific_metadata)
+ self.logger.info(f"Successfully loaded item {item_id}")
+
+
+ @abstractmethod
+ async def get_item_payload(self) -> Dict[str, Any]:
+ """Get the item payload."""
+ pass
+
+ async def create(self, workspace_id: UUID, item_id: UUID, create_request: CreateItemRequest) -> None:
+ """Create a new item."""
+ self.tenant_object_id = str(self.auth_context.tenant_object_id)
+ self.workspace_object_id = str(workspace_id)
+ self.item_object_id = str(item_id)
+ self.display_name = create_request.display_name
+ self.description = create_request.description
+
+ self.logger.info(f"Creating item {self.item_type} with ID {item_id} in workspace {workspace_id}")
+ self.logger.debug(f"Creation payload: {create_request.creation_payload}")
+
+ self.set_definition(create_request.creation_payload)
+ self.logger.debug(f"Creating item with tenant ID: {self.tenant_object_id}")
+ await self.save_changes()
+ self.logger.info(f"Successfully created item {item_id}")
+
+ async def update(self, update_request: UpdateItemRequest) -> None:
+ """Update an existing item."""
+ if not update_request:
+ self.logger.error(f"Invalid item payload for type {self.item_type}, item ID {self.item_object_id}")
+ raise InvalidItemPayloadException(self.item_type, self.item_object_id)
+
+ self.display_name = update_request.display_name
+ self.description = update_request.description
+
+ self.update_definition(update_request.update_payload)
+ await self.save_changes()
+ self.logger.info(f"Successfully updated item {self.item_object_id}")
+
+ async def delete(self) -> None:
+ """Delete an existing item."""
+ await self.item_metadata_store.delete(self.tenant_object_id, self.item_object_id)
+ self.logger.info(f"Successfully deleted item {self.item_object_id}")
+
+ @abstractmethod
+ def set_definition(self, payload: Dict[str, Any]) -> None:
+ """Set the item definition from a creation payload."""
+ pass
+
+ @abstractmethod
+ def update_definition(self, payload: Dict[str, Any]) -> None:
+ """Update the item definition from an update payload."""
+ pass
+
+ @abstractmethod
+ def get_type_specific_metadata(self) -> TItemMetadata:
+ """Get the type-specific metadata for this item."""
+ pass
+
+ @abstractmethod
+ def set_type_specific_metadata(self, metadata: TItemMetadata) -> None:
+ """Set the type-specific metadata for this item."""
+ pass
+
+ @abstractmethod
+ async def execute_job(self,
+ job_type: str,
+ job_instance_id: UUID,
+ invoke_type: JobInvokeType,
+ creation_payload: Dict[str, Any]) -> None:
+ """Execute a job for this item."""
+ pass
+
+ @abstractmethod
+ async def get_job_state(self, job_type: str, job_instance_id: UUID) -> ItemJobInstanceState:
+ """Get the state of a job instance."""
+ pass
+
+ async def cancel_job(self, job_type: str, job_instance_id: UUID) -> None:
+ """Cancel a job instance."""
+ # Import JobMetadata here to avoid circular imports
+ from models.job_metadata import JobMetadata
+
+ # Check if job metadata exists
+ job_metadata = None
+
+ if not await self.item_metadata_store.exists_job(self.tenant_object_id, self.item_object_id, str(job_instance_id)):
+ # Recreate missing job metadata
+ self.logger.warning(f"Recreating missing job {job_instance_id} metadata in tenant {self.tenant_object_id} item {self.item_object_id}")
+ # Create new JobMetadata instance
+ job_metadata = JobMetadata(
+ job_type=job_type,
+ job_instance_id=job_instance_id
+ )
+ else:
+ # Load existing job metadata
+ job_metadata = await self.item_metadata_store.load_job(self.tenant_object_id, self.item_object_id, str(job_instance_id))
+
+ # If already canceled, nothing to do
+ if job_metadata.is_canceled:
+ return
+
+ # Mark as canceled and set canceled time
+ job_metadata.canceled_time = datetime.datetime.now(datetime.timezone.utc)
+
+ # Update job metadata
+ await self.item_metadata_store.upsert_job(
+ self.tenant_object_id,
+ self.item_object_id,
+ str(job_instance_id),
+ job_metadata
+ )
+ self.logger.info(f"Canceled job {job_instance_id} for item {self.item_object_id}")
+
+ async def save_changes(self) -> None:
+ """Save changes to this item."""
+ self.logger.info(f"Saving item with tenant ID: {self.tenant_object_id}")
+ await self.store()
+ await self.allocate_and_free_resources()
+ await self.update_fabric()
+
+ async def store(self) -> None:
+ """Store the item metadata."""
+ self.logger.info(f"Storing item {self.item_object_id}")
+ common_metadata = CommonItemMetadata(
+ type=self.item_type,
+ tenant_object_id=self.tenant_object_id,
+ workspace_object_id=self.workspace_object_id,
+ item_object_id=self.item_object_id,
+ display_name=self.display_name,
+ description=self.description
+ )
+
+ type_specific_metadata = self.get_type_specific_metadata()
+
+ await self.item_metadata_store.upsert(
+ self.tenant_object_id,
+ self.item_object_id,
+ common_metadata,
+ type_specific_metadata
+ )
+
+ async def allocate_and_free_resources(self) -> None:
+ """Allocate and free resources as needed."""
+ pass
+
+ async def update_fabric(self) -> None:
+ """Notify Fabric of changes to this item."""
+ pass
+
+ def get_current_utc_time(self) -> str:
+ """Get the current UTC time as an ISO 8601 string."""
+ return datetime.datetime.now(datetime.timezone.utc).isoformat()
\ No newline at end of file
diff --git a/Backend/python/src/items/item1.py b/Backend/python/src/items/item1.py
new file mode 100644
index 0000000..804e8f8
--- /dev/null
+++ b/Backend/python/src/items/item1.py
@@ -0,0 +1,458 @@
+from datetime import datetime, timezone
+import json
+import logging
+import time
+import random
+import asyncio
+from typing import Dict, Any, Optional, List, Tuple, Type, Union
+from uuid import UUID
+
+from .base_item import ItemBase
+from exceptions.exceptions import AuthenticationUIRequiredException
+from models.authentication_models import AuthorizationContext
+from services.lakehouse_client_service import get_lakehouse_client_service
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.environment_constants import EnvironmentConstants
+from constants.onelake_constants import OneLakeConstants
+from constants.workload_constants import WorkloadConstants
+
+from models.item1_metadata import Item1Operator
+from constants.job_types import Item1JobType
+from models.job_metadata import JobMetadata
+from models.item_reference import ItemReference
+from models.item1_metadata import Item1Metadata
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import DoubledOperandsOverflowException, ItemMetadataNotFoundException
+
+
+logger = logging.getLogger(__name__)
+
+
+
+class Item1(ItemBase[Dict[str, Any], Dict[str, Any]]):
+ # Static class variables
+ supported_operators = [op.value for op in Item1Operator if op != Item1Operator.UNDEFINED]
+ fabric_scopes = [f"{EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID}/Lakehouse.Read.All"]
+
+ def __init__(self, auth_context: AuthorizationContext):
+ """Initialize an Item1 instance."""
+ super().__init__(auth_context)
+
+ self._lakehouse_client_service = get_lakehouse_client_service()
+ self._metadata = Item1Metadata()
+
+ @property
+ def item_type(self) -> str:
+ return WorkloadConstants.ItemTypes.ITEM1
+
+ @property
+ def metadata(self) -> Item1Metadata:
+ if not self._metadata:
+ raise ValueError("The item object must be initialized before use")
+ return self._metadata
+
+ @property
+ def lakehouse(self) -> ItemReference:
+ return self.metadata.lakehouse
+
+ @property
+ def operand1(self) -> int:
+ return self.metadata.operand1
+
+ @property
+ def operand2(self) -> int:
+ return self.metadata.operand2
+
+ @property
+ def operator(self) -> str:
+ return self.metadata.operator
+
+ def get_metadata_class(self) -> Type[Item1Metadata]:
+ """Return the metadata class for Item1."""
+ return Item1Metadata
+
+ def is_valid_lakehouse(self) -> bool:
+ """
+ Check if the item has a valid lakehouse reference that can be used.
+
+ Returns:
+ bool: True if the lakehouse reference is valid and can be used, False otherwise.
+ """
+ return self._metadata.is_valid_lakehouse()
+
+ async def get_item_payload(self) -> Dict[str, Any]:
+ """Get the item payload."""
+
+ lakehouse_item = None
+ # Try to get lakehouse details if we have a valid lakehouse reference
+ if self.is_valid_lakehouse():
+ try:
+ token = await self.authentication_service.get_access_token_on_behalf_of(
+ self.auth_context,
+ self.fabric_scopes
+ )
+ lakehouse_item = await self._lakehouse_client_service.get_fabric_lakehouse(
+ token,
+ self.lakehouse.workspace_id,
+ self.lakehouse.id
+ )
+ except Exception as e:
+ self.logger.error(
+ f"Failed to retrieve FabricLakehouse for lakehouse: {self.lakehouse.id} "
+ f"in workspace: {self.lakehouse.workspace_id}. Error: {str(e)}"
+ )
+
+ client_metadata = self.metadata.to_client_metadata(lakehouse_item)
+ return {Fields.PAYLOAD_METADATA: client_metadata}
+
+ async def execute_job(self,
+ job_type: str,
+ job_instance_id: UUID,
+ invoke_type: JobInvokeType,
+ creation_payload: Dict[str, Any]) -> None:
+ """Execute a job for this item."""
+ if job_type.lower() == Item1JobType.INSTANT_JOB.lower():
+ self.logger.info(f"Instant Job {job_instance_id} executed.")
+ return
+
+ # Create job metadata using JobMetadata class
+ job_metadata = JobMetadata(
+ job_type=job_type,
+ job_instance_id=job_instance_id,
+ use_onelake=self._metadata.use_onelake
+ )
+
+ # Store initial job metadata
+ await self.item_metadata_store.upsert_job(
+ self.tenant_object_id,
+ self.item_object_id,
+ str(job_instance_id),
+ job_metadata
+ )
+
+ token = await self.authentication_service.get_access_token_on_behalf_of(
+ self.auth_context,
+ OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ # Fetch operands and operator from metadata
+ op1 = self._metadata.operand1
+ op2 = self._metadata.operand2
+ calculation_operator = self._metadata.operator
+
+ # Perform calculation
+ result = self._calculate_result(op1, op2, calculation_operator)
+
+ # Simulate long running job if needed
+ if job_type.lower() == Item1JobType.LONG_RUNNING_CALCULATE_AS_TEXT.lower():
+ await asyncio.sleep(480) # 8 minutes
+
+ # Reload job metadata to check if it was cancelled
+ try:
+ job_metadata = await self.item_metadata_store.load_job(
+ self.tenant_object_id,
+ self.item_object_id,
+ str(job_instance_id)
+ )
+ except FileNotFoundError:
+ # Recreate missing job metadata
+ self.logger.warning(f"Recreating missing job {job_instance_id} metadata in tenant {self.tenant_object_id} item {self.item_object_id}")
+ await self.item_metadata_store.upsert_job(
+ self.tenant_object_id,
+ self.item_object_id,
+ str(job_instance_id),
+ job_metadata
+ )
+
+ # Only proceed if not canceled
+ if not job_metadata.is_canceled:
+ file_path = self._get_calculation_result_file_path(job_metadata)
+ await self.onelake_client_service.write_to_onelake_file(token, file_path, result)
+ self._metadata.last_calculation_result_location = file_path
+ await self.save_changes()
+ self.logger.info(f"Successfully saved result to OneLake at {file_path}")
+
+ async def get_job_state(self, job_type: str, job_instance_id: UUID) -> ItemJobInstanceState:
+ """Get the state of a job instance."""
+ # For instant jobs, always return completed status immediately
+ if job_type.lower() == Item1JobType.INSTANT_JOB.lower():
+ return ItemJobInstanceState(status=JobInstanceStatus.COMPLETED)
+
+ # Check if job metadata exists
+ if not await self.item_metadata_store.exists_job(self.tenant_object_id, self.item_object_id, str(job_instance_id)):
+ self.logger.error(
+ f"Job {job_instance_id} metadata does not exist in tenant {self.tenant_object_id} "
+ f"item {self.item_object_id}."
+ )
+ return ItemJobInstanceState(status=JobInstanceStatus.FAILED)
+
+ # Load job metadata (now directly returns JobMetadata object)
+ job_metadata = await self.item_metadata_store.load_job(
+ self.tenant_object_id,
+ self.item_object_id,
+ str(job_instance_id)
+ )
+
+ # Check if job was canceled
+ if job_metadata.is_canceled:
+ return ItemJobInstanceState(status=JobInstanceStatus.CANCELLED)
+
+ file_path = self._get_calculation_result_file_path(job_metadata)
+
+ # Check if there's a locally saved result file
+ try:
+ token = await self.authentication_service.get_access_token_on_behalf_of(
+ self.auth_context,
+ OneLakeConstants.ONELAKE_SCOPES
+ )
+ file_exists = await self.onelake_client_service.check_if_file_exists(token, file_path)
+ return ItemJobInstanceState(status=JobInstanceStatus.COMPLETED) if file_exists else ItemJobInstanceState(status=JobInstanceStatus.INPROGRESS)
+ except Exception as token_ex:
+ self.logger.error(f"Error checking OneLake file existence: {str(token_ex)}")
+ # Continue to next check - don't fail the operation
+ return ItemJobInstanceState(status=JobInstanceStatus.INPROGRESS)
+
+ def _get_calculation_result_file_path(self, job_metadata: Union[Dict[str, Any], JobMetadata]) -> str:
+ """Gets the path to the calculation result file in OneLake storage."""
+ # Handle both Dict and JobMetadata for backward compatibility
+ if isinstance(job_metadata, JobMetadata):
+ job_instance_id = job_metadata.job_instance_id
+ job_type = job_metadata.job_type
+ use_onelake = job_metadata.use_onelake
+ else:
+ # Dictionary-based job metadata
+ job_instance_id = job_metadata.get("job_instance_id")
+ job_type = job_metadata.get("job_type", "")
+ use_onelake = job_metadata.get("use_onelake", self.metadata.use_onelake)
+
+ if not job_instance_id:
+ error_msg = f"Cannot build calculation result file path: job_instance_id is missing in job metadata"
+ self.logger.error(error_msg)
+ raise ValueError(error_msg)
+
+ #TODO: Refactor to use a job naames from config!
+ type_to_filename = {
+ Item1JobType.SCHEDULED_JOB.lower(): f"CalculationResult_{job_instance_id}.txt",
+ Item1JobType.CALCULATE_AS_TEXT.lower(): f"CalculationResult_{job_instance_id}.txt",
+ Item1JobType.LONG_RUNNING_CALCULATE_AS_TEXT.lower(): f"CalculationResult_{job_instance_id}.txt",
+ Item1JobType.CALCULATE_AS_PARQUET.lower(): f"CalculationResult_{job_instance_id}.parquet"
+ }
+
+ # Get the filename based on job type or default to .txt
+ job_type_lower = job_type.lower() if isinstance(job_type, str) else job_type
+ filename = type_to_filename.get(job_type_lower, f"CalculationResult_{job_instance_id}.txt")
+
+ # Determine the file path based on storage location choice
+ if use_onelake:
+ # Use OneLake storage
+ return self.onelake_client_service.get_onelake_file_path(
+ self.workspace_object_id,
+ self.item_object_id,
+ filename
+ )
+ else:
+ if (self.metadata.lakehouse and
+ self.metadata.lakehouse.id and
+ self.metadata.lakehouse.workspace_id and
+ self.metadata.lakehouse.id != "00000000-0000-0000-0000-000000000000"):
+ # Use lakehouse path
+ return self.onelake_client_service.get_onelake_file_path(
+ self.metadata.lakehouse.workspace_id,
+ self.metadata.lakehouse.id,
+ filename
+ )
+
+ else:
+ error_msg = f"Cannot write to lakehouse or OneLake: missing lakehouse reference or useOneLake is false."
+ self.logger.error(error_msg)
+ raise ValueError(error_msg)
+
+ def _calculate_result(self, op1: int, op2: int, calculation_operator: Union[str, Item1Operator]) -> str:
+ """Calculate the result based on operands and operator."""
+ op_enum: Item1Operator
+ if isinstance(calculation_operator, str):
+ try:
+ op_enum = Item1Operator(calculation_operator) # Convert string to enum
+ except ValueError as ve:
+ raise ValueError(f"Unknown operator: {calculation_operator}")
+ elif isinstance(calculation_operator, Item1Operator):
+ op_enum = calculation_operator
+ else:
+ raise ValueError(f"Unknown operator: {calculation_operator}")
+
+
+ """Calculate the result based on operands and operator."""
+ if op_enum == Item1Operator.ADD:
+ return self._format_result(op1, op2, op_enum, op1 + op2)
+ elif op_enum == Item1Operator.SUBTRACT:
+ return self._format_result(op1, op2, op_enum, op1 - op2)
+ elif op_enum == Item1Operator.MULTIPLY:
+ return self._format_result(op1, op2, op_enum, op1 * op2)
+ elif op_enum == Item1Operator.DIVIDE:
+ if op2 != 0:
+ return self._format_result(op1, op2, op_enum, op1 // op2)
+ else:
+ raise ValueError("Cannot divide by zero.")
+ elif op_enum == Item1Operator.RANDOM:
+ if op1 > op2:
+ raise ValueError("For RANDOM operator, operand1 must not be greater than operand2.")
+ rand = random.randint(op1, op2)
+ return self._format_result(op1, op2, op_enum, rand)
+ elif op_enum == Item1Operator.UNDEFINED:
+ raise ValueError("Undefined operator.")
+ else:
+ raise ValueError(f"Unsupported operator: {calculation_operator}")
+
+ def _format_result(self, op1: int, op2: int, calculation_operator: Item1Operator, result: int) -> str:
+ """Format the calculation result."""
+ return f"op1 = {op1}, op2 = {op2}, operator = {calculation_operator.name.title()}, result = {result}"
+
+ def _validate_operands_before_double(self, operand1: int, operand2: int) -> None:
+ """Validate operands before doubling them."""
+ invalid_operands = []
+
+ if operand1 > 2**31 - 1 or operand1 < -2**31:
+ invalid_operands.append("Operand1")
+
+ if operand2 > 2**31 - 1 or operand2 < -2**31:
+ invalid_operands.append("Operand2")
+
+ if invalid_operands:
+ raise DoubledOperandsOverflowException(invalid_operands)
+
+ async def double(self) -> Tuple[int, int]:
+ """Double the operands produced by the item calculation."""
+
+ # Create metadata object from dict
+
+ operand1 = self.metadata.operand1
+ operand2 = self.metadata.operand2
+
+ self._validate_operands_before_double(operand1, operand2)
+ operand1 *= 2
+ operand2 *= 2
+
+ self.metadata.operand1 = operand1
+ self.metadata.operand2 = operand2
+
+ # Update the stored metadata
+ await self.save_changes()
+ return (operand1, operand2)
+
+ def set_definition(self, payload: Dict[str, Any]) -> None:
+ """Set the item definition from a creation payload."""
+ if not payload:
+ self.logger.info(f"No payload is provided for {self.item_type}, objectId={self.item_object_id}")
+ self._metadata = Item1Metadata()
+ return
+
+ item1_metadata_json = payload.get(Fields.PAYLOAD_METADATA)
+ if not item1_metadata_json:
+ raise ValueError(f"Invalid item payload for type {self.item_type}, item ID {self.item_object_id}")
+
+ lakehouse = item1_metadata_json.get(Fields.LAKEHOUSE_FIELD)
+ use_onelake = item1_metadata_json.get(Fields.USE_ONELAKE_FIELD, False)
+
+ if not lakehouse and not use_onelake:
+ self.logger.error("Missing Lakehouse reference and useOneLake is false")
+ raise ValueError(f"Missing Lakehouse reference for type {self.item_type}, item ID {self.item_object_id}")
+ self.logger.debug(f"Set definition payload: {payload}")
+ self._metadata = Item1Metadata.from_json_data(item1_metadata_json)
+ self.logger.debug(f"Set definition metadata object: {self._metadata}")
+
+
+ def update_definition(self, payload: Dict[str, Any]) -> None:
+ """Update the item definition from an update payload."""
+ self.logger.debug(f"Update payload: {payload}")
+ if not payload:
+ self.logger.info(f"No payload is provided for {self.item_type}, objectId={self.item_object_id}")
+ return
+ item1_metadata = payload.get(Fields.PAYLOAD_METADATA)
+ if not item1_metadata:
+ raise ValueError(f"Invalid item payload for type {self.item_type}, item ID {self.item_object_id}")
+
+ lakehouse = item1_metadata.get(Fields.LAKEHOUSE_FIELD)
+ use_onelake = item1_metadata.get(Fields.USE_ONELAKE_FIELD, False)
+
+ if not lakehouse and not use_onelake:
+ raise ValueError(f"Missing Lakehouse reference for type {self.item_type}, item ID {self.item_object_id}")
+
+ #todo: fix!
+ if lakehouse:
+ lakehouse_workspace_id = lakehouse.get(Fields.LAKEHOUSE_WORKSPACE_ID_FIELD) or lakehouse.get("workspaceId")
+ if not lakehouse_workspace_id:
+ self.logger.error(f"ERROR! Something went wrong...")
+ self.logger.error(f"workspace_object_id ({self.workspace_object_id}), fix null workspace_id in lakehouse metadata!")
+ self.logger.error("Constructing metadata object will probvably fail")
+
+ last_calculation_result_location = ""
+ if self._metadata and self._metadata.last_calculation_result_location:
+ last_calculation_result_location = self._metadata.last_calculation_result_location
+
+ metadata = Item1Metadata.from_json_data(item1_metadata)
+ metadata.last_calculation_result_location = last_calculation_result_location
+
+ self.logger.debug(f"Update definition metadata OBJECT: {metadata}")
+ self.set_type_specific_metadata(metadata)
+
+
+ def set_type_specific_metadata(self, metadata: Item1Metadata) -> None:
+ """Set the type-specific metadata for this item."""
+ self._metadata = metadata.clone()
+
+
+ def get_type_specific_metadata(self) -> Item1Metadata:
+ """Get the type-specific metadata for this item."""
+ return self._metadata.clone()
+
+
+ async def get_last_result(self) -> str:
+ """Get the last calculation result."""
+ if not self.metadata.last_calculation_result_location or self.metadata.last_calculation_result_location.strip() == '':
+ return ""
+ try:
+ token = await self.authentication_service.get_access_token_on_behalf_of(
+ self.auth_context,
+ OneLakeConstants.ONELAKE_SCOPES
+ )
+
+ return await self.onelake_client_service.get_onelake_file(
+ token,
+ self._metadata.last_calculation_result_location
+ )
+ except AuthenticationUIRequiredException:
+ # Important: Re-raise AuthenticationUIRequiredException to ensure consent UI is triggered
+ self.logger.warning("User consent required for OneLake access")
+ raise
+ except FileNotFoundError as file_ex:
+ self.logger.error(f"File not found: {str(file_ex)}")
+ return ""
+ except Exception as e:
+ self.logger.error(f"Error getting last result: {str(e)}")
+ return ""
+
+
+ def _save_result_locally(self, job_instance_id: str, result: str) -> None:
+ """Save calculation result locally as a fallback when OneLake is unavailable."""
+ try:
+ import os
+
+ # Create results directory if it doesn't exist
+ results_dir = os.path.join(os.getcwd(), "results")
+ os.makedirs(results_dir, exist_ok=True)
+
+ # Create a filename based on job instance ID
+ filename = f"CalculationResult_{job_instance_id}.txt"
+ file_path = os.path.join(results_dir, filename)
+
+ # Write the result to a local file
+ with open(file_path, "w") as f:
+ f.write(result)
+
+ # Update metadata with local file path
+ self._metadata.last_calculation_result_location = file_path
+ self.logger.info(f"Saved result locally to {file_path}")
+ except Exception as e:
+ self.logger.error(f"Failed to save result locally: {str(e)}")
\ No newline at end of file
diff --git a/Backend/python/src/main.py b/Backend/python/src/main.py
new file mode 100644
index 0000000..864f14f
--- /dev/null
+++ b/Backend/python/src/main.py
@@ -0,0 +1,404 @@
+import asyncio
+import os
+import logging
+import logging.config
+import sys
+import time
+from typing import Optional, Set
+import uuid
+from contextlib import asynccontextmanager
+from fabric_api.impl.jobs_controller import cleanup_background_tasks
+from datetime import datetime, timezone
+from pathlib import Path
+
+import uvicorn
+from fastapi import FastAPI, Request, status
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.middleware.gzip import GZipMiddleware
+from fastapi.middleware.trustedhost import TrustedHostMiddleware
+from fastapi.responses import JSONResponse
+
+from services.configuration_service import get_configuration_service
+from core.service_initializer import get_service_initializer
+from core.service_registry import get_service_registry
+
+# Import controllers
+from fabric_api.apis.endpoint_resolution_api import router as EndpointResolutionApiRouter
+from fabric_api.apis.item_lifecycle_api import router as ItemLifecycleApiRouter
+from fabric_api.apis.jobs_api import router as JobsApiRouter
+from impl.fabric_extension_controller import router as fabric_extension_router
+from impl.onelake_controller import router as onelake_controller
+from impl.lakehouse_controller import router as lakehouse_controller
+
+from middleware.exception_handlers import register_exception_handlers
+
+def setup_logging(config_service=None) -> logging.Logger:
+ """Setup logging configuration based on settings."""
+ if config_service is None:
+ config_service = get_configuration_service()
+
+ # Map configuration log level to Python log level
+ log_level_mapping = {
+ "Trace": "DEBUG",
+ "Debug": "DEBUG",
+ "Information": "INFO",
+ "Warning": "WARNING",
+ "Error": "ERROR",
+ "Critical": "CRITICAL",
+ "None": "CRITICAL"
+ }
+
+ config_log_level = config_service.get_log_level()
+ log_level = log_level_mapping.get(config_log_level, "INFO")
+
+ # Get user's AppData/Roaming directory (cross-platform)
+ appdata = Path.home() / '.config' / 'fabric_backend'
+ if os.name == 'nt':
+ # On Windows, use APPDATA environment variable (Roaming)
+ appdata = os.environ.get('APPDATA')
+ if not appdata:
+ # Fallback if APPDATA is not set
+ appdata = os.path.expanduser('~\\AppData\\Roaming')
+
+ # Create logs directory
+ app_name = config_service.get_app_name().replace(" ", "_")
+ log_dir = Path(appdata) / app_name / 'logs'
+ log_dir.mkdir(parents=True, exist_ok=True)
+
+ # Log file path with date rotation
+ log_filename = f'fabric_backend_{datetime.now().strftime("%Y%m%d")}.log'
+ log_file = log_dir / log_filename
+
+ # Logging configuration
+ logging_config = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "default": {
+ "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ "datefmt": "%Y-%m-%d %H:%M:%S"
+ },
+ "detailed": {
+ "format": "%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(funcName)s() - %(message)s",
+ "datefmt": "%Y-%m-%d %H:%M:%S"
+ }
+ },
+ "handlers": {
+ "console": {
+ "class": "logging.StreamHandler",
+ "formatter": "default",
+ "level": log_level,
+ "stream": "ext://sys.stdout"
+ },
+ "file": {
+ "class": "logging.handlers.RotatingFileHandler",
+ "formatter": "detailed",
+ "filename": str(log_file),
+ "maxBytes": 10485760, # 10MB
+ "backupCount": 5,
+ "level": "INFO",
+ "encoding": "utf-8"
+ }
+ },
+ "root": {
+ "level": log_level,
+ "handlers": ["console", "file"]
+ },
+ "loggers": {
+ "uvicorn": {
+ "handlers": ["console"],
+ "level": "INFO",
+ "propagate": False
+ },
+ "uvicorn.error": {
+ "handlers": ["console"],
+ "propagate": False,
+ "level": "INFO"
+ },
+ "uvicorn.access": {
+ "handlers": ["console"],
+ "level": "WARNING",
+ "propagate": False
+ },
+ "httpx": {
+ "level": "WARNING"
+ },
+ "httpcore": {
+ "level": "WARNING"
+ },
+ "asyncio": {
+ "level": "WARNING"
+ }
+ }
+ }
+
+ logging.config.dictConfig(logging_config)
+ logger = logging.getLogger(__name__)
+ logger.info(f"Logging initialized - Level: {log_level}, File: {log_file}")
+
+ return logger
+
+# Global state for shutdown handling
+class ApplicationState:
+ def __init__(self):
+ self.shutdown_event = asyncio.Event()
+ self.is_shutting_down = False
+ self.logger: Optional[logging.Logger] = None
+ self.active_requests: Set[str] = set()
+ self.request_lock = asyncio.Lock()
+
+app_state = ApplicationState()
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ """Handle application lifecycle with proper startup and shutdown."""
+ # Startup
+ startup_start = time.time()
+
+ # Get configuration service (will create if not exists)
+ config_service = get_configuration_service()
+
+ # Setup logging with configuration
+ logger = setup_logging(config_service)
+ app_state.logger = logger
+
+ logger.info("=" * 60)
+ logger.info(f"Starting {config_service.get_app_name()}...")
+ logger.info(f"Environment: {config_service.get_environment()}")
+ logger.info(f"Python Version: {sys.version}")
+ logger.info(f"Platform: {sys.platform}")
+ logger.info(f"Process ID: {os.getpid()}")
+ logger.info("=" * 60)
+
+ logger.info("Configuration Summary:")
+ logger.info(f" - Host: {config_service.get_host()}")
+ logger.info(f" - Port: {config_service.get_port()}")
+ logger.info(f" - Debug: {config_service.is_debug()}")
+ logger.info(f" - Log Level: {config_service.get_log_level()}")
+ logger.info(f" - Shutdown Timeout: {config_service.get_shutdown_timeout()}s")
+
+ try:
+ # Initialize all services with parallel execution
+ initializer = get_service_initializer()
+ await initializer.initialize_all_services()
+
+ startup_time = time.time() - startup_start
+ logger.info(f"✓ Application started successfully in {startup_time:.2f}s")
+ logger.info(f"✓ Server: {config_service.get_http_endpoint()}")
+ logger.info(f"✓ Debug Mode: {config_service.is_debug()}")
+ logger.info("=" * 60)
+
+ except Exception as e:
+ logger.error(f"Failed to start application: {str(e)}", exc_info=True)
+ raise
+
+ yield
+
+ # Shutdown
+ shutdown_start_time = time.time()
+ logger.info("=" * 60)
+ logger.info("Application shutdown initiated...")
+ # Mark as shutting down
+ app_state.is_shutting_down = True
+ app_state.shutdown_event.set()
+
+ # Get shutdown timeout and allocate time proportionally
+ total_timeout = config_service.get_shutdown_timeout()
+ tasks_cleanup_timeout = total_timeout * 0.6 # 60% for background tasks
+ service_cleanup_timeout = total_timeout * 0.3 # 30% for services
+
+ # 1. Clean up background tasks
+ try:
+ logger.info(f"Cleaning up background tasks (timeout: {tasks_cleanup_timeout:.1f}s)...")
+ await cleanup_background_tasks(timeout=tasks_cleanup_timeout)
+ logger.info("✓ Background tasks cleanup completed")
+ except Exception as e:
+ logger.error(f"Error during background tasks cleanup: {str(e)}", exc_info=True)
+
+ # 2. Clean up services
+ try:
+ registry = get_service_registry()
+ logger.info(f"Cleaning up services (timeout: {service_cleanup_timeout:.1f}s)...")
+ await asyncio.wait_for(registry.cleanup(), timeout=service_cleanup_timeout)
+ logger.info("✓ Service registry cleanup completed")
+ except asyncio.TimeoutError:
+ logger.warning("⚠ Service registry cleanup timed out")
+ except Exception as e:
+ logger.error(f"Error during service registry cleanup: {str(e)}", exc_info=True)
+
+ shutdown_duration = time.time() - shutdown_start_time
+ logger.info(f"✓ Application shutdown completed in {shutdown_duration:.2f}s")
+ logger.info("=" * 60)
+
+# Create FastAPI app
+def create_app() -> FastAPI:
+ """Create and configure the FastAPI application."""
+ config_service = get_configuration_service()
+
+ app = FastAPI(
+ title=config_service.get_app_name(),
+ description="Python implementation of Microsoft Fabric backend sample workload",
+ version="1.0.0",
+ root_path="/workload",
+ lifespan=lifespan,
+ docs_url="/api/docs" if config_service.is_debug() else None,
+ redoc_url="/api/redoc" if config_service.is_debug() else None,
+ openapi_url="/api/openapi.json" if config_service.is_debug() else None
+ )
+
+ # Configure middleware
+
+ # Security middleware (only in production)
+ if config_service.is_production():
+ app.add_middleware(
+ TrustedHostMiddleware,
+ allowed_hosts=config_service.get_allowed_hosts()
+ )
+
+ # Compression
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
+
+ # CORS
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=config_service.get_cors_origins(),
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ expose_headers=["X-Request-ID", "X-Process-Time"]
+ )
+
+ # Register exception handlers
+ register_exception_handlers(app)
+
+ # Include routers with proper prefixes
+ app.include_router(EndpointResolutionApiRouter)
+ app.include_router(ItemLifecycleApiRouter)
+ app.include_router(JobsApiRouter)
+ app.include_router(fabric_extension_router)
+ app.include_router(onelake_controller)
+ app.include_router(lakehouse_controller)
+
+ return app
+
+# Create app instance
+app = create_app()
+
+@app.get("/health", tags=["monitoring"])
+async def health_check():
+ """Health check endpoint for monitoring."""
+ return {
+ "status": "healthy",
+ "timestamp": datetime.now(timezone.utc).isoformat(),
+ "version": app.version,
+ "environment": os.environ.get('PYTHON_ENVIRONMENT', 'Development')
+ }
+
+@app.get("/ready", tags=["monitoring"])
+async def readiness_check():
+ """Readiness check for Kubernetes and load balancers."""
+ try:
+ registry = get_service_registry()
+
+ if not registry.is_initialized:
+ return JSONResponse(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ content={
+ "status": "not ready",
+ "error": "Services not initialized",
+ "timestamp": datetime.now(timezone.utc).isoformat()
+ }
+ )
+
+ return {
+ "status": "ready",
+ "timestamp": datetime.now(timezone.utc).isoformat(),
+ "services": registry.get_all_services()
+ }
+ except Exception as e:
+ return JSONResponse(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ content={
+ "status": "not ready",
+ "error": str(e),
+ "timestamp": datetime.now(timezone.utc).isoformat()
+ }
+ )
+
+@app.middleware("http")
+async def add_process_time_header(request: Request, call_next):
+ """Add request processing time and request ID headers."""
+ # Check if shutting down
+ if app_state.is_shutting_down:
+ return JSONResponse(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ content={"message": "Server is shutting down"}
+ )
+
+ # Generate or get request ID
+ request_id = request.headers.get("X-Request-ID", str(uuid.uuid4()))
+ request.state.request_id = request_id
+
+ # Track active request
+ async with app_state.request_lock:
+ app_state.active_requests.add(request_id)
+
+ start_time = time.time()
+
+ try:
+ response = await call_next(request)
+ process_time = time.time() - start_time
+
+ # Add headers
+ response.headers["X-Process-Time"] = f"{process_time:.3f}"
+ response.headers["X-Request-ID"] = request_id
+
+ # Log request (skip health checks to reduce noise)
+ if request.url.path not in ["/health", "/ready"] and app_state.logger:
+ app_state.logger.info(
+ f"{request.method} {request.url.path} → {response.status_code} "
+ f"({process_time:.3f}s) [ID: {request_id[:8]}]"
+ )
+
+ return response
+
+ except Exception as e:
+ process_time = time.time() - start_time
+ if app_state.logger:
+ app_state.logger.error(
+ f"{request.method} {request.url.path} → ERROR "
+ f"({process_time:.3f}s) [ID: {request_id[:8]}]: {str(e)}",
+ exc_info=True
+ )
+ raise
+ finally:
+ # Remove from active requests
+ async with app_state.request_lock:
+ app_state.active_requests.discard(request_id)
+
+def main():
+ """Main entry point for the application."""
+ # Get configuration first
+ config_service = get_configuration_service()
+
+ uvicorn.run(
+ "main:app",
+ host=config_service.get_host(),
+ port=config_service.get_port(),
+ reload=False,
+ workers=config_service.get_workers(),
+ loop="asyncio",
+ log_config=None,
+ access_log=False,
+ limit_concurrency=1000,
+ limit_max_requests=10000 if config_service.is_production() else None,
+ timeout_keep_alive=5,
+ timeout_graceful_shutdown=max(config_service.get_shutdown_timeout() + 10, 30),
+ lifespan="on",
+ # SSL configuration
+ ssl_keyfile=os.environ.get("SSL_KEYFILE") if config_service.is_production() else None,
+ ssl_certfile=os.environ.get("SSL_CERTFILE") if config_service.is_production() else None,
+ )
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/Backend/python/src/middleware/exception_handlers.py b/Backend/python/src/middleware/exception_handlers.py
new file mode 100644
index 0000000..de22d49
--- /dev/null
+++ b/Backend/python/src/middleware/exception_handlers.py
@@ -0,0 +1,129 @@
+from uuid import UUID
+from fastapi import Request, FastAPI
+import logging
+from exceptions.base_exception import WorkloadExceptionBase
+from exceptions.exceptions import (
+ UnauthorizedException,
+ TooManyRequestsException,
+ InternalErrorException,
+ InvariantViolationException,
+ DoubledOperandsOverflowException,
+ ItemMetadataNotFoundException,
+ AuthenticationException,
+ AuthenticationUIRequiredException,
+ InvalidParameterException
+)
+
+logger = logging.getLogger(__name__)
+
+async def workload_exception_handler(request: Request, exc: WorkloadExceptionBase):
+ """
+ Handle all workload-specific exceptions
+ """
+ logger.error(f"Workload exception: {exc}\r\n{exc.to_telemetry_string()}")
+ return exc.to_response()
+
+async def unauthorized_exception_handler(request: Request, exc: UnauthorizedException):
+ """Handle unauthorized exceptions."""
+ logger.error(f"Unauthorized access: {str(exc)}")
+ return exc.to_response()
+
+async def too_many_requests_exception_handler(request: Request, exc: TooManyRequestsException):
+ """Handle rate limiting exceptions."""
+ logger.warning(f"Rate limiting: {str(exc)}")
+ return exc.to_response()
+
+async def internal_error_exception_handler(request: Request, exc: InternalErrorException):
+ """Handle internal server errors."""
+ logger.error(f"Internal error: {exc.to_telemetry_string()}")
+ return exc.to_response()
+
+async def doubled_operands_overflow_exception_handler(request: Request, exc: DoubledOperandsOverflowException):
+ """Handle doubled operands overflow errors."""
+ logger.warning(f"Doubled operands overflow: {str(exc)}")
+ return exc.to_response()
+
+async def item_metadata_not_found_exception_handler(request: Request, exc: ItemMetadataNotFoundException):
+ """Handle item metadata not found errors."""
+ logger.warning(f"Item metadata not found: {str(exc)}")
+ return exc.to_response()
+
+async def authentication_exception_handler(request: Request, exc: AuthenticationException):
+ """Handle authentication errors."""
+ logger.error(f"Authentication error: {str(exc)}")
+ return exc.to_response()
+
+async def authentication_ui_required_exception_handler(request: Request, exc: AuthenticationUIRequiredException):
+ logger.error("Failed to acquire a token, user interaction is required, returning '401 Unauthorized' with WWW-Authenticate header")
+ response = exc.to_response()
+ # Add WWW-Authenticate header
+ response.headers["WWW-Authenticate"] = exc.to_www_authenticate_header()
+ return response
+
+async def value_error_handler(request: Request, exc: ValueError):
+ """Handle ValueError exceptions by converting to InvalidParameterException."""
+ logger.error(f"ValueError caught: {str(exc)}")
+
+ error_message = str(exc)
+ parameter_name = "unknown"
+
+ # Try to extract parameter info from the request path
+ path_params = request.path_params
+
+ # Common ValueError patterns
+ if "badly formed hexadecimal UUID string" in error_message:
+ # Try to identify which UUID parameter failed
+ for param_name, param_value in path_params.items():
+ if "id" in param_name.lower():
+ try:
+ UUID(str(param_value))
+ except ValueError:
+ parameter_name = param_name
+ break
+ else:
+ parameter_name = "UUID"
+ elif "invalid literal for int()" in error_message:
+ parameter_name = "integer value"
+ elif "could not convert string to float" in error_message:
+ parameter_name = "numeric value"
+
+ # Create InvalidParameterException
+ invalid_param_exc = InvalidParameterException(
+ parameter_name=parameter_name,
+ message=error_message
+ )
+
+ # Return the formatted response
+ return invalid_param_exc.to_response()
+
+async def invariant_violation_exception_handler(request: Request, exc: InvariantViolationException):
+ """Handle invariant violation errors."""
+ logger.error(f"Invariant violation: {exc.to_telemetry_string()}")
+ return exc.to_response()
+
+async def global_exception_handler(request: Request, exc: Exception):
+ logger.error(f"Unknown exception: {exc}")
+ # Return InternalErrorException response
+ internal_error = InternalErrorException("Unexpected error")
+ return internal_error.to_response()
+
+def register_exception_handlers(app: FastAPI):
+ """
+ Register all exception handlers with the FastAPI app.
+ """
+ # Register specific handlers for better logging control
+ app.add_exception_handler(AuthenticationUIRequiredException, authentication_ui_required_exception_handler)
+ app.add_exception_handler(AuthenticationException, authentication_exception_handler)
+ app.add_exception_handler(UnauthorizedException, unauthorized_exception_handler)
+ app.add_exception_handler(TooManyRequestsException, too_many_requests_exception_handler)
+ app.add_exception_handler(InvariantViolationException, invariant_violation_exception_handler)
+ app.add_exception_handler(InternalErrorException, internal_error_exception_handler)
+ app.add_exception_handler(DoubledOperandsOverflowException, doubled_operands_overflow_exception_handler)
+ app.add_exception_handler(ItemMetadataNotFoundException, item_metadata_not_found_exception_handler)
+ app.add_exception_handler(ValueError, value_error_handler)
+
+ # Register base handler as fallback for any WorkloadExceptionBase we didn't explicitly handle
+ app.add_exception_handler(WorkloadExceptionBase, workload_exception_handler)
+
+ # Register global exception handler for all other exceptions
+ app.add_exception_handler(Exception, global_exception_handler)
diff --git a/Backend/python/src/models/__init__.py b/Backend/python/src/models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/models/authentication_models.py b/Backend/python/src/models/authentication_models.py
new file mode 100644
index 0000000..967104b
--- /dev/null
+++ b/Backend/python/src/models/authentication_models.py
@@ -0,0 +1,75 @@
+from typing import Dict, List, Optional, ClassVar, Dict, Any
+import re
+from enum import IntEnum
+from exceptions.exceptions import AuthenticationException
+from pydantic import BaseModel, Field, computed_field, ConfigDict
+
+class Claim(BaseModel):
+ """
+ Represents an identity claim.
+ """
+ type: str = Field(..., description="The claim type")
+ value: Any = Field(..., description="The claim value")
+
+ model_config = ConfigDict(
+ arbitrary_types_allowed=True)
+
+class AuthorizationContext(BaseModel):
+ """Context containing information about an authenticated request."""
+ original_subject_token: Optional[str] = None
+ tenant_object_id: Optional[str] = None
+ claims: List[Claim] = Field(default_factory=list)
+
+ @property
+ def has_subject_context(self) -> bool:
+ """Gets a value indicating whether there is subject context."""
+ return self.original_subject_token is not None and len(self.original_subject_token) > 0
+
+ @property
+ def object_id(self) -> Optional[str]:
+ """Gets the object ID from the claims."""
+ for claim in self.claims:
+ if claim.type == "oid":
+ return claim.value
+ return None
+
+class TokenVersion(IntEnum):
+ """Token version enumeration"""
+ V1 = 1
+ V2 = 2
+
+class SubjectAndAppToken(BaseModel):
+ """Container for subject and app tokens."""
+ HEADER_PATTERN: ClassVar[str] = r'^SubjectAndAppToken1\.0 subjectToken="(eyJ[\w\-\._]+)", appToken="(eyJ[\w\-\._]+)"$'
+ HEADER_PATTERN_EMPTY_SUBJECT: ClassVar[str] = r'^SubjectAndAppToken1\.0 subjectToken="", appToken="(eyJ[\w\-\._]+)"$'
+ subject_token: Optional[str] = None
+ app_token: str
+
+ @classmethod
+ def parse(cls, auth_header_value: str) -> 'SubjectAndAppToken':
+ """Parse the SubjectAndAppToken from the authorization header."""
+ if not auth_header_value:
+ raise AuthenticationException("Invalid Authorization header")
+
+ # First, try matching the pattern with a non-empty subject token
+ match = re.fullmatch(cls.HEADER_PATTERN, auth_header_value)
+ if match:
+ subject_token = match.group(1)
+ app_token = match.group(2)
+ return cls(subject_token=subject_token, app_token=app_token)
+
+ # If no match, try matching the pattern with an empty subject token
+ match_empty_subject = re.fullmatch(cls.HEADER_PATTERN_EMPTY_SUBJECT, auth_header_value)
+ if match_empty_subject:
+ app_token = match_empty_subject.group(1)
+ return cls(subject_token=None, app_token=app_token)
+
+ # If no match, raise an exception
+ raise AuthenticationException("Invalid SubjectAndAppToken header format")
+
+ @staticmethod
+ def generate_authorization_header_value(subject_token: Optional[str], app_token: str) -> str:
+ """Generates the string value for the Authorization header with SubjectAndAppToken1.0 scheme."""
+ # Ensure subject_token is an empty string if None, to match C# behavior where subjectToken=""
+ effective_subject_token = subject_token if subject_token is not None else ""
+ return f'SubjectAndAppToken1.0 subjectToken="{effective_subject_token}", appToken="{app_token}"'
diff --git a/Backend/python/src/models/common_item_metadata.py b/Backend/python/src/models/common_item_metadata.py
new file mode 100644
index 0000000..88fc6b5
--- /dev/null
+++ b/Backend/python/src/models/common_item_metadata.py
@@ -0,0 +1,25 @@
+from typing import Optional
+from datetime import datetime, timezone
+from uuid import UUID
+from pydantic import BaseModel, Field, ConfigDict
+
+
+class CommonItemMetadata(BaseModel):
+ """
+ Represents common metadata for Fabric items.
+ """
+ type: str = Field(..., description="The type of the item")
+ tenant_object_id: UUID = Field(..., description="The tenant object ID")
+ workspace_object_id: UUID = Field(..., description="The workspace object ID")
+ item_object_id: UUID = Field(..., description="The item object ID")
+ display_name: Optional[str] = Field(None, description="The display name of the item")
+ description: Optional[str] = Field(None, description="The description of the item")
+ last_updated_date_time_utc: datetime = Field(
+ default_factory=lambda: datetime.now(timezone.utc),
+ description="The UTC timestamp when the item was last updated"
+ )
+
+ model_config = ConfigDict(
+ from_attributes=True,
+ populate_by_name=True
+ )
\ No newline at end of file
diff --git a/Backend/python/src/models/fabric_item.py b/Backend/python/src/models/fabric_item.py
new file mode 100644
index 0000000..5720430
--- /dev/null
+++ b/Backend/python/src/models/fabric_item.py
@@ -0,0 +1,40 @@
+from typing import Optional
+from pydantic import BaseModel, Field
+from .item_reference import ItemReference
+
+class FabricItem(ItemReference):
+ """
+ Model representing a Microsoft Fabric item.
+ """
+ type: Optional[str] = Field(
+ None,
+ description="The type of the Fabric item"
+ )
+ display_name: Optional[str] = Field(
+ None,
+ description="The display name of the Fabric item",
+ alias="displayName"
+ )
+ description: Optional[str] = Field(
+ None,
+ description="The description of the Fabric item"
+ )
+ workspace_name: Optional[str] = Field(
+ None,
+ description="The name of the workspace containing this item",
+ alias="workspaceName"
+ )
+
+ model_config = {
+ "populate_by_name": True,
+ "json_schema_extra": {
+ "example": {
+ "id": "12345678-1234-5678-abcd-1234567890ab",
+ "workspaceId": "98765432-1234-5678-abcd-1234567890ab",
+ "type": "Lakehouse",
+ "displayName": "Sample Lakehouse",
+ "description": "A sample lakehouse for storing data",
+ "workspaceName": "My Workspace"
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/models/item1_metadata.py b/Backend/python/src/models/item1_metadata.py
new file mode 100644
index 0000000..efa29cb
--- /dev/null
+++ b/Backend/python/src/models/item1_metadata.py
@@ -0,0 +1,204 @@
+"""
+Python implementation of Item1Metadata model.
+"""
+from enum import IntEnum
+from typing import Any, Dict, Optional, TypeVar, Generic, ClassVar
+from pydantic import BaseModel, Field, ConfigDict, field_serializer
+
+from constants.item1_field_names import Item1FieldNames as Fields
+from .fabric_item import FabricItem
+from .item_reference import ItemReference
+
+
+class Item1Operator(IntEnum):
+ UNDEFINED = 0
+ ADD = 1
+ SUBTRACT = 2
+ MULTIPLY = 3
+ DIVIDE = 4
+ RANDOM = 5
+
+ @classmethod
+ def from_string(cls, value: str) -> 'Item1Operator':
+ """Convert a string operator name to the enum value"""
+ for member in cls:
+ if (member.name.lower() == value.lower() or
+ member.name.capitalize() == value.capitalize()):
+ return member
+ raise ValueError(f"Unknown operator: {value}")
+
+ @classmethod
+ def _missing_(cls, value):
+ """Handle string values by converting them to enum members"""
+ if isinstance(value, str):
+ # Try to find a case-insensitive match with the enum name
+ for member in cls:
+ if member.name.lower() == value.lower():
+ return member
+
+ # Try matching capitalized names (Add, Subtract, etc.)
+ for member in cls:
+ if member.name.capitalize() == value.capitalize():
+ return member
+ elif isinstance(value, int):
+ # Try to find a match with the enum value
+ for member in cls:
+ if member.value == value:
+ return member
+ return None # Let Python raise ValueError if no match
+
+ def __str__(self) -> str:
+ """Return a user-friendly string representation of the operator."""
+ return self.name.capitalize()
+
+
+# Generic type variable for the lakehouse reference
+TLakehouse = TypeVar('TLakehouse')
+
+
+class Item1MetadataBase(BaseModel, Generic[TLakehouse]):
+ """
+ Base class for Item1 metadata containing common properties.
+ """
+ operand1: int = Field(
+ default=0,
+ description="The first operand for the calculation"
+ )
+ operand2: int = Field(
+ default=0,
+ description="The second operand for the calculation"
+ )
+ operator: Item1Operator = Field(
+ default=Item1Operator.UNDEFINED,
+ description="The operation to perform on the operands"
+ )
+ lakehouse: Optional[TLakehouse] = Field(
+ default=None,
+ description="Reference to the lakehouse used by this item"
+ )
+ use_onelake: bool = Field(
+ default=False,
+ description="Flag indicating whether to use OneLake",
+ alias="useOneLake"
+ )
+ last_calculation_result_location: Optional[str] = Field(
+ default=None,
+ description="The location of the last calculation result",
+ alias="lastCalculationResultLocation"
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ use_enum_values=False,
+ )
+
+ @field_serializer('operator')
+ def serialize_operator(self, value: Item1Operator) -> str:
+ """Serialize Item1Operator to string."""
+ return str(value)
+
+
+class Item1Metadata(Item1MetadataBase[ItemReference]):
+ """
+ Represents the core metadata for item1 stored within the system's storage.
+ """
+ DEFAULT: ClassVar[Optional['Item1Metadata']] = None
+
+ @classmethod
+ def from_json_data(cls, metadata_dict: Dict[str, Any]) -> 'Item1Metadata':
+ """
+ Creates an Item1Metadata instance from a dictionary.
+ Handles nested objects and type conversions.
+
+ Args:
+ metadata_dict: Dictionary containing metadata values
+
+ Returns:
+ An Item1Metadata instance populated with values from the dictionary
+ """
+ if not metadata_dict:
+ return cls(lakehouse=ItemReference(workspace_id="", id=""))
+
+ # Create lakehouse reference
+ lakehouse_ref = ItemReference(workspace_id="", id="")
+ if metadata_dict.get(Fields.LAKEHOUSE_FIELD):
+ lakehouse_dict = metadata_dict[Fields.LAKEHOUSE_FIELD]
+ workspace_id = lakehouse_dict.get(Fields.LAKEHOUSE_WORKSPACE_ID_FIELD)
+ lakehouse_id = lakehouse_dict.get(Fields.LAKEHOUSE_ID_FIELD) or lakehouse_dict.get("id", "")
+ lakehouse_ref = ItemReference(workspace_id=workspace_id, id=lakehouse_id)
+
+ operator_value = metadata_dict.get(Fields.OPERATOR_FIELD, Item1Operator.UNDEFINED)
+ try:
+ operator = Item1Operator(operator_value)
+ except ValueError:
+ operator = Item1Operator.UNDEFINED
+
+ return cls(
+ operand1=metadata_dict.get(Fields.OPERAND1_FIELD, 0),
+ operand2=metadata_dict.get(Fields.OPERAND2_FIELD, 0),
+ operator=operator,
+ lakehouse=lakehouse_ref,
+ use_onelake=metadata_dict.get(Fields.USE_ONELAKE_FIELD, False),
+ last_calculation_result_location=metadata_dict.get(
+ Fields.RESULT_LOCATION_FIELD, "")
+ )
+
+ def clone(self) -> 'Item1Metadata':
+ """
+ Creates a clone of this Item1Metadata object.
+ """
+ #TODO: return deepcopy(self)
+ return Item1Metadata(
+ operand1=self.operand1,
+ operand2=self.operand2,
+ operator=self.operator,
+ lakehouse=self.lakehouse,
+ use_onelake=self.use_onelake,
+ last_calculation_result_location=self.last_calculation_result_location
+ )
+
+ def is_valid_lakehouse(self) -> bool:
+ """
+ Check if the item has a valid lakehouse reference that can be used.
+
+ Returns:
+ bool: True if the lakehouse reference is valid and can be used, False otherwise.
+ """
+ return bool(self.lakehouse and
+ self.lakehouse.id and
+ self.lakehouse.id != "00000000-0000-0000-0000-000000000000" and
+ self.lakehouse.workspace_id)
+
+ def to_client_metadata(self, lakehouse: FabricItem) -> 'Item1ClientMetadata':
+ """
+ Converts this Item1Metadata to an Item1ClientMetadata object.
+ Args:
+ lakehouse: The FabricItem representing the lakehouse
+
+ Returns:
+ An Item1ClientMetadata object with properties from this object
+ """
+ if lakehouse is None:
+ lakehouse_param = FabricItem(id="", workspace_id="", type="", display_name="")
+ else:
+ lakehouse_param = lakehouse
+
+ return Item1ClientMetadata(
+ operand1=self.operand1,
+ operand2=self.operand2,
+ operator=str(self.operator),
+ lakehouse=lakehouse_param,
+ use_onelake=self.use_onelake
+ )
+
+
+class Item1ClientMetadata(Item1MetadataBase[FabricItem]):
+ """
+ Represents extended metadata for item1, including additional information
+ about the associated lakehouse, tailored for client-side usage.
+ """
+ pass
+
+
+# Initialize the DEFAULT class variable
+Item1Metadata.DEFAULT = Item1Metadata(lakehouse=ItemReference(id="", workspace_id=""))
\ No newline at end of file
diff --git a/Backend/python/src/models/item_metadata.py b/Backend/python/src/models/item_metadata.py
new file mode 100644
index 0000000..34ae129
--- /dev/null
+++ b/Backend/python/src/models/item_metadata.py
@@ -0,0 +1,22 @@
+from typing import Generic, TypeVar
+from pydantic import BaseModel, ConfigDict
+
+from .common_item_metadata import CommonItemMetadata
+
+# TypeVar for the generic type parameter
+T = TypeVar('T')
+
+class ItemMetadata(BaseModel, Generic[T]):
+ """
+ Model representing metadata for an item in Fabric.
+ Attributes:
+ common_metadata: The common metadata shared by all items
+ type_specific_metadata: Type-specific metadata that varies by item type
+ """
+ common_metadata: CommonItemMetadata
+ type_specific_metadata: T
+
+ model_config = ConfigDict(
+ from_attributes=True,
+ populate_by_name=True
+ )
\ No newline at end of file
diff --git a/Backend/python/src/models/item_reference.py b/Backend/python/src/models/item_reference.py
new file mode 100644
index 0000000..4f5a697
--- /dev/null
+++ b/Backend/python/src/models/item_reference.py
@@ -0,0 +1,32 @@
+from typing import Union, Optional
+from uuid import UUID
+from pydantic import BaseModel, Field
+
+
+class ItemReference(BaseModel):
+ """
+ A reference to an item in a workspace.
+
+ Attributes:
+ workspace_id: The ID of the workspace containing the item
+ id: The ID of the item
+ """
+ workspace_id: Optional[Union[str, UUID]] = Field(
+ default="00000000-0000-0000-0000-000000000000",
+ description="The ID of the workspace containing the item",
+ alias="workspaceId"
+ )
+ id: Optional[Union[str, UUID]] = Field(
+ default="00000000-0000-0000-0000-000000000000",
+ description="The ID of the item"
+ )
+
+ model_config = {
+ "populate_by_name": True,
+ "json_schema_extra": {
+ "example": {
+ "workspaceId": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
+ "id": "6ba7b810-9dad-11d1-80b4-00c04fd430c8"
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/models/job_metadata.py b/Backend/python/src/models/job_metadata.py
new file mode 100644
index 0000000..29e9aa0
--- /dev/null
+++ b/Backend/python/src/models/job_metadata.py
@@ -0,0 +1,47 @@
+from typing import Any, Optional
+from datetime import datetime
+from uuid import UUID
+from pydantic import BaseModel, Field
+
+
+class JobMetadata(BaseModel):
+ """
+ Represents metadata for a job instance.
+ """
+ job_type: str
+ job_instance_id: UUID
+ error_details: Optional[Any] = None
+ canceled_time: Optional[datetime] = None
+ use_onelake: bool = False
+
+ @property
+ def is_canceled(self) -> bool:
+ """Returns whether the job is canceled."""
+ return self.canceled_time is not None
+
+ def model_dump_json(self) -> dict:
+ """Convert the job metadata to a dictionary for serialization.
+ This maintains compatibility with the original to_dict method.
+ """
+ return {
+ "job_type": self.job_type,
+ "job_instance_id": str(self.job_instance_id),
+ "error_details": self.error_details,
+ "canceled_time": self.canceled_time.isoformat() if self.canceled_time else None,
+ "use_onelake": self.use_onelake
+ }
+
+ def to_dict(self) -> dict:
+ """Convert the job metadata to a dictionary for serialization."""
+ return self.model_dump_json()
+
+ @classmethod
+ def from_dict(cls, data: dict) -> 'JobMetadata':
+ """Create a JobMetadata instance from a dictionary."""
+ return cls(
+ job_type=data.get("job_type", ""),
+ job_instance_id=UUID(data.get("job_instance_id", "00000000-0000-0000-0000-000000000000")),
+ use_onelake=data.get("use_onelake", False),
+ error_details=data.get("error_details"),
+ canceled_time=datetime.fromisoformat(data["canceled_time"]) if data.get("canceled_time") else None
+ )
\ No newline at end of file
diff --git a/Backend/python/src/models/lakehouse_file.py b/Backend/python/src/models/lakehouse_file.py
new file mode 100644
index 0000000..62afc3f
--- /dev/null
+++ b/Backend/python/src/models/lakehouse_file.py
@@ -0,0 +1,29 @@
+from typing import Optional
+from pydantic import BaseModel, Field
+
+
+class LakehouseFile(BaseModel):
+ """Model representing a file in a Lakehouse."""
+
+ name: str = Field(
+ ...,
+ description="The name of the file"
+ )
+ path: str = Field(
+ ...,
+ description="The relative path of the file within the Files directory"
+ )
+ is_directory: bool = Field(
+ ...,
+ description="Whether this path represents a directory"
+ )
+
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "name": "data.csv",
+ "path": "subfolder/data.csv",
+ "is_directory": False
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/models/lakehouse_table.py b/Backend/python/src/models/lakehouse_table.py
new file mode 100644
index 0000000..f119727
--- /dev/null
+++ b/Backend/python/src/models/lakehouse_table.py
@@ -0,0 +1,31 @@
+from typing import Optional
+from pydantic import BaseModel, Field
+
+class LakehouseTable(BaseModel):
+ """
+ Model representing a table in a Lakehouse.
+ """
+ name: str = Field(
+ ...,
+ description="The name of the table"
+ )
+ path: str = Field(
+ ...,
+ description="The full path to the table in OneLake storage"
+ )
+ schema_name: Optional[str] = Field(
+ None,
+ description="The schema name of the table, if available",
+ alias="schema"
+ )
+
+ model_config = {
+ "populate_by_name": True,
+ "json_schema_extra": {
+ "example": {
+ "name": "customers",
+ "path": "12345678-1234-5678-abcd-1234567890ab/Tables/customers/",
+ "schema": "dbo"
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/models/onelake_folder.py b/Backend/python/src/models/onelake_folder.py
new file mode 100644
index 0000000..ae0a4c0
--- /dev/null
+++ b/Backend/python/src/models/onelake_folder.py
@@ -0,0 +1,89 @@
+from typing import List, Optional, Dict, Any
+from pydantic import BaseModel, Field
+
+class OneLakePathData(BaseModel):
+ """
+ Model representing path data in OneLake storage.
+ """
+ name: str = Field(..., description="The name of the path")
+ is_shortcut: bool = Field(False, description="Whether this path is a shortcut to another location", alias="isShortcut")
+ account_type: Optional[str] = Field(None, description="The account type for the shortcut (e.g., 'ADLS')", alias="accountType")
+ is_directory: bool = Field(False, description="Whether this path represents a directory", alias="isDirectory")
+
+ model_config = {
+ "populate_by_name": True
+ }
+
+class OneLakePathContainer(BaseModel):
+ """
+ Container for OneLake paths.
+ """
+ paths: List[OneLakePathData] = Field(..., description="List of paths in the container")
+
+ model_config = {
+ "populate_by_name": True
+ }
+
+class OneLakeFolder(BaseModel):
+ """
+ Model representing a folder or file in OneLake storage.
+ """
+ name: str = Field(
+ ...,
+ description="The name of the folder or file"
+ )
+ is_directory: bool = Field(
+ ...,
+ description="Whether this path represents a directory",
+ alias="isDirectory"
+ )
+ is_shortcut: Optional[bool] = Field(
+ None,
+ description="Whether this path is a shortcut to another location",
+ alias="isShortcut"
+ )
+ account_type: Optional[str] = Field(
+ None,
+ description="The account type for the shortcut (e.g., 'ADLS')",
+ alias="accountType"
+ )
+
+ model_config = {
+ "populate_by_name": True,
+ "json_schema_extra": {
+ "example": {
+ "name": "data",
+ "isDirectory": True,
+ "isShortcut": False,
+ "accountType": None
+ }
+ }
+ }
+
+class GetFoldersResult(BaseModel):
+ """
+ Model representing the result of a folder listing operation.
+ """
+ paths: List[OneLakeFolder] = Field(
+ ...,
+ description="List of folders and files in the requested directory"
+ )
+
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "paths": [
+ {
+ "name": "data",
+ "isDirectory": True,
+ "isShortcut": False
+ },
+ {
+ "name": "logs",
+ "isDirectory": True,
+ "isShortcut": False
+ }
+ ]
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/models/write_to_lakehouse_file_request.py b/Backend/python/src/models/write_to_lakehouse_file_request.py
new file mode 100644
index 0000000..44e5a06
--- /dev/null
+++ b/Backend/python/src/models/write_to_lakehouse_file_request.py
@@ -0,0 +1,53 @@
+from typing import Optional
+from pydantic import BaseModel, Field, field_validator
+import uuid
+
+class WriteToLakehouseFileRequest(BaseModel):
+ """
+ Request model for writing content to a Lakehouse file.
+ """
+ workspace_id: str = Field(
+ ...,
+ description="The workspace ID containing the lakehouse",
+ examples=["12345678-1234-5678-abcd-1234567890ab"]
+ )
+ lakehouse_id: str = Field(
+ ...,
+ description="The lakehouse ID where the file will be stored",
+ examples=["12345678-1234-5678-abcd-1234567890ab"]
+ )
+ file_name: str = Field(
+ ...,
+ description="Name of the file to be written",
+ examples=["data.json"]
+ )
+ content: str = Field(
+ ...,
+ description="Content to write to the file"
+ )
+ overwrite_if_exists: bool = Field(
+ False,
+ description="Whether to overwrite the file if it already exists"
+ )
+
+ # V2-style validators to ensure workspace_id and lakehouse_id are valid UUIDs
+ @field_validator('workspace_id', 'lakehouse_id')
+ @classmethod # Field validators should be classmethods in V2
+ def validate_uuid(cls, v):
+ try:
+ uuid.UUID(v)
+ return v
+ except ValueError:
+ raise ValueError(f"Invalid UUID format: {v}")
+
+ model_config = { # Use model_config instead of Config in V2
+ "json_schema_extra": {
+ "example": {
+ "workspace_id": "12345678-1234-5678-abcd-1234567890ab",
+ "lakehouse_id": "98765432-1234-5678-abcd-1234567890ab",
+ "file_name": "sample-data.json",
+ "content": "{ \"key\": \"value\" }",
+ "overwrite_if_exists": True
+ }
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/src/services/__init__.py b/Backend/python/src/services/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/src/services/authentication.py b/Backend/python/src/services/authentication.py
new file mode 100644
index 0000000..e26c812
--- /dev/null
+++ b/Backend/python/src/services/authentication.py
@@ -0,0 +1,560 @@
+import logging
+
+from jose import jwt, JWTError
+from jose.exceptions import JWTClaimsError, ExpiredSignatureError, JWTError
+from typing import Optional, List, Dict, Any
+import msal
+
+from msal.exceptions import MsalServiceError
+from constants.http_constants import AuthorizationSchemes
+from constants.environment_constants import EnvironmentConstants
+from services.configuration_service import get_configuration_service
+from constants.workload_scopes import WorkloadScopes
+from models.authentication_models import SubjectAndAppToken, TokenVersion, AuthorizationContext, Claim
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+from services.open_id_connect_configuration import OpenIdConnectConfigurationManager
+from constants.api_constants import ApiConstants
+
+logger = logging.getLogger(__name__)
+
+class AuthenticationService:
+ def __init__(self, openid_manager: OpenIdConnectConfigurationManager):
+ self.logger = logging.getLogger(__name__)
+ self.openid_manager = openid_manager
+ config_service = get_configuration_service()
+ self.publisher_tenant_id = config_service.get_publisher_tenant_id()
+ self.audience = config_service.get_audience()
+ self.client_id = config_service.get_client_id()
+ self.client_secret = config_service.get_client_secret()
+ self._msal_apps = {}
+
+
+ # Default scopes for SubjectAndApp token authentication
+ self.subject_and_app_auth_allowed_scopes = [WorkloadScopes.FABRIC_WORKLOAD_CONTROL]
+
+ # Create MSAL confidential client application
+ self.authority_template = f"{EnvironmentConstants.AAD_INSTANCE_URL}/{{tenant_id}}"
+ default_authority = f"{EnvironmentConstants.AAD_INSTANCE_URL}/organizations"
+ self.app = None
+ if self.client_id and self.client_secret and self.publisher_tenant_id:
+ self._msal_apps[default_authority] = msal.ConfidentialClientApplication(
+ client_id=self.client_id,
+ client_credential=self.client_secret,
+ authority=default_authority
+ )
+ self.logger.info("MSAL Confidential Client Application initialized")
+ else:
+ self.logger.warning("Missing ClientId or ClientSecret in configuration. MSAL client not initialized.")
+
+ def _get_msal_app(self, tenant_id: str) -> msal.ConfidentialClientApplication:
+ """Gets or creates an MSAL app for the specified tenant."""
+ authority = f"{EnvironmentConstants.AAD_INSTANCE_URL}/{tenant_id}"
+
+ if authority not in self._msal_apps:
+ self._msal_apps[authority] = msal.ConfidentialClientApplication(
+ client_id=self.client_id,
+ authority=authority,
+ client_credential=self.client_secret
+ )
+
+ return self._msal_apps[authority]
+
+
+
+ async def authenticate_control_plane_call(
+ self,
+ auth_header: Optional[str],
+ tenant_id: Optional[str] = None,
+ require_subject_token: bool = True,
+ require_tenant_id_header: bool = True
+ ) -> AuthorizationContext:
+ """
+ Authenticate a control plane API call using the authorization header.
+
+ This is called during item create/update/delete/get/Jobs operations.
+ """
+ self.logger.info("Authenticating control plane call")
+
+ if not auth_header:
+ self.logger.error("Missing or invalid Authorization header")
+ raise AuthenticationException("Missing or invalid Authorization header")
+
+ if require_tenant_id_header and not tenant_id:
+ self.logger.error("tenant_id header is missing")
+ raise AuthenticationException("tenant_id header is missing")
+
+ # Parse the tokens
+ try:
+ subject_and_app_token = SubjectAndAppToken.parse(auth_header)
+ except AuthenticationException as e:
+ self.logger.error(f"Failed to parse SubjectAndAppToken: {str(e)}")
+ raise
+
+ # Create authorization context based on parsed tokens
+ auth_context = await self._authenticate(
+ tenant_id,
+ subject_and_app_token,
+ self.subject_and_app_auth_allowed_scopes,
+ require_subject_token,
+ require_tenant_id_header
+ )
+
+ return auth_context
+
+ async def authenticate_data_plane_call(
+ self,
+ auth_header: Optional[str],
+ allowed_scopes: List[str],
+ tenant_id: Optional[str] = None
+ ) -> AuthorizationContext:
+ """
+ Authenticate a data plane API call using the authorization header.
+
+ This is called for custom API operations like getting supported operators.
+
+ Args:
+ auth_header: The authorization header from the request
+ allowed_scopes: List of scopes required for the operation
+ tenant_id: Optional tenant ID from the request header
+
+ Returns:
+ AuthorizationContext with user and tenant information
+ """
+ self.logger.info(f"Authenticating data plane call with scopes: {allowed_scopes}")
+
+ if not auth_header or not auth_header.startswith("Bearer "):
+ self.logger.error("Missing or invalid Authorization header")
+ raise AuthenticationException("Missing or invalid Authorization header")
+
+ token = auth_header[len(AuthorizationSchemes.BEARER):].strip()
+ auth_context = await self._authenticate_bearer(token, allowed_scopes)
+ return auth_context
+
+ async def get_access_token_on_behalf_of(
+ self,
+ auth_context: AuthorizationContext,
+ scopes: List[str]
+ ) -> str:
+ """Get an access token using OBO flow."""
+ self.logger.info(f"Getting access token for scopes: {', '.join(scopes)}")
+
+ if not auth_context.original_subject_token:
+ self.logger.error("No original_subject_token in AuthorizationContext for OBO flow.")
+ raise AuthenticationException("OBO flow requires an original subject token.")
+
+ if not self.client_id or not self.client_secret: # Check if base MSAL config is present
+ self.logger.error("MSAL client_id or client_secret not configured, cannot perform OBO flow.")
+ raise AuthenticationException("MSAL client not configured for OBO flow.")
+
+ if not auth_context.tenant_object_id:
+ self.logger.error("TenantObjectId missing in AuthorizationContext for OBO flow. Cannot determine authority.")
+ raise AuthenticationException("Cannot determine tenant authority for OBO flow.")
+
+ obo_app = self._get_msal_app(auth_context.tenant_object_id)
+ self.logger.debug(f"OBO MSAL app configured with authority: {auth_context.tenant_object_id}")
+
+ try:
+ result = obo_app.acquire_token_on_behalf_of(
+ user_assertion=auth_context.original_subject_token,
+ scopes=scopes
+ )
+
+ except Exception as e:
+ self.logger.error(f"MSAL OBO acquire_token_on_behalf_of call failed unexpectedly: {str(e)}", exc_info=True)
+ raise AuthenticationException(f"OBO token acquisition failed: {str(e)}")
+
+ if "error" in result:
+ error_code = result.get("error")
+ error_description = result.get("error_description", "")
+ self.logger.error(f"Error in OBO token acquisition: {error_code}: {error_description}")
+ # Handle consent required error
+ if error_code in ["interaction_required", "consent_required", "invalid_grant"] or result.get("suberror") == "conditional_access":
+ claims_challenge = result.get("claims")
+ py_ex = AuthenticationUIRequiredException(error_description)
+ if claims_challenge:
+ py_ex.add_claims_for_conditional_access(claims_challenge)
+ if error_code == "consent_required" or "consent_required" in error_description.lower():
+ py_ex.add_scopes_to_consent(scopes)
+ self.logger.warning(f"OBO flow requires UI interaction: {error_code}. Claims: {claims_challenge}, Scopes: {scopes if 'consent_required' in error_description.lower() or error_code == 'consent_required' else 'N/A'}")
+ raise py_ex
+ raise AuthenticationException(f"Error acquiring token: {error_code}")
+
+ if "access_token" not in result:
+ self.logger.error("Access token not found in OBO result")
+ raise AuthenticationException("Access token not found in OBO result")
+
+ self.logger.info(f"OBO flow successful for user {auth_context.object_id}.")
+ return result["access_token"]
+
+ async def build_composite_token(
+ self,
+ auth_context: AuthorizationContext,
+ scopes: List[str]
+ ) -> str:
+ """Build a composite token for making calls to Fabric APIs."""
+ self.logger.info(f"Building composite token for scopes: {', '.join(scopes)}")
+
+ # Get OBO token for Fabric
+ token_obo = await self.get_access_token_on_behalf_of(auth_context, scopes)
+
+ # Get service-to-service token
+ service_principal_token = await self.get_fabric_s2s_token()
+
+ # Generate SubjectAndAppToken authorization header
+ return SubjectAndAppToken.generate_authorization_header_value(token_obo, service_principal_token)
+
+ async def get_fabric_s2s_token(self) -> str:
+ """Get a service-to-service token for Fabric."""
+ self.logger.info("Acquiring Fabric S2S token")
+ try:
+ # Request token with default scope
+ scopes = [f"{EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID}/.default"]
+
+ publisher_authority = f"{EnvironmentConstants.AAD_INSTANCE_URL}/{self.publisher_tenant_id}"
+ app = self._get_msal_app(self.publisher_tenant_id)
+ try:
+ result = app.acquire_token_for_client(scopes=scopes)
+ except MsalServiceError as e:
+ self.logger.error(f"MSAL exception: {str(e)}")
+ raise AuthenticationException(f"MSAL exception: {str(e)}")
+
+ if "error" in result:
+ error_code = result.get("error")
+ error_description = result.get("error_description", "")
+ self.logger.error(f"MSAL exception: {error_code}: {error_description}")
+ raise AuthenticationException(f"MSAL exception: {error_code}")
+
+ return result["access_token"]
+
+ except AuthenticationException:
+ raise
+ except Exception as e:
+ self.logger.error(f"An error occurred: {str(e)}")
+ raise Exception(f"An error occurred: {str(e)}")
+
+ async def _authenticate(
+ self,
+ tenant_id: Optional[str],
+ subject_and_app_token: SubjectAndAppToken,
+ allowed_scopes: List[str],
+ require_subject_token: bool = True,
+ require_tenant_id_header: bool = True
+ ) -> AuthorizationContext:
+ """Authenticate using SubjectAndAppToken."""
+ if require_tenant_id_header and not tenant_id:
+ self.logger.error("tenant_id header is missing")
+ raise AuthenticationException("tenant_id header is missing")
+
+ app_token_claims = await self._validate_app_token(subject_and_app_token.app_token)
+ app_token_version = self._get_token_version(app_token_claims)
+
+ # Check app ID claim based on token version
+ app_id_claim = "appid" if app_token_version == TokenVersion.V1 else "azp"
+ app_token_app_id = self._validate_claim_one_of_values(
+ app_token_claims,
+ app_id_claim,
+ [EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ EnvironmentConstants.FABRIC_CLIENT_FOR_WORKLOADS_APP_ID],
+ "app-only token must belong to Fabric BE or Fabric client for workloads"
+ )
+
+ # Validate app token belongs to publisher tenant
+ self._validate_claim_value(app_token_claims, "tid", self.publisher_tenant_id,
+ "app token must be in the publisher's tenant")
+
+ # Handle missing subject token
+ if not subject_and_app_token.subject_token:
+ if require_subject_token:
+ self.logger.error("subject token is missing")
+ raise AuthenticationException("SubjectAndAppToken is missing subject token")
+
+ # Create context without subject info
+ if require_tenant_id_header:
+ return AuthorizationContext(tenant_object_id=tenant_id)
+ else:
+ return AuthorizationContext()
+
+ # Validate subject token
+ subject_claims = await self._validate_subject_token(subject_and_app_token.subject_token, tenant_id)
+ subject_token_version = self._get_token_version(subject_claims)
+
+ # Validate app IDs match between tokens
+ subject_app_id_claim = "appid" if subject_token_version == TokenVersion.V1 else "azp"
+ self._validate_claim_value(subject_claims, subject_app_id_claim, app_token_app_id,
+ "subject and app tokens should belong to same application")
+
+ # Validate tenant ID
+ self._validate_claim_value(subject_claims, "tid", tenant_id, "subject tokens must belong to the subject's tenant")
+
+ # Validate scopes
+ self._validate_any_scope(subject_claims, allowed_scopes)
+
+ # Create context with subject info - properly set fields that exist in the model
+ auth_context = AuthorizationContext(
+ original_subject_token=subject_and_app_token.subject_token,
+ tenant_object_id=tenant_id, # Use tenant_object_id instead of tenant_id
+ claims=subject_claims
+ )
+
+ return auth_context
+
+ def _validate_claim_one_of_values(self, claims: List[Claim], claim_name: str,
+ expected_values: List[str], error_message: str) -> str:
+ """Validate a claim exists and matches one of the expected values."""
+ claim_value = self._validate_claim_exists(claims, claim_name,
+ f"Missing required claim: {claim_name}")
+
+ if claim_value not in expected_values:
+ self.logger.error(
+ f"{error_message}: claim '{claim_name}' has value '{claim_value}', "
+ f"expected one of: {expected_values}"
+ )
+ raise AuthenticationException(error_message)
+
+ return claim_value
+
+ async def _authenticate_bearer(self, token: str, allowed_scopes: List[str]) -> AuthorizationContext:
+ """Authenticate a bearer token"""
+ claims = await self._validate_aad_token_common(token, is_app_only=False, expected_tenant_id_for_issuer=None)
+
+ # Extract tenant ID
+ tenant_id = self._validate_claim_exists(claims, "tid", "access tokens should have this claim")
+
+ # Validate scopes
+ self._validate_any_scope(claims, allowed_scopes)
+
+ # Create context with subject info properly
+ auth_context = AuthorizationContext(
+ original_subject_token=token,
+ tenant_object_id=tenant_id,
+ claims=claims
+ )
+
+ return auth_context
+
+ def get_expected_issuer(self, oidc_config:OpenIdConnectConfigurationManager ,token_version: TokenVersion, tenant_id: str) -> str:
+ """Get the expected issuer for the token version and tenant ID."""
+ expected_issuer = None
+ if token_version == TokenVersion.V1:
+ try:
+ expected_issuer = oidc_config.issuer_configuration.format(tenantid=tenant_id)
+ except KeyError:
+ logger.error(f"Issuer configuration:{oidc_config.issuer_configuration} missing tenantid placeholder 'tenantid'")
+ raise AuthenticationException("Issuer configuration missing tenantid placeholder")
+ elif token_version == TokenVersion.V2:
+ expected_issuer = f"{EnvironmentConstants.AAD_INSTANCE_URL}/{tenant_id}/v2.0"
+ else:
+ self.logger.error(f"Unsupported token version: {token_version}")
+ raise AuthenticationException(f"Unsupported token version: {token_version}")
+ return expected_issuer
+
+ def _get_token_version(self, claims: List[Claim]) -> str:
+ """Gets the token version from claims."""
+ version = self._validate_claim_exists(claims, "ver", "access tokens should have version claim")
+ if version == "1.0":
+ return TokenVersion.V1
+ elif version == "2.0":
+ return TokenVersion.V2
+ else:
+ raise AuthenticationException(f"Unsupported token version: {version}")
+
+ def _get_excpected_audience(self, token_version: TokenVersion) -> str:
+ """Get the expected audience based on token version."""
+ return self.audience if token_version == TokenVersion.V1 else self.client_id
+
+ async def _validate_aad_token_common(self, token: str, is_app_only: bool, expected_tenant_id_for_issuer: Optional[str]) -> Dict[str, Any]:
+ """
+ Validate common properties of an AAD token (signature, lifetime, audience, issuer).
+ Returns the decoded claims as a dictionary.
+ """
+ self.logger.debug(f"Validating AAD token. is_app_only: {is_app_only}, expected_tenant_id_for_issuer: {expected_tenant_id_for_issuer}")
+ try:
+ unverified_header = jwt.get_unverified_header(token)
+ unverified_claims_dict = jwt.get_unverified_claims(token)
+
+ unverified_claims_list = [Claim(type=k, value=v) for k, v in unverified_claims_dict.items()]
+ # Extract tenant ID from claims
+ tenant_id = self._validate_claim_exists(unverified_claims_list, "tid", "access tokens should have 'tid' claim")
+
+ if not tenant_id:
+ self.logger.error("Token is missing 'tid' claim.")
+ raise AuthenticationException("Token is missing 'tid' claim.")
+
+ # Get token version for issuer and audience validation
+ token_version = self._get_token_version(unverified_claims_list)
+ self.logger.debug(f"Token version: {token_version}")
+
+ # Get OpenID Connect configuration for signing keys
+ oidc_config = await self.openid_manager.get_configuration_async()
+
+ signing_key = None
+ for key in oidc_config.signing_keys:
+ if key.get("kid") == unverified_header.get("kid"):
+ signing_key = key
+ break
+
+ if not signing_key:
+ logger.error("Token signing key not found")
+ raise AuthenticationException("Token signing key not found")
+
+ expected_issuer = self.get_expected_issuer(oidc_config, token_version, tenant_id)
+ if not expected_issuer:
+ self.logger.error("Expected issuer not found")
+ raise AuthenticationException("Expected issuer not found")
+
+ expected_audience = self._get_excpected_audience(token_version)
+ self.logger.debug(f"Expected audience: {expected_audience}")
+
+ # Validate token fully
+ decoded_payload = jwt.decode(
+ token,
+ key=signing_key,
+ algorithms=[unverified_header.get("alg", "RS256")],
+ audience=expected_audience,
+ issuer=expected_issuer,
+ options={
+ "verify_signature": True,
+ "verify_exp": True,
+ "verify_nbf": True,
+ "verify_iat": True,
+ "verify_aud": True,
+ "verify_iss": True,
+ "leeway": 60, # 1 minute leeway for time checks
+ }
+ )
+
+ claims = [Claim(type=k, value=v) for k, v in decoded_payload.items()]
+ self.logger.debug(f"Token validated successfully. Claims: {decoded_payload}")
+
+ app_id_claim = "appid" if token_version == TokenVersion.V1 else "azp"
+ self._validate_claim_exists(claims, app_id_claim, f"access tokens should have {app_id_claim} claim")
+
+ self._validate_app_only(claims, is_app_only)
+ self.logger.info("AAD token validation successful")
+ return claims
+
+ except ExpiredSignatureError:
+ self.logger.error("Token has expired")
+ raise AuthenticationException("Token has expired")
+ except JWTClaimsError as e:
+ if "Invalid audience" in str(e):
+ token_audience_from_unverified = unverified_claims_dict.get("aud") if 'unverified_claims_dict' in locals() else "N/A (unverified claims not available)"
+ expected_audiences_for_log = expected_audience if 'valid_audiences' in locals() else "N/A (expected audiences not available)"
+ error_message = f". Expected: {expected_audiences_for_log}, Got: {token_audience_from_unverified}"
+
+ self.logger.error(error_message)
+ self.logger.error(f"Token has invalid claims: {str(e)}")
+ raise AuthenticationException(f"Invalid token claims: {str(e)}")
+ except JWTError as e:
+ self.logger.error(f"JWT validation failed: {str(e)}")
+ raise AuthenticationException(f"Token validation failed: {str(e)}")
+ except Exception as e:
+ self.logger.error(f"Token validation failed: {str(e)}")
+ raise AuthenticationException(f"Token validation failed: {str(e)}")
+
+ async def _validate_app_token(self, token: str) -> Dict[str, Any]:
+ """
+ Validate an app token (app-only) with publisher tenant validation.
+ """
+ return await self._validate_aad_token_common(
+ token,
+ is_app_only=True,
+ expected_tenant_id_for_issuer=self.publisher_tenant_id
+ )
+
+ async def _validate_subject_token(self, token: str, tenant_id: str) -> Dict[str, Any]:
+ """
+ Validate a subject token (delegated) with the user's tenant.
+ """
+ return await self._validate_aad_token_common(
+ token,
+ is_app_only=False,
+ expected_tenant_id_for_issuer=tenant_id
+ )
+
+ def _validate_claim_value(self, claims: List[Claim], claim_name: str, expected_value: str = None,
+ error_message: str = None) -> str:
+ """Validate a claim exists and optionally matches expected value."""
+ claim_value = self._validate_claim_exists(claims, claim_name, f"Missing required claim: {claim_name}")
+
+ if expected_value is not None and str(claim_value) != expected_value:
+ error_msg = error_message or f"Claim {claim_name} has incorrect value"
+ self.logger.error(f"{error_msg}: expected '{expected_value}', got '{claim_value}'")
+ raise AuthenticationException(error_msg)
+
+ return claim_value
+
+ def _validate_claim_exists(self, claims: List[Claim], claim_name: str, error_message: str) -> str:
+ """Validate a claim exists and return its value."""
+ for claim in claims:
+ if claim.type == claim_name:
+ return claim.value
+
+ self.logger.error(f"Missing claim {claim_name}: {error_message}")
+ raise AuthenticationException(f"Missing claim {claim_name}: {error_message}")
+
+ def _validate_no_claim(self, claims: List[Claim], claim_name: str, error_message: str) -> None:
+ """Validate a claim does not exist."""
+ for claim in claims:
+ if claim.type == claim_name:
+ self.logger.error(f"Unexpected claim exists: claimType='{claim_name}', reason='{error_message}', actualValue={claim.value}")
+ raise AuthenticationException("Unexpected token format")
+
+ def _validate_app_only(self, claims: List[Claim], is_app_only: bool) -> None:
+ """Validate that the token is either app-only or delegated based on claims."""
+ if is_app_only:
+ self._validate_claim_value(claims, "idtyp", "app", "expecting an app-only token")
+ self._validate_claim_exists(claims, "oid", "app-only tokens should have oid claim in them")
+ self._validate_no_claim(claims, "scp", "app-only tokens should not have this claim")
+ else:
+ self._validate_no_claim(claims, "idtyp", "delegated tokens should not have this claim")
+ self._validate_claim_exists(claims, "scp", "delegated tokens should have this claim")
+
+ def _extract_scopes_from_claims(self, claims: List[Claim]) -> List[str]:
+ """Extract all scopes from both delegated (scp) and application (roles) claims."""
+ token_scopes = []
+
+ # Extract delegated permissions from scp claim
+ for claim in claims:
+ if claim.type == "scp":
+ scopes_str = claim.value if claim.value else ""
+ if isinstance(scopes_str, str):
+ token_scopes.extend([s.strip() for s in scopes_str.split()])
+
+ for claim in claims:
+ if claim.type == "roles":
+ roles = claim.value if claim.value else []
+ if isinstance(roles, list):
+ token_scopes.extend(roles)
+ elif isinstance(roles, str):
+ token_scopes.append(roles)
+
+ return token_scopes
+
+ def _validate_any_scope(self, claims: List[Claim], allowed_scopes: List[str]) -> None:
+ """Validate that the token has at least one of the allowed scopes."""
+ token_scopes = self._extract_scopes_from_claims(claims)
+
+ # Check if any allowed scope is present in token scopes
+ if not any(scope in token_scopes for scope in allowed_scopes):
+ allowed_scopes_str = ", ".join(allowed_scopes)
+ token_scopes_str = ", ".join(token_scopes) if token_scopes else "none"
+ error_message = "Workload's Entra ID application is missing required scopes"
+ self.logger.error(f"{error_message}. Required: [{allowed_scopes_str}], Found: [{token_scopes_str}]")
+ raise AuthenticationException(error_message)
+
+ self.logger.debug(f"Scope validation successful. Required: {allowed_scopes}, Found: {token_scopes}")
+
+
+def get_authentication_service() -> AuthenticationService:
+ """Get the singleton AuthenticationService instance."""
+ from core.service_registry import get_service_registry
+ service_registry = get_service_registry()
+ if not service_registry.has(AuthenticationService):
+ if not hasattr(get_authentication_service, "instance"):
+ raise RuntimeError(
+ "AuthenticationService not initialized. "
+ "Ensure the application startup has completed."
+ )
+ return get_authentication_service.instance
+ return service_registry.get(AuthenticationService)
\ No newline at end of file
diff --git a/Backend/python/src/services/authorization.py b/Backend/python/src/services/authorization.py
new file mode 100644
index 0000000..32a0a22
--- /dev/null
+++ b/Backend/python/src/services/authorization.py
@@ -0,0 +1,164 @@
+import logging
+import aiohttp
+from typing import List, Optional, Dict, Any
+from uuid import UUID
+import httpx
+from pydantic import BaseModel
+
+from constants.environment_constants import EnvironmentConstants
+from models.authentication_models import AuthorizationContext
+from exceptions.exceptions import UnauthorizedException, TooManyRequestsException, InternalErrorException
+from constants.api_constants import ApiConstants
+from services.http_client import get_http_client_service
+
+logger = logging.getLogger(__name__)
+
+class ResolvePermissionsResponse(BaseModel):
+ """Response model for the resolve permissions API."""
+ permissions: List[str]
+
+class AuthorizationHandler:
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self._auth_service = None
+ self.fabric_scopes = [f"{EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID}/.default"]
+
+ @property
+ def auth_service(self):
+ """Lazy load authentication service to avoid circular dependencies."""
+ if self._auth_service is None:
+ from services.authentication import get_authentication_service
+ self._auth_service = get_authentication_service()
+ return self._auth_service
+
+ async def dispose_async(self):
+ """Cleanup method for service registry."""
+ # No resources to cleanup, but method needed for consistency
+ self.logger.debug("AuthorizationHandler disposed")
+
+ async def validate_permissions(
+ self,
+ auth_context: AuthorizationContext,
+ workspace_object_id: UUID,
+ item_object_id: UUID,
+ required_permissions: List[str]
+ ) -> None:
+ """
+ Validate that the user has the required permissions for the item.
+
+ Args:
+ auth_context: The authorization context from authentication
+ workspace_object_id: The workspace ID
+ item_object_id: The item ID
+ required_permissions: List of permissions required (e.g., ["Read", "Write"])
+
+ Raises:
+ UnauthorizedException: If the user doesn't have the required permissions
+ TooManyRequestsException: If API throttling occurs
+ """
+ self.logger.debug(f"Validating permissions for item {item_object_id} in workspace {workspace_object_id}")
+
+ # Get a composite token for calling Fabric APIs
+ subject_and_app_token = await self.auth_service.build_composite_token(
+ auth_context,
+ self.fabric_scopes
+ )
+
+ # Resolve item permissions using the provided token
+ response = await self._resolve_item_permissions(
+ subject_and_app_token,
+ workspace_object_id,
+ item_object_id
+ )
+
+ if response is None or not response.permissions:
+ self.logger.error("Fabric response should contain permissions")
+ raise UnauthorizedException("Failed to resolve permissions")
+
+ # Check if any of the required permissions is missing (case-insensitive comparison)
+ missing_permissions = []
+ for required_perm in required_permissions:
+ if not any(perm.lower() == required_perm.lower() for perm in response.permissions):
+ missing_permissions.append(required_perm)
+
+ if missing_permissions:
+ self.logger.error(
+ f"Insufficient permissions: subjectTenantObjectId={auth_context.tenant_object_id}, "
+ f"subjectObjectId={auth_context.object_id}, "
+ f"workspaceObjectId={workspace_object_id}, "
+ f"itemObjectId={item_object_id}, "
+ f"requiredPermissions={required_permissions}, "
+ f"actualPermissions={response.permissions}"
+ )
+ raise UnauthorizedException("User does not have required permissions")
+
+
+
+ async def _resolve_item_permissions(
+ self,
+ token: str,
+ workspace_id: UUID,
+ item_id: UUID
+ ) -> ResolvePermissionsResponse:
+ """
+ Resolve item permissions by calling the Fabric workload-control API.
+
+ Args:
+ token: The authentication token
+ workspace_id: The workspace ID
+ item_id: The item ID
+
+ Returns:
+ ResolvePermissionsResponse: The response containing permissions
+
+ Raises:
+ TooManyRequestsException: If the API is throttling requests
+ UnauthorizedException: If there are permission issues
+ Exception: For other errors
+ """
+ url = f"{ApiConstants.WORKLOAD_CONTROL_API_BASE_URL}/workspaces/{workspace_id}/items/{item_id}/resolvepermissions"
+ self.logger.debug(f"Calling resolve permissions API: {url}")
+
+ auth_header_value = token
+ if not token.startswith("SubjectAndAppToken"):
+ auth_header_value = f"Bearer {token}"
+ headers = {
+ "Authorization": auth_header_value,
+ "Content-Type": "application/json"
+ }
+
+ try:
+ http_client = get_http_client_service()
+ response = await http_client.get(url,token)
+ if response.status_code == 429:
+ self.logger.warning(f"Throttling from resolvepermissions API (429) for item {item_id}")
+ raise TooManyRequestsException("Blocked due to resolved-permissions API throttling.")
+
+ if response.status_code in (401, 403):
+ error_text = response.text
+ self.logger.error(f"Access denied by resolvepermissions API ({response.status_code}): {error_text}")
+ raise UnauthorizedException(f"Access denied by resolvepermissions API ({response.status_code}): {error_text}")
+
+ response.raise_for_status()
+ response_data = response.json()
+ return ResolvePermissionsResponse(**response_data)
+
+ except httpx.HTTPStatusError as e:
+ self.logger.error(f"Error resolving permissions: {str(e)}")
+ raise InternalErrorException(f"Error communicating with Fabric API: {str(e)}")
+ except Exception as e:
+ self.logger.error(f"Unexpected error in _resolve_item_permissions: {str(e)}", exc_info=True)
+ raise InternalErrorException(f"Unexpected error: {str(e)}")
+
+def get_authorization_service() -> AuthorizationHandler:
+ """Get the singleton AuthorizationHandler instance."""
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+
+ if not registry.has(AuthorizationHandler):
+ # Fallback for backward compatibility
+ if not hasattr(get_authorization_service, "instance"):
+ get_authorization_service.instance = AuthorizationHandler()
+ return get_authorization_service.instance
+
+ return registry.get(AuthorizationHandler)
\ No newline at end of file
diff --git a/Backend/python/src/services/configuration_service.py b/Backend/python/src/services/configuration_service.py
new file mode 100644
index 0000000..2779f9a
--- /dev/null
+++ b/Backend/python/src/services/configuration_service.py
@@ -0,0 +1,449 @@
+import json
+import os
+import logging
+from pathlib import Path
+from typing import Dict, Any, Optional, List
+from dataclasses import dataclass, field
+from enum import Enum
+
+logger = logging.getLogger(__name__)
+
+class Environment(Enum):
+ """Application environments."""
+ DEVELOPMENT = "Development"
+ STAGING = "Staging"
+ PRODUCTION = "Production"
+
+@dataclass
+class ServerConfig:
+ """Server configuration."""
+ host: str = "0.0.0.0"
+ port: int = 5000
+ workers: int = 1
+ shutdown_timeout: int = 10
+ force_shutdown_timeout: int = 15
+
+@dataclass
+class SecurityConfig:
+ """Security configuration."""
+ allowed_hosts: List[str] = field(default_factory=lambda: ["*"])
+ cors_origins: List[str] = field(default_factory=lambda: ["*"])
+
+class ConfigurationService:
+ """
+ Service for accessing application configuration.
+ Provides a clean interface for configuration management across environments.
+ """
+
+ _instance: Optional['ConfigurationService'] = None
+ _initialized: bool = False
+
+ def __new__(cls, *args, **kwargs):
+ """Ensure singleton pattern."""
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self, base_path: Optional[Path] = None, environment: Optional[str] = None):
+ """
+ Initialize the configuration service.
+
+ Args:
+ base_path: Base path for configuration files (defaults to src directory)
+ environment: Environment name (Development, Staging, Production)
+ """
+ # Prevent re-initialization
+ if ConfigurationService._initialized:
+ return
+
+ self.logger = logging.getLogger(__name__)
+
+ # Determine base path
+ if base_path is None:
+ base_path = Path(__file__).parent.parent # Go up to src directory
+ else:
+ base_path = Path(base_path)
+
+ self.base_path = base_path
+
+ self.environment = (
+ environment or
+ os.environ.get('PYTHON_ENVIRONMENT') or
+ os.environ.get('ASPNETCORE_ENVIRONMENT') or
+ Environment.DEVELOPMENT.value
+ )
+
+ # Initialize configuration storage
+ self.config: Dict[str, Any] = {}
+ self._server_config: Optional[ServerConfig] = None
+ self._security_config: Optional[SecurityConfig] = None
+
+ # Load all configurations
+ self._load_configurations()
+
+ ConfigurationService._initialized = True
+
+ def _load_configurations(self) -> None:
+ """Load all configuration files in order of precedence."""
+ try:
+ # 1. Load base appsettings.json
+ base_config_path = self.base_path / "appsettings.json"
+ self._load_config_file(base_config_path, required=True)
+
+ # 2. Load environment-specific settings
+ env_config_path = self.base_path / f"appsettings.{self.environment}.json"
+ self._load_config_file(env_config_path, required=False)
+
+ # 3. Override with environment variables
+ self._load_environment_variables()
+
+ # 4. Validate required settings
+ self._validate_configuration()
+
+ # 5. Parse structured configs
+ self._parse_structured_configs()
+
+ self.logger.info(f"Configuration loaded successfully for environment: {self.environment}")
+
+ except Exception as e:
+ self.logger.error(f"Failed to load configuration: {e}")
+ raise
+
+ def _load_config_file(self, config_path: Path, required: bool = True) -> None:
+ """Load a configuration file and merge with existing config."""
+ try:
+ with open(config_path, 'r', encoding='utf-8') as f:
+ content = f.read()
+
+ # Remove comments (// style) for JSON compatibility
+ lines = content.split('\n')
+ cleaned_lines = []
+ for line in lines:
+ comment_idx = line.find('//')
+ if comment_idx != -1 and not self._is_in_string(line, comment_idx):
+ line = line[:comment_idx]
+ cleaned_lines.append(line)
+ content = '\n'.join(cleaned_lines)
+
+ file_config = json.loads(content)
+ self._deep_merge(self.config, file_config)
+ self.logger.debug(f"Loaded configuration from {config_path}")
+
+ except FileNotFoundError:
+ if required:
+ self.logger.error(f"Required configuration file not found: {config_path}")
+ raise
+ else:
+ self.logger.debug(f"Optional configuration file not found: {config_path}")
+
+ except json.JSONDecodeError as e:
+ self.logger.error(f"Invalid JSON in configuration file {config_path}: {e}")
+ raise
+
+ def _is_in_string(self, line: str, position: int) -> bool:
+ """Check if a position in a line is within a string literal."""
+ in_string = False
+ escape_next = False
+
+ for i, char in enumerate(line):
+ if i >= position:
+ break
+
+ if escape_next:
+ escape_next = False
+ continue
+
+ if char == '\\':
+ escape_next = True
+ elif char == '"':
+ in_string = not in_string
+
+ return in_string
+
+ def _load_environment_variables(self) -> None:
+ """Override configuration with environment variables."""
+ # Standard mappings
+ env_mappings = {
+ 'PUBLISHER_TENANT_ID': 'PublisherTenantId',
+ 'CLIENT_ID': 'ClientId',
+ 'CLIENT_SECRET': 'ClientSecret',
+ 'AUDIENCE': 'Audience',
+ }
+
+ # Support ASP.NET Core style environment variables (with __ as separator)
+ for env_key, env_value in os.environ.items():
+ if '__' in env_key:
+ # Convert __ to : for nested keys
+ config_key = env_key.replace('__', ':')
+ self._set_nested_value(config_key, env_value)
+ self.logger.debug(f"Set {config_key} from environment variable {env_key}")
+
+ # Apply standard mappings
+ for env_var, config_key in env_mappings.items():
+ if value := os.environ.get(env_var):
+ self._set_nested_value(config_key, value)
+ self.logger.debug(f"Overrode {config_key} from environment variable {env_var}")
+
+ def _set_nested_value(self, key_path: str, value: Any) -> None:
+ """Set a nested configuration value using : separator."""
+ keys = key_path.split(':')
+ current = self.config
+
+ # Navigate to the parent of the target key
+ for key in keys[:-1]:
+ if key not in current:
+ current[key] = {}
+ current = current[key]
+
+ # Convert string values to appropriate types
+ if isinstance(value, str):
+ # Try to parse as JSON first (for arrays/objects)
+ try:
+ value = json.loads(value)
+ except json.JSONDecodeError:
+ # Not JSON, try other conversions
+ if value.lower() in ('true', 'false'):
+ value = value.lower() == 'true'
+ elif value.isdigit():
+ value = int(value)
+ elif '.' in value and all(part.isdigit() for part in value.split('.', 1)):
+ value = float(value)
+
+ current[keys[-1]] = value
+
+ def _deep_merge(self, base: Dict[str, Any], update: Dict[str, Any]) -> None:
+ """Deep merge update dictionary into base dictionary."""
+ for key, value in update.items():
+ if key in base and isinstance(base[key], dict) and isinstance(value, dict):
+ self._deep_merge(base[key], value)
+ else:
+ base[key] = value
+
+ def _validate_configuration(self) -> None:
+ """Validate that required configuration values are present."""
+ if self.environment == Environment.PRODUCTION.value:
+ required_keys = [
+ 'PublisherTenantId',
+ 'ClientId',
+ 'ClientSecret',
+ 'Audience'
+ ]
+
+ missing_keys = []
+ for key in required_keys:
+ if not self.get_value(key):
+ missing_keys.append(key)
+
+ if missing_keys:
+ raise ValueError(f"Missing required configuration keys for production: {missing_keys}")
+
+ def _parse_structured_configs(self) -> None:
+ """Parse structured configuration sections into typed objects."""
+ # Parse server config
+ server_section = self.get_section("Server")
+ if server_section:
+ self._server_config = ServerConfig(
+ host=server_section.get("Host", "0.0.0.0"),
+ port=int(server_section.get("Port", 5000)),
+ workers=int(server_section.get("Workers", 1)),
+ shutdown_timeout=int(server_section.get("ShutdownTimeout", 10)),
+ force_shutdown_timeout=int(server_section.get("ForceShutdownTimeout", 15))
+ )
+ else:
+ self._server_config = ServerConfig()
+
+ # Parse security config
+ security_section = self.get_section("Security")
+ if security_section:
+ self._security_config = SecurityConfig(
+ allowed_hosts=security_section.get("AllowedHosts", ["*"]),
+ cors_origins=security_section.get("CorsOrigins", ["*"])
+ )
+ else:
+ self._security_config = SecurityConfig()
+
+ def get_value(self, key: str, default: Any = None) -> Any:
+ """
+ Get a configuration value by key.
+ Supports nested keys with : separator (e.g., "Logging:LogLevel:Default")
+ """
+ keys = key.split(':')
+ value = self.config
+
+ for k in keys:
+ if isinstance(value, dict) and k in value:
+ value = value[k]
+ else:
+ return default
+
+ return value
+
+ def get_section(self, key: str) -> Dict[str, Any]:
+ """Get a configuration section as a dictionary."""
+ value = self.get_value(key, {})
+ return value if isinstance(value, dict) else {}
+
+ def get_connection_string(self, name: str) -> Optional[str]:
+ """Get a connection string by name (C# compatibility)."""
+ return self.get_value(f"ConnectionStrings:{name}")
+
+ def __getitem__(self, key: str) -> Any:
+ """Support dictionary-style access: config['key']."""
+ value = self.get_value(key)
+ if value is None:
+ raise KeyError(f"Configuration key '{key}' not found")
+ return value
+
+ def get(self, key: str, default: Any = None) -> Any:
+ """Support dict.get() style access."""
+ return self.get_value(key, default)
+
+ # Specific configuration accessors
+
+ def get_publisher_tenant_id(self) -> str:
+ """Get the PublisherTenantId configuration value."""
+ return self.get_value("PublisherTenantId", "")
+
+ def get_audience(self) -> str:
+ """Get the Audience configuration value."""
+ return self.get_value("Audience", "")
+
+ def get_client_id(self) -> str:
+ """Get the ClientId configuration value."""
+ return self.get_value("ClientId", "")
+
+ def get_client_secret(self) -> str:
+ """Get the ClientSecret configuration value."""
+ return self.get_value("ClientSecret", "")
+
+ # Storage configuration
+
+ def get_jobs_directory_name(self) -> str:
+ """Get the JobsDirectory configuration value."""
+ return self.get_value("Storage:Metadata:JobsDirectory", "jobs")
+
+ def get_common_metadata_file_name(self) -> str:
+ """Get the CommonMetadataFile configuration value."""
+ return self.get_value("Storage:Metadata:CommonMetadataFile", "common_metadata.json")
+
+ def get_type_specific_metadata_file_name(self) -> str:
+ """Get the TypeSpecificMetadataFile configuration value."""
+ return self.get_value("Storage:Metadata:TypeSpecificMetadataFile", "type_specific_metadata.json")
+
+ # Application configuration
+
+ def get_app_name(self) -> str:
+ """Get application name."""
+ return self.get_value("Application:Name", "Microsoft Fabric Python Backend")
+
+ def get_environment(self) -> str:
+ """Get current environment."""
+ return self.environment
+
+ def is_development(self) -> bool:
+ """Check if running in development environment."""
+ return self.environment == Environment.DEVELOPMENT.value
+
+ def is_production(self) -> bool:
+ """Check if running in production environment."""
+ return self.environment == Environment.PRODUCTION.value
+
+ def is_debug(self) -> bool:
+ """Check if debug mode is enabled."""
+ # In development, debug is true by default
+ if self.is_development():
+ return bool(self.get_value("Application:Debug", True))
+ # In production, debug is false by default
+ return bool(self.get_value("Application:Debug", False))
+
+ # Server configuration
+
+ def get_host(self) -> str:
+ """Get server host."""
+ if self._server_config:
+ return self._server_config.host
+ return "0.0.0.0"
+
+ def get_port(self) -> int:
+ """Get server port."""
+ if self._server_config:
+ return self._server_config.port
+ return 5000
+
+ def get_workers(self) -> int:
+ """Get number of workers."""
+ if self._server_config:
+ return self._server_config.workers
+ return 1
+
+ # Security configuration
+
+ def get_allowed_hosts(self) -> List[str]:
+ """Get allowed hosts."""
+ if self._security_config:
+ return self._security_config.allowed_hosts
+ return ["*"]
+
+ def get_cors_origins(self) -> List[str]:
+ """Get CORS origins."""
+ if self._security_config:
+ return self._security_config.cors_origins
+ return ["*"]
+
+ # Kestrel/Server configuration
+
+ def get_http_endpoint(self) -> str:
+ """Get HTTP endpoint URL."""
+ host = self.get_host()
+ port = self.get_port()
+ return f"http://{host}:{port}"
+
+ def get_https_endpoint(self) -> str:
+ """Get HTTPS endpoint URL."""
+ host = self.get_host()
+ port = self.get_port()
+ return f"https://{host}:{port + 1}"
+
+ # Logging configuration
+
+ def get_log_level(self) -> str:
+ """Get log level for a specific category."""
+ return self.get_value(f"Logging:LogLevel", "Information")
+
+ def get_shutdown_timeout(self) -> int:
+ """Get server shutdown timeout in seconds."""
+ if self._server_config:
+ return self._server_config.shutdown_timeout
+ return 10
+
+ def get_force_shutdown_timeout(self) -> int:
+ """Get force shutdown timeout in seconds."""
+ if self._server_config:
+ return self._server_config.force_shutdown_timeout
+ return 15
+
+ # ServiceRegistry integration
+
+ async def dispose_async(self) -> None:
+ """Dispose method for ServiceRegistry cleanup."""
+ self.logger.debug("ConfigurationService disposed")
+
+
+def get_configuration_service() -> ConfigurationService:
+ """
+ Get the ConfigurationService instance from ServiceRegistry.
+ This ensures proper lifecycle management and dependency injection.
+ """
+ from core.service_registry import get_service_registry
+
+ registry = get_service_registry()
+
+ # Check if already registered
+ if registry.has(ConfigurationService):
+ return registry.get(ConfigurationService)
+
+ # Create and register if not exists (bootstrap case)
+ config_service = ConfigurationService()
+ registry.register(ConfigurationService, config_service)
+
+ return config_service
\ No newline at end of file
diff --git a/Backend/python/src/services/http_client.py b/Backend/python/src/services/http_client.py
new file mode 100644
index 0000000..68894f5
--- /dev/null
+++ b/Backend/python/src/services/http_client.py
@@ -0,0 +1,204 @@
+import logging
+import httpx
+import asyncio
+from typing import Dict, Any, Optional, ClassVar
+
+class HttpClientService:
+ """
+ Singleton HTTP client service with connection pooling and retry logic.
+ Managed by ServiceRegistry for proper lifecycle management.
+ """
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self._closed = False
+ self._client = httpx.AsyncClient(
+ timeout=httpx.Timeout(30.0, connect=10.0),
+ limits=httpx.Limits(
+ max_keepalive_connections=20,
+ max_connections=100,
+ keepalive_expiry=30.0
+ ),
+ follow_redirects=True,
+ event_hooks={
+ "request": [self._log_request],
+ "response": [self._log_response]
+ }
+ )
+
+ async def _log_request(self, request):
+ self.logger.debug(f"Request: {request.method} {request.url}")
+
+ async def _log_response(self, response):
+ request = response.request
+ try:
+ elapsed_time = 0
+ if hasattr(response, '_elapsed') and response._elapsed:
+ elapsed_time = response._elapsed.total_seconds()
+
+ self.logger.debug(
+ f"Response: {request.method} {request.url} - "
+ f"Status: {response.status_code} - Time: {elapsed_time:.2f}s"
+ )
+ except Exception:
+ # If we can't get timing, just log without it
+ self.logger.debug(
+ f"Response: {request.method} {request.url} - "
+ f"Status: {response.status_code}"
+ )
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ await self.close()
+
+ async def close(self):
+ """Close the HTTP client."""
+ if not self._closed and hasattr(self, '_client'):
+ try:
+ # Check if we're in an async context
+ try:
+ asyncio.get_running_loop()
+ await self._client.aclose()
+ self._closed = True
+ self.logger.info("HTTP client closed successfully")
+ except RuntimeError:
+ # Not in async context, try sync close if available
+ if hasattr(self._client, 'close'):
+ self._client.close()
+ self._closed = True
+ self.logger.warning("HTTP client closed outside async context")
+ except Exception as e:
+ self.logger.error(f"Error closing HTTP client: {e}")
+ self._closed = True # Mark as closed anyway
+
+ async def dispose_async(self):
+ """Dispose method for ServiceRegistry cleanup."""
+ await self.close()
+
+ def _get_headers(self, token: str) -> Dict[str, str]:
+ """Create headers with proper authorization."""
+ headers = {}
+ if token.startswith("SubjectAndAppToken"):
+ headers["Authorization"] = token
+ else:
+ headers["Authorization"] = f"Bearer {token}"
+ headers["User-Agent"] = "Microsoft-Fabric-Workload/1.0"
+ return headers
+
+ async def _make_request(self, method: str, url: str, token: str, **kwargs) -> httpx.Response:
+ """Common request handling with retry logic."""
+ headers = self._get_headers(token)
+ headers.update(kwargs.pop('headers', {}))
+
+ max_retries = 3
+ for attempt in range(max_retries):
+ try:
+ response = await getattr(self._client, method)(
+ url, headers=headers, **kwargs
+ )
+ response.raise_for_status()
+ return response
+ except httpx.HTTPStatusError as e:
+ if e.response.status_code >= 500 and attempt < max_retries - 1:
+ wait_time = 2 ** attempt
+ self.logger.warning(
+ f"Request failed with {e.response.status_code}, "
+ f"retrying in {wait_time}s (attempt {attempt + 1}/{max_retries})"
+ )
+ await asyncio.sleep(wait_time)
+ continue
+ raise
+ except httpx.RequestError as e:
+ if attempt < max_retries - 1:
+ wait_time = 2 ** attempt
+ self.logger.warning(
+ f"Request error: {e}, retrying in {wait_time}s "
+ f"(attempt {attempt + 1}/{max_retries})"
+ )
+ await asyncio.sleep(wait_time)
+ continue
+ raise
+
+ async def get(self, url: str, token: str) -> httpx.Response:
+ """Performs a GET request to the specified URL."""
+ return await self._make_request('get', url, token)
+
+ async def put(self, url: str, content: Any, token: str) -> httpx.Response:
+ """Performs a PUT request to the specified URL."""
+ kwargs = {}
+ if content == "":
+ kwargs['content'] = b""
+ elif content is None:
+ pass # No content
+ elif isinstance(content, (str, bytes)):
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ kwargs['content'] = content
+ else:
+ # JSON content for API calls
+ kwargs['json'] = content
+ kwargs['headers'] = {"Content-Type": "application/json"}
+
+ return await self._make_request('put', url, token, **kwargs)
+
+ async def post(self, url: str, content: Any, token: str) -> httpx.Response:
+ """Performs a POST request to the specified URL."""
+ kwargs = {}
+ if isinstance(content, (str, bytes)):
+ if isinstance(content, str):
+ content = content.encode('utf-8')
+ kwargs['content'] = content
+ else:
+ kwargs['json'] = content
+ kwargs['headers'] = {"Content-Type": "application/json"}
+
+ return await self._make_request('post', url, token, **kwargs)
+
+ async def patch(self, url: str, content: Optional[Any], token: str,
+ content_type: Optional[str] = None) -> httpx.Response:
+ """Performs a PATCH request to the specified URL."""
+ kwargs = {}
+ headers = {}
+
+ if content is None:
+ pass # No content
+ elif isinstance(content, bytes):
+ kwargs['content'] = content
+ if content_type:
+ headers["Content-Type"] = content_type
+ elif isinstance(content, str):
+ kwargs['content'] = content.encode('utf-8')
+ else:
+ kwargs['json'] = content
+ headers["Content-Type"] = "application/json"
+
+ if headers:
+ kwargs['headers'] = headers
+
+ return await self._make_request('patch', url, token, **kwargs)
+
+ async def delete(self, url: str, token: str) -> httpx.Response:
+ """Performs a DELETE request to the specified URL."""
+ return await self._make_request('delete', url, token)
+
+ async def head(self, url: str, token: str) -> httpx.Response:
+ """Performs a HEAD request to the specified URL."""
+ return await self._make_request('head', url, token)
+
+def get_http_client_service() -> HttpClientService:
+ """
+ Get the singleton HttpClientService instance from ServiceRegistry.
+ This ensures proper lifecycle management and dependency injection.
+ """
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+ try:
+ return registry.get(HttpClientService)
+ except KeyError:
+ logger = logging.getLogger(__name__)
+ logger.error("HttpClientService not found in registry. Was the service initialized?")
+ raise RuntimeError(
+ "HttpClientService not initialized. Please ensure ServiceInitializer.initialize_all_services() "
+ "has been called during application startup."
+ )
\ No newline at end of file
diff --git a/Backend/python/src/services/item_factory.py b/Backend/python/src/services/item_factory.py
new file mode 100644
index 0000000..90d2a6a
--- /dev/null
+++ b/Backend/python/src/services/item_factory.py
@@ -0,0 +1,36 @@
+import logging
+from typing import Dict, Type
+from models.authentication_models import AuthorizationContext
+from items.base_item import ItemBase
+from items.item1 import Item1
+from constants.workload_constants import WorkloadConstants
+from exceptions.exceptions import UnexpectedItemTypeException
+
+logger = logging.getLogger(__name__)
+
+class ItemFactory:
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+
+ def create_item(self, item_type: str, auth_context: AuthorizationContext) -> ItemBase:
+ """Create an instance of the specified item type."""
+ self.logger.info(f"Creating item of type {item_type}")
+ if item_type == WorkloadConstants.ItemTypes.ITEM1:
+ return Item1(auth_context)
+ else:
+ self.logger.error(f"Unexpected item type: {item_type}")
+ raise UnexpectedItemTypeException(f"Items of type {item_type} are not supported")
+
+
+def get_item_factory() -> ItemFactory:
+ # Use a singleton pattern for consistency
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+
+ if not registry.has(ItemFactory):
+ if not hasattr(get_item_factory, "instance"):
+ get_item_factory.instance = ItemFactory()
+ return get_item_factory.instance
+
+ return registry.get(ItemFactory)
\ No newline at end of file
diff --git a/Backend/python/src/services/item_metadata_store.py b/Backend/python/src/services/item_metadata_store.py
new file mode 100644
index 0000000..001ad4e
--- /dev/null
+++ b/Backend/python/src/services/item_metadata_store.py
@@ -0,0 +1,257 @@
+import asyncio
+import logging
+import json
+import os
+import shutil
+from typing import Any, TypeVar, Type
+from pathlib import Path
+import aiofiles
+from models.job_metadata import JobMetadata
+from models.common_item_metadata import CommonItemMetadata
+from models.item_metadata import ItemMetadata
+from constants.workload_constants import WorkloadConstants
+from services.configuration_service import get_configuration_service
+
+T = TypeVar('T')
+
+
+class ItemMetadataStore:
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self.config_service = get_configuration_service()
+ self.data_dir = self.get_base_directory_path(WorkloadConstants.WORKLOAD_NAME)
+ self.logger.debug(f"created Data directory: {self.data_dir}")
+ os.makedirs(self.data_dir, exist_ok=True)
+
+ async def _ensure_dir_exists(self, path: Path) -> None:
+ """Ensure a directory exists, in a non-blocking way."""
+ # Run directory creation in a thread to avoid blocking
+ await asyncio.to_thread(os.makedirs, path, exist_ok=True)
+
+ def get_base_directory_path(self, workload_name: str) -> Path:
+ """Get the application data directory for the workload."""
+ if os.name == 'nt':
+ # On Windows, use APPDATA environment variable (Roaming)
+ appdata = os.environ.get('APPDATA')
+ if not appdata:
+ # Fallback if APPDATA is not set
+ appdata = os.path.expanduser('~\\AppData\\Roaming')
+ base_path = Path(appdata)
+ else:
+ base_path = Path.home() / '.local' / 'share'
+ return base_path / workload_name
+
+
+ def _get_item_dir_path(self, tenant_id: str, item_id: str) -> Path:
+ tenant_id_str = str(tenant_id)
+ item_id_str = str(item_id)
+ """Get directory path for an item."""
+ tenant_dir = self.data_dir / tenant_id_str
+ return tenant_dir / item_id_str
+
+ def _get_common_metadata_path(self, tenant_id: str, item_id: str) -> Path:
+ """Get path for common metadata file."""
+ item_dir = self._get_item_dir_path(tenant_id, item_id)
+ return item_dir / self.config_service.get_common_metadata_file_name()
+
+ def _get_type_specific_metadata_path(self, tenant_id: str, item_id: str) -> Path:
+ """Get path for type-specific metadata file."""
+ item_dir = self._get_item_dir_path(tenant_id, item_id)
+ return item_dir / self.config_service.get_type_specific_metadata_file_name()
+
+ def _get_job_metadata_path(self, tenant_id: str, item_id: str, job_id: str) -> Path:
+ """Get path for job metadata file."""
+ item_dir = self._get_item_dir_path(tenant_id, item_id)
+ jobs_dir = item_dir / self.config_service.get_jobs_directory_name()
+ return jobs_dir / f"{job_id}.json"
+
+ #todo: change the type_specific_metadata type!
+ async def upsert(
+ self,
+ tenant_id: str,
+ item_id: str,
+ common_metadata: CommonItemMetadata,
+ type_specific_metadata: Any
+ ) -> None:
+ """Create or update an item's metadata.
+
+ Args:
+ tenant_id: The tenant ID
+ item_id: The item ID
+ common_metadata: The common metadata model
+ type_specific_metadata: The type-specific metadata model
+ """
+ self.logger.info(f"Upserting metadata for item {item_id} in tenant {tenant_id}")
+
+ # Ensure directories exist
+ item_dir = self._get_item_dir_path(tenant_id, item_id)
+ await self._ensure_dir_exists(item_dir)
+
+ # Save common metadata
+ common_path = self._get_common_metadata_path(tenant_id, item_id)
+ async with aiofiles.open(common_path, 'w') as f:
+ # Convert model to dictionary for JSON serialization
+ common_data = common_metadata.model_dump(mode='json')
+ await f.write(json.dumps(common_data, indent=2))
+
+ # Save type-specific metadata
+ specific_path = self._get_type_specific_metadata_path(tenant_id, item_id)
+ async with aiofiles.open(specific_path, 'w') as f:
+ # Handle different types of metadata objects
+ if hasattr(type_specific_metadata, 'model_dump'):
+ # If it's a Pydantic model, use model_dump()
+ data = type_specific_metadata.model_dump(mode='json', by_alias=True)
+ else:
+ data = type_specific_metadata
+ # Otherwise, try direct serialization
+ await f.write(json.dumps(data, indent=2))
+
+ async def load(self, tenant_id: str, item_id: str, metadata_class: Type[T] = None) -> ItemMetadata[T]:
+ """Load an item's metadata.
+
+ Args:
+ tenant_id: The tenant ID
+ item_id: The item ID
+ metadata_class: Optional type-specific metadata class to instantiate
+
+ Returns:
+ An ItemMetadata instance with both common and type-specific metadata
+
+ Raises:
+ FileNotFoundError: If the item metadata doesn't exist
+ """
+ self.logger.info(f"Loading metadata for item {item_id} in tenant {tenant_id}")
+
+ # Load common metadata
+ common_path = self._get_common_metadata_path(tenant_id, item_id)
+ type_specific_path = self._get_type_specific_metadata_path(tenant_id, item_id)
+
+ common_exists = await asyncio.to_thread(common_path.exists)
+ type_specific_exists = await asyncio.to_thread(type_specific_path.exists)
+
+ if not common_exists or not type_specific_exists:
+ self.logger.error(f"Metadata not found for item {item_id} in tenant {tenant_id}")
+ raise FileNotFoundError(f"Item metadata not found for {item_id}")
+
+ async with aiofiles.open(common_path, 'r') as f:
+ common_data = json.loads(await f.read())
+ common_metadata = CommonItemMetadata(**common_data)
+
+ async with aiofiles.open(type_specific_path, 'r') as f:
+ type_specific_data = json.loads(await f.read())
+
+ # If a specific metadata class was provided, instantiate it
+ if metadata_class:
+ type_specific_metadata = metadata_class(**type_specific_data)
+ else:
+ # Otherwise just use the raw data
+ type_specific_metadata = type_specific_data
+
+ self.logger.info(f"Metadata loaded for item {item_id} in tenant {tenant_id}:")
+ self.logger.info(f"Common metadata: {common_metadata}")
+ self.logger.info(f"Type-specific metadata: {type_specific_metadata}")
+
+ return ItemMetadata(
+ common_metadata=common_metadata,
+ type_specific_metadata=type_specific_metadata
+ )
+
+ async def exists(self, tenant_id: str, item_id: str) -> bool:
+ """Check if an item's metadata exists."""
+ common_path = self._get_common_metadata_path(tenant_id, item_id)
+ type_specific_path = self._get_type_specific_metadata_path(tenant_id, item_id)
+
+ common_exists = await asyncio.to_thread(common_path.exists)
+ type_specific_exists = await asyncio.to_thread(type_specific_path.exists)
+ return common_exists and type_specific_exists
+
+ async def delete(self, tenant_id: str, item_id: str) -> None:
+ """Delete an item's metadata."""
+ self.logger.info(f"Deleting metadata for item {item_id} in tenant {tenant_id}")
+ item_dir = self._get_item_dir_path(tenant_id, item_id)
+
+ dir_exists = await asyncio.to_thread(item_dir.exists)
+ if dir_exists:
+ await asyncio.to_thread(shutil.rmtree, item_dir)
+ else:
+ self.logger.warning(f"Item directory {item_dir} does not exist, nothing to delete.")
+ self.logger.info(f"Metadata for item {item_id} in tenant {tenant_id} deleted successfully.")
+
+ async def upsert_job(
+ self,
+ tenant_id: str,
+ item_id: str,
+ job_id: str,
+ job_metadata: JobMetadata
+ ) -> None:
+ """Create or update job metadata.
+
+ Args:
+ tenant_id: The tenant ID
+ item_id: The item ID
+ job_id: The job ID
+ job_metadata: The job metadata model
+ """
+ self.logger.info(f"Upserting job metadata for job {job_id} in item {item_id}")
+
+ jobs_dir = self._get_item_dir_path(tenant_id, item_id) / self.config_service.get_jobs_directory_name()
+ await self._ensure_dir_exists(jobs_dir)
+
+ job_path = self._get_job_metadata_path(tenant_id, item_id, job_id)
+ async with aiofiles.open(job_path, 'w') as f:
+ job_data = job_metadata.model_dump(mode='json')
+ await f.write(json.dumps(job_data, indent=2))
+
+ async def load_job(
+ self,
+ tenant_id: str,
+ item_id: str,
+ job_id: str
+ ) -> JobMetadata:
+ """Load job metadata.
+
+ Args:
+ tenant_id: The tenant ID
+ item_id: The item ID
+ job_id: The job ID
+
+ Returns:
+ JobMetadata: The job metadata model
+
+ Raises:
+ FileNotFoundError: If the job metadata doesn't exist
+ """
+ job_path = self._get_job_metadata_path(tenant_id, item_id, job_id)
+ job_exists = await asyncio.to_thread(job_path.exists)
+
+ if not job_exists:
+ self.logger.error(f"Metadata not found for job {job_id} in item {item_id}")
+ raise FileNotFoundError(f"Job metadata not found for job {job_id}")
+
+ async with aiofiles.open(job_path, 'r') as f:
+ job_data = json.loads(await f.read())
+ return JobMetadata(**job_data)
+
+ async def exists_job(self, tenant_id: str, item_id: str, job_id: str) -> bool:
+ """Check if job metadata exists."""
+ job_path = self._get_job_metadata_path(tenant_id, item_id, job_id)
+ return await asyncio.to_thread(job_path.exists)
+
+ async def delete_job(self, tenant_id: str, item_id: str, job_id: str) -> None:
+ """Delete job metadata."""
+ job_path = self._get_job_metadata_path(tenant_id, item_id, job_id)
+ job_exists = await asyncio.to_thread(job_path.exists)
+ if job_exists:
+ await asyncio.to_thread(os.remove, job_path)
+
+
+def get_item_metadata_store() -> ItemMetadataStore:
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+
+ if not registry.has(ItemMetadataStore):
+ if not hasattr(get_item_metadata_store, "instance"):
+ get_item_metadata_store.instance = ItemMetadataStore()
+ return get_item_metadata_store.instance
+
+ return registry.get(ItemMetadataStore)
\ No newline at end of file
diff --git a/Backend/python/src/services/lakehouse_client_service.py b/Backend/python/src/services/lakehouse_client_service.py
new file mode 100644
index 0000000..bc294d8
--- /dev/null
+++ b/Backend/python/src/services/lakehouse_client_service.py
@@ -0,0 +1,219 @@
+import json
+import logging
+import urllib.parse
+from typing import List, Optional
+from uuid import UUID
+
+import httpx
+
+from constants.environment_constants import EnvironmentConstants
+from models.fabric_item import FabricItem
+from models.lakehouse_table import LakehouseTable
+from models.lakehouse_file import LakehouseFile
+from models.onelake_folder import OneLakePathContainer, OneLakePathData
+
+
+class LakehouseClientService:
+ """Service for interacting with Fabric Lakehouse and OneLake storage."""
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self._http_client_service = None
+
+ @property
+ def http_client_service(self):
+ """Lazy load HTTP client service."""
+ if self._http_client_service is None:
+ from services.http_client import get_http_client_service
+ self._http_client_service = get_http_client_service()
+ return self._http_client_service
+
+ async def dispose_async(self):
+ """Cleanup method for service registry."""
+ self.logger.debug("LakehouseClientService disposed")
+
+ async def get_lakehouse_tables(self, token: str, workspace_id: UUID, lakehouse_id: UUID) -> List[LakehouseTable]:
+ """
+ Retrieves a list of tables available in the current lakehouse.
+
+ Args:
+ token: The access token required to authorize the API requests.
+ workspace_id: The id of the workspace that contains the selected lakehouse.
+ lakehouse_id: The id of the lakehouse from which we want to retrieve tables.
+
+ Returns:
+ A list of LakehouseTable objects.
+
+ Raises:
+ httpx.HTTPStatusError: If the HTTP request fails.
+ Exception: For other types of errors.
+ """
+ directory = f"{lakehouse_id}/Tables/"
+ onelake_container = await self._get_path_list(token, workspace_id, directory, recursive=True)
+ delta_log_directory = "/_delta_log"
+
+ # Filter and map paths to LakehouseTable objects
+ tables = []
+
+ # A Onelake table is a delta table that consists of Parquet files and a _delta_log/ directory
+ # or a shortcut to a Onelake table
+ filtered_paths = [
+ path for path in onelake_container.paths
+ if path.name.endswith(delta_log_directory) or
+ (path.is_shortcut == True and path.account_type == "ADLS")
+ ]
+
+ for path in filtered_paths:
+ path_name = path.name
+ parts = path_name.split('/')
+ schema_name = None
+
+ # Check if the path ends with '_delta_log' and remove it if needed
+ if path_name.endswith(delta_log_directory):
+ path_name = '/'.join(parts[:-1])
+ parts = path_name.split('/')
+
+ # path structure without schema: /Tables/ (3 parts long)
+ # path structure with schema: /Tables// (4 parts long)
+ table_name = parts[-1]
+ if len(parts) == 4:
+ schema_name = parts[2]
+
+ tables.append(LakehouseTable(
+ name=table_name,
+ path=path_name + '/',
+ schema=schema_name
+ ))
+
+ return tables
+
+ async def get_fabric_lakehouse(self, token: str, workspace_id: UUID, lakehouse_id: UUID) -> Optional[FabricItem]:
+ """
+ Get Lakehouse item from Fabric.
+
+ Args:
+ token: The bearer token for authentication.
+ workspace_id: The workspace id of the lakehouse.
+ lakehouse_id: The Lakehouse id.
+
+ Returns:
+ Lakehouse item metadata or None if retrieval fails.
+ """
+ url = f"{EnvironmentConstants.FABRIC_API_BASE_URL}/v1/workspaces/{workspace_id}/items/{lakehouse_id}"
+
+ try:
+ response = await self.http_client_service.get(url, token)
+ response.raise_for_status() # This will raise an exception for non-2xx status codes
+
+ lakehouse_data = response.json()
+ return FabricItem(**lakehouse_data)
+
+ except Exception as ex:
+ self.logger.error(
+ f"Failed to retrieve FabricLakehouse for lakehouse: {lakehouse_id} "
+ f"in workspace: {workspace_id}. Error: {str(ex)}"
+ )
+ return None
+
+ async def get_lakehouse_files(self, token: str, workspace_id: UUID, lakehouse_id: UUID) -> List[LakehouseFile]:
+ """
+ Retrieves a list of files available in the current lakehouse.
+
+ Args:
+ token: The access token required to authorize the API requests.
+ workspace_id: The id of the workspace that contains the selected lakehouse.
+ lakehouse_id: The id of the lakehouse from which we want to retrieve files.
+
+ Returns:
+ A list of LakehouseFile objects.
+
+ Raises:
+ httpx.HTTPStatusError: If the HTTP request fails.
+ Exception: For other types of errors.
+ """
+ directory = f"{lakehouse_id}/Files/"
+ onelake_container = await self._get_path_list(token, workspace_id, directory, recursive=True)
+
+ files = []
+ for path in onelake_container.paths:
+ path_name = path.name
+ parts = path_name.split('/')
+
+ # Path structure: /Files/....../
+ file_name = parts[-1]
+
+ # Remove the prefix (lakehouseId/Files/) from the path
+ relative_path = path_name[len(directory):] if len(path_name) > len(directory) else ""
+
+ files.append(LakehouseFile(
+ name=file_name,
+ path=relative_path,
+ is_directory=path.is_directory
+ ))
+
+ return files
+
+ async def _get_path_list(
+ self,
+ token: str,
+ workspace_id: UUID,
+ directory: str,
+ recursive: bool = False
+ ) -> OneLakePathContainer:
+ """
+ Retrieves a list of paths available in the selected directory.
+
+ Args:
+ token: The access token required to authorize the API requests.
+ workspace_id: The id of the workspace that contains the directory.
+ directory: The directory containing the desired paths.
+ recursive: Whether to search the entire directory or only immediate descendants.
+
+ Returns:
+ OneLakePathContainer with the list of paths.
+
+ Raises:
+ httpx.HTTPStatusError: If the HTTP request fails.
+ Exception: For other types of errors.
+ """
+ # Create the URL using the provided source
+ encoded_directory = urllib.parse.quote(directory)
+ url = (
+ f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{workspace_id}/"
+ f"?recursive={str(recursive).lower()}&resource=filesystem"
+ f"&directory={encoded_directory}&getShortcutMetadata=true"
+ )
+
+ try:
+ # Set the Authorization header using the bearer token
+ response = await self.http_client_service.get(url, token)
+ response.raise_for_status() # This will raise httpx.HTTPStatusError for non-2xx status codes
+
+ # Parse the response content as JSON and create typed object
+ content = response.json()
+
+ # Convert the raw JSON to our Pydantic model
+ return OneLakePathContainer(**content)
+
+ except httpx.HTTPStatusError as ex:
+ # Handle HTTP request failure
+ self.logger.error(f"HTTP request failed: {str(ex)}")
+ raise
+
+ except Exception as ex:
+ # Handle other types of exceptions
+ self.logger.error(f"Error in _get_path_list: {str(ex)}")
+ raise
+
+
+def get_lakehouse_client_service() -> LakehouseClientService:
+ """Get the singleton LakehouseClientService instance."""
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+
+ if not registry.has(LakehouseClientService):
+ if not hasattr(get_lakehouse_client_service, "instance"):
+ get_lakehouse_client_service.instance = LakehouseClientService()
+ return get_lakehouse_client_service.instance
+
+ return registry.get(LakehouseClientService)
\ No newline at end of file
diff --git a/Backend/python/src/services/onelake_client_service.py b/Backend/python/src/services/onelake_client_service.py
new file mode 100644
index 0000000..dd69cd2
--- /dev/null
+++ b/Backend/python/src/services/onelake_client_service.py
@@ -0,0 +1,210 @@
+import json
+import logging
+import urllib.parse
+from typing import Dict, Any, List, Optional
+from uuid import UUID
+import requests
+
+from constants.environment_constants import EnvironmentConstants
+from models.onelake_folder import GetFoldersResult, OneLakeFolder
+
+class OneLakeClientService:
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self._http_client_service = None
+
+ @property
+ def http_client_service(self):
+ """Lazy load HTTP client service."""
+ if self._http_client_service is None:
+ from services.http_client import get_http_client_service
+ self._http_client_service = get_http_client_service()
+ return self._http_client_service
+
+ async def dispose_async(self):
+ """Cleanup method for service registry."""
+ # No cleanup needed as HTTP client is managed by registry
+ self.logger.debug("OneLakeClientService disposed")
+
+ async def check_if_file_exists(self, token: str, file_path: str) -> bool:
+ """
+ Checks if a file exists in OneLake storage.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{file_path}?resource=file"
+
+ try:
+ response = await self.http_client_service.head(url, token)
+
+ if response.status_code == 200:
+ return True
+ elif response.status_code == 404:
+ return False
+ else:
+ self.logger.warning(f"check_if_file_exists received unexpected status code: {response.status_code}")
+ return False
+ except Exception as ex:
+ self.logger.error(f"check_if_file_exists failed for filePath: {file_path}. Error: {str(ex)}")
+ return False
+
+ async def get_onelake_folder_names(self, token: str, workspace_id: UUID, item_id: UUID) -> Optional[List[str]]:
+ """
+ Returns the names of the folders under the item's root folder in OneLake, if exists.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{workspace_id}"
+ append_query = self._build_get_onelake_folders_query_parameters(item_id)
+ append_url = f"{url}?{append_query}"
+
+ try:
+ response = await self.http_client_service.get(append_url, token)
+
+ if response.status_code == 200:
+ get_folders_result_str = response.text
+ get_folders_result_obj = json.loads(get_folders_result_str)
+ paths = get_folders_result_obj.get("paths", [])
+ return [f["name"] for f in paths if f.get("isDirectory", False)]
+ elif response.status_code == 404:
+ return None
+ else:
+ self.logger.warning(f"get_onelake_folder_names received unexpected status code: {response.status_code}")
+ return None
+ except Exception as ex:
+ self.logger.error(f"get_onelake_folder_names failed for workspaceId: {workspace_id}, itemId: {item_id}. Error: {str(ex)}")
+ return None
+
+ async def write_to_onelake_file(self, token: str, file_path: str, content: str):
+ """
+ Writes content to a OneLake file, overwriting any existing data.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{file_path}?resource=file"
+
+ try:
+ # Create a new file or overwrite existing
+ response = await self.http_client_service.put(url, "", token)
+ if response.status_code < 200 or response.status_code > 299:
+ self.logger.error(f"write_to_onelake_file Creating a new file failed for filePath: {file_path}. Status: {response.status_code}")
+ return
+
+ self.logger.info(f"write_to_onelake_file Creating a new file succeeded for filePath: {file_path}")
+ except Exception as ex:
+ self.logger.error(f"write_to_onelake_file Creating a new file failed for filePath: {file_path}. Error: {str(ex)}")
+ return
+
+ # Append content to the file
+ await self._append_to_onelake_file(token, file_path, content)
+
+ async def get_onelake_file(self, token: str, source: str) -> str:
+ """
+ Retrieves the content of a file from OneLake.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{source}"
+
+ try:
+ response = await self.http_client_service.get(url, token)
+ if response.status_code < 200 or response.status_code > 299:
+ self.logger.error(f"get_onelake_file failed for source: {source}. Status: {response.status_code}")
+ return ""
+
+ content = response.text
+ self.logger.info(f"get_onelake_file succeeded for source: {source}")
+ return content
+ except Exception as ex:
+ self.logger.error(f"get_onelake_file failed for source: {source}. Error: {str(ex)}")
+ return ""
+
+ async def delete_onelake_file(self, token: str, file_path: str):
+ """
+ Deletes a file from OneLake.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{file_path}?recursive=true"
+
+ try:
+ response = await self.http_client_service.delete(url, token)
+ if response.status_code < 200 or response.status_code > 299:
+ self.logger.error(f"delete_onelake_file failed for filePath: {file_path}. Status: {response.status_code}")
+ return
+
+ self.logger.info(f"delete_onelake_file succeeded for filePath: {file_path}")
+ except Exception as ex:
+ self.logger.error(f"delete_onelake_file failed for filePath: {file_path}. Error: {str(ex)}")
+
+ def get_onelake_file_path(self, workspace_id: str, item_id: str, filename: str) -> str:
+ """
+ Returns the path to a file in OneLake storage.
+ """
+ return f"{workspace_id}/{item_id}/Files/{filename}"
+
+ async def _append_to_onelake_file(self, token: str, file_path: str, content: str):
+ """
+ Appends content to an OneLake file and flushes the changes.
+ """
+ url = f"{EnvironmentConstants.ONELAKE_DFS_BASE_URL}/{file_path}"
+ append_query = self._build_append_query_parameters()
+ append_url = f"{url}?{append_query}"
+
+ try:
+ # Perform the append action
+ encoded_content = content.encode('utf-8')
+ response = await self.http_client_service.patch(append_url, encoded_content, token)
+ if response.status_code < 200 or response.status_code > 299:
+ self.logger.error(f"_append_to_onelake_file failed for filePath: {file_path}. Status: {response.status_code}")
+ return
+
+ # Calculate the length of the content that was appended
+ content_length = len(encoded_content)
+
+ # Update the flush URL with the correct position
+ flush_query = self._build_flush_query_parameters(content_length)
+ flush_url = f"{url}?{flush_query}"
+
+ # Perform a flush to finalize the changes
+ flush_response = await self.http_client_service.patch(flush_url, None, token)
+ if flush_response.status_code < 200 or flush_response.status_code > 299:
+ self.logger.error(f"_append_to_onelake_file flush failed for filePath: {file_path}. Status: {flush_response.status_code}")
+ return
+
+ self.logger.info(f"_append_to_onelake_file succeeded for filePath: {file_path}")
+ except Exception as ex:
+ self.logger.error(f"_append_to_onelake_file failed for filePath: {file_path}. Error: {str(ex)}")
+
+ def _build_append_query_parameters(self) -> str:
+ """
+ Builds query parameters for appending to a file.
+ """
+ query_parameters = [
+ "position=0",
+ "action=append"
+ ]
+ return "&".join(query_parameters)
+
+ def _build_flush_query_parameters(self, content_length: int) -> str:
+ """
+ Builds query parameters for flushing a file.
+ """
+ query_parameters = [
+ f"position={content_length}",
+ "action=flush"
+ ]
+ return "&".join(query_parameters)
+
+ def _build_get_onelake_folders_query_parameters(self, item_id: UUID) -> str:
+ """
+ Builds query parameters for getting OneLake folders.
+ """
+ query_parameters = [
+ f"directory={item_id}",
+ "resource=filesystem",
+ "recursive=false"
+ ]
+ return "&".join(query_parameters)
+
+def get_onelake_client_service() -> OneLakeClientService:
+ """Get the singleton OneLakeClientService instance."""
+ from core.service_registry import get_service_registry
+ registry = get_service_registry()
+
+ if not registry.has(OneLakeClientService):
+ if not hasattr(get_onelake_client_service, "instance"):
+ get_onelake_client_service.instance = OneLakeClientService()
+ return get_onelake_client_service.instance
+
+ return registry.get(OneLakeClientService)
\ No newline at end of file
diff --git a/Backend/python/src/services/open_id_connect_configuration.py b/Backend/python/src/services/open_id_connect_configuration.py
new file mode 100644
index 0000000..122c588
--- /dev/null
+++ b/Backend/python/src/services/open_id_connect_configuration.py
@@ -0,0 +1,97 @@
+import httpx
+import asyncio
+import time
+import logging
+from typing import Dict, Any, Optional, List
+from constants.environment_constants import EnvironmentConstants
+from constants.api_constants import ApiConstants
+
+logger = logging.getLogger(__name__)
+
+class OpenIdConnectConfiguration:
+ """Configuration container for OpenID Connect metadata."""
+
+ def __init__(self, issuer: str, jwks_data: Dict[str, Any]):
+ self.issuer_configuration = issuer
+ self._signing_keys = jwks_data.get("keys", [])
+
+ @property
+ def signing_keys(self) -> List[Dict[str, Any]]:
+ """Gets the signing keys for JWT validation."""
+ return self._signing_keys
+
+class OpenIdConnectConfigurationManager:
+ """
+ Manager for fetching and caching OpenID Connect configuration.
+ """
+ _instance = None
+ _instance_lock = asyncio.Lock()
+
+ def __init__(self, metadata_endpoint: str, cache_duration_seconds: int = 3600):
+ self.metadata_endpoint = metadata_endpoint
+ self.cache_duration_seconds = cache_duration_seconds
+ self.configuration: Optional[OpenIdConnectConfiguration] = None
+ self.last_updated: float = 0
+ self._lock = asyncio.Lock() # For thread-safe updates
+
+ async def get_configuration_async(self, timeout_seconds: int = 5) -> OpenIdConnectConfiguration:
+ """
+ Gets or refreshes the OpenID Connect configuration.
+ """
+ current_time = time.time()
+
+ # Return cached configuration if still valid
+ if self.configuration and current_time - self.last_updated < self.cache_duration_seconds:
+ return self.configuration
+
+ # Use lock to prevent multiple concurrent refreshes
+ async with self._lock:
+ # Check again in case another request refreshed while waiting for lock
+ if self.configuration and current_time - self.last_updated < self.cache_duration_seconds:
+ return self.configuration
+
+ # Fetch new configuration with timeout
+ try:
+ async with httpx.AsyncClient() as client:
+ # Get OpenID configuration
+ response = await client.get(
+ self.metadata_endpoint,
+ timeout=timeout_seconds
+ )
+ response.raise_for_status()
+ config_data = response.json()
+
+ # Fetch signing keys (JWKS)
+ jwks_uri = config_data.get("jwks_uri")
+ if not jwks_uri:
+ raise ValueError("JWKS URI not found in OpenID configuration")
+
+ jwks_response = await client.get(jwks_uri, timeout=timeout_seconds)
+ jwks_response.raise_for_status()
+ jwks_data = jwks_response.json()
+
+ # Create and cache the configuration
+ self.configuration = OpenIdConnectConfiguration(
+ issuer=config_data.get("issuer"),
+ jwks_data=jwks_data
+ )
+
+ self.last_updated = current_time
+ logger.info(f"OpenID Connect configuration refreshed from {self.metadata_endpoint}")
+
+ return self.configuration
+
+ except Exception as e:
+ logger.error(f"Failed to fetch OpenID Connect configuration: {str(e)}")
+ if not self.configuration:
+ raise # Only raise if we don't have a cached configuration
+ logger.warning("Returning expired cached configuration")
+ return self.configuration
+
+async def get_openid_manager_service() -> OpenIdConnectConfigurationManager:
+ async with OpenIdConnectConfigurationManager._instance_lock:
+ if OpenIdConnectConfigurationManager._instance is None:
+ metadata_endpoint = ApiConstants.DEFAULT_OPENID_CONFIG_ENDPOINT
+ OpenIdConnectConfigurationManager._instance = OpenIdConnectConfigurationManager(metadata_endpoint)
+ logger.info(f"Created OpenID Connect configuration manager with endpoint: {metadata_endpoint}")
+ return OpenIdConnectConfigurationManager._instance
\ No newline at end of file
diff --git a/Backend/python/tests/README.md b/Backend/python/tests/README.md
new file mode 100644
index 0000000..fb3e53c
--- /dev/null
+++ b/Backend/python/tests/README.md
@@ -0,0 +1,253 @@
+# Testing Guide for Python Backend
+
+This guide explains how to set up and run tests for the Microsoft Fabric Python Backend sample.
+
+## 📋 Prerequisites
+
+- **Python 3.8+** installed on your system
+- **pip** package manager
+- **Virtual environment** (strongly recommended)
+
+## 🚀 Quick Start
+
+### 1. Set up your environment
+
+First, navigate to the Python Backend directory:
+```bash
+cd Backend
+cd python
+```
+
+Create and activate a virtual environment:
+
+**Windows:**
+```bash
+python -m venv venv
+.\venv\Scripts\activate
+```
+
+**Linux/macOS:**
+```bash
+python3 -m venv venv
+source venv/bin/activate
+```
+
+### 2. Install dependencies
+
+Install both application and test dependencies:
+```bash
+pip install -r requirements.txt
+pip install -r tests/requirements-test.txt
+```
+
+### 3. Run tests
+
+The easiest way is to use our cross-platform test runner:
+```bash
+python run_tests.py
+```
+
+## 📊 Test Commands
+
+### Using the Test Runner Script
+
+| Command | Description |
+|---------|-------------|
+| `python run_tests.py` | Run all tests |
+| `python run_tests.py unit` | Run only unit tests |
+| `python run_tests.py integration` | Run only integration tests |
+| `python run_tests.py coverage` | Run tests with coverage report |
+| `python run_tests.py specific test_name` | Run tests matching a pattern |
+| `python run_tests.py parallel` | Run tests in parallel (faster) |
+| `python run_tests.py watch` | Auto-run tests on file changes |
+| `python run_tests.py debug` | Run with verbose debugging output |
+
+### Using pytest Directly
+
+For more control, you can use pytest commands:
+
+```bash
+# Run all tests with verbose output
+pytest tests/ -v
+
+# Run with coverage and generate HTML report
+pytest tests/ --cov=src --cov-report=html --cov-report=term
+
+# Run specific test file
+pytest tests/unit/controllers/test_item_lifecycle_controller.py -v
+
+# Run tests matching a keyword
+pytest tests/ -k "test_create_item" -v
+
+# Run tests by marker
+pytest tests/ -m unit # Unit tests only
+pytest tests/ -m integration # Integration tests only
+pytest tests/ -m api # API tests only
+```
+
+## 📁 Test Structure
+
+```
+python/
+├── src/ # Application source code
+├── tests/
+│ ├── __init__.py # Package initialization
+│ ├── conftest.py # Pytest configuration and shared fixtures
+│ ├── test_fixtures.py # Common test data (UUIDs, payloads, etc.)
+│ ├── test_helpers.py # Helper utilities for tests
+│ ├── constants.py # Test constants and expected responses
+│ ├── requirements-test.txt # Test-specific dependencies
+│ └── unit/ # Unit tests
+│ ├── api/ # API endpoint tests
+│ └── controllers/ # Controller tests
+├── run_tests.py # Cross-platform test runner
+└── pytest.ini # Pytest configuration
+```
+
+## 📈 Coverage Reports
+
+After running tests with coverage (`python run_tests.py coverage`):
+
+1. **HTML Report**: Open `htmlcov/index.html` in your browser
+2. **Terminal Report**: Coverage summary is displayed in the terminal
+3. **XML Report**: `coverage.xml` is generated for CI/CD integration
+
+The project has a minimum coverage requirement of **80%**.
+
+## ✍️ Writing Tests
+
+### Test File Naming
+- Test files must start with `test_`
+- Example: `test_item_lifecycle_controller.py`
+
+### Test Structure
+```python
+import pytest
+from tests.test_fixtures import TestFixtures
+from tests.test_helpers import TestHelpers
+
+@pytest.mark.unit # Mark the test type
+@pytest.mark.controllers # Mark the component being tested
+class TestYourFeature:
+ """Test cases for YourFeature."""
+
+ @pytest.mark.asyncio # For async tests
+ async def test_something_works(self, client, mock_authentication_service):
+ """Test that something works correctly."""
+ # Arrange
+ headers = {"authorization": "Bearer token"}
+
+ # Act
+ response = client.get("/endpoint", headers=headers)
+
+ # Assert
+ assert response.status_code == 200
+```
+
+### Available Test Markers
+- `@pytest.mark.unit` - Fast, isolated unit tests
+- `@pytest.mark.integration` - Tests integrating multiple components
+- `@pytest.mark.api` - API endpoint tests
+- `@pytest.mark.controllers` - Controller layer tests
+- `@pytest.mark.services` - Service layer tests
+- `@pytest.mark.slow` - Long-running tests
+- `@pytest.mark.smoke` - Critical tests for CI/CD
+
+### Common Fixtures (from conftest.py)
+- `client` - FastAPI test client
+- `valid_headers` - Pre-configured valid request headers
+- `mock_authentication_service` - Mocked authentication service
+- `mock_item_factory` - Mocked item factory
+- `app` - FastAPI application instance
+
+## 🔧 Troubleshooting
+
+### Common Issues
+
+**1. Import Errors**
+```bash
+# Ensure you're in the python directory
+cd Backend
+cd python
+
+# Verify PYTHONPATH includes src
+echo $PYTHONPATH # Linux/macOS
+echo %PYTHONPATH% # Windows
+```
+
+**2. Missing Dependencies**
+```bash
+# Reinstall all dependencies
+pip install -r requirements.txt -r tests/requirements-test.txt
+```
+
+**3. Virtual Environment Not Active**
+- Look for `(venv)` prefix in your terminal
+- If missing, activate it again (see Quick Start)
+
+**4. Tests Failing Due to Async Issues**
+- Ensure you're using `@pytest.mark.asyncio` for async tests
+- Check that mocks are created with `AsyncMock` for async methods
+
+**5. Coverage Not Meeting Threshold**
+- Run `python run_tests.py coverage` to see uncovered lines
+- Focus on testing error cases and edge conditions
+
+## 🚀 CI/CD Integration
+
+### GitHub Actions Example
+```yaml
+name: Run Tests
+
+on: [push, pull_request]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.8'
+
+ - name: Install dependencies
+ run: |
+ cd Backend
+ cd python
+ pip install -r requirements.txt
+ pip install -r tests/requirements-test.txt
+
+ - name: Run tests with coverage
+ run: |
+ cd Backend
+ cd python
+ python run_tests.py coverage
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v3
+ with:
+ file: ./python/coverage.xml
+ fail_ci_if_error: true
+```
+
+## 💡 Tips and Best Practices
+
+1. **Always use a virtual environment** to avoid dependency conflicts
+2. **Run tests before committing** code changes
+3. **Write tests for new features** as you develop them
+4. **Use descriptive test names** that explain what is being tested
+5. **Keep tests focused** - one test should verify one behavior
+6. **Use fixtures** to avoid code duplication
+7. **Mock external dependencies** to keep tests fast and isolated
+
+## 📞 Need Help?
+
+If you encounter issues:
+1. Check the troubleshooting section above
+2. Review the test output carefully - pytest provides detailed error messages
+3. Check existing tests for examples
+4. Ensure all dependencies are installed correctly
+
+Happy testing! 🎉
\ No newline at end of file
diff --git a/Backend/python/tests/__init__.py b/Backend/python/tests/__init__.py
new file mode 100644
index 0000000..de78aef
--- /dev/null
+++ b/Backend/python/tests/__init__.py
@@ -0,0 +1,8 @@
+"""Test package for Python Backend FastAPI application."""
+import sys
+from pathlib import Path
+
+# Add src directory to Python path
+src_path = Path(__file__).parent.parent / "src"
+if str(src_path) not in sys.path:
+ sys.path.insert(0, str(src_path))
\ No newline at end of file
diff --git a/Backend/python/tests/conftest.py b/Backend/python/tests/conftest.py
new file mode 100644
index 0000000..4fefcb8
--- /dev/null
+++ b/Backend/python/tests/conftest.py
@@ -0,0 +1,276 @@
+import pytest
+import asyncio
+from unittest.mock import Mock, AsyncMock, patch, MagicMock
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+from typing import Dict, Any, Generator, Optional, List
+from uuid import UUID
+from datetime import datetime, timedelta
+import sys
+from pathlib import Path
+
+# Add src to Python path for imports
+src_path = Path(__file__).parent.parent / "src"
+if str(src_path) not in sys.path:
+ sys.path.insert(0, str(src_path))
+
+from main import app as application
+from core.service_registry import ServiceRegistry
+from services.authentication import AuthenticationService
+from services.item_factory import ItemFactory
+from models.authentication_models import AuthorizationContext, Claim, SubjectAndAppToken
+
+# Import the services that need to be mocked
+from services.configuration_service import ConfigurationService
+from services.open_id_connect_configuration import OpenIdConnectConfigurationManager, OpenIdConnectConfiguration
+from services.http_client import HttpClientService
+from services.item_metadata_store import ItemMetadataStore
+from services.lakehouse_client_service import LakehouseClientService
+from services.onelake_client_service import OneLakeClientService
+from services.authorization import AuthorizationHandler
+from constants.environment_constants import EnvironmentConstants
+
+
+@pytest.fixture(scope="session")
+def event_loop_policy():
+ """Set the event loop policy for the test session."""
+ return asyncio.get_event_loop_policy()
+
+
+@pytest.fixture(scope="function")
+def event_loop(event_loop_policy):
+ """Create an instance of the default event loop for each test function."""
+ loop = event_loop_policy.new_event_loop()
+ yield loop
+ loop.close()
+
+
+@pytest.fixture
+def mock_service_registry():
+ """Create a mock service registry for testing."""
+ # Clear the singleton instance to ensure clean state
+ ServiceRegistry._instance = None
+ registry = ServiceRegistry()
+
+ # Mark as initialized to bypass initialization checks
+ registry._initialized = True
+
+ return registry
+
+
+@pytest.fixture
+def mock_auth_context():
+ """Create a mock authorization context for testing."""
+ context = AuthorizationContext(
+ original_subject_token="mock_subject_token",
+ tenant_object_id="44444444-4444-4444-4444-444444444444",
+ claims=[
+ {"type": "oid", "value": "test-user-id"},
+ {"type": "name", "value": "Test User"},
+ {"type": "tid", "value": "44444444-4444-4444-4444-444444444444"}
+ ]
+ )
+ return context
+
+
+@pytest.fixture
+def mock_authentication_service(mock_auth_context):
+ """Create a mock authentication service."""
+ mock_service = AsyncMock(spec=AuthenticationService)
+
+ # Configure the mock's authenticate_control_plane_call to return a mock auth context
+ mock_service.authenticate_control_plane_call.return_value = mock_auth_context
+ mock_service.authenticate_data_plane_call.return_value = mock_auth_context
+ mock_service.get_access_token_on_behalf_of.return_value = "mock_access_token"
+ mock_service.get_fabric_s2s_token.return_value = "mock_s2s_token"
+ mock_service.build_composite_token.return_value = "SubjectAndAppToken1.0 subjectToken=\"mock_subject\", appToken=\"mock_app\""
+
+ return mock_service
+
+
+@pytest.fixture
+def mock_item_factory():
+ """Create a mock item factory."""
+ mock_factory = Mock(spec=ItemFactory)
+
+ # Import test helpers here to avoid circular imports
+ from tests.test_helpers import TestHelpers
+
+ # Create a default mock item that will be returned
+ mock_item = TestHelpers.create_mock_item()
+ mock_factory.create_item.return_value = mock_item
+
+ return mock_factory
+
+
+@pytest.fixture
+def mock_configuration_service():
+ """Create a mock configuration service."""
+ mock_config = Mock(spec=ConfigurationService)
+ mock_config.get_environment.return_value = "test"
+ mock_config.get_publisher_tenant_id.return_value = "test-publisher-tenant"
+ mock_config.get_audience.return_value = "test-audience"
+ mock_config.get_client_id.return_value = "test-client-id"
+ mock_config.get_client_secret.return_value = "test-client-secret"
+ mock_config.get_common_metadata_file_name.return_value = "common_metadata.json"
+ mock_config.get_type_specific_metadata_file_name.return_value = "type_specific_metadata.json"
+ mock_config.get_jobs_directory_name.return_value = "jobs"
+ return mock_config
+
+
+@pytest.fixture
+def mock_all_services(mock_service_registry, mock_authentication_service, mock_item_factory, mock_configuration_service):
+ """Mock all services required for testing."""
+ # Register the already created mocks
+ mock_service_registry.register(AuthenticationService, mock_authentication_service)
+ mock_service_registry.register(ItemFactory, mock_item_factory)
+ mock_service_registry.register(ConfigurationService, mock_configuration_service)
+
+ # Mock remaining services
+ services_to_mock = [
+ (OpenIdConnectConfigurationManager, AsyncMock),
+ (HttpClientService, Mock),
+ (ItemMetadataStore, Mock),
+ (LakehouseClientService, Mock),
+ (OneLakeClientService, Mock),
+ (AuthorizationHandler, Mock)
+ ]
+
+ mocked_services = {
+ 'AuthenticationService': mock_authentication_service,
+ 'ItemFactory': mock_item_factory,
+ 'ConfigurationService': mock_configuration_service
+ }
+
+ for service_class, mock_type in services_to_mock:
+ mock_service = mock_type(spec=service_class)
+ mock_service_registry.register(service_class, mock_service)
+ mocked_services[service_class.__name__] = mock_service
+
+ return mocked_services
+
+
+@pytest.fixture
+def app(mock_all_services, mock_service_registry) -> Generator[FastAPI, None, None]:
+ """Create FastAPI app with mocked services."""
+ # Clear any existing dependency overrides
+ application.dependency_overrides = {}
+
+ # Create a context manager to patch all the service getter functions
+ patches = []
+
+ # Patch service registry getter
+ registry_patch = patch('core.service_registry.get_service_registry', return_value=mock_service_registry)
+ patches.append(registry_patch)
+
+ # Patch authentication service getter
+ auth_patch = patch('fabric_api.impl.item_lifecycle_controller.get_authentication_service',
+ return_value=mock_all_services['AuthenticationService'])
+ patches.append(auth_patch)
+
+ # Patch item factory getter
+ factory_patch = patch('fabric_api.impl.item_lifecycle_controller.get_item_factory',
+ return_value=mock_all_services['ItemFactory'])
+ patches.append(factory_patch)
+
+ # Patch configuration service getter
+ config_patch = patch('services.configuration_service.get_configuration_service',
+ return_value=mock_all_services['ConfigurationService'])
+ patches.append(config_patch)
+
+ # Add patches for jobs controller
+ jobs_auth_patch = patch('fabric_api.impl.jobs_controller.get_authentication_service',
+ return_value=mock_all_services['AuthenticationService'])
+ patches.append(jobs_auth_patch)
+
+ jobs_factory_patch = patch('fabric_api.impl.jobs_controller.get_item_factory',
+ return_value=mock_all_services['ItemFactory'])
+ patches.append(jobs_factory_patch)
+
+ # Apply all patches
+ for p in patches:
+ p.start()
+
+ try:
+ yield application
+ finally:
+ # Stop all patches
+ for p in patches:
+ p.stop()
+
+
+@pytest.fixture
+def client(app) -> TestClient:
+ """Create test client."""
+ return TestClient(app)
+
+
+@pytest.fixture
+def valid_headers():
+ """Provide valid headers for API requests."""
+ return {
+ "activity_id": "test-activity-123",
+ "request_id": "test-request-456",
+ "authorization": "SubjectAndAppToken1.0 subjectToken=\"mock_subject_token\", appToken=\"mock_app_token\"",
+ "x_ms_client_tenant_id": "44444444-4444-4444-4444-444444444444",
+ }
+
+
+@pytest.fixture
+def sample_create_request():
+ """Provide a sample create item request."""
+ return {
+ "display_name": "Test Item",
+ "description": "Test Description",
+ "creation_payload": {
+ "metadata": {
+ "operand1": 10,
+ "operand2": 20,
+ "operator": "Add"
+ }
+ }
+ }
+
+
+@pytest.fixture
+def sample_update_request():
+ """Provide a sample update item request."""
+ return {
+ "display_name": "Updated Test Item",
+ "description": "Updated Test Description",
+ "update_payload": {
+ "metadata": {
+ "operand1": 30,
+ "operand2": 40,
+ "operator": "Multiply"
+ }
+ }
+ }
+
+
+# Import AuthenticationTestFixtures from test_fixtures.py
+from tests.test_fixtures import AuthenticationTestFixtures
+
+
+@pytest.fixture
+def auth_fixtures():
+ """Provide AuthenticationTestFixtures as a pytest fixture."""
+ return AuthenticationTestFixtures
+
+
+@pytest.fixture
+def enhanced_mock_authentication_service():
+ """Create an enhanced mock authentication service with more comprehensive capabilities."""
+ mock_service = AsyncMock(spec=AuthenticationService)
+
+ # Use AuthenticationTestFixtures for enhanced mock context
+ auth_context = AuthenticationTestFixtures.create_auth_context()
+
+ # Configure the mock's methods
+ mock_service.authenticate_control_plane_call.return_value = auth_context
+ mock_service.authenticate_data_plane_call.return_value = auth_context
+ mock_service.get_access_token_on_behalf_of.return_value = "mock_access_token"
+ mock_service.get_fabric_s2s_token.return_value = "mock_s2s_token"
+ mock_service.build_composite_token.return_value = "SubjectAndAppToken1.0 subjectToken=\"mock_subject\", appToken=\"mock_app\""
+
+ return mock_service
\ No newline at end of file
diff --git a/Backend/python/tests/constants/__init__.py b/Backend/python/tests/constants/__init__.py
new file mode 100644
index 0000000..528353f
--- /dev/null
+++ b/Backend/python/tests/constants/__init__.py
@@ -0,0 +1,5 @@
+"""Test constants module."""
+
+from .expected_responses import ExpectedResponses
+
+__all__ = ["ExpectedResponses"]
\ No newline at end of file
diff --git a/Backend/python/tests/constants/expected_responses.py b/Backend/python/tests/constants/expected_responses.py
new file mode 100644
index 0000000..e8d003b
--- /dev/null
+++ b/Backend/python/tests/constants/expected_responses.py
@@ -0,0 +1,119 @@
+"""Constants for expected HTTP responses in tests."""
+
+from fastapi import status
+from constants.error_codes import ErrorCodes
+from fabric_api.models.error_source import ErrorSource
+
+
+class ExpectedResponses:
+ """Expected response codes and error codes for different scenarios."""
+
+ # Authentication errors
+ MISSING_AUTH_HEADER = {
+ "status_code": status.HTTP_401_UNAUTHORIZED,
+ "error_code": ErrorCodes.Authentication.AUTH_ERROR,
+ "source": ErrorSource.EXTERNAL
+ }
+
+ INVALID_AUTH_TOKEN = {
+ "status_code": status.HTTP_401_UNAUTHORIZED,
+ "error_code": ErrorCodes.Authentication.AUTH_ERROR,
+ "source": ErrorSource.EXTERNAL
+ }
+
+ MISSING_TENANT_ID = {
+ "status_code": status.HTTP_401_UNAUTHORIZED,
+ "error_code": ErrorCodes.Authentication.AUTH_ERROR,
+ "source": ErrorSource.EXTERNAL
+ }
+
+ AUTH_UI_REQUIRED = {
+ "status_code": status.HTTP_401_UNAUTHORIZED,
+ "error_code": ErrorCodes.Authentication.AUTH_UI_REQUIRED,
+ "source": ErrorSource.EXTERNAL
+ }
+
+ # Security errors
+ ACCESS_DENIED = {
+ "status_code": status.HTTP_403_FORBIDDEN,
+ "error_code": ErrorCodes.Security.ACCESS_DENIED,
+ "source": ErrorSource.USER
+ }
+
+ # Item errors
+ ITEM_NOT_FOUND = {
+ "status_code": status.HTTP_404_NOT_FOUND,
+ "error_code": ErrorCodes.Item.ITEM_METADATA_NOT_FOUND,
+ "source": ErrorSource.SYSTEM
+ }
+
+ DOUBLED_OPERANDS_OVERFLOW = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": ErrorCodes.Item.DOUBLED_OPERANDS_OVERFLOW,
+ "source": ErrorSource.USER
+ }
+
+ # Item payload errors
+ INVALID_ITEM_PAYLOAD = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": ErrorCodes.ItemPayload.INVALID_ITEM_PAYLOAD,
+ "source": ErrorSource.USER
+ }
+
+ MISSING_LAKEHOUSE_REFERENCE = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": ErrorCodes.ItemPayload.MISSING_LAKEHOUSE_REFERENCE,
+ "source": ErrorSource.USER
+ }
+
+ # Internal errors
+ INTERNAL_ERROR = {
+ "status_code": status.HTTP_500_INTERNAL_SERVER_ERROR,
+ "error_code": ErrorCodes.INTERNAL_ERROR,
+ "source": ErrorSource.SYSTEM
+ }
+
+ UNEXPECTED_ITEM_TYPE = {
+ "status_code": status.HTTP_500_INTERNAL_SERVER_ERROR,
+ "error_code": ErrorCodes.INTERNAL_ERROR,
+ "source": ErrorSource.SYSTEM
+ }
+
+ # Rate limiting
+ TOO_MANY_REQUESTS = {
+ "status_code": status.HTTP_429_TOO_MANY_REQUESTS,
+ "error_code": ErrorCodes.RateLimiting.TOO_MANY_REQUESTS,
+ "source": ErrorSource.SYSTEM
+ }
+
+ # Kusto errors
+ KUSTO_DATA_ERROR = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": ErrorCodes.Kusto.KUSTO_DATA_EXCEPTION,
+ "source": ErrorSource.SYSTEM
+ }
+
+ # Validation errors
+ INVALID_PARAMETER = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": "InvalidParameter",
+ "source": ErrorSource.USER
+ }
+
+ INVALID_UUID = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": "InvalidParameter",
+ "source": ErrorSource.USER
+ }
+
+ VALIDATION_ERROR = {
+ "status_code": status.HTTP_422_UNPROCESSABLE_ENTITY,
+ "error_code": "ValidationError",
+ "source": ErrorSource.USER
+ }
+
+ INVALID_REQUEST = {
+ "status_code": status.HTTP_400_BAD_REQUEST,
+ "error_code": ErrorCodes.INVALID_REQUEST,
+ "source": ErrorSource.USER
+ }
\ No newline at end of file
diff --git a/Backend/python/tests/integration/services/authentication/test_authentication_flows.py b/Backend/python/tests/integration/services/authentication/test_authentication_flows.py
new file mode 100644
index 0000000..5e13aae
--- /dev/null
+++ b/Backend/python/tests/integration/services/authentication/test_authentication_flows.py
@@ -0,0 +1,360 @@
+"""
+Integration tests for AuthenticationService flows.
+Tests moved from unit test suite that are more integration-style.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+
+from services.authentication import AuthenticationService
+from services.open_id_connect_configuration import OpenIdConnectConfiguration
+from models.authentication_models import Claim, AuthorizationContext, SubjectAndAppToken
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+from constants.environment_constants import EnvironmentConstants
+
+
+@pytest.mark.integration
+@pytest.mark.services
+class TestAuthenticationServiceIntegration:
+ """Integration tests with service infrastructure."""
+
+ def test_service_registry_integration(self, auth_fixtures):
+ """Test AuthenticationService integration with ServiceRegistry."""
+ from core.service_registry import ServiceRegistry
+
+ registry = ServiceRegistry()
+ registry.clear()
+
+ service = auth_fixtures.get_authentication_service()
+ registry.register(AuthenticationService, service)
+
+ assert registry.has(AuthenticationService)
+ retrieved = registry.get(AuthenticationService)
+ assert retrieved is service
+
+ @pytest.mark.asyncio
+ async def test_end_to_end_control_plane_flow(self, auth_fixtures):
+ """Test complete end-to-end control plane authentication."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create realistic tokens with matching app IDs
+ subject_token = auth_fixtures.create_mock_jwt_token(
+ scopes="FabricWorkloadControl",
+ tenant_id="user-tenant",
+ app_id=EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ )
+ app_token = auth_fixtures.create_mock_jwt_token(
+ id_typ="app",
+ tenant_id="publisher-tenant-id",
+ app_id=EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ )
+ auth_header = SubjectAndAppToken.generate_authorization_header_value(subject_token, app_token)
+
+ # Mock validation chain
+ subject_claims = auth_fixtures.create_subject_claims(tenant_id="user-tenant")
+ app_claims = auth_fixtures.create_app_claims()
+
+ with patch.object(service, '_validate_app_token', return_value=app_claims):
+ with patch.object(service, '_validate_subject_token', return_value=subject_claims):
+ result = await service.authenticate_control_plane_call(
+ auth_header=auth_header,
+ tenant_id="user-tenant"
+ )
+
+ assert isinstance(result, AuthorizationContext)
+ assert result.has_subject_context
+ assert result.tenant_object_id == "user-tenant"
+
+ @pytest.mark.asyncio
+ async def test_end_to_end_composite_token_flow(self, auth_fixtures):
+ """Test complete end-to-end composite token building flow."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ # Use realistic JWT-like tokens for OBO and S2S
+ obo_token = auth_fixtures.create_mock_jwt_token(
+ scopes="https://graph.microsoft.com/.default",
+ tenant_id="user-tenant"
+ )
+ s2s_token = auth_fixtures.create_mock_jwt_token(
+ id_typ="app",
+ tenant_id="publisher-tenant-id"
+ )
+
+ with patch.object(service, 'get_access_token_on_behalf_of', return_value=obo_token):
+ with patch.object(service, 'get_fabric_s2s_token', return_value=s2s_token):
+ result = await service.build_composite_token(
+ auth_context=auth_context,
+ scopes=["test-scope"]
+ )
+
+ # Verify result format - should be a proper SubjectAndAppToken header
+ assert result.startswith("SubjectAndAppToken1.0")
+ assert obo_token in result
+ assert s2s_token in result
+
+ # Verify it can be parsed
+ parsed = SubjectAndAppToken.parse(result)
+ assert parsed.subject_token == obo_token
+ assert parsed.app_token == s2s_token
+
+ @pytest.mark.asyncio
+ async def test_data_plane_to_control_plane_flow_integration(self, auth_fixtures):
+ """Test integration between data plane and control plane authentication."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Start with data plane authentication
+ bearer_token = auth_fixtures.create_mock_jwt_token(scopes="Item1.ReadWrite.All")
+ data_plane_header = f"Bearer {bearer_token}"
+
+ with patch.object(service, '_authenticate_bearer') as mock_auth_bearer:
+ data_plane_context = AuthorizationContext(
+ original_subject_token=bearer_token,
+ tenant_object_id="test-tenant-id"
+ )
+ mock_auth_bearer.return_value = data_plane_context
+
+ # Authenticate data plane call
+ data_result = await service.authenticate_data_plane_call(
+ auth_header=data_plane_header,
+ allowed_scopes=["Item1.ReadWrite.All"]
+ )
+
+ # Use result to build composite token (similar to control plane)
+ obo_token = auth_fixtures.create_mock_jwt_token(tenant_id="test-tenant-id")
+ s2s_token = auth_fixtures.create_mock_jwt_token(id_typ="app")
+
+ with patch.object(service, 'get_access_token_on_behalf_of', return_value=obo_token):
+ with patch.object(service, 'get_fabric_s2s_token', return_value=s2s_token):
+ composite_token = await service.build_composite_token(
+ auth_context=data_result,
+ scopes=["fabric-scope"]
+ )
+
+ assert composite_token.startswith("SubjectAndAppToken1.0")
+
+
+@pytest.mark.integration
+@pytest.mark.services
+class TestErrorRecoveryScenarios:
+ """Test error recovery and resilience scenarios."""
+
+ @pytest.mark.asyncio
+ async def test_openid_config_fetch_failure_recovery(self, auth_fixtures):
+ """Test recovery when OpenID configuration fetch fails."""
+ service = auth_fixtures.get_authentication_service()
+ token = auth_fixtures.create_mock_jwt_token()
+
+ # First call fails, but if cached config exists, it should use it
+ service.openid_manager.get_configuration_async.side_effect = Exception("Network error")
+
+ with pytest.raises(Exception, match="Network error"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_partial_service_degradation(self, auth_fixtures):
+ """Test service behavior under partial degradation."""
+ # Create service with minimal configuration
+ mock_openid_manager, _ = auth_fixtures.get_basic_mocks()
+ mock_config = auth_fixtures.get_config_service_mock(client_secret=None) # Missing secret
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+
+ # Some operations should fail gracefully
+ auth_context = auth_fixtures.create_auth_context()
+
+ with pytest.raises(AuthenticationException, match="MSAL client not configured"):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ @pytest.mark.asyncio
+ async def test_network_timeout_resilience(self, auth_fixtures):
+ """Test resilience to network timeouts during token validation."""
+ service = auth_fixtures.get_authentication_service()
+ token = auth_fixtures.create_mock_jwt_token()
+
+ # Simulate network timeout during OpenID config fetch
+ import asyncio
+ service.openid_manager.get_configuration_async.side_effect = asyncio.TimeoutError("Network timeout")
+
+ # The service wraps the TimeoutError in AuthenticationException
+ with pytest.raises(AuthenticationException, match="Token validation failed: Network timeout"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_jwt_validation_external_dependency_failures(self, auth_fixtures):
+ """Test handling of JWT validation library failures."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test various JWT library failure scenarios
+ token = auth_fixtures.create_mock_jwt_token()
+
+ # Mock OpenID configuration
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ payload = auth_fixtures.create_jwt_payload(tenant_id="test-tenant", token_version="2.0")
+
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ # Test JWT header extraction failure
+ with patch('services.authentication.jwt.get_unverified_header', side_effect=Exception("JWT library error")):
+ with pytest.raises(AuthenticationException, match="Token validation failed"):
+ await service._validate_aad_token_common(token, False, None)
+
+ # Test JWT claims extraction failure
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', side_effect=Exception("Claims extraction failed")):
+ with pytest.raises(AuthenticationException, match="Token validation failed"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_concurrent_token_validation(self, auth_fixtures):
+ """Test that concurrent token validation doesn't interfere."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create tokens with proper payload structure
+ payload1 = auth_fixtures.create_jwt_payload(tenant_id="tenant1", token_version="2.0")
+ payload2 = auth_fixtures.create_jwt_payload(tenant_id="tenant2", token_version="2.0")
+ token1 = auth_fixtures.create_mock_jwt_token(payload=payload1)
+ token2 = auth_fixtures.create_mock_jwt_token(payload=payload2)
+
+ # Mock OpenID configuration with matching key
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', side_effect=[payload1, payload2]):
+ with patch('services.authentication.jwt.decode', side_effect=[payload1, payload2]):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ # Both validations should succeed independently
+ result1 = await service._validate_aad_token_common(token1, False, None)
+ result2 = await service._validate_aad_token_common(token2, False, None)
+
+ assert len(result1) > 0
+ assert len(result2) > 0
+
+ @pytest.mark.asyncio
+ async def test_service_initialization_under_load(self, auth_fixtures):
+ """Test service initialization behavior under load conditions."""
+ import asyncio
+
+ async def create_service():
+ mock_openid_manager, mock_config_service = auth_fixtures.get_basic_mocks()
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_service):
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app = Mock()
+ mock_msal.ConfidentialClientApplication.return_value = mock_app
+
+ return AuthenticationService(openid_manager=mock_openid_manager)
+
+ # Create multiple services concurrently
+ tasks = [create_service() for _ in range(10)]
+ services = await asyncio.gather(*tasks)
+
+ # All services should be properly initialized
+ for service in services:
+ assert service.openid_manager is not None
+ assert service.publisher_tenant_id == "publisher-tenant-id"
+ assert service.client_id == "test-client-id"
+
+
+@pytest.mark.integration
+@pytest.mark.services
+class TestConcurrencyAndThreadSafety:
+ """Test concurrency and thread safety aspects with integration focus."""
+
+ def test_msal_app_cache_thread_safety(self, auth_fixtures):
+ """Test that MSAL app caching is thread-safe under load."""
+ service = auth_fixtures.get_authentication_service()
+ tenant_id = "test-tenant"
+
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app = Mock()
+ mock_msal.ConfidentialClientApplication.return_value = mock_app
+
+ # Simulate concurrent access from multiple threads
+ import threading
+ import time
+
+ apps_retrieved = []
+ errors = []
+
+ def get_app():
+ try:
+ app = service._get_msal_app(tenant_id)
+ apps_retrieved.append(app)
+ time.sleep(0.01) # Small delay to increase contention
+ except Exception as e:
+ errors.append(e)
+
+ # Create multiple threads
+ threads = [threading.Thread(target=get_app) for _ in range(20)]
+
+ # Start all threads
+ for thread in threads:
+ thread.start()
+
+ # Wait for all threads to complete
+ for thread in threads:
+ thread.join()
+
+ # Verify no errors occurred
+ assert len(errors) == 0, f"Errors occurred: {errors}"
+
+ # All threads should have retrieved the same app instance
+ assert len(apps_retrieved) == 20
+ assert all(app is mock_app for app in apps_retrieved)
+
+ # Only one app should have been created despite concurrent access
+ assert mock_msal.ConfidentialClientApplication.call_count == 1
+
+ @pytest.mark.asyncio
+ async def test_concurrent_different_tenants(self, auth_fixtures):
+ """Test concurrent access with different tenants creates separate apps."""
+ service = auth_fixtures.get_authentication_service()
+
+ with patch("services.authentication.msal") as mock_msal:
+ # Create unique mock apps for each tenant
+ tenant_apps = {
+ "tenant1": Mock(),
+ "tenant2": Mock(),
+ "tenant3": Mock()
+ }
+ mock_msal.ConfidentialClientApplication.side_effect = tenant_apps.values()
+
+ import asyncio
+
+ async def get_app_for_tenant(tenant_id):
+ return service._get_msal_app(tenant_id)
+
+ # Create concurrent tasks for different tenants
+ tasks = []
+ for tenant in tenant_apps.keys():
+ for _ in range(5): # 5 concurrent requests per tenant
+ tasks.append(get_app_for_tenant(tenant))
+
+ results = await asyncio.gather(*tasks)
+
+ # Verify results - should have 15 total results (3 tenants × 5 requests each)
+ assert len(results) == 15
+
+ # Verify each tenant got its own app, but all requests for same tenant got same app
+ tenant1_results = results[0:5] # First 5 are tenant1
+ tenant2_results = results[5:10] # Next 5 are tenant2
+ tenant3_results = results[10:15] # Last 5 are tenant3
+
+ # All requests for same tenant should return same app instance
+ assert all(app is tenant1_results[0] for app in tenant1_results)
+ assert all(app is tenant2_results[0] for app in tenant2_results)
+ assert all(app is tenant3_results[0] for app in tenant3_results)
+
+ # Apps for different tenants should be different
+ assert tenant1_results[0] != tenant2_results[0]
+ assert tenant2_results[0] != tenant3_results[0]
+ assert tenant1_results[0] != tenant3_results[0]
+
+ # Should have created exactly 3 apps (one per tenant)
+ assert mock_msal.ConfidentialClientApplication.call_count == 3
\ No newline at end of file
diff --git a/Backend/python/tests/requirements-test.txt b/Backend/python/tests/requirements-test.txt
new file mode 100644
index 0000000..114e2ca
--- /dev/null
+++ b/Backend/python/tests/requirements-test.txt
@@ -0,0 +1,12 @@
+# Test dependencies
+pytest==7.4.3
+pytest-asyncio==0.21.1
+pytest-cov==4.1.0
+pytest-mock==3.12.0
+pytest-xdist==3.5.0 # For parallel test execution
+pytest-timeout==2.2.0 # For test timeouts
+pytest-watch==4.2.0 # For watch mode (optional)
+faker==20.1.0 # For generating test data
+
+# Platform-specific color support (optional but recommended)
+colorama==0.4.6 # For Windows color support
\ No newline at end of file
diff --git a/Backend/python/tests/test_fixtures.py b/Backend/python/tests/test_fixtures.py
new file mode 100644
index 0000000..fd2e931
--- /dev/null
+++ b/Backend/python/tests/test_fixtures.py
@@ -0,0 +1,355 @@
+"""Common test fixtures and data."""
+
+import json
+import base64
+import time
+from unittest.mock import Mock, AsyncMock, patch
+from uuid import UUID
+from typing import Dict, Any, List, Optional
+
+from constants.environment_constants import EnvironmentConstants
+from models.authentication_models import AuthorizationContext, Claim, SubjectAndAppToken
+from services.configuration_service import ConfigurationService
+from services.open_id_connect_configuration import OpenIdConnectConfigurationManager, OpenIdConnectConfiguration
+from services.authentication import AuthenticationService
+
+
+class TestFixtures:
+ """Common test data fixtures."""
+
+ # Test UUIDs
+ WORKSPACE_ID = UUID("11111111-1111-1111-1111-111111111111")
+ ITEM_ID = UUID("22222222-2222-2222-2222-222222222222")
+ JOB_INSTANCE_ID = UUID("33333333-3333-3333-3333-333333333333")
+ TENANT_ID = UUID("44444444-4444-4444-4444-444444444444")
+
+ # Authentication test data
+ AUTH_TENANT_ID = "test-tenant-id"
+ AUTH_OBJECT_ID = "test-object-id"
+ AUTH_PUBLISHER_TENANT_ID = "publisher-tenant-id"
+ AUTH_AUDIENCE = "test-audience"
+ AUTH_CLIENT_ID = "test-client-id"
+ AUTH_CLIENT_SECRET = "test-client-secret"
+ AUTH_ISSUER = "https://login.microsoftonline.com/test-tenant/v2.0"
+ AUTH_APP_ID = EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ AUTH_SCOPES = "FabricWorkloadControl"
+
+ # JWT test data
+ JWT_HEADER = {
+ "typ": "JWT",
+ "alg": "RS256",
+ "kid": "test-key-id"
+ }
+
+ JWT_PAYLOAD_DELEGATED = {
+ "iss": "https://login.microsoftonline.com/test-tenant/v2.0",
+ "aud": "test-audience",
+ "sub": "test-subject",
+ "azp": EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ "tid": "test-tenant-id",
+ "oid": "test-object-id",
+ "ver": "2.0",
+ "scp": "FabricWorkloadControl"
+ }
+
+ JWT_PAYLOAD_APP_ONLY = {
+ "iss": "https://login.microsoftonline.com/publisher-tenant/v2.0",
+ "aud": "test-audience",
+ "sub": "service-principal-id",
+ "azp": EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ "tid": "publisher-tenant-id",
+ "oid": "service-principal-id",
+ "ver": "2.0",
+ "idtyp": "app"
+ }
+
+ # OpenID Connect configuration test data
+ OPENID_CONFIG = {
+ "issuer_configuration": "https://login.microsoftonline.com/{tenantid}/v2.0",
+ "signing_keys": [{"kid": "test-key-id", "kty": "RSA"}]
+ }
+
+ # Authentication headers
+ AUTH_HEADERS = {
+ "activity_id": "test-activity-123",
+ "request_id": "test-request-456",
+ "authorization": "SubjectAndAppToken1.0 subjectToken=\"mock_subject_token\", appToken=\"mock_app_token\"",
+ "x_ms_client_tenant_id": "44444444-4444-4444-4444-444444444444",
+ }
+
+ # Claims test data
+ SUBJECT_CLAIMS = [
+ {"type": "tid", "value": "test-tenant-id"},
+ {"type": "oid", "value": "test-object-id"},
+ {"type": "scp", "value": "FabricWorkloadControl"},
+ {"type": "ver", "value": "2.0"},
+ {"type": "azp", "value": EnvironmentConstants.FABRIC_BACKEND_APP_ID}
+ ]
+
+ APP_CLAIMS = [
+ {"type": "tid", "value": "publisher-tenant-id"},
+ {"type": "oid", "value": "service-principal-id"},
+ {"type": "idtyp", "value": "app"},
+ {"type": "ver", "value": "2.0"},
+ {"type": "azp", "value": EnvironmentConstants.FABRIC_BACKEND_APP_ID}
+ ]
+
+ # Test item types
+ ITEM_TYPE = "Item1"
+ UNKNOWN_ITEM_TYPE = "UnknownItem"
+
+ # Test payloads
+ CREATE_PAYLOAD = {
+ "display_name": "Test Item",
+ "description": "Test Description",
+ "creation_payload": {
+ "metadata": {
+ "operand1": 10,
+ "operand2": 20,
+ "operator": "Add",
+ "lakehouse": {
+ "id": "44444444-4444-4444-4444-444444444444",
+ "workspace_id": "55555555-5555-5555-5555-555555555555"
+ }
+ }
+ }
+ }
+
+ UPDATE_PAYLOAD = {
+ "display_name": "Updated Test Item",
+ "description": "Updated Test Description",
+ "update_payload": {
+ "metadata": {
+ "operand1": 30,
+ "operand2": 40,
+ "operator": "Multiply"
+ }
+ }
+ }
+
+ INVALID_PAYLOAD = {
+ "display_name": "Invalid Item",
+ # Missing required fields
+ }
+
+ # Job test data
+ JOB_CREATE_PAYLOAD = {
+ "invoke_type": "Manual",
+ "creation_payload": {
+ "key": "value",
+ "nested": {
+ "data": "test"
+ }
+ }
+ }
+
+ SCHEDULED_JOB_PAYLOAD = {
+ "invoke_type": "Scheduled",
+ "creation_payload": {
+ "schedule": "0 * * * *",
+ "timezone": "UTC"
+ }
+ }
+
+ # Authentication error scenarios
+ EXPIRED_JWT_PAYLOAD = {
+ "iss": "https://login.microsoftonline.com/test-tenant/v2.0",
+ "aud": "test-audience",
+ "sub": "test-subject",
+ "azp": EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ "tid": "test-tenant-id",
+ "oid": "test-object-id",
+ "ver": "2.0",
+ "scp": "FabricWorkloadControl",
+ "exp": 1234567890 # Expired timestamp
+ }
+
+ INVALID_AUDIENCE_JWT_PAYLOAD = {
+ "iss": "https://login.microsoftonline.com/test-tenant/v2.0",
+ "aud": "wrong-audience",
+ "sub": "test-subject",
+ "azp": EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ "tid": "test-tenant-id",
+ "oid": "test-object-id",
+ "ver": "2.0",
+ "scp": "FabricWorkloadControl"
+ }
+
+ MISSING_CLAIMS_JWT_PAYLOAD = {
+ "iss": "https://login.microsoftonline.com/test-tenant/v2.0",
+ "aud": "test-audience",
+ "sub": "test-subject"
+ # Missing required claims like tid, oid, etc.
+ }
+
+
+# ===== AUTHENTICATION TEST FIXTURES =====
+class AuthenticationTestFixtures:
+ """Consolidated test fixtures and helpers for authentication testing."""
+
+ @staticmethod
+ def create_jwt_header(kid: str = "test-key-id", alg: str = "RS256") -> Dict[str, Any]:
+ """Create a JWT header for testing."""
+ return {"typ": "JWT", "alg": alg, "kid": kid}
+
+ @staticmethod
+ def create_jwt_payload(
+ iss: str = "https://login.microsoftonline.com/test-tenant/v2.0",
+ aud: str = "test-audience",
+ tenant_id: str = "test-tenant-id",
+ object_id: str = "test-object-id",
+ app_id: str = EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ scopes: Optional[str] = "FabricWorkloadControl", # Default scope for delegated tokens
+ id_typ: Optional[str] = None,
+ token_version: str = "2.0",
+ exp_offset_minutes: int = 60
+ ) -> Dict[str, Any]:
+ """Create a JWT payload for testing."""
+ now = int(time.time())
+ exp = now + (exp_offset_minutes * 60)
+
+ payload = {
+ "iss": iss,
+ "aud": aud,
+ "sub": "test-subject",
+ "azp": app_id if token_version == "2.0" else None,
+ "appid": app_id if token_version == "1.0" else None,
+ "tid": tenant_id,
+ "oid": object_id,
+ "ver": token_version,
+ "iat": now,
+ "nbf": now,
+ "exp": exp
+ }
+
+ # Remove None values
+ payload = {k: v for k, v in payload.items() if v is not None}
+
+ # Add scopes for delegated tokens (unless it's an app-only token)
+ if scopes and not id_typ:
+ payload["scp"] = scopes
+ if id_typ:
+ payload["idtyp"] = id_typ
+
+ return payload
+
+ @staticmethod
+ def encode_jwt_part(data: Dict[str, Any]) -> str:
+ """Encode a JWT part for testing."""
+ json_str = json.dumps(data, separators=(',', ':'))
+ encoded = base64.urlsafe_b64encode(json_str.encode()).decode()
+ return encoded.rstrip('=')
+
+ @staticmethod
+ def create_mock_jwt_token(
+ header: Optional[Dict[str, Any]] = None,
+ payload: Optional[Dict[str, Any]] = None,
+ signature: str = "mock-signature",
+ **payload_kwargs
+ ) -> str:
+ """Create a mock JWT token for testing."""
+ if header is None:
+ header = AuthenticationTestFixtures.create_jwt_header()
+ if payload is None:
+ payload = AuthenticationTestFixtures.create_jwt_payload(**payload_kwargs)
+
+ header_encoded = AuthenticationTestFixtures.encode_jwt_part(header)
+ payload_encoded = AuthenticationTestFixtures.encode_jwt_part(payload)
+
+ return f"{header_encoded}.{payload_encoded}.{signature}"
+
+ @staticmethod
+ def get_basic_mocks():
+ """Get basic mock objects for testing."""
+ # OpenID manager mock
+ mock_openid_manager = AsyncMock(spec=OpenIdConnectConfigurationManager)
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+ mock_openid_manager.get_configuration_async.return_value = mock_config
+
+ # Configuration service mock
+ mock_config_service = Mock(spec=ConfigurationService)
+ mock_config_service.get_publisher_tenant_id.return_value = "publisher-tenant-id"
+ mock_config_service.get_audience.return_value = "test-audience"
+ mock_config_service.get_client_id.return_value = "test-client-id"
+ mock_config_service.get_client_secret.return_value = "test-client-secret"
+
+ return mock_openid_manager, mock_config_service
+
+ @staticmethod
+ def get_config_service_mock(
+ publisher_tenant_id: str = "publisher-tenant-id",
+ audience: str = "test-audience",
+ client_id: str = "test-client-id",
+ client_secret: str = "test-client-secret"
+ ):
+ """Get a configuration service mock with specific values."""
+ mock_config = Mock(spec=ConfigurationService)
+ mock_config.get_publisher_tenant_id.return_value = publisher_tenant_id
+ mock_config.get_audience.return_value = audience
+ mock_config.get_client_id.return_value = client_id
+ mock_config.get_client_secret.return_value = client_secret
+ return mock_config
+
+ @staticmethod
+ def get_authentication_service():
+ """Get a configured AuthenticationService for testing."""
+ mock_openid_manager, mock_config_service = AuthenticationTestFixtures.get_basic_mocks()
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_service):
+ with patch("services.authentication.msal"):
+ return AuthenticationService(openid_manager=mock_openid_manager)
+
+ @staticmethod
+ def create_auth_context(
+ tenant_id: str = "test-tenant-id",
+ has_subject_token: bool = True
+ ) -> AuthorizationContext:
+ """Create an AuthorizationContext for testing."""
+ return AuthorizationContext(
+ original_subject_token="mock-subject-token" if has_subject_token else None,
+ tenant_object_id=tenant_id,
+ claims=[
+ Claim(type="oid", value="test-object-id"),
+ Claim(type="tid", value=tenant_id),
+ Claim(type="scp", value="FabricWorkloadControl")
+ ]
+ )
+
+ @staticmethod
+ def create_subject_claims(
+ tenant_id: str = "test-tenant-id",
+ scopes: str = "FabricWorkloadControl",
+ app_id: str = EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ ) -> List[Claim]:
+ """Create subject token claims."""
+ return [
+ Claim(type="tid", value=tenant_id),
+ Claim(type="oid", value="test-object-id"),
+ Claim(type="scp", value=scopes),
+ Claim(type="ver", value="2.0"),
+ Claim(type="azp", value=app_id)
+ ]
+
+ @staticmethod
+ def create_app_claims(
+ tenant_id: str = "publisher-tenant-id",
+ app_id: str = EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ ) -> List[Claim]:
+ """Create app token claims."""
+ return [
+ Claim(type="tid", value=tenant_id),
+ Claim(type="oid", value="service-principal-id"),
+ Claim(type="idtyp", value="app"),
+ Claim(type="ver", value="2.0"),
+ Claim(type="azp", value=app_id)
+ ]
+
+ @staticmethod
+ def create_claims_from_payload(payload: Dict[str, Any]) -> List[Claim]:
+ """Convert JWT payload to claims list."""
+ claims = []
+ for key, value in payload.items():
+ claims.append(Claim(type=key, value=value))
+ return claims
\ No newline at end of file
diff --git a/Backend/python/tests/test_helpers.py b/Backend/python/tests/test_helpers.py
new file mode 100644
index 0000000..050d6be
--- /dev/null
+++ b/Backend/python/tests/test_helpers.py
@@ -0,0 +1,102 @@
+"""Helper utilities for testing."""
+
+from typing import Dict, Any, Optional, List
+from uuid import UUID, uuid4
+from unittest.mock import Mock, AsyncMock
+
+from models.authentication_models import AuthorizationContext, Claim
+from fabric_api.models.create_item_request import CreateItemRequest
+from fabric_api.models.update_item_request import UpdateItemRequest
+from fabric_api.models.get_item_payload_response import GetItemPayloadResponse
+from items.base_item import ItemBase
+from tests.test_fixtures import TestFixtures
+
+
+class TestHelpers:
+ """Helper methods for creating test objects."""
+
+ @staticmethod
+ def create_mock_item(item_type: str = "Item1", item_id: UUID = None) -> Mock:
+ """Create a mock item with all required attributes."""
+ mock_item = AsyncMock(spec=ItemBase)
+ mock_item.item_type = item_type
+ mock_item.item_object_id = item_id or TestFixtures.ITEM_ID # Add this attribute
+
+ # Mock async methods
+ mock_item.create = AsyncMock()
+ mock_item.update = AsyncMock()
+ mock_item.delete = AsyncMock()
+ mock_item.load = AsyncMock()
+ mock_item.get_item_payload = AsyncMock(return_value={"test": "payload"})
+ mock_item.execute_job = AsyncMock()
+ mock_item.get_job_state = AsyncMock()
+ mock_item.cancel_job = AsyncMock()
+
+ return mock_item
+
+ @staticmethod
+ def create_auth_context(
+ tenant_id: str = "44444444-4444-4444-4444-444444444444",
+ user_id: str = "test-user-id",
+ user_name: str = "Test User",
+ has_subject_context: bool = True
+ ) -> AuthorizationContext:
+ """Create an authorization context for testing."""
+ # Control has_subject_context by setting original_subject_token
+ context = AuthorizationContext(
+ original_subject_token="mock_subject_token" if has_subject_context else None,
+ tenant_object_id=tenant_id,
+ claims=[
+ {"type": "oid", "value": user_id},
+ {"type": "name", "value": user_name},
+ {"type": "tid", "value": tenant_id}
+ ]
+ )
+ return context
+
+ @staticmethod
+ def create_headers(
+ activity_id: Optional[str] = None,
+ request_id: Optional[str] = None,
+ authorization: Optional[str] = None,
+ tenant_id: Optional[str] = None
+ ) -> Dict[str, str]:
+ """Create request headers for testing."""
+ headers = {}
+
+ if activity_id:
+ headers["activity_id"] = activity_id
+
+ if request_id:
+ headers["request_id"] = request_id
+
+ if authorization:
+ headers["authorization"] = authorization
+
+ if tenant_id:
+ headers["x_ms_client_tenant_id"] = tenant_id
+
+ return headers
+
+ @staticmethod
+ def assert_error_response(response, expected_status: int, expected_error_code: Optional[str] = None):
+ """Assert that a response is an error with expected properties."""
+ assert response.status_code == expected_status, \
+ f"Expected status code {expected_status}, got {response.status_code}. Response: {response.text}"
+
+ if expected_error_code:
+ error_data = response.json()
+ assert "errorCode" in error_data, f"Missing errorCode in response: {error_data}"
+ assert error_data["errorCode"] == expected_error_code, \
+ f"Expected error code '{expected_error_code}', got '{error_data.get('errorCode')}'"
+
+ @staticmethod
+ def create_error_response(error_code: str, message: str, status_code: int = 400) -> Dict[str, Any]:
+ """Create a standard error response."""
+ return {
+ "errorCode": error_code,
+ "message": message,
+ "details": {
+ "statusCode": status_code
+ }
+ }
\ No newline at end of file
diff --git a/Backend/python/tests/unit/__init__.py b/Backend/python/tests/unit/__init__.py
new file mode 100644
index 0000000..6908c58
--- /dev/null
+++ b/Backend/python/tests/unit/__init__.py
@@ -0,0 +1 @@
+"""Test package for Python Backend FastAPI application."""
\ No newline at end of file
diff --git a/Backend/python/tests/unit/api/test_endpoint_resolution_api.py b/Backend/python/tests/unit/api/test_endpoint_resolution_api.py
new file mode 100644
index 0000000..f2c25da
--- /dev/null
+++ b/Backend/python/tests/unit/api/test_endpoint_resolution_api.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+
+from fastapi.testclient import TestClient
+
+
+from pydantic import Field, StrictStr # noqa: F401
+from typing_extensions import Annotated # noqa: F401
+from fabric_api.models.endpoint_resolution_request import EndpointResolutionRequest # noqa: F401
+from fabric_api.models.endpoint_resolution_response import EndpointResolutionResponse # noqa: F401
+from fabric_api.models.error_response import ErrorResponse # noqa: F401
+
+
+def test_endpoint_resolution_resolve(client: TestClient):
+ """Test case for endpoint_resolution_resolve
+
+ Resolve an endpoint for a given service called by Microsoft Fabric
+ """
+ body = {"context":[{"name":"EndpointName","value":"value"},{"name":"EndpointName","value":"value"}]}
+
+ headers = {
+ "activity_id": 'activity_id_example',
+ "request_id": 'request_id_example',
+ "authorization": 'authorization_example',
+ }
+ # uncomment below to make a request
+ #response = client.request(
+ # "POST",
+ # "/resolve-api-path-placeholder",
+ # headers=headers,
+ # json=body,
+ #)
+
+ # uncomment below to assert the status code of the HTTP response
+ #assert response.status_code == 200
+
diff --git a/Backend/python/tests/unit/api/test_item_lifecycle_api.py b/Backend/python/tests/unit/api/test_item_lifecycle_api.py
new file mode 100644
index 0000000..7de628b
--- /dev/null
+++ b/Backend/python/tests/unit/api/test_item_lifecycle_api.py
@@ -0,0 +1,152 @@
+# coding: utf-8
+
+"""
+Enhanced tests for item_lifecycle_api endpoints.
+These tests focus on the API layer validation and routing.
+"""
+
+from exceptions.exceptions import AuthenticationException
+import pytest
+from fastapi.testclient import TestClient
+from uuid import UUID
+import json
+from unittest.mock import ANY
+
+from tests.test_fixtures import TestFixtures
+from tests.test_helpers import TestHelpers
+from tests.constants import ExpectedResponses
+
+
+@pytest.mark.unit
+@pytest.mark.api
+class TestItemLifecycleAPI:
+ """Test cases for Item Lifecycle API endpoints."""
+
+ def test_create_item_valid_request(self, client: TestClient, valid_headers, mock_item_factory):
+ """Test creating an item with valid request."""
+ # Ensure mock returns async mock item
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+ assert response.status_code == 200
+ mock_item_factory.create_item.assert_called_once_with(
+ TestFixtures.ITEM_TYPE,
+ ANY # auth context
+ )
+ mock_item.create.assert_called_once()
+
+ # Assert - Call arguments
+ create_call_args = mock_item.create.call_args
+ assert create_call_args[0][0] == TestFixtures.WORKSPACE_ID
+ assert create_call_args[0][1] == TestFixtures.ITEM_ID
+ assert create_call_args[0][2] is not None
+
+ @pytest.mark.parametrize("invalid_uuid", [
+ "not-a-uuid",
+ "12345",
+ "123e4567-e89b-12d3-a456-426614174000-extra"
+ ])
+ def test_invalid_uuid_parameters(self, client, valid_headers, invalid_uuid):
+ """Test API with various invalid UUID formats."""
+ response = client.post(
+ f"/workspaces/{invalid_uuid}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ assert response.status_code == 400
+ error_data = response.json()
+ assert error_data["error_code"] == "InvalidParameter"
+ assert error_data["source"] == "User"
+ assert error_data["is_permanent"] == True
+ assert "Invalid parameter 'workspaceId'" in error_data["message"]
+ assert "badly formed hexadecimal UUID string" in error_data["message"]
+
+ def test_update_item_valid_request(self, client: TestClient, valid_headers, mock_item_factory):
+ """Test updating an item with valid request."""
+ # Ensure mock returns async mock item
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.patch(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.UPDATE_PAYLOAD
+ )
+
+ assert response.status_code == 200
+
+ def test_delete_item_valid_request(self, client: TestClient, valid_headers, mock_item_factory):
+ """Test deleting an item with valid request."""
+ # Ensure mock returns async mock item
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.delete(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers
+ )
+
+ assert response.status_code == 200
+
+ def test_get_item_payload_valid_request(self, client: TestClient, valid_headers, mock_item_factory):
+ """Test getting item payload with valid request."""
+ # Ensure mock returns async mock item with payload
+ test_payload = {"test": "payload"}
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.get_item_payload.return_value = test_payload
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/payload",
+ headers=valid_headers
+ )
+
+ assert response.status_code == 200
+ data = response.json()
+ assert "itemPayload" in data
+
+ def test_missing_required_headers(self, client: TestClient, mock_authentication_service):
+ """Test API call with missing required headers."""
+ # Ensure mock returns async mock item
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException(
+ "Missing authorization header"
+ )
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ expected = ExpectedResponses.MISSING_AUTH_HEADER
+ assert response.status_code == expected["status_code"]
+
+ def test_invalid_json_payload(self, client: TestClient, valid_headers):
+ """Test API call with invalid JSON payload."""
+ # Merge headers and add content-type
+ headers = {**valid_headers, "content-type": "application/json"}
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=headers,
+ content="invalid json"
+ )
+
+ expected = ExpectedResponses.VALIDATION_ERROR
+ assert response.status_code == expected["status_code"]
+
+ def test_empty_request_body(self, client: TestClient, valid_headers):
+ """Test create/update with empty request body."""
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json={}
+ )
+
+ expected = ExpectedResponses.VALIDATION_ERROR
+ assert response.status_code == expected["status_code"]
\ No newline at end of file
diff --git a/Backend/python/tests/unit/api/test_jobs_api.py b/Backend/python/tests/unit/api/test_jobs_api.py
new file mode 100644
index 0000000..0d9591e
--- /dev/null
+++ b/Backend/python/tests/unit/api/test_jobs_api.py
@@ -0,0 +1,205 @@
+"""Tests for Jobs API endpoints."""
+
+import pytest
+from fastapi import status
+from uuid import UUID
+import asyncio
+
+from tests.test_fixtures import TestFixtures
+from tests.test_helpers import TestHelpers
+from tests.constants import ExpectedResponses
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.error_details import ErrorDetails
+from fabric_api.models.error_source import ErrorSource
+
+
+@pytest.mark.unit
+@pytest.mark.api
+class TestJobsAPI:
+ """Test cases for Jobs API endpoints."""
+
+ def test_create_job_instance_endpoint_valid(self, client, valid_headers, mock_item_factory):
+ """Test valid job instance creation request."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers,
+ json=TestFixtures.JOB_CREATE_PAYLOAD
+ )
+
+ # Assert - Jobs API returns 202 Accepted with no content
+ assert response.status_code == status.HTTP_202_ACCEPTED
+
+ def test_create_job_instance_missing_headers(self, client, mock_authentication_service):
+ """Test job creation with missing authorization header."""
+ headers = {"x_ms_client_tenant_id": str(TestFixtures.TENANT_ID)}
+
+ # Configure mock to raise exception for missing auth
+ from exceptions.exceptions import AuthenticationException
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException("Missing authorization header")
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=headers,
+ json=TestFixtures.JOB_CREATE_PAYLOAD
+ )
+
+ expected = ExpectedResponses.MISSING_AUTH_HEADER
+ assert response.status_code == expected["status_code"]
+
+ def test_create_job_instance_invalid_json(self, client, valid_headers):
+ """Test job creation with invalid JSON payload."""
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers,
+ content="invalid json" # Not JSON
+ )
+
+ # FastAPI returns 422 for validation errors
+ assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
+
+ def test_create_job_instance_empty_body(self, client, valid_headers, mock_item_factory):
+ """Test job creation with empty request body."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers,
+ json=None
+ )
+
+ # Should still accept empty body for jobs
+ assert response.status_code == status.HTTP_202_ACCEPTED
+
+ def test_get_job_instance_state_valid(self, client, valid_headers, mock_item_factory):
+ """Test getting job instance state."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.get_job_state.return_value = ItemJobInstanceState(
+ status=JobInstanceStatus.COMPLETED,
+ message="Job completed successfully"
+ )
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers
+ )
+
+ assert response.status_code == status.HTTP_200_OK
+ data = response.json()
+ assert data["status"] == "Completed"
+
+ def test_get_job_instance_state_missing_auth(self, client, mock_authentication_service):
+ """Test getting job state without authorization."""
+ headers = {"x_ms_client_tenant_id": str(TestFixtures.TENANT_ID)}
+
+ from exceptions.exceptions import AuthenticationException
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException("Missing authorization")
+
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=headers
+ )
+
+ expected = ExpectedResponses.MISSING_AUTH_HEADER
+ assert response.status_code == expected["status_code"]
+
+ def test_cancel_job_instance_valid(self, client, valid_headers, mock_item_factory):
+ """Test cancelling a job instance."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}/cancel",
+ headers=valid_headers
+ )
+
+ assert response.status_code == status.HTTP_200_OK
+ data = response.json()
+ assert data["status"] == "Cancelled"
+
+ def test_cancel_job_instance_missing_tenant_id(self, client, mock_authentication_service):
+ """Test cancelling job without tenant ID."""
+ headers = {"authorization": "Bearer token"}
+
+ from exceptions.exceptions import AuthenticationException
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException("tenant_id header is missing")
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}/cancel",
+ headers=headers
+ )
+
+ expected = ExpectedResponses.MISSING_TENANT_ID
+ assert response.status_code == expected["status_code"]
+
+ @pytest.mark.parametrize("job_type", ["RunCalculation", "ScheduledJob", "CustomJob"])
+ def test_different_job_types(self, client, valid_headers, job_type, mock_item_factory):
+ """Test creating jobs with different job types."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/{job_type}/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers,
+ json=TestFixtures.JOB_CREATE_PAYLOAD
+ )
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+
+ @pytest.mark.parametrize("invoke_type,payload", [
+ ("Manual", TestFixtures.JOB_CREATE_PAYLOAD),
+ ("Scheduled", TestFixtures.SCHEDULED_JOB_PAYLOAD)
+ ])
+ def test_different_invoke_types(self, client, valid_headers, invoke_type, payload, mock_item_factory):
+ """Test creating jobs with different invoke types."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers,
+ json=payload
+ )
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+
+ def test_invalid_uuid_format(self, client, valid_headers):
+ """Test API with invalid UUID format."""
+ response = client.get(
+ f"/workspaces/invalid-uuid/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/RunCalculation/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers
+ )
+
+ # Should return 400 for invalid UUID
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+
+ def test_special_characters_in_job_type(self, client, valid_headers, mock_item_factory):
+ """Test API with special characters in job type."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.get_job_state.return_value = ItemJobInstanceState(
+ status=JobInstanceStatus.NOTSTARTED
+ )
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_type_encoded = "Run%20Calculation%20With%20Spaces"
+
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/jobTypes/{job_type_encoded}/instances/{TestFixtures.JOB_INSTANCE_ID}",
+ headers=valid_headers
+ )
+
+ # Should handle URL encoded job types
+ assert response.status_code == status.HTTP_200_OK
\ No newline at end of file
diff --git a/Backend/python/tests/unit/controllers/test_item_lifecycle_controller.py b/Backend/python/tests/unit/controllers/test_item_lifecycle_controller.py
new file mode 100644
index 0000000..7f6fcad
--- /dev/null
+++ b/Backend/python/tests/unit/controllers/test_item_lifecycle_controller.py
@@ -0,0 +1,375 @@
+"""Comprehensive tests for ItemLifecycleController."""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch, ANY
+from uuid import UUID
+import json
+
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+from exceptions.exceptions import (
+ AuthenticationException,
+ ItemMetadataNotFoundException,
+ UnauthorizedException,
+ UnexpectedItemTypeException,
+ InternalErrorException
+)
+from fabric_api.models.get_item_payload_response import GetItemPayloadResponse
+from tests.constants import ExpectedResponses
+
+
+@pytest.mark.unit
+@pytest.mark.controllers
+class TestItemLifecycleController:
+ """Test cases for ItemLifecycleController."""
+
+ @pytest.mark.asyncio
+ async def test_create_item_success(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful item creation."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ # Assert
+ assert response.status_code == 200
+
+ # Verify service calls - note that the controller might pass tenant_id differently
+ mock_authentication_service.authenticate_control_plane_call.assert_called_once()
+
+ # Check the call arguments more flexibly
+ call_args = mock_authentication_service.authenticate_control_plane_call.call_args
+ assert call_args[0][0] == valid_headers["authorization"]
+
+ mock_item_factory.create_item.assert_called_once()
+ mock_item.create.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_create_item_missing_auth_header(self, client, valid_headers, mock_authentication_service):
+ """Test item creation with missing authorization header."""
+ # Arrange
+ headers = valid_headers.copy()
+ del headers["authorization"]
+
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException(
+ "Missing authorization header")
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ expected = ExpectedResponses.MISSING_AUTH_HEADER
+ assert response.status_code == expected["status_code"]
+
+
+ @pytest.mark.asyncio
+ async def test_create_item_authentication_failure(
+ self,
+ client,
+ mock_authentication_service,
+ valid_headers
+ ):
+ """Test item creation with authentication failure."""
+ # Arrange
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException(
+ "Invalid token"
+ )
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ # Assert
+ expected = ExpectedResponses.INVALID_AUTH_TOKEN
+ assert response.status_code == expected["status_code"]
+ response_data = response.json()
+ assert response_data.get("error_code") == expected["error_code"]
+ assert response_data.get("source") == expected["source"]
+ assert "Invalid token" in response_data.get("message", "")
+
+ @pytest.mark.asyncio
+ async def test_update_item_success(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful item update."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.patch(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.UPDATE_PAYLOAD
+ )
+
+ # Assert
+ assert response.status_code == 200
+
+ # Verify service calls - the item ID might be passed as string
+ mock_item.load.assert_called_once()
+ # Check if called with either UUID or string
+ load_call_args = mock_item.load.call_args[0][0]
+ assert str(load_call_args) == str(TestFixtures.ITEM_ID)
+
+ mock_item.update.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_update_item_not_found(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test updating a non-existent item."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.load.side_effect = ItemMetadataNotFoundException(TestFixtures.ITEM_ID)
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.patch(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.UPDATE_PAYLOAD
+ )
+
+ # Assert
+ expected = ExpectedResponses.ITEM_NOT_FOUND
+ assert response.status_code == expected["status_code"]
+ response_data = response.json()
+ assert response_data.get("error_code") == expected["error_code"]
+ assert response_data.get("source") == expected["source"]
+ assert "Item metadata file cannot be found" in response_data.get("message", "")
+
+ @pytest.mark.asyncio
+ async def test_delete_item_success(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful item deletion."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.delete(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers
+ )
+
+ # Assert
+ assert response.status_code == 200
+
+ # Verify service calls
+ mock_authentication_service.authenticate_control_plane_call.assert_called_once()
+
+ # The load and delete should be called
+ mock_item.load.assert_called_once()
+ mock_item.delete.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_delete_item_without_subject_token(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test item deletion without subject token (system deletion)."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Create auth context without subject context
+ auth_context = TestHelpers.create_auth_context(has_subject_context=False)
+ mock_authentication_service.authenticate_control_plane_call.return_value = auth_context
+
+ # Act
+ response = client.delete(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers
+ )
+
+ # Assert
+ assert response.status_code == 200
+ mock_item.delete.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_get_item_payload_success(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful retrieval of item payload."""
+ # Arrange
+ test_payload = {"test": "payload", "data": {"value": 123}}
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.get_item_payload.return_value = test_payload
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/payload",
+ headers=valid_headers
+ )
+
+ # Assert
+ assert response.status_code == 200
+ response_data = response.json()
+ assert response_data["itemPayload"] == test_payload
+
+ # Verify service calls - be flexible about UUID vs string
+ mock_item.load.assert_called_once()
+ mock_item.get_item_payload.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_get_item_payload_unauthorized(
+ self,
+ client,
+ mock_authentication_service,
+ valid_headers
+ ):
+ """Test getting item payload with unauthorized access."""
+ # Arrange
+ mock_authentication_service.authenticate_control_plane_call.side_effect = UnauthorizedException(
+ "Access denied"
+ )
+
+ # Act
+ response = client.get(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}/payload",
+ headers=valid_headers
+ )
+
+ # Assert
+ expected = ExpectedResponses.ACCESS_DENIED
+ assert response.status_code == expected["status_code"]
+ response_data = response.json()
+ assert response_data.get("error_code") == expected["error_code"]
+ assert response_data.get("source") == expected["source"]
+
+ @pytest.mark.asyncio
+ async def test_invalid_item_type(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test operations with invalid item type."""
+ # Arrange
+ exception = UnexpectedItemTypeException(f"Unknown item type: {TestFixtures.UNKNOWN_ITEM_TYPE}")
+ mock_item_factory.create_item.side_effect = exception
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.UNKNOWN_ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ # Assert
+ expected = ExpectedResponses.UNEXPECTED_ITEM_TYPE
+ assert response.status_code == expected["status_code"]
+ response_data = response.json()
+ assert response_data.get("error_code") == expected["error_code"]
+ assert response_data.get("source") == expected["source"]
+
+ @pytest.mark.asyncio
+ async def test_missing_tenant_id_header(self, client, valid_headers, mock_authentication_service):
+ """Test API call with missing tenant ID header."""
+ # Arrange
+ headers = valid_headers.copy()
+ del headers["x_ms_client_tenant_id"]
+
+ # Configure mock to raise exception
+ exception = AuthenticationException("tenant_id header is missing")
+ mock_authentication_service.authenticate_control_plane_call.side_effect = exception
+
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=headers,
+ json=TestFixtures.CREATE_PAYLOAD
+ )
+
+ # Assert - Use the expected response constants
+ expected = ExpectedResponses.MISSING_TENANT_ID
+ assert response.status_code == expected["status_code"]
+ response_data = response.json()
+ assert response_data.get("error_code") == expected["error_code"]
+ assert "tenant_id header is missing" in response_data.get("message", "")
+
+ @pytest.mark.asyncio
+ async def test_malformed_request_body(self, client, valid_headers):
+ """Test API call with malformed request body."""
+ # Act
+ response = client.post(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json="invalid json string"
+ )
+
+ # Assert
+ expected = ExpectedResponses.VALIDATION_ERROR
+ assert response.status_code == expected["status_code"]
+
+ @pytest.mark.asyncio
+ async def test_concurrent_operations(
+ self,
+ client,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test handling of concurrent operations on the same item."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Act - Make 3 update requests (not truly concurrent in test client)
+ responses = []
+ for _ in range(3):
+ response = client.patch(
+ f"/workspaces/{TestFixtures.WORKSPACE_ID}/items/{TestFixtures.ITEM_TYPE}/{TestFixtures.ITEM_ID}",
+ headers=valid_headers,
+ json=TestFixtures.UPDATE_PAYLOAD
+ )
+ responses.append(response)
+
+ # Assert - All should succeed
+ for response in responses:
+ assert response.status_code == 200
+
+ # Verify the item was loaded and updated 3 times
+ assert mock_item.load.call_count == 3
+ assert mock_item.update.call_count == 3
\ No newline at end of file
diff --git a/Backend/python/tests/unit/controllers/test_jobs_controller.py b/Backend/python/tests/unit/controllers/test_jobs_controller.py
new file mode 100644
index 0000000..2868a84
--- /dev/null
+++ b/Backend/python/tests/unit/controllers/test_jobs_controller.py
@@ -0,0 +1,709 @@
+"""Comprehensive tests for JobsController."""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+from uuid import UUID
+import asyncio
+
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+from exceptions.exceptions import (
+ AuthenticationException,
+ ItemMetadataNotFoundException,
+ UnauthorizedException,
+ AuthenticationUIRequiredException
+)
+from fabric_api.models.create_item_job_instance_request import CreateItemJobInstanceRequest
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.error_details import ErrorDetails
+from fabric_api.models.error_source import ErrorSource
+from fabric_api.impl.jobs_controller import JobsController, _background_tasks
+
+
+@pytest.mark.unit
+@pytest.mark.controllers
+class TestJobsController:
+ """Test cases for JobsController."""
+
+ @pytest.fixture
+ def controller(self):
+ """Create a JobsController instance."""
+ return JobsController()
+
+ @pytest.mark.asyncio
+ async def test_create_job_instance_success(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful job instance creation."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={"test": "data"}
+ )
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ # Assert
+ assert result is None # 202 Accepted returns None
+
+ # Wait for background task to complete
+ await asyncio.sleep(0.1)
+
+ # Verify service calls
+ mock_authentication_service.authenticate_control_plane_call.assert_called_once()
+ mock_item.load.assert_called_once()
+ mock_item.execute_job.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_create_job_instance_authentication_failure(
+ self,
+ controller,
+ mock_authentication_service,
+ valid_headers
+ ):
+ """Test job creation with authentication failure."""
+ # Arrange
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationException(
+ "Invalid token"
+ )
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={}
+ )
+
+ # Patch the service getter
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert
+ with pytest.raises(AuthenticationException) as exc_info:
+ await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ assert "Invalid token" in str(exc_info.value)
+
+ @pytest.mark.asyncio
+ async def test_create_job_instance_item_not_found(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test job creation when item doesn't exist."""
+ # Arrange
+ # Create a real item instance (not a mock) or a partial mock
+ from items.item1 import Item1 # Import your actual item class
+ mock_item = Item1(mock_authentication_service.authenticate_control_plane_call.return_value)
+
+ # Mock the item_metadata_store to simulate item not found
+ mock_metadata_store = AsyncMock()
+ mock_metadata_store.exists.return_value = False # This will trigger ItemMetadataNotFoundException
+
+ # Inject the mock store into the item
+ mock_item.item_metadata_store = mock_metadata_store
+
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={}
+ )
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act & Assert - Should raise the exception immediately
+ with pytest.raises(ItemMetadataNotFoundException) as exc_info:
+ await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ # Verify the exception message
+ assert str(TestFixtures.ITEM_ID) in str(exc_info.value)
+
+ # Verify that exists was called
+ mock_metadata_store.exists.assert_called_once_with(
+ mock_authentication_service.authenticate_control_plane_call.return_value.tenant_object_id,
+ str(TestFixtures.ITEM_ID)
+ )
+
+ @pytest.mark.asyncio
+ async def test_create_job_instance_without_request_body(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test job creation without request body."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=None
+ )
+
+ # Assert
+ assert result is None
+
+ # Wait for background task
+ await asyncio.sleep(0.1)
+
+ # Verify execute_job was called with None invoke_type and empty payload
+ mock_item.execute_job.assert_called_once()
+ call_args = mock_item.execute_job.call_args[0]
+ assert call_args[2] is None # invoke_type
+ assert call_args[3] == {} # creation_payload
+
+ @pytest.mark.asyncio
+ async def test_get_job_instance_state_success(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful retrieval of job instance state."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.item_object_id = TestFixtures.ITEM_ID
+
+ expected_state = ItemJobInstanceState(
+ status=JobInstanceStatus.COMPLETED,
+ message="Job completed successfully"
+ )
+ mock_item.get_job_state.return_value = expected_state
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_get_item_job_instance_state(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ # Assert
+ assert result.status == JobInstanceStatus.COMPLETED
+
+ # Verify service calls
+ mock_item.load.assert_called_once_with(TestFixtures.ITEM_ID)
+ mock_item.get_job_state.assert_called_once_with("RunCalculation", TestFixtures.JOB_INSTANCE_ID)
+
+ @pytest.mark.asyncio
+ async def test_get_job_instance_state_item_not_found(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test getting job state when item doesn't exist."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.item_object_id = None # Item not found after load
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_get_item_job_instance_state(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ # Assert
+ assert result.status == JobInstanceStatus.FAILED
+ assert result.error_details.error_code == "ItemNotFound"
+ assert result.error_details.source == ErrorSource.SYSTEM
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_various_statuses(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test getting job state with various status values."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.item_object_id = TestFixtures.ITEM_ID
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ statuses = [
+ (JobInstanceStatus.NOTSTARTED, None),
+ (JobInstanceStatus.INPROGRESS, "Processing..."),
+ (JobInstanceStatus.COMPLETED, "Success"),
+ (JobInstanceStatus.FAILED, ErrorDetails(
+ error_code="ProcessingError",
+ message="Job failed",
+ source=ErrorSource.SYSTEM
+ )),
+ (JobInstanceStatus.CANCELLED, None)
+ ]
+
+ for status, detail in statuses:
+ # Configure mock
+ if isinstance(detail, str):
+ state = ItemJobInstanceState(status=status, message=detail)
+ elif isinstance(detail, ErrorDetails):
+ state = ItemJobInstanceState(status=status, error_details=detail)
+ else:
+ state = ItemJobInstanceState(status=status)
+
+ mock_item.get_job_state.return_value = state
+
+ # Act
+ result = await controller.jobs_get_item_job_instance_state(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ # Assert
+ assert result.status == status
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_instance_success(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test successful job cancellation."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.item_object_id = TestFixtures.ITEM_ID
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_cancel_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ # Assert
+ assert result.status == JobInstanceStatus.CANCELLED
+
+ # Verify service calls
+ mock_item.cancel_job.assert_called_once_with("RunCalculation", TestFixtures.JOB_INSTANCE_ID)
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_instance_unauthorized(
+ self,
+ controller,
+ mock_authentication_service,
+ valid_headers
+ ):
+ """Test job cancellation with unauthorized access."""
+ # Arrange
+ mock_authentication_service.authenticate_control_plane_call.side_effect = UnauthorizedException(
+ "Access denied"
+ )
+
+ # Patch the service getter
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert
+ with pytest.raises(UnauthorizedException):
+ await controller.jobs_cancel_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_instance_item_not_found(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test cancelling job when item doesn't exist."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.item_object_id = None # Item not found
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ result = await controller.jobs_cancel_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ # Assert
+ assert result.status == JobInstanceStatus.FAILED
+ assert result.error_details.error_code == "ItemNotFound"
+
+ @pytest.mark.asyncio
+ async def test_job_execution_error_handling(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test error handling during job execution."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item.execute_job.side_effect = Exception("Job execution failed")
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={}
+ )
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act - Should not raise exception (background task handles it)
+ result = await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ # Assert
+ assert result is None # 202 Accepted
+
+ # Wait for background task to process
+ await asyncio.sleep(0.1)
+
+ # Verify execute_job was called and failed
+ mock_item.execute_job.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_concurrent_job_requests(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test handling of concurrent job requests."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={}
+ )
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act - Create multiple jobs concurrently
+ job_ids = [
+ UUID("55555555-5555-5555-5555-555555555550"),
+ UUID("55555555-5555-5555-5555-555555555551"),
+ UUID("55555555-5555-5555-5555-555555555552")
+ ]
+
+ tasks = []
+ for job_id in job_ids:
+ task = controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=job_id,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+ tasks.append(task)
+
+ # Execute all tasks concurrently
+ results = await asyncio.gather(*tasks)
+
+ # Assert
+ for result in results:
+ assert result is None # All should return 202 Accepted
+
+ # Wait for all background tasks
+ await asyncio.sleep(0.2)
+
+ # Verify all jobs were executed
+ assert mock_item.execute_job.call_count == 3
+
+ @pytest.mark.asyncio
+ async def test_background_task_tracking(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test that background tasks are properly tracked."""
+ # Clear any existing tasks
+ _background_tasks.clear()
+
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ # Make execute_job take some time
+ async def slow_execute(*args, **kwargs):
+ await asyncio.sleep(0.5)
+ mock_item.execute_job = slow_execute
+ mock_item_factory.create_item.return_value = mock_item
+
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=JobInvokeType.MANUAL,
+ creation_payload={}
+ )
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ # Act
+ await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ # Assert - Task should be tracked
+ await asyncio.sleep(0.1) # Let task start
+ assert len(_background_tasks) == 1
+
+ # Wait for task to complete
+ await asyncio.sleep(0.6)
+ assert len(_background_tasks) == 0
+
+ @pytest.mark.asyncio
+ async def test_cleanup_background_tasks(self):
+ """Test cleanup of background tasks during shutdown."""
+ # Clear any existing tasks
+ _background_tasks.clear()
+
+ # Add some mock tasks
+ async def dummy_task():
+ await asyncio.sleep(1)
+
+ task1 = asyncio.create_task(dummy_task())
+ task2 = asyncio.create_task(dummy_task())
+ _background_tasks.add(task1)
+ _background_tasks.add(task2)
+
+ # Act
+ from fabric_api.impl.jobs_controller import cleanup_background_tasks
+ await cleanup_background_tasks(timeout=0.1)
+
+ # Assert
+ assert len(_background_tasks) == 0
+ assert task1.cancelled()
+ assert task2.cancelled()
+
+ @pytest.mark.asyncio
+ async def test_cleanup_background_tasks_with_completed_tasks(self):
+ """Test cleanup handles already completed tasks."""
+ # Clear any existing tasks
+ _background_tasks.clear()
+
+ # Add a completed task
+ async def quick_task():
+ return "done"
+
+ task = asyncio.create_task(quick_task())
+ await task # Let it complete
+ _background_tasks.add(task)
+
+ # Act
+ from fabric_api.impl.jobs_controller import cleanup_background_tasks
+ await cleanup_background_tasks()
+
+ # Assert
+ assert len(_background_tasks) == 0
+
+ @pytest.mark.asyncio
+ async def test_auth_ui_required_exception(
+ self,
+ controller,
+ mock_authentication_service,
+ valid_headers
+ ):
+ """Test handling of authentication UI required exception."""
+ # Arrange
+ mock_authentication_service.authenticate_control_plane_call.side_effect = AuthenticationUIRequiredException(
+ "User interaction required"
+ )
+
+ # Patch the service getter
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert
+ with pytest.raises(AuthenticationUIRequiredException):
+ await controller.jobs_get_item_job_instance_state(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=TestFixtures.JOB_INSTANCE_ID,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"]
+ )
+
+ @pytest.mark.asyncio
+ async def test_create_job_with_all_invoke_types(
+ self,
+ controller,
+ mock_authentication_service,
+ mock_item_factory,
+ valid_headers
+ ):
+ """Test creating jobs with all invoke types."""
+ # Arrange
+ mock_item = TestHelpers.create_mock_item()
+ mock_item_factory.create_item.return_value = mock_item
+
+ # Patch the service getters
+ with patch('fabric_api.impl.jobs_controller.get_authentication_service', return_value=mock_authentication_service), \
+ patch('fabric_api.impl.jobs_controller.get_item_factory', return_value=mock_item_factory):
+
+ invoke_types = [
+ (JobInvokeType.MANUAL, {"manual": "data"}),
+ (JobInvokeType.SCHEDULED, {"schedule": "0 * * * *"}) ]
+
+ for i, (invoke_type, payload) in enumerate(invoke_types):
+ job_request = CreateItemJobInstanceRequest(
+ invoke_type=invoke_type,
+ creation_payload=payload
+ )
+
+ # Use unique job instance ID for each
+ job_id = UUID(f"66666666-6666-6666-6666-66666666666{i}")
+
+ # Act
+ result = await controller.jobs_create_item_job_instance(
+ workspaceId=TestFixtures.WORKSPACE_ID,
+ itemType=TestFixtures.ITEM_TYPE,
+ itemId=TestFixtures.ITEM_ID,
+ jobType="RunCalculation",
+ jobInstanceId=job_id,
+ authorization=valid_headers["authorization"],
+ x_ms_client_tenant_id=valid_headers["x_ms_client_tenant_id"],
+ create_item_job_instance_request=job_request
+ )
+
+ # Assert
+ assert result is None # 202 Accepted
+
+ # Wait for all background tasks
+ await asyncio.sleep(0.2)
+
+ # Verify all jobs were executed
+ assert mock_item.execute_job.call_count == 2
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/__init__.py b/Backend/python/tests/unit/items/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Backend/python/tests/unit/items/test_base_item.py b/Backend/python/tests/unit/items/test_base_item.py
new file mode 100644
index 0000000..e7e180e
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_base_item.py
@@ -0,0 +1,1070 @@
+"""Unit tests for ItemBase abstract class."""
+
+import datetime
+import pytest
+from unittest.mock import Mock, AsyncMock, patch, MagicMock
+from uuid import UUID
+import logging
+
+from items.base_item import ItemBase, TItemMetadata, TItemClientMetadata
+from models.authentication_models import AuthorizationContext
+from services.item_metadata_store import ItemMetadataStore
+from services.onelake_client_service import OneLakeClientService
+from services.authentication import AuthenticationService
+from exceptions.exceptions import ItemMetadataNotFoundException, InvariantViolationException, UnexpectedItemTypeException, InvalidItemPayloadException
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+# Create a concrete implementation of ItemBase for testing
+class ConcreteTestItem(ItemBase[dict, dict]):
+ """Concrete implementation of ItemBase for testing purposes."""
+
+ @property
+ def item_type(self) -> str:
+ return "TestableItem"
+
+ def get_metadata_class(self) -> type:
+ return dict
+
+ async def get_item_payload(self) -> dict:
+ return {"test": "payload"}
+
+ def set_definition(self, payload: dict) -> None:
+ self._test_metadata = payload
+
+ def update_definition(self, payload: dict) -> None:
+ self._test_metadata = payload
+
+ def get_type_specific_metadata(self) -> dict:
+ return getattr(self, '_test_metadata', {})
+
+ def set_type_specific_metadata(self, metadata: dict) -> None:
+ self._test_metadata = metadata
+
+ async def execute_job(self, job_type: str, job_instance_id: UUID, invoke_type, creation_payload: dict) -> None:
+ pass
+
+ async def get_job_state(self, job_type: str, job_instance_id: UUID):
+ pass
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItemBase:
+ """Test cases for ItemBase abstract class."""
+
+ @pytest.fixture
+ def auth_context(self):
+ """Create a test authorization context."""
+ return TestHelpers.create_auth_context()
+
+ @pytest.fixture
+ def mock_item_metadata_store(self):
+ """Create a mock item metadata store."""
+ mock_store = AsyncMock(spec=ItemMetadataStore)
+ mock_store.exists.return_value = True
+ mock_store.load.return_value = MagicMock()
+ mock_store.upsert.return_value = None
+ mock_store.delete.return_value = None
+ mock_store.exists_job.return_value = True
+ mock_store.load_job.return_value = MagicMock()
+ mock_store.upsert_job.return_value = None
+ return mock_store
+
+ @pytest.fixture
+ def mock_onelake_client_service(self):
+ """Create a mock OneLake client service."""
+ mock_service = AsyncMock(spec=OneLakeClientService)
+ mock_service.write_to_onelake_file.return_value = None
+ mock_service.get_onelake_file.return_value = "test content"
+ mock_service.check_if_file_exists.return_value = True
+ mock_service.get_onelake_file_path.return_value = "/path/to/file"
+ return mock_service
+
+ @pytest.fixture
+ def mock_authentication_service(self):
+ """Create a mock authentication service."""
+ mock_service = AsyncMock(spec=AuthenticationService)
+ mock_service.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_service.get_fabric_s2s_token.return_value = "mock_s2s_token"
+ return mock_service
+
+ def test_init_creates_required_services(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that ItemBase initialization creates and injects all required service dependencies."""
+
+ # Arrange - Mock the service getter functions
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act - Create an instance of the testable item
+ item = ConcreteTestItem(auth_context)
+
+ # Assert - Verify all required services are properly injected
+
+ # 1. Verify logger is created with correct name pattern
+ assert item.logger is not None
+ assert isinstance(item.logger, logging.Logger)
+ # The logger name is created in ItemBase.__init__ using the module where ItemBase is defined
+ expected_logger_name = f"items.base_item.{ConcreteTestItem.__name__}"
+ assert item.logger.name == expected_logger_name
+
+ # 2. Verify auth_context is stored
+ assert item.auth_context is auth_context
+ assert item.auth_context.tenant_object_id == auth_context.tenant_object_id
+
+ # 3. Verify item_metadata_store service is injected
+ assert item.item_metadata_store is not None
+ assert item.item_metadata_store is mock_item_metadata_store
+
+ # 4. Verify authentication_service is injected
+ assert item.authentication_service is not None
+ assert item.authentication_service is mock_authentication_service
+
+ # 5. Verify onelake_client_service is injected
+ assert item.onelake_client_service is not None
+ assert item.onelake_client_service is mock_onelake_client_service
+
+ # 6. Verify initial state properties are None
+ assert item.tenant_object_id is None
+ assert item.workspace_object_id is None
+ assert item.item_object_id is None
+ assert item.display_name is None
+ assert item.description is None
+
+ def test_init_sets_auth_context(self, auth_context):
+ """Test that ItemBase initialization properly sets the authorization context."""
+
+ # Arrange - Mock all service dependencies
+ with patch('services.item_metadata_store.get_item_metadata_store'), \
+ patch('services.onelake_client_service.get_onelake_client_service'), \
+ patch('services.authentication.get_authentication_service'):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+
+ # Assert
+ assert item.auth_context is auth_context
+ assert item.auth_context.tenant_object_id == "44444444-4444-4444-4444-444444444444"
+ assert item.auth_context.original_subject_token == "mock_subject_token"
+ assert len(item.auth_context.claims) == 3
+
+ def test_properties_initialization(self, auth_context):
+ """Test that ItemBase properties are properly initialized to None."""
+
+ # Arrange - Mock all service dependencies
+ with patch('services.item_metadata_store.get_item_metadata_store'), \
+ patch('services.onelake_client_service.get_onelake_client_service'), \
+ patch('services.authentication.get_authentication_service'):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+
+ # Assert - All ID and metadata properties should be None initially
+ assert item.tenant_object_id is None
+ assert item.workspace_object_id is None
+ assert item.item_object_id is None
+ assert item.display_name is None
+ assert item.description is None
+
+ def test_logger_initialization(self, auth_context):
+ """Test that the logger is properly initialized with the correct naming pattern."""
+
+ # Arrange - Mock all service dependencies
+ with patch('services.item_metadata_store.get_item_metadata_store'), \
+ patch('services.onelake_client_service.get_onelake_client_service'), \
+ patch('services.authentication.get_authentication_service'):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+
+ # Assert
+ assert item.logger is not None
+ assert isinstance(item.logger, logging.Logger)
+ # Logger name should follow the pattern: module_name.class_name
+ # The logger is created in ItemBase.__init__ using the ItemBase module name
+ expected_name = f"items.base_item.{item.__class__.__name__}"
+ assert item.logger.name == expected_name
+
+ def test_service_injection_calls_getters(self, auth_context):
+ """Test that service injection calls the appropriate service getter functions."""
+
+ # Arrange - Create mock services and spy on getter functions
+ mock_item_store = AsyncMock(spec=ItemMetadataStore)
+ mock_onelake_service = AsyncMock(spec=OneLakeClientService)
+ mock_auth_service = AsyncMock(spec=AuthenticationService)
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_store) as mock_get_item_store, \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_service) as mock_get_onelake, \
+ patch('services.authentication.get_authentication_service', return_value=mock_auth_service) as mock_get_auth:
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+
+ # Assert - Verify that each service getter was called exactly once
+ mock_get_item_store.assert_called_once()
+ mock_get_onelake.assert_called_once()
+ mock_get_auth.assert_called_once()
+
+ # Assert - Verify the returned services are properly assigned
+ assert item.item_metadata_store is mock_item_store
+ assert item.onelake_client_service is mock_onelake_service
+ assert item.authentication_service is mock_auth_service
+
+ # ============================================================================
+ # Load Operations Tests - Core CRUD Functionality
+ # ============================================================================
+
+ @pytest.mark.asyncio
+ async def test_load_existing_item_success(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test successful loading of an existing item with proper metadata validation."""
+
+ # Arrange - Create test data
+ item_id = TestFixtures.ITEM_ID
+ workspace_id = TestFixtures.WORKSPACE_ID
+ tenant_id = TestFixtures.TENANT_ID
+
+ # Mock successful metadata loading
+ from models.common_item_metadata import CommonItemMetadata
+ from models.item1_metadata import Item1Metadata
+
+ # Create mock metadata structures
+ common_metadata = CommonItemMetadata(
+ type="TestableItem",
+ tenant_object_id=tenant_id,
+ workspace_object_id=workspace_id,
+ item_object_id=item_id,
+ display_name="Test Item",
+ description="Test Description"
+ )
+
+ type_specific_metadata = {"test_key": "test_value"}
+
+ # Create mock item metadata container
+ mock_item_metadata = MagicMock()
+ mock_item_metadata.common_metadata = common_metadata
+ mock_item_metadata.type_specific_metadata = type_specific_metadata
+
+ # Configure mock responses
+ mock_item_metadata_store.exists.return_value = True
+ mock_item_metadata_store.load.return_value = mock_item_metadata
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act - Load the item
+ item = ConcreteTestItem(auth_context)
+ await item.load(item_id)
+
+ # Assert - Verify loading behavior
+
+ # 1. Verify existence check was called (tenant_id is converted to string by auth_context)
+ mock_item_metadata_store.exists.assert_called_once_with(str(tenant_id), str(item_id))
+
+ # 2. Verify metadata load was called with correct parameters
+ mock_item_metadata_store.load.assert_called_once_with(str(tenant_id), str(item_id), dict)
+
+ # 3. Verify all properties were set correctly from common metadata
+ assert item.tenant_object_id == str(tenant_id)
+ assert item.workspace_object_id == str(workspace_id)
+ assert item.item_object_id == str(item_id)
+ assert item.display_name == "Test Item"
+ assert item.description == "Test Description"
+
+ # 4. Verify type-specific metadata was set (mocked implementation stores it)
+ assert hasattr(item, '_test_metadata')
+ assert item._test_metadata == type_specific_metadata
+
+ @pytest.mark.asyncio
+ async def test_load_nonexistent_item_raises_exception(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that loading a non-existent item raises ItemMetadataNotFoundException."""
+
+ # Arrange - Mock item doesn't exist
+ item_id = TestFixtures.ITEM_ID
+ tenant_id = TestFixtures.TENANT_ID
+
+ mock_item_metadata_store.exists.return_value = False
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert - Verify exception is raised
+ item = ConcreteTestItem(auth_context)
+
+ with pytest.raises(ItemMetadataNotFoundException) as exc_info:
+ await item.load(item_id)
+
+ # Verify exception details
+ assert str(item_id) in str(exc_info.value)
+
+ # Verify existence check was called but load was not (tenant_id is converted to string)
+ mock_item_metadata_store.exists.assert_called_once_with(str(tenant_id), str(item_id))
+ mock_item_metadata_store.load.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_load_invalid_metadata_structure(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test handling of corrupt/invalid metadata structure."""
+
+ # Arrange - Mock invalid metadata structure
+ item_id = TestFixtures.ITEM_ID
+ tenant_id = TestFixtures.TENANT_ID
+
+ # Test case 1: None metadata
+ mock_item_metadata_store.exists.return_value = True
+ mock_item_metadata_store.load.return_value = None
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ with pytest.raises(InvariantViolationException) as exc_info:
+ await item.load(item_id)
+
+ assert "Object reference must not be null: itemMetadata" in str(exc_info.value)
+
+ # Test case 2: Missing common_metadata
+ mock_item_metadata_2 = MagicMock()
+ mock_item_metadata_2.common_metadata = None
+ mock_item_metadata_2.type_specific_metadata = {"test": "data"}
+ mock_item_metadata_store.load.return_value = mock_item_metadata_2
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ with pytest.raises(InvariantViolationException) as exc_info:
+ await item.load(item_id)
+
+ assert "Object reference must not be null: itemMetadata.CommonMetadata" in str(exc_info.value)
+
+ @pytest.mark.asyncio
+ async def test_load_tenant_mismatch_raises_exception(self, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that tenant ID mismatch raises access denied error."""
+
+ # Arrange - Create auth context with different tenant
+ auth_context = TestHelpers.create_auth_context(tenant_id="66666666-6666-6666-6666-666666666666")
+ item_id = TestFixtures.ITEM_ID
+
+ # Mock metadata with different tenant ID
+ from models.common_item_metadata import CommonItemMetadata
+
+ common_metadata = CommonItemMetadata(
+ type="TestableItem",
+ tenant_object_id=UUID("55555555-5555-5555-5555-555555555555"), # Different from auth context
+ workspace_object_id=TestFixtures.WORKSPACE_ID,
+ item_object_id=item_id,
+ display_name="Test Item",
+ description="Test Description"
+ )
+
+ mock_item_metadata = MagicMock()
+ mock_item_metadata.common_metadata = common_metadata
+ mock_item_metadata.type_specific_metadata = {"test": "data"}
+
+ mock_item_metadata_store.exists.return_value = True
+ mock_item_metadata_store.load.return_value = mock_item_metadata
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert - Verify access denied exception
+ item = ConcreteTestItem(auth_context)
+
+ with pytest.raises(InvariantViolationException) as exc_info:
+ await item.load(item_id)
+
+ # Verify error message contains access denied information
+ assert "Condition violation detected: TenantObjectId must match" in str(exc_info.value)
+
+ @pytest.mark.asyncio
+ async def test_load_wrong_item_type_raises_exception(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that item type mismatch raises TypeError."""
+
+ # Arrange - Mock metadata with wrong item type
+ item_id = TestFixtures.ITEM_ID
+ tenant_id = TestFixtures.TENANT_ID
+
+ from models.common_item_metadata import CommonItemMetadata
+
+ common_metadata = CommonItemMetadata(
+ type="WrongItemType", # Different from ConcreteTestItem.item_type
+ tenant_object_id=tenant_id,
+ workspace_object_id=TestFixtures.WORKSPACE_ID,
+ item_object_id=item_id,
+ display_name="Test Item",
+ description="Test Description"
+ )
+
+ mock_item_metadata = MagicMock()
+ mock_item_metadata.common_metadata = common_metadata
+ mock_item_metadata.type_specific_metadata = {"test": "data"}
+
+ mock_item_metadata_store.exists.return_value = True
+ mock_item_metadata_store.load.return_value = mock_item_metadata
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act & Assert - Verify type error
+ item = ConcreteTestItem(auth_context)
+
+ with pytest.raises(UnexpectedItemTypeException) as exc_info:
+ await item.load(item_id)
+
+ # Verify error message contains type information
+ assert "Unexpected item type" in str(exc_info.value)
+ assert "WrongItemType" in str(exc_info.value)
+ assert "TestableItem" in str(exc_info.value)
+
+ @pytest.mark.asyncio
+ async def test_load_sets_metadata_through_abstract_method(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that load calls set_type_specific_metadata with the loaded metadata."""
+
+ # Arrange - Setup successful load scenario
+ item_id = TestFixtures.ITEM_ID
+ tenant_id = TestFixtures.TENANT_ID
+ test_metadata = {"calculation": "result", "operand1": 42}
+
+ from models.common_item_metadata import CommonItemMetadata
+
+ common_metadata = CommonItemMetadata(
+ type="TestableItem",
+ tenant_object_id=tenant_id,
+ workspace_object_id=TestFixtures.WORKSPACE_ID,
+ item_object_id=item_id,
+ display_name="Test Item",
+ description="Test Description"
+ )
+
+ mock_item_metadata = MagicMock()
+ mock_item_metadata.common_metadata = common_metadata
+ mock_item_metadata.type_specific_metadata = test_metadata
+
+ mock_item_metadata_store.exists.return_value = True
+ mock_item_metadata_store.load.return_value = mock_item_metadata
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act - Load the item
+ item = ConcreteTestItem(auth_context)
+
+ # Spy on the set_type_specific_metadata method
+ with patch.object(item, 'set_type_specific_metadata', wraps=item.set_type_specific_metadata) as spy_method:
+ await item.load(item_id)
+
+ # Assert - Verify set_type_specific_metadata was called with correct data
+ spy_method.assert_called_once_with(test_metadata)
+
+ # Verify the metadata was actually set in our concrete implementation
+ assert item._test_metadata == test_metadata
+
+ # ============================================================================
+ # CRUD Operations Tests - Core Item Lifecycle
+ # ============================================================================
+
+ @pytest.mark.asyncio
+ async def test_create_item_success(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test successful item creation with CreateItemRequest."""
+
+ # Arrange
+ workspace_id = TestFixtures.WORKSPACE_ID
+ item_id = TestFixtures.ITEM_ID
+
+ from fabric_api.models.create_item_request import CreateItemRequest
+ create_request = CreateItemRequest(
+ display_name="Test Item Creation",
+ description="Test item creation description",
+ creation_payload={"metadata": {"operand1": 100, "operand2": 200, "operator": "Add"}}
+ )
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+ await item.create(workspace_id, item_id, create_request)
+
+ # Assert - Verify all properties were set correctly
+ assert item.tenant_object_id == str(auth_context.tenant_object_id)
+ assert item.workspace_object_id == str(workspace_id)
+ assert item.item_object_id == str(item_id)
+ assert item.display_name == "Test Item Creation"
+ assert item.description == "Test item creation description"
+
+ # Verify definition was set (concrete implementation stores in _test_metadata)
+ assert hasattr(item, '_test_metadata')
+ assert item._test_metadata == {"metadata": {"operand1": 100, "operand2": 200, "operator": "Add"}}
+
+ # Verify save_changes was called (which calls store, allocate_and_free_resources, update_fabric)
+ mock_item_metadata_store.upsert.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_create_sets_all_properties(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Verify all properties are set correctly during creation."""
+
+ # Arrange
+ workspace_id = TestFixtures.WORKSPACE_ID
+ item_id = TestFixtures.ITEM_ID
+
+ from fabric_api.models.create_item_request import CreateItemRequest
+ create_request = CreateItemRequest(
+ display_name="Property Test Item",
+ description="Testing property assignment",
+ creation_payload={"test_data": "test_value"}
+ )
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+ await item.create(workspace_id, item_id, create_request)
+
+ # Assert - Verify ALL properties are correctly set
+ assert item.tenant_object_id == str(auth_context.tenant_object_id)
+ assert item.workspace_object_id == str(workspace_id)
+ assert item.item_object_id == str(item_id)
+ assert item.display_name == "Property Test Item"
+ assert item.description == "Testing property assignment"
+
+ # Verify auth context is preserved
+ assert item.auth_context is auth_context
+
+ # Verify services are still accessible
+ assert item.item_metadata_store is mock_item_metadata_store
+ assert item.authentication_service is mock_authentication_service
+ assert item.onelake_client_service is mock_onelake_client_service
+
+ @pytest.mark.asyncio
+ async def test_update_item_success(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test successful item updates."""
+
+ # Arrange - Create and initialize an item first
+ workspace_id = TestFixtures.WORKSPACE_ID
+ item_id = TestFixtures.ITEM_ID
+
+ from fabric_api.models.update_item_request import UpdateItemRequest
+ update_request = UpdateItemRequest(
+ display_name="Updated Item Name",
+ description="Updated item description",
+ update_payload={"metadata": {"operand1": 300, "operand2": 400, "operator": "Multiply"}}
+ )
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+
+ # Set some initial state (simulate a loaded item)
+ item.tenant_object_id = str(auth_context.tenant_object_id)
+ item.workspace_object_id = str(workspace_id)
+ item.item_object_id = str(item_id)
+ item.display_name = "Original Name"
+ item.description = "Original Description"
+
+ await item.update(update_request)
+
+ # Assert - Verify properties were updated
+ assert item.display_name == "Updated Item Name"
+ assert item.description == "Updated item description"
+
+ # Verify definition was updated (concrete implementation stores in _test_metadata)
+ assert hasattr(item, '_test_metadata')
+ assert item._test_metadata == {"metadata": {"operand1": 300, "operand2": 400, "operator": "Multiply"}}
+
+ # Verify save_changes was called
+ mock_item_metadata_store.upsert.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_update_invalid_request_raises_exception(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test validation of update requests with invalid data."""
+
+ # Arrange
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set item state
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Act & Assert - Test with None request
+ with pytest.raises(InvalidItemPayloadException) as exc_info:
+ await item.update(None)
+
+ # InvalidItemPayloadException uses the format: "{item_type} payload is invalid for id={item_id}. See MoreDetails for additional information."
+ assert "payload is invalid" in str(exc_info.value)
+ assert item.item_type in str(exc_info.value)
+ assert item.item_object_id in str(exc_info.value)
+
+ # Verify no save operation was attempted
+ mock_item_metadata_store.upsert.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_delete_item_calls_metadata_store(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Verify deletion calls correct services."""
+
+ # Arrange
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties for deletion
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Act
+ await item.delete()
+
+ # Assert - Verify delete was called with correct parameters
+ mock_item_metadata_store.delete.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID)
+ )
+
+ @pytest.mark.asyncio
+ async def test_save_changes_calls_all_required_methods(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that save_changes calls all required methods in sequence."""
+
+ # Arrange
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set up item state
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.workspace_object_id = str(TestFixtures.WORKSPACE_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+ item.display_name = "Test Item"
+ item.description = "Test Description"
+
+ # Mock the individual methods to verify they're called
+ with patch.object(item, 'store') as mock_store, \
+ patch.object(item, 'allocate_and_free_resources') as mock_allocate, \
+ patch.object(item, 'update_fabric') as mock_update_fabric:
+
+ # Act
+ await item.save_changes()
+
+ # Assert - Verify all methods were called in the correct order
+ mock_store.assert_called_once()
+ mock_allocate.assert_called_once()
+ mock_update_fabric.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_store_creates_correct_metadata_structure(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that store creates the correct metadata structure."""
+
+ # Arrange
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set up item state
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.workspace_object_id = str(TestFixtures.WORKSPACE_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+ item.display_name = "Test Storage Item"
+ item.description = "Test storage description"
+ item._test_metadata = {"test": "metadata"}
+
+ # Act
+ await item.store()
+
+ # Assert - Verify upsert was called with correct structure
+ mock_item_metadata_store.upsert.assert_called_once()
+
+ # Get the call arguments
+ call_args = mock_item_metadata_store.upsert.call_args
+ tenant_id, item_id, common_metadata, type_specific_metadata = call_args[0]
+
+ # Verify the arguments
+ assert tenant_id == str(TestFixtures.TENANT_ID)
+ assert item_id == str(TestFixtures.ITEM_ID)
+
+ # Verify common metadata structure
+ assert common_metadata.type == "TestableItem"
+ assert common_metadata.tenant_object_id == TestFixtures.TENANT_ID
+ assert common_metadata.workspace_object_id == TestFixtures.WORKSPACE_ID
+ assert common_metadata.item_object_id == TestFixtures.ITEM_ID
+ assert common_metadata.display_name == "Test Storage Item"
+ assert common_metadata.description == "Test storage description"
+
+ # Verify type-specific metadata
+ assert type_specific_metadata == {"test": "metadata"}
+
+ @pytest.mark.asyncio
+ async def test_create_with_empty_payload(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test creation with empty payload is handled gracefully."""
+
+ # Arrange
+ workspace_id = TestFixtures.WORKSPACE_ID
+ item_id = TestFixtures.ITEM_ID
+
+ from fabric_api.models.create_item_request import CreateItemRequest
+ create_request = CreateItemRequest(
+ display_name="Empty Payload Item",
+ description="Item with empty payload",
+ creation_payload={}
+ )
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ # Act
+ item = ConcreteTestItem(auth_context)
+ await item.create(workspace_id, item_id, create_request)
+
+ # Assert - Item should be created successfully with empty payload
+ assert item.display_name == "Empty Payload Item"
+ assert item.description == "Item with empty payload"
+ assert item._test_metadata == {}
+
+ @pytest.mark.asyncio
+ async def test_update_preserves_existing_state(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that update only changes specified properties."""
+
+ # Arrange
+ from fabric_api.models.update_item_request import UpdateItemRequest
+ update_request = UpdateItemRequest(
+ display_name="New Display Name",
+ description="New Description",
+ update_payload={"new_data": "new_value"}
+ )
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set initial state
+ original_tenant_id = str(TestFixtures.TENANT_ID)
+ original_workspace_id = str(TestFixtures.WORKSPACE_ID)
+ original_item_id = str(TestFixtures.ITEM_ID)
+
+ item.tenant_object_id = original_tenant_id
+ item.workspace_object_id = original_workspace_id
+ item.item_object_id = original_item_id
+ item.display_name = "Original Name"
+ item.description = "Original Description"
+
+ # Act
+ await item.update(update_request)
+
+ # Assert - Verify IDs are preserved and only name/description updated
+ assert item.tenant_object_id == original_tenant_id
+ assert item.workspace_object_id == original_workspace_id
+ assert item.item_object_id == original_item_id
+ assert item.display_name == "New Display Name"
+ assert item.description == "New Description"
+
+ # ============================================================================
+ # Job Management Tests - Cancel Job Operations
+ # ============================================================================
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_missing_metadata_recreates(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test job metadata recreation scenario when metadata is missing."""
+
+ # Arrange
+ job_type = "TestJob"
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock missing job metadata (exists_job returns False)
+ mock_item_metadata_store.exists_job.return_value = False
+ mock_item_metadata_store.upsert_job.return_value = None
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Mock JobMetadata import and creation
+ with patch('models.job_metadata.JobMetadata') as mock_job_metadata_class:
+ mock_job_metadata = MagicMock()
+ mock_job_metadata.is_canceled = False
+ mock_job_metadata_class.return_value = mock_job_metadata
+
+ # Mock datetime for consistent testing
+ with patch('datetime.datetime') as mock_datetime:
+ mock_utc_time = MagicMock()
+ mock_datetime.now.return_value = mock_utc_time
+ mock_datetime.timezone.utc = MagicMock()
+
+ # Act
+ await item.cancel_job(job_type, job_instance_id)
+
+ # Assert - Verify recreation workflow
+ mock_item_metadata_store.exists_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id)
+ )
+
+ # Verify new JobMetadata was created with correct parameters
+ mock_job_metadata_class.assert_called_once_with(
+ job_type=job_type,
+ job_instance_id=job_instance_id
+ )
+
+ # Verify canceled time was set
+ assert mock_job_metadata.canceled_time == mock_utc_time
+
+ # Verify upsert was called with recreated metadata
+ mock_item_metadata_store.upsert_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id),
+ mock_job_metadata
+ )
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_already_canceled_noop(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test idempotent cancellation - no operation when job is already canceled."""
+
+ # Arrange
+ job_type = "TestJob"
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock existing job metadata that is already canceled
+ mock_job_metadata = MagicMock()
+ mock_job_metadata.is_canceled = True # Already canceled
+
+ mock_item_metadata_store.exists_job.return_value = True
+ mock_item_metadata_store.load_job.return_value = mock_job_metadata
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Act
+ await item.cancel_job(job_type, job_instance_id)
+
+ # Assert - Verify idempotent behavior
+ mock_item_metadata_store.exists_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id)
+ )
+
+ # Verify existing metadata was loaded
+ mock_item_metadata_store.load_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id)
+ )
+
+ # Verify no upsert was called since job is already canceled
+ mock_item_metadata_store.upsert_job.assert_not_called()
+
+ # Verify canceled_time was not set during this operation
+ # Since the job is already canceled, we should not modify canceled_time
+ # The key assertion is that upsert_job was not called
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_sets_canceled_time(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Verify cancellation timestamp is properly set."""
+
+ # Arrange
+ job_type = "TestJob"
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock existing job metadata that is NOT canceled
+ mock_job_metadata = MagicMock()
+ mock_job_metadata.is_canceled = False
+
+ mock_item_metadata_store.exists_job.return_value = True
+ mock_item_metadata_store.load_job.return_value = mock_job_metadata
+ mock_item_metadata_store.upsert_job.return_value = None
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Mock datetime to control the timestamp
+ with patch('datetime.datetime') as mock_datetime:
+ mock_utc_time = datetime.datetime(2023, 7, 16, 12, 30, 45, tzinfo=datetime.timezone.utc)
+ mock_datetime.now.return_value = mock_utc_time
+ mock_datetime.timezone.utc = datetime.timezone.utc
+
+ # Act
+ await item.cancel_job(job_type, job_instance_id)
+
+ # Assert - Verify cancellation timestamp was set correctly
+ mock_datetime.now.assert_called_once_with(datetime.timezone.utc)
+ assert mock_job_metadata.canceled_time == mock_utc_time
+
+ # Verify upsert was called with updated metadata
+ mock_item_metadata_store.upsert_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id),
+ mock_job_metadata
+ )
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_with_existing_metadata_workflow(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test complete cancellation workflow with existing metadata."""
+
+ # Arrange
+ job_type = "CalculateAsText"
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock existing job metadata
+ mock_job_metadata = MagicMock()
+ mock_job_metadata.is_canceled = False
+ mock_job_metadata.job_type = job_type
+ mock_job_metadata.job_instance_id = job_instance_id
+
+ mock_item_metadata_store.exists_job.return_value = True
+ mock_item_metadata_store.load_job.return_value = mock_job_metadata
+ mock_item_metadata_store.upsert_job.return_value = None
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Act
+ await item.cancel_job(job_type, job_instance_id)
+
+ # Assert - Verify complete workflow
+
+ # 1. Check if job exists
+ mock_item_metadata_store.exists_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id)
+ )
+
+ # 2. Load existing metadata
+ mock_item_metadata_store.load_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id)
+ )
+
+ # 3. Verify canceled_time was set (should be a datetime object)
+ assert hasattr(mock_job_metadata, 'canceled_time')
+ assert mock_job_metadata.canceled_time is not None
+
+ # 4. Update metadata
+ mock_item_metadata_store.upsert_job.assert_called_once_with(
+ str(TestFixtures.TENANT_ID),
+ str(TestFixtures.ITEM_ID),
+ str(job_instance_id),
+ mock_job_metadata
+ )
+
+ @pytest.mark.asyncio
+ async def test_cancel_job_logs_recreation_warning(self, auth_context, mock_item_metadata_store,
+ mock_onelake_client_service, mock_authentication_service):
+ """Test that proper warning is logged when recreating missing job metadata."""
+
+ # Arrange
+ job_type = "TestJob"
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock missing job metadata
+ mock_item_metadata_store.exists_job.return_value = False
+ mock_item_metadata_store.upsert_job.return_value = None
+
+ with patch('services.item_metadata_store.get_item_metadata_store', return_value=mock_item_metadata_store), \
+ patch('services.onelake_client_service.get_onelake_client_service', return_value=mock_onelake_client_service), \
+ patch('services.authentication.get_authentication_service', return_value=mock_authentication_service):
+
+ item = ConcreteTestItem(auth_context)
+
+ # Set required properties
+ item.tenant_object_id = str(TestFixtures.TENANT_ID)
+ item.item_object_id = str(TestFixtures.ITEM_ID)
+
+ # Mock JobMetadata and logger
+ with patch('models.job_metadata.JobMetadata') as mock_job_metadata_class, \
+ patch.object(item, 'logger') as mock_logger:
+
+ mock_job_metadata = MagicMock()
+ mock_job_metadata.is_canceled = False
+ mock_job_metadata_class.return_value = mock_job_metadata
+
+ # Act
+ await item.cancel_job(job_type, job_instance_id)
+
+ # Assert - Verify warning was logged
+ mock_logger.warning.assert_called_once()
+ warning_call = mock_logger.warning.call_args[0][0]
+ assert f"Recreating missing job {job_instance_id} metadata" in warning_call
+ assert f"tenant {TestFixtures.TENANT_ID}" in warning_call
+ assert f"item {TestFixtures.ITEM_ID}" in warning_call
+
+ # Verify success info was logged
+ mock_logger.info.assert_called_once()
+ info_call = mock_logger.info.call_args[0][0]
+ assert f"Canceled job {job_instance_id}" in info_call
+ assert f"item {TestFixtures.ITEM_ID}" in info_call
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_async_operations.py b/Backend/python/tests/unit/items/test_item1_async_operations.py
new file mode 100644
index 0000000..7d8d17e
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_async_operations.py
@@ -0,0 +1,96 @@
+"""Item1 Async Operations Tests
+
+Tests for Item1 async operations including the double operation and boundary validation.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from exceptions.exceptions import DoubledOperandsOverflowException
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1AsyncOperations:
+ """Async operations tests - double operation and boundary validation."""
+
+ @pytest.mark.asyncio
+ async def test_double_operation_comprehensive(self, mock_auth_context, mock_all_services):
+ """Test double operation with comprehensive validation."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.operand1 = 10
+ item._metadata.operand2 = 20
+
+ # Mock save_changes call
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_store.upsert = AsyncMock()
+
+ # Act
+ result = await item.double()
+
+ # Assert
+ assert result == (20, 40)
+ assert item._metadata.operand1 == 20
+ assert item._metadata.operand2 == 40
+ mock_store.upsert.assert_called_once()
+
+ @pytest.mark.parametrize("operand1,operand2,should_raise,expected_operands", [
+ (2147483647, 2147483647, False, []), # Max safe values
+ (2147483648, 1, True, ["Operand1"]), # operand1 overflow
+ (1, 2147483648, True, ["Operand2"]), # operand2 overflow
+ (2147483648, 2147483648, True, ["Operand1"]), # Both overflow (first invalid operand reported)
+ (-2147483649, 1, True, ["Operand1"]), # operand1 underflow
+ (1, -2147483649, True, ["Operand2"]), # operand2 underflow
+ (0, 0, False, []), # Zero operands
+ (-1000000, -1000000, False, []), # Large negative safe
+ ])
+ @pytest.mark.asyncio
+ async def test_double_operation_boundary_validation(self, mock_auth_context, mock_all_services,
+ operand1, operand2, should_raise, expected_operands):
+ """Test double operation boundary validation.
+
+ Tests that DoubledOperandsOverflowException is raised with proper operand names
+ when operands would cause overflow after doubling.
+ """
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.operand1 = operand1
+ item._metadata.operand2 = operand2
+
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_store.upsert = AsyncMock()
+
+ if should_raise:
+ # Act & Assert
+ with pytest.raises(DoubledOperandsOverflowException) as exc_info:
+ await item.double()
+
+ # Verify that the exception message contains the expected operand name
+ exception_message = str(exc_info.value)
+ assert "may lead to overflow" in exception_message
+
+ # Verify that the specific invalid operand is mentioned in the message
+ if expected_operands:
+ expected_operand = expected_operands[0] # Take first expected operand
+ assert expected_operand in exception_message
+
+ mock_store.upsert.assert_not_called()
+ else:
+ # Act
+ result = await item.double()
+
+ # Assert
+ assert result == (operand1 * 2, operand2 * 2)
+ mock_store.upsert.assert_called_once()
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_calculations.py b/Backend/python/tests/unit/items/test_item1_calculations.py
new file mode 100644
index 0000000..fee548c
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_calculations.py
@@ -0,0 +1,93 @@
+"""Item1 Mathematical Calculations Tests
+
+Tests for Item1 mathematical operations including all operators, edge cases, and error handling.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1Calculations:
+ """Mathematical operations tests - comprehensive calculation coverage."""
+
+ @pytest.mark.parametrize("op1,op2,operator,expected_result", [
+ (10, 5, Item1Operator.ADD, "op1 = 10, op2 = 5, operator = Add, result = 15"),
+ (10, 5, Item1Operator.SUBTRACT, "op1 = 10, op2 = 5, operator = Subtract, result = 5"),
+ (10, 5, Item1Operator.MULTIPLY, "op1 = 10, op2 = 5, operator = Multiply, result = 50"),
+ (10, 5, Item1Operator.DIVIDE, "op1 = 10, op2 = 5, operator = Divide, result = 2"),
+ (100, 3, Item1Operator.DIVIDE, "op1 = 100, op2 = 3, operator = Divide, result = 33"),
+ (-10, 5, Item1Operator.ADD, "op1 = -10, op2 = 5, operator = Add, result = -5"),
+ (-10, -5, Item1Operator.MULTIPLY, "op1 = -10, op2 = -5, operator = Multiply, result = 50"),
+ ])
+ def test_calculate_result_operations(self, mock_auth_context, mock_all_services,
+ op1, op2, operator, expected_result):
+ """Test all arithmetic operations with comprehensive coverage."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ # Act
+ result = item._calculate_result(op1, op2, operator)
+
+ # Assert
+ assert result == expected_result
+
+ @pytest.mark.parametrize("op1,op2", [
+ (5, 10), (1, 100), (0, 5), (-5, 5), (10, 10)
+ ])
+ def test_calculate_result_random_valid_ranges(self, mock_auth_context, mock_all_services, op1, op2):
+ """Test RANDOM operator with valid ranges."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ # Act
+ result = item._calculate_result(op1, op2, Item1Operator.RANDOM)
+
+ # Assert
+ assert f"op1 = {op1}, op2 = {op2}, operator = Random" in result
+ result_value = int(result.split("result = ")[1])
+ assert op1 <= result_value <= op2
+
+ @pytest.mark.parametrize("op1,op2,operator,error_message", [
+ (10, 0, Item1Operator.DIVIDE, "Cannot divide by zero"),
+ (10, 5, Item1Operator.UNDEFINED, "Undefined operator"),
+ (15, 5, Item1Operator.RANDOM, "operand1 must not be greater than operand2"),
+ (10, 5, "InvalidOperator", "Unknown operator"),
+ (10, 5, None, "Unknown operator"),
+ ])
+ def test_calculate_result_error_scenarios(self, mock_auth_context, mock_all_services,
+ op1, op2, operator, error_message):
+ """Test calculation error handling scenarios."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ # Act & Assert
+ with pytest.raises(ValueError, match=error_message):
+ item._calculate_result(op1, op2, operator)
+
+ @pytest.mark.parametrize("string_operator,expected_enum", [
+ ("Add", Item1Operator.ADD),
+ ("SUBTRACT", Item1Operator.SUBTRACT),
+ ("multiply", Item1Operator.MULTIPLY),
+ ("Divide", Item1Operator.DIVIDE),
+ ("Random", Item1Operator.RANDOM),
+ ])
+ def test_string_operator_conversion(self, mock_auth_context, mock_all_services,
+ string_operator, expected_enum):
+ """Test string to enum operator conversion."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ op1, op2 = 5, 10 # Valid for all operators including RANDOM
+
+ # Act
+ result = item._calculate_result(op1, op2, string_operator)
+
+ # Assert
+ assert f"operator = {expected_enum.name.title()}" in result
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_core.py b/Backend/python/tests/unit/items/test_item1_core.py
new file mode 100644
index 0000000..a87789b
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_core.py
@@ -0,0 +1,91 @@
+"""Item1 Core Functionality Tests
+
+Tests for core Item1 functionality including initialization, properties, and basic operations.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1Core:
+ """Core Item1 functionality tests - initialization, properties, and basic operations."""
+
+ def test_item1_initialization_success(self, mock_auth_context, mock_all_services):
+ """Test successful Item1 initialization with all required services."""
+ # Act
+ item = Item1(mock_auth_context)
+
+ # Assert - Verify core initialization
+ assert item.item_type == WorkloadConstants.ItemTypes.ITEM1
+ assert item.auth_context is mock_auth_context
+ assert isinstance(item._metadata, Item1Metadata)
+ assert item._lakehouse_client_service is not None
+
+ # Assert - Verify default metadata state
+ assert item._metadata.operand1 == 0
+ assert item._metadata.operand2 == 0
+ assert item._metadata.operator == Item1Operator.UNDEFINED
+ assert item._metadata.use_onelake is False
+
+ def test_static_class_variables_configuration(self, mock_auth_context, mock_all_services):
+ """Test that static class variables are properly configured."""
+ # Act
+ item = Item1(mock_auth_context)
+
+ # Assert - Test supported_operators
+ assert Item1Operator.UNDEFINED.value not in Item1.supported_operators
+ expected_operators = [op.value for op in Item1Operator if op != Item1Operator.UNDEFINED]
+ assert set(Item1.supported_operators) == set(expected_operators)
+
+ # Assert - Test fabric_scopes
+ assert len(Item1.fabric_scopes) == 1
+ assert "Lakehouse.Read.All" in Item1.fabric_scopes[0]
+ assert EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID in Item1.fabric_scopes[0]
+
+ @pytest.mark.parametrize("property_name,metadata_attribute,test_value", [
+ ("operand1", "operand1", 42),
+ ("operand2", "operand2", 84),
+ ("operator", "operator", Item1Operator.MULTIPLY),
+ ("lakehouse", "lakehouse", ItemReference(workspace_id="test-ws", id="test-id")),
+ ])
+ def test_properties_delegate_to_metadata(self, mock_auth_context, mock_all_services,
+ property_name, metadata_attribute, test_value):
+ """Test that all properties correctly delegate to metadata."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ setattr(item._metadata, metadata_attribute, test_value)
+
+ # Act
+ property_value = getattr(item, property_name)
+
+ # Assert
+ assert property_value == test_value
+ assert property_value == getattr(item._metadata, metadata_attribute)
+
+ @pytest.mark.parametrize("lakehouse_id,workspace_id,expected", [
+ (str(TestFixtures.ITEM_ID), str(TestFixtures.WORKSPACE_ID), True),
+ ("00000000-0000-0000-0000-000000000000", str(TestFixtures.WORKSPACE_ID), False),
+ ("", str(TestFixtures.WORKSPACE_ID), False),
+ (str(TestFixtures.ITEM_ID), "", False),
+ (None, str(TestFixtures.WORKSPACE_ID), False),
+ ])
+ def test_is_valid_lakehouse_scenarios(self, mock_auth_context, mock_all_services,
+ lakehouse_id, workspace_id, expected):
+ """Test is_valid_lakehouse validation scenarios."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.lakehouse = ItemReference(id=lakehouse_id, workspace_id=workspace_id)
+
+ # Act & Assert
+ assert item.is_valid_lakehouse() == expected
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_error_handling.py b/Backend/python/tests/unit/items/test_item1_error_handling.py
new file mode 100644
index 0000000..4a110ea
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_error_handling.py
@@ -0,0 +1,111 @@
+"""Item1 ErrorHandling Tests
+
+Tests for Item1 errorhandling functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1ErrorHandling:
+ """Error handling and edge cases - comprehensive error coverage."""
+
+ def test_initialization_error_scenarios(self, mock_all_services):
+ """Test initialization error handling."""
+ # Test None auth context handling
+ try:
+ item = Item1(None)
+ assert item.auth_context is None # If it doesn't raise, verify state
+ except (TypeError, AttributeError, ValueError):
+ pass # Expected for None auth context
+
+ def test_metadata_access_errors(self, mock_auth_context, mock_all_services):
+ """Test metadata access error conditions."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ # Test metadata property with None
+ item._metadata = None
+ with pytest.raises(ValueError, match="The item object must be initialized before use"):
+ _ = item.metadata
+
+ # Test metadata property with missing attribute
+ delattr(item, '_metadata')
+ with pytest.raises(AttributeError):
+ _ = item.metadata
+
+ @pytest.mark.parametrize("payload,expected_behavior", [
+ ({}, "creates_default_metadata"),
+ (None, "creates_default_metadata"),
+ ({"invalid": "data"}, "raises_error"),
+ ])
+ def test_payload_validation_edge_cases(self, mock_auth_context, mock_all_services, payload, expected_behavior):
+ """Test payload validation edge cases."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ if expected_behavior == "creates_default_metadata":
+ # Act
+ item.set_definition(payload)
+
+ # Assert
+ assert isinstance(item._metadata, Item1Metadata)
+ assert item._metadata.operand1 == 0
+ assert item._metadata.operand2 == 0
+ assert item._metadata.operator == Item1Operator.UNDEFINED
+
+ elif expected_behavior == "raises_error":
+ # Act & Assert
+ with pytest.raises(ValueError, match="Invalid item payload"):
+ item.set_definition(payload)
+
+ @pytest.mark.parametrize("payload", [
+ (None),
+ ({})
+ ])
+ def test_update_definition_edge_cases(self, mock_auth_context, mock_all_services, payload):
+ """Test update_definition edge cases."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ original_metadata = Item1Metadata(operand1=10, operand2=20)
+ item._metadata = original_metadata
+
+ if payload is None:
+ # Act - None payload should return early
+ item.update_definition(payload)
+
+ # Assert - No changes
+ assert item._metadata is original_metadata
+ else:
+ # Act - Empty payload should return early
+ item.update_definition(payload)
+
+ # Assert - No changes for empty payload
+ assert item._metadata is original_metadata
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_file_path_resolution.py b/Backend/python/tests/unit/items/test_item1_file_path_resolution.py
new file mode 100644
index 0000000..c20d623
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_file_path_resolution.py
@@ -0,0 +1,136 @@
+"""Item1 FilePathResolution Tests
+
+Tests for Item1 filepathresolution functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1FilePathResolution:
+ """File path resolution tests - comprehensive path generation coverage."""
+
+ @pytest.mark.parametrize("job_type,expected_extension", [
+ (Item1JobType.CALCULATE_AS_TEXT, ".txt"),
+ (Item1JobType.SCHEDULED_JOB, ".txt"),
+ (Item1JobType.LONG_RUNNING_CALCULATE_AS_TEXT, ".txt"),
+ (Item1JobType.CALCULATE_AS_PARQUET, ".parquet"),
+ ("UnknownJobType", ".txt") # Default fallback
+ ])
+ def test_get_calculation_result_file_path_job_types(self, mock_auth_context, mock_all_services, job_type, expected_extension):
+ """Test file path generation for different job types."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.use_onelake = True
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+ job_metadata = JobMetadata(
+ job_type=job_type,
+ job_instance_id=job_instance_id,
+ use_onelake=True
+ )
+
+ # Mock OneLake service
+ mock_onelake = mock_all_services['OneLakeClientService']
+ expected_path = f"/workspace/{TestFixtures.WORKSPACE_ID}/item/{TestFixtures.ITEM_ID}/CalculationResult_{job_instance_id}{expected_extension}"
+ mock_onelake.get_onelake_file_path.return_value = expected_path
+
+ # Act
+ file_path = item._get_calculation_result_file_path(job_metadata)
+
+ # Assert
+ assert file_path == expected_path
+ mock_onelake.get_onelake_file_path.assert_called_once()
+ args = mock_onelake.get_onelake_file_path.call_args[0]
+ assert args[0] == TestFixtures.WORKSPACE_ID
+ assert args[1] == TestFixtures.ITEM_ID
+ assert expected_extension in args[2]
+
+ def test_get_calculation_result_file_path_use_lakehouse(self, mock_auth_context, mock_all_services):
+ """Test file path generation using lakehouse storage."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(
+ lakehouse=ItemReference(
+ workspace_id=str(TestFixtures.WORKSPACE_ID),
+ id=str(TestFixtures.ITEM_ID)
+ ),
+ use_onelake=False
+ )
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+ job_metadata = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=False
+ )
+
+ # Mock OneLake service
+ mock_onelake = mock_all_services['OneLakeClientService']
+ expected_path = f"/lakehouse/{TestFixtures.WORKSPACE_ID}/{TestFixtures.ITEM_ID}/CalculationResult_{job_instance_id}.txt"
+ mock_onelake.get_onelake_file_path.return_value = expected_path
+
+ # Act
+ file_path = item._get_calculation_result_file_path(job_metadata)
+
+ # Assert
+ assert file_path == expected_path
+
+ def test_get_calculation_result_file_path_no_storage_error(self, mock_auth_context, mock_all_services):
+ """Test file path generation raises error when no valid storage."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(use_onelake=False) # No lakehouse, no OneLake
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+ job_metadata = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=False
+ )
+
+ # Act & Assert
+ with pytest.raises(ValueError, match="Cannot write to lakehouse or OneLake"):
+ item._get_calculation_result_file_path(job_metadata)
+
+ def test_get_calculation_result_file_path_missing_job_id_error(self, mock_auth_context, mock_all_services):
+ """Test file path generation raises error when job instance ID is missing."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.use_onelake = True
+
+ # Job metadata without job_instance_id
+ job_metadata = {"job_type": Item1JobType.CALCULATE_AS_TEXT}
+
+ # Act & Assert
+ with pytest.raises(ValueError, match="job_instance_id is missing"):
+ item._get_calculation_result_file_path(job_metadata)
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_job_execution.py b/Backend/python/tests/unit/items/test_item1_job_execution.py
new file mode 100644
index 0000000..c70765a
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_job_execution.py
@@ -0,0 +1,185 @@
+"""Item1 Job Execution Tests
+
+Tests for Item1 job execution pipeline including workflow coverage and edge cases.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock
+from datetime import datetime, timezone
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from constants.job_types import Item1JobType
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1JobExecution:
+ """Job execution pipeline tests - comprehensive workflow coverage."""
+
+ @pytest.mark.asyncio
+ async def test_execute_job_instant_job_immediate_return(self, mock_auth_context, mock_all_services):
+ """Test that InstantJob returns immediately without processing."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Act
+ await item.execute_job(
+ Item1JobType.INSTANT_JOB,
+ job_instance_id,
+ JobInvokeType.MANUAL,
+ {}
+ )
+
+ # Assert - No metadata store operations should happen for instant jobs
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_store.upsert_job.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_execute_job_full_workflow_success(self, mock_auth_context, mock_all_services):
+ """Test complete job execution workflow for regular jobs."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.operand1 = 10
+ item._metadata.operand2 = 5
+ item._metadata.operator = Item1Operator.ADD
+ item._metadata.use_onelake = True
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+ mock_store.load_job.return_value = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True,
+ is_canceled=False
+ )
+
+ # Act
+ await item.execute_job(
+ Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id,
+ JobInvokeType.MANUAL,
+ {}
+ )
+
+ # Assert - Job metadata was created
+ mock_store.upsert_job.assert_called()
+ args = mock_store.upsert_job.call_args[0]
+ assert args[0] == TestFixtures.TENANT_ID
+ assert args[1] == TestFixtures.ITEM_ID
+ assert args[2] == str(job_instance_id)
+
+ # Assert - OneLake file was written
+ mock_onelake.write_to_onelake_file.assert_called_once()
+ write_args = mock_onelake.write_to_onelake_file.call_args[0]
+ assert write_args[0] == "mock_token"
+ assert write_args[1] == "/test/path/result.txt"
+ assert "op1 = 10, op2 = 5, operator = Add, result = 15" in write_args[2]
+
+ @pytest.mark.asyncio
+ async def test_execute_job_cancellation_handling(self, mock_auth_context, mock_all_services):
+ """Test job execution with cancellation behavior.
+ """
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.operand1 = 10
+ item._metadata.operand2 = 5
+ item._metadata.operator = Item1Operator.ADD
+ item._metadata.use_onelake = True
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+
+ # Set up the load_job mock to return canceled job on the reload
+ canceled_job = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True,
+ canceled_time=datetime.now(timezone.utc)
+ )
+ mock_store.load_job.return_value = canceled_job
+
+ # Act
+ await item.execute_job(
+ Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id,
+ JobInvokeType.MANUAL,
+ {}
+ )
+
+ mock_store.upsert_job.assert_called() # Initial job metadata creation
+ mock_store.load_job.assert_called() # Job metadata reload
+ mock_onelake.write_to_onelake_file.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_execute_job_missing_metadata_recreation(self, mock_auth_context, mock_all_services):
+ """Test job metadata recreation when missing."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.operand1 = 10
+ item._metadata.operand2 = 5
+ item._metadata.operator = Item1Operator.ADD
+ item._metadata.use_onelake = True # Ensure valid storage
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+
+ # First load_job call raises FileNotFoundError, second succeeds
+ original_metadata = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True, # Match item metadata
+ is_canceled=False
+ )
+ mock_store.load_job.side_effect = [FileNotFoundError(), original_metadata]
+
+ # Act
+ await item.execute_job(
+ Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id,
+ JobInvokeType.MANUAL,
+ {}
+ )
+
+ # Assert - upsert_job was called twice (initial creation + recreation)
+ assert mock_store.upsert_job.call_count == 2
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_job_state_management.py b/Backend/python/tests/unit/items/test_item1_job_state_management.py
new file mode 100644
index 0000000..cc778ea
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_job_state_management.py
@@ -0,0 +1,185 @@
+"""Item1 JobStateManagement Tests
+
+Tests for Item1 jobstatemanagement functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+from datetime import datetime, timezone
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1JobStateManagement:
+ """Job state management tests - comprehensive state tracking coverage."""
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_instant_job_completed(self, mock_auth_context, mock_all_services):
+ """Test that instant jobs immediately return COMPLETED status."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Act
+ state = await item.get_job_state(Item1JobType.INSTANT_JOB, job_instance_id)
+
+ # Assert
+ assert state.status == JobInstanceStatus.COMPLETED
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_missing_metadata_failed(self, mock_auth_context, mock_all_services):
+ """Test get_job_state returns FAILED when job metadata doesn't exist."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_store.exists_job.return_value = False
+
+ # Act
+ state = await item.get_job_state(Item1JobType.CALCULATE_AS_TEXT, job_instance_id)
+
+ # Assert
+ assert state.status == JobInstanceStatus.FAILED
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_canceled_job(self, mock_auth_context, mock_all_services):
+ """Test get_job_state behavior for canceled jobs.
+ """
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_store.exists_job.return_value = True
+
+ # Create a canceled job metadata
+ canceled_job = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True,
+ canceled_time=datetime.now(timezone.utc)
+ )
+ mock_store.load_job.return_value = canceled_job
+
+ # Mock the file existence check to return True (file exists)
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+ mock_onelake.check_if_file_exists.return_value = True
+
+ # Act
+ state = await item.get_job_state(Item1JobType.CALCULATE_AS_TEXT, job_instance_id)
+
+ # Assert
+ assert state.status == JobInstanceStatus.CANCELLED
+
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_file_exists_completed(self, mock_auth_context, mock_all_services):
+ """Test get_job_state returns COMPLETED when result file exists."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.use_onelake = True
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_store.exists_job.return_value = True
+ mock_store.load_job.return_value = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True,
+ is_canceled=False
+ )
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+ mock_onelake.check_if_file_exists.return_value = True # File exists
+
+ # Act
+ state = await item.get_job_state(Item1JobType.CALCULATE_AS_TEXT, job_instance_id)
+
+ # Assert
+ assert state.status == JobInstanceStatus.COMPLETED
+
+ @pytest.mark.asyncio
+ async def test_get_job_state_file_missing_in_progress(self, mock_auth_context, mock_all_services):
+ """Test get_job_state returns IN_PROGRESS when result file doesn't exist."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item.tenant_object_id = TestFixtures.TENANT_ID
+ item.workspace_object_id = TestFixtures.WORKSPACE_ID
+ item.item_object_id = TestFixtures.ITEM_ID
+ item._metadata.use_onelake = True
+
+ job_instance_id = TestFixtures.JOB_INSTANCE_ID
+
+ # Mock services
+ mock_store = mock_all_services['ItemMetadataStore']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth = mock_all_services['AuthenticationService']
+
+ mock_store.exists_job.return_value = True
+ mock_store.load_job.return_value = JobMetadata(
+ job_type=Item1JobType.CALCULATE_AS_TEXT,
+ job_instance_id=job_instance_id,
+ use_onelake=True,
+ is_canceled=False
+ )
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file_path.return_value = "/test/path/result.txt"
+ mock_onelake.check_if_file_exists.return_value = False # File doesn't exist
+
+ # Act
+ state = await item.get_job_state(Item1JobType.CALCULATE_AS_TEXT, job_instance_id)
+
+ # Assert
+ assert state.status == JobInstanceStatus.INPROGRESS
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_local_fallback.py b/Backend/python/tests/unit/items/test_item1_local_fallback.py
new file mode 100644
index 0000000..28c34a1
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_local_fallback.py
@@ -0,0 +1,81 @@
+"""Item1 LocalFallback Tests
+
+Tests for Item1 localfallback functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1LocalFallback:
+ """Local fallback tests - comprehensive local storage coverage."""
+
+ def test_save_result_locally_successful(self, mock_auth_context, mock_all_services):
+ """Test successful local result saving."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ job_instance_id = str(TestFixtures.JOB_INSTANCE_ID)
+ result = "op1 = 10, op2 = 5, operator = Add, result = 15"
+
+ with patch('os.makedirs') as mock_makedirs, \
+ patch('builtins.open', mock_open()) as mock_file, \
+ patch('os.getcwd', return_value="/test/cwd"):
+
+ # Act
+ item._save_result_locally(job_instance_id, result)
+
+ # Assert - Directory created (use os.path.join for platform independence)
+ import os
+ expected_path = os.path.join("/test/cwd", "results")
+ mock_makedirs.assert_called_once_with(expected_path, exist_ok=True)
+
+ # Assert - File written
+ expected_file_path = os.path.join("/test/cwd", "results", f"CalculationResult_{job_instance_id}.txt")
+ mock_file.assert_called_once_with(expected_file_path, "w")
+ mock_file().write.assert_called_once_with(result)
+
+ # Assert - Metadata updated
+ assert item._metadata.last_calculation_result_location == expected_file_path
+
+ def test_save_result_locally_directory_creation_failure(self, mock_auth_context, mock_all_services):
+ """Test local save handles directory creation failure gracefully."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ job_instance_id = str(TestFixtures.JOB_INSTANCE_ID)
+ result = "test result"
+
+ with patch('os.makedirs', side_effect=OSError("Permission denied")) as mock_makedirs, \
+ patch('os.getcwd', return_value="/test/cwd"):
+
+ # Act - Should not raise exception
+ item._save_result_locally(job_instance_id, result)
+
+ # Assert - Attempted directory creation
+ mock_makedirs.assert_called_once()
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_metadata_operations.py b/Backend/python/tests/unit/items/test_item1_metadata_operations.py
new file mode 100644
index 0000000..befdb82
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_metadata_operations.py
@@ -0,0 +1,178 @@
+"""Item1 MetadataOperations Tests
+
+Tests for Item1 metadataoperations functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1MetadataOperations:
+ """Metadata operations tests - comprehensive set/update/get operations."""
+
+ @pytest.mark.parametrize("test_case", [
+ {
+ "name": "valid_creation_payload",
+ "payload": {
+ Fields.PAYLOAD_METADATA: {
+ Fields.OPERAND1_FIELD: 15,
+ Fields.OPERAND2_FIELD: 25,
+ Fields.OPERATOR_FIELD: "Add",
+ Fields.LAKEHOUSE_FIELD: {
+ Fields.LAKEHOUSE_ID_FIELD: str(TestFixtures.ITEM_ID),
+ Fields.LAKEHOUSE_WORKSPACE_ID_FIELD: str(TestFixtures.WORKSPACE_ID)
+ },
+ Fields.USE_ONELAKE_FIELD: False
+ }
+ },
+ "expected_operand1": 15,
+ "expected_operand2": 25,
+ "expected_operator": Item1Operator.ADD,
+ "should_raise": False
+ },
+ {
+ "name": "onelake_only_payload",
+ "payload": {
+ Fields.PAYLOAD_METADATA: {
+ Fields.OPERAND1_FIELD: 50,
+ Fields.OPERAND2_FIELD: 60,
+ Fields.OPERATOR_FIELD: "Divide",
+ Fields.USE_ONELAKE_FIELD: True
+ }
+ },
+ "expected_operand1": 50,
+ "expected_operand2": 60,
+ "expected_operator": Item1Operator.DIVIDE,
+ "should_raise": False
+ },
+ {
+ "name": "missing_metadata_field",
+ "payload": {"some_other_field": "value"},
+ "should_raise": True,
+ "error_message": "Invalid item payload"
+ },
+ {
+ "name": "missing_lakehouse_and_onelake_false",
+ "payload": {
+ Fields.PAYLOAD_METADATA: {
+ Fields.OPERAND1_FIELD: 10,
+ Fields.OPERAND2_FIELD: 20,
+ Fields.OPERATOR_FIELD: "Add",
+ Fields.USE_ONELAKE_FIELD: False
+ }
+ },
+ "should_raise": True,
+ "error_message": "Missing Lakehouse reference"
+ }
+ ])
+ def test_set_definition_scenarios(self, mock_auth_context, mock_all_services, test_case):
+ """Test set_definition with various scenarios."""
+ # Arrange
+ item = Item1(mock_auth_context)
+
+ if test_case.get("should_raise", False):
+ # Act & Assert
+ with pytest.raises(ValueError, match=test_case["error_message"]):
+ item.set_definition(test_case["payload"])
+ else:
+ # Act
+ item.set_definition(test_case["payload"])
+
+ # Assert
+ assert item._metadata.operand1 == test_case["expected_operand1"]
+ assert item._metadata.operand2 == test_case["expected_operand2"]
+ assert item._metadata.operator == test_case["expected_operator"]
+
+ def test_update_definition_preserves_last_result(self, mock_auth_context, mock_all_services):
+ """Test that update_definition preserves last_calculation_result_location."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(
+ operand1=10,
+ operand2=20,
+ operator=Item1Operator.ADD,
+ last_calculation_result_location="/previous/result.txt"
+ )
+
+ payload = {
+ Fields.PAYLOAD_METADATA: {
+ Fields.OPERAND1_FIELD: 30,
+ Fields.OPERAND2_FIELD: 40,
+ Fields.OPERATOR_FIELD: "Multiply",
+ Fields.USE_ONELAKE_FIELD: True
+ }
+ }
+
+ # Act
+ item.update_definition(payload)
+
+ # Assert - New values set
+ assert item._metadata.operand1 == 30
+ assert item._metadata.operand2 == 40
+ assert item._metadata.operator == Item1Operator.MULTIPLY
+
+ # Assert - Last result location preserved
+ assert item._metadata.last_calculation_result_location == "/previous/result.txt"
+
+ def test_metadata_cloning_operations(self, mock_auth_context, mock_all_services):
+ """Test metadata cloning get/set operations."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ original_metadata = Item1Metadata(
+ operand1=100,
+ operand2=200,
+ operator=Item1Operator.DIVIDE,
+ use_onelake=True,
+ last_calculation_result_location="/path/to/result"
+ )
+ item._metadata = original_metadata
+
+ # Act - Get cloned metadata
+ cloned_metadata = item.get_type_specific_metadata()
+
+ # Assert - Clone independence
+ assert cloned_metadata is not original_metadata
+ assert cloned_metadata.operand1 == 100
+ assert cloned_metadata.operand2 == 200
+
+ # Act - Modify clone
+ cloned_metadata.operand1 = 999
+
+ # Assert - Original unchanged
+ assert item._metadata.operand1 == 100
+
+ # Act - Set new metadata
+ new_metadata = Item1Metadata(operand1=777, operand2=888, operator=Item1Operator.SUBTRACT)
+ item.set_type_specific_metadata(new_metadata)
+
+ # Assert - Metadata was set as clone
+ assert item._metadata is not new_metadata
+ assert item._metadata.operand1 == 777
+ assert item._metadata.operand2 == 888
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_payload_generation.py b/Backend/python/tests/unit/items/test_item1_payload_generation.py
new file mode 100644
index 0000000..222e207
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_payload_generation.py
@@ -0,0 +1,141 @@
+"""Item1 PayloadGeneration Tests
+
+Tests for Item1 payloadgeneration functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1PayloadGeneration:
+ """Item payload generation tests - comprehensive payload handling coverage."""
+
+ @pytest.mark.asyncio
+ async def test_get_item_payload_with_valid_lakehouse(self, mock_auth_context, mock_all_services):
+ """Test get_item_payload with successful lakehouse retrieval."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(
+ operand1=10,
+ operand2=20,
+ operator=Item1Operator.ADD,
+ lakehouse=ItemReference(
+ workspace_id=str(TestFixtures.WORKSPACE_ID),
+ id=str(TestFixtures.ITEM_ID)
+ ),
+ use_onelake=False
+ )
+
+ # Mock services
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_lakehouse = mock_all_services['LakehouseClientService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+
+ # Create proper FabricItem mock
+ from models.fabric_item import FabricItem
+ mock_lakehouse_item = FabricItem(
+ id=str(TestFixtures.ITEM_ID),
+ workspace_id=str(TestFixtures.WORKSPACE_ID),
+ type="Lakehouse",
+ display_name="Test Lakehouse"
+ )
+ mock_lakehouse.get_fabric_lakehouse.return_value = mock_lakehouse_item
+
+ # Act
+ payload = await item.get_item_payload()
+
+ # Assert
+ assert Fields.PAYLOAD_METADATA in payload
+ metadata = payload[Fields.PAYLOAD_METADATA]
+ assert metadata.operand1 == 10
+ assert metadata.operand2 == 20
+ assert str(metadata.operator) == "Add"
+
+ # Verify authentication and lakehouse calls
+ mock_auth.get_access_token_on_behalf_of.assert_called_once()
+ mock_lakehouse.get_fabric_lakehouse.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_get_item_payload_without_lakehouse(self, mock_auth_context, mock_all_services):
+ """Test get_item_payload when no valid lakehouse reference."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(
+ operand1=10,
+ operand2=20,
+ operator=Item1Operator.ADD,
+ use_onelake=True # No lakehouse needed
+ )
+
+ # Act
+ payload = await item.get_item_payload()
+
+ # Assert
+ assert Fields.PAYLOAD_METADATA in payload
+ metadata = payload[Fields.PAYLOAD_METADATA]
+ assert metadata.operand1 == 10
+ assert metadata.operand2 == 20
+
+ # Verify no lakehouse calls were made
+ mock_lakehouse = mock_all_services['LakehouseClientService']
+ mock_lakehouse.get_fabric_lakehouse.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_get_item_payload_lakehouse_access_failure(self, mock_auth_context, mock_all_services):
+ """Test get_item_payload when lakehouse access fails."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata = Item1Metadata(
+ operand1=10,
+ operand2=20,
+ operator=Item1Operator.ADD,
+ lakehouse=ItemReference(
+ workspace_id=str(TestFixtures.WORKSPACE_ID),
+ id=str(TestFixtures.ITEM_ID)
+ ),
+ use_onelake=False
+ )
+
+ # Mock services
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_lakehouse = mock_all_services['LakehouseClientService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_lakehouse.get_fabric_lakehouse.side_effect = Exception("Lakehouse access failed")
+
+ # Act
+ payload = await item.get_item_payload()
+
+ # Assert - Should still return payload with None lakehouse
+ assert Fields.PAYLOAD_METADATA in payload
+ metadata = payload[Fields.PAYLOAD_METADATA]
+ assert metadata.operand1 == 10
+ assert metadata.operand2 == 20
\ No newline at end of file
diff --git a/Backend/python/tests/unit/items/test_item1_result_retrieval.py b/Backend/python/tests/unit/items/test_item1_result_retrieval.py
new file mode 100644
index 0000000..e46fe00
--- /dev/null
+++ b/Backend/python/tests/unit/items/test_item1_result_retrieval.py
@@ -0,0 +1,122 @@
+"""Item1 ResultRetrieval Tests
+
+Tests for Item1 resultretrieval functionality.
+"""
+
+import pytest
+import asyncio
+import os
+import random
+from unittest.mock import Mock, AsyncMock, patch, MagicMock, mock_open
+from uuid import UUID, uuid4
+from typing import Dict, Any
+
+from items.item1 import Item1
+from models.authentication_models import AuthorizationContext
+from models.item1_metadata import Item1Metadata, Item1Operator
+from models.item_reference import ItemReference
+from models.fabric_item import FabricItem
+from models.job_metadata import JobMetadata
+from fabric_api.models.job_invoke_type import JobInvokeType
+from fabric_api.models.item_job_instance_state import ItemJobInstanceState
+from fabric_api.models.job_instance_status import JobInstanceStatus
+from constants.workload_constants import WorkloadConstants
+from constants.environment_constants import EnvironmentConstants
+from constants.job_types import Item1JobType
+from constants.item1_field_names import Item1FieldNames as Fields
+from exceptions.exceptions import (
+ DoubledOperandsOverflowException,
+ AuthenticationUIRequiredException
+)
+from tests.test_helpers import TestHelpers
+from tests.test_fixtures import TestFixtures
+
+
+@pytest.mark.unit
+@pytest.mark.models
+class TestItem1ResultRetrieval:
+ """Result retrieval tests - comprehensive result access coverage."""
+
+ @pytest.mark.asyncio
+ async def test_get_last_result_successful_retrieval(self, mock_auth_context, mock_all_services):
+ """Test successful result retrieval from OneLake."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.last_calculation_result_location = "/test/path/result.txt"
+
+ # Mock services
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_onelake = mock_all_services['OneLakeClientService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file.return_value = "op1 = 10, op2 = 5, operator = Add, result = 15"
+
+ # Act
+ result = await item.get_last_result()
+
+ # Assert
+ assert result == "op1 = 10, op2 = 5, operator = Add, result = 15"
+ mock_auth.get_access_token_on_behalf_of.assert_called_once()
+ mock_onelake.get_onelake_file.assert_called_once_with("mock_token", "/test/path/result.txt")
+
+ @pytest.mark.asyncio
+ async def test_get_last_result_authentication_ui_required(self, mock_auth_context, mock_all_services):
+ """Test get_last_result re-raises AuthenticationUIRequiredException."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.last_calculation_result_location = "/test/path/result.txt"
+
+ # Mock services
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_onelake = mock_all_services['OneLakeClientService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file.side_effect = AuthenticationUIRequiredException("Consent required")
+
+ # Act & Assert
+ with pytest.raises(AuthenticationUIRequiredException):
+ await item.get_last_result()
+
+ @pytest.mark.asyncio
+ async def test_get_last_result_file_not_found(self, mock_auth_context, mock_all_services):
+ """Test get_last_result returns empty string when file not found."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.last_calculation_result_location = "/test/path/nonexistent.txt"
+
+ # Mock services
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_onelake = mock_all_services['OneLakeClientService']
+
+ mock_auth.get_access_token_on_behalf_of.return_value = "mock_token"
+ mock_onelake.get_onelake_file.side_effect = FileNotFoundError("File not found")
+
+ # Act
+ result = await item.get_last_result()
+
+ # Assert
+ assert result == ""
+
+ @pytest.mark.parametrize("result_location", [
+ "",
+ None,
+ " "
+ ])
+ @pytest.mark.asyncio
+ async def test_get_last_result_empty_location(self, mock_auth_context, mock_all_services, result_location):
+ """Test get_last_result returns empty string for invalid locations."""
+ # Arrange
+ item = Item1(mock_auth_context)
+ item._metadata.last_calculation_result_location = result_location
+
+ # Act
+ result = await item.get_last_result()
+
+ # Assert
+ assert result == ""
+
+ # Verify no service calls were made
+ mock_auth = mock_all_services['AuthenticationService']
+ mock_onelake = mock_all_services['OneLakeClientService']
+ mock_auth.get_access_token_on_behalf_of.assert_not_called()
+ mock_onelake.get_onelake_file.assert_not_called()
diff --git a/Backend/python/tests/unit/services/authentication/__init__.py b/Backend/python/tests/unit/services/authentication/__init__.py
new file mode 100644
index 0000000..8f89297
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/__init__.py
@@ -0,0 +1 @@
+# Authentication service tests package
\ No newline at end of file
diff --git a/Backend/python/tests/unit/services/authentication/test_authentication_core.py b/Backend/python/tests/unit/services/authentication/test_authentication_core.py
new file mode 100644
index 0000000..78eeeb7
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/test_authentication_core.py
@@ -0,0 +1,381 @@
+"""
+Core unit tests for AuthenticationService - consolidated essential tests.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch, MagicMock
+from jose import jwt, JWTError
+from jose.exceptions import JWTClaimsError, ExpiredSignatureError
+
+from services.authentication import AuthenticationService, get_authentication_service
+from services.open_id_connect_configuration import OpenIdConnectConfiguration
+from services.configuration_service import ConfigurationService
+from models.authentication_models import Claim, AuthorizationContext, SubjectAndAppToken, TokenVersion
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+from constants.environment_constants import EnvironmentConstants
+from constants.workload_scopes import WorkloadScopes
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestAuthenticationServiceInitialization:
+ """Test AuthenticationService initialization and configuration."""
+
+ def test_init_with_valid_configuration(self, auth_fixtures):
+ """Test successful initialization with valid configuration."""
+ mock_openid_manager, mock_config_service = auth_fixtures.get_basic_mocks()
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_service):
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app = Mock()
+ mock_msal.ConfidentialClientApplication.return_value = mock_app
+
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+
+ # Verify initialization
+ assert service.openid_manager == mock_openid_manager
+ assert service.publisher_tenant_id == "publisher-tenant-id"
+ assert service.client_id == "test-client-id"
+
+ # Verify MSAL app creation
+ mock_msal.ConfidentialClientApplication.assert_called_once()
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestControlPlaneAuthentication:
+ """Test control plane authentication flow."""
+
+ @pytest.mark.asyncio
+ async def test_authenticate_control_plane_success(self, auth_fixtures):
+ """Test successful control plane authentication."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create valid tokens with matching app IDs
+ subject_token = auth_fixtures.create_mock_jwt_token(
+ scopes="FabricWorkloadControl",
+ app_id=EnvironmentConstants.FABRIC_BACKEND_APP_ID
+ )
+ app_token = auth_fixtures.create_mock_jwt_token(
+ id_typ="app",
+ app_id=EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ tenant_id="publisher-tenant-id"
+ )
+ auth_header = SubjectAndAppToken.generate_authorization_header_value(subject_token, app_token)
+
+ # Mock validation methods
+ subject_claims = auth_fixtures.create_subject_claims()
+ app_claims = auth_fixtures.create_app_claims()
+
+ with patch.object(service, '_validate_app_token', return_value=app_claims):
+ with patch.object(service, '_validate_subject_token', return_value=subject_claims):
+ result = await service.authenticate_control_plane_call(
+ auth_header=auth_header,
+ tenant_id="test-tenant-id"
+ )
+
+ assert isinstance(result, AuthorizationContext)
+ assert result.original_subject_token == subject_token
+ assert result.tenant_object_id == "test-tenant-id"
+
+ @pytest.mark.asyncio
+ async def test_authenticate_control_plane_missing_auth_header(self, auth_fixtures):
+ """Test control plane authentication with missing authorization header."""
+ service = auth_fixtures.get_authentication_service()
+
+ with pytest.raises(AuthenticationException, match="Missing or invalid Authorization header"):
+ await service.authenticate_control_plane_call(
+ auth_header=None,
+ tenant_id="test-tenant-id"
+ )
+
+ @pytest.mark.asyncio
+ async def test_authenticate_control_plane_app_only_mode(self, auth_fixtures):
+ """Test control plane authentication in app-only mode."""
+ service = auth_fixtures.get_authentication_service()
+ app_token = auth_fixtures.create_mock_jwt_token(
+ id_typ="app",
+ app_id=EnvironmentConstants.FABRIC_BACKEND_APP_ID,
+ tenant_id="publisher-tenant-id"
+ )
+ auth_header = SubjectAndAppToken.generate_authorization_header_value(None, app_token)
+
+ app_claims = auth_fixtures.create_app_claims()
+
+ with patch.object(service, '_validate_app_token', return_value=app_claims):
+ result = await service.authenticate_control_plane_call(
+ auth_header=auth_header,
+ tenant_id="test-tenant-id",
+ require_subject_token=False
+ )
+
+ assert result.original_subject_token is None
+ assert not result.has_subject_context
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestDataPlaneAuthentication:
+ """Test data plane authentication flow."""
+
+ @pytest.mark.asyncio
+ async def test_authenticate_data_plane_success(self, auth_fixtures):
+ """Test successful data plane authentication with Bearer token."""
+ service = auth_fixtures.get_authentication_service()
+ bearer_token = auth_fixtures.create_mock_jwt_token(scopes="Item1.ReadWrite.All")
+ auth_header = f"Bearer {bearer_token}"
+
+ with patch.object(service, '_authenticate_bearer') as mock_auth_bearer:
+ mock_auth_bearer.return_value = AuthorizationContext(
+ original_subject_token=bearer_token,
+ tenant_object_id="test-tenant-id"
+ )
+
+ result = await service.authenticate_data_plane_call(
+ auth_header=auth_header,
+ allowed_scopes=["Item1.ReadWrite.All"]
+ )
+
+ assert isinstance(result, AuthorizationContext)
+ mock_auth_bearer.assert_called_once_with(bearer_token, ["Item1.ReadWrite.All"])
+
+ @pytest.mark.asyncio
+ async def test_authenticate_data_plane_invalid_bearer(self, auth_fixtures):
+ """Test data plane authentication with invalid Bearer token format."""
+ service = auth_fixtures.get_authentication_service()
+
+ with pytest.raises(AuthenticationException, match="Missing or invalid Authorization header"):
+ await service.authenticate_data_plane_call(
+ auth_header="Basic invalid-auth", # Not Bearer
+ allowed_scopes=["Item1.ReadWrite.All"]
+ )
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestTokenValidation:
+ """Test core token validation methods."""
+
+ @pytest.mark.asyncio
+ async def test_validate_aad_token_common_success(self, auth_fixtures):
+ """Test successful AAD token validation."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create a realistic payload with all required claims
+ payload = auth_fixtures.create_jwt_payload(
+ tenant_id="test-tenant",
+ token_version="2.0"
+ )
+ token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID configuration with matching key
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ # Mock JWT library calls with proper claims
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch('services.authentication.jwt.decode', return_value=payload):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ result = await service._validate_aad_token_common(token, False, None)
+
+ assert isinstance(result, list)
+ assert all(isinstance(claim, Claim) for claim in result)
+ # Verify essential claims are present
+ claim_types = [claim.type for claim in result]
+ assert "tid" in claim_types
+ assert "ver" in claim_types
+
+ @pytest.mark.asyncio
+ async def test_validate_aad_token_expired(self, auth_fixtures):
+ """Test token validation with expired token."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create expired token with proper claims structure
+ payload = auth_fixtures.create_jwt_payload(
+ tenant_id="test-tenant",
+ token_version="2.0",
+ exp_offset_minutes=-60 # Expired
+ )
+ expired_token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID configuration with matching key
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch('services.authentication.jwt.decode', side_effect=ExpiredSignatureError("Token expired")):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ with pytest.raises(AuthenticationException, match="Token has expired"):
+ await service._validate_aad_token_common(expired_token, False, None)
+
+ def test_validate_claim_exists_success(self, auth_fixtures):
+ """Test successful claim existence validation."""
+ service = auth_fixtures.get_authentication_service()
+ claims = [Claim(type="tid", value="test-tenant-id")]
+
+ result = service._validate_claim_exists(claims, "tid", "Tenant ID required")
+ assert result == "test-tenant-id"
+
+ def test_validate_claim_exists_missing(self, auth_fixtures):
+ """Test claim validation with missing claim."""
+ service = auth_fixtures.get_authentication_service()
+ claims = []
+
+ with pytest.raises(AuthenticationException, match="Missing claim tid"):
+ service._validate_claim_exists(claims, "tid", "Tenant ID required")
+
+ def test_validate_any_scope_success(self, auth_fixtures):
+ """Test successful scope validation."""
+ service = auth_fixtures.get_authentication_service()
+ claims = [Claim(type="scp", value="FabricWorkloadControl other-scope")]
+
+ # Should not raise exception
+ service._validate_any_scope(claims, ["FabricWorkloadControl"])
+
+ def test_validate_any_scope_failure(self, auth_fixtures):
+ """Test scope validation failure."""
+ service = auth_fixtures.get_authentication_service()
+ claims = [Claim(type="scp", value="wrong-scope")]
+
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(claims, ["FabricWorkloadControl"])
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestOBOFlow:
+ """Test On-Behalf-Of flow."""
+
+ @pytest.mark.asyncio
+ async def test_get_access_token_on_behalf_of_success(self, auth_fixtures):
+ """Test successful OBO token acquisition."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ mock_app = Mock()
+ mock_app.acquire_token_on_behalf_of.return_value = {"access_token": "obo-token"}
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ result = await service.get_access_token_on_behalf_of(
+ auth_context=auth_context,
+ scopes=["https://graph.microsoft.com/.default"]
+ )
+
+ assert result == "obo-token"
+
+ @pytest.mark.asyncio
+ async def test_obo_flow_missing_subject_token(self, auth_fixtures):
+ """Test OBO flow with missing original subject token."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = AuthorizationContext(original_subject_token=None)
+
+ with pytest.raises(AuthenticationException, match="OBO flow requires an original subject token"):
+ await service.get_access_token_on_behalf_of(
+ auth_context=auth_context,
+ scopes=["test-scope"]
+ )
+
+ @pytest.mark.asyncio
+ async def test_obo_flow_ui_required(self, auth_fixtures):
+ """Test OBO flow with UI required error."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ mock_app = Mock()
+ mock_app.acquire_token_on_behalf_of.return_value = {
+ "error": "interaction_required",
+ "error_description": "User interaction required",
+ "claims": '{"access_token":{"polids":{"essential":true}}}'
+ }
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ with patch('services.authentication.AuthenticationUIRequiredException') as mock_ui_ex:
+ mock_exception = Mock(spec=AuthenticationUIRequiredException)
+ mock_ui_ex.return_value = mock_exception
+
+ with pytest.raises(Exception): # Will raise the mock exception
+ await service.get_access_token_on_behalf_of(
+ auth_context=auth_context,
+ scopes=["test-scope"]
+ )
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestS2SFlow:
+ """Test Service-to-Service flow."""
+
+ @pytest.mark.asyncio
+ async def test_get_fabric_s2s_token_success(self, auth_fixtures):
+ """Test successful S2S token acquisition."""
+ service = auth_fixtures.get_authentication_service()
+
+ mock_app = Mock()
+ mock_app.acquire_token_for_client.return_value = {"access_token": "s2s-token"}
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ result = await service.get_fabric_s2s_token()
+
+ assert result == "s2s-token"
+
+ # Verify correct scope was used
+ expected_scopes = [f"{EnvironmentConstants.FABRIC_BACKEND_RESOURCE_ID}/.default"]
+ mock_app.acquire_token_for_client.assert_called_once_with(scopes=expected_scopes)
+
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestCompositeTokenFlow:
+ """Test composite token building."""
+
+ @pytest.mark.asyncio
+ async def test_build_composite_token_success(self, auth_fixtures):
+ """Test successful composite token building."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ # Use realistic JWT-like tokens for OBO and S2S
+ obo_token = auth_fixtures.create_mock_jwt_token(
+ scopes="https://graph.microsoft.com/.default",
+ tenant_id="user-tenant"
+ )
+ s2s_token = auth_fixtures.create_mock_jwt_token(
+ id_typ="app",
+ tenant_id="publisher-tenant-id"
+ )
+
+ with patch.object(service, 'get_access_token_on_behalf_of', return_value=obo_token):
+ with patch.object(service, 'get_fabric_s2s_token', return_value=s2s_token):
+ result = await service.build_composite_token(
+ auth_context=auth_context,
+ scopes=["test-scope"]
+ )
+
+ # Verify result format - should be a proper SubjectAndAppToken header
+ assert result.startswith("SubjectAndAppToken1.0")
+ assert obo_token in result
+ assert s2s_token in result
+
+ # Verify it can be parsed
+ parsed = SubjectAndAppToken.parse(result)
+ assert parsed.subject_token == obo_token
+ assert parsed.app_token == s2s_token
+
+ @pytest.mark.asyncio
+ async def test_build_composite_token_obo_failure(self, auth_fixtures):
+ """Test composite token building when OBO fails."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ with patch.object(service, 'get_access_token_on_behalf_of',
+ side_effect=AuthenticationException("OBO failed")):
+ with pytest.raises(AuthenticationException, match="OBO failed"):
+ await service.build_composite_token(
+ auth_context=auth_context,
+ scopes=["test-scope"]
+ )
diff --git a/Backend/python/tests/unit/services/authentication/test_authentication_edge_cases.py b/Backend/python/tests/unit/services/authentication/test_authentication_edge_cases.py
new file mode 100644
index 0000000..a47ac2a
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/test_authentication_edge_cases.py
@@ -0,0 +1,69 @@
+"""
+Edge cases and error scenarios for AuthenticationService.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+from jose.exceptions import JWTClaimsError, ExpiredSignatureError
+
+from services.authentication import AuthenticationService
+from services.open_id_connect_configuration import OpenIdConnectConfiguration
+from models.authentication_models import Claim, AuthorizationContext
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestTokenValidationEdgeCases:
+ """Test token validation edge cases and error scenarios."""
+
+ @pytest.mark.asyncio
+ async def test_malformed_jwt_token(self, auth_fixtures):
+ """Test handling of malformed JWT tokens."""
+ service = auth_fixtures.get_authentication_service()
+ malformed_token = "invalid.jwt.format"
+
+ with patch('services.authentication.jwt.get_unverified_header', side_effect=Exception("Invalid token")):
+ with pytest.raises(AuthenticationException, match="Token validation failed"):
+ await service._validate_aad_token_common(malformed_token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_token_missing_signing_key(self, auth_fixtures):
+ """Test token validation when signing key is not found."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create token with proper payload structure
+ payload = auth_fixtures.create_jwt_payload(tenant_id="test-tenant", token_version="2.0")
+ token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID config with different key ID
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.signing_keys = [{"kid": "different-key-id", "kty": "RSA"}]
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "unknown-key"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ with pytest.raises(AuthenticationException, match="Token signing key not found"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_token_invalid_audience(self, auth_fixtures):
+ """Test token validation with invalid audience."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create token with proper payload structure
+ payload = auth_fixtures.create_jwt_payload(tenant_id="test-tenant", token_version="2.0")
+ token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID configuration with matching key
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch('services.authentication.jwt.decode', side_effect=JWTClaimsError("Invalid audience")):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ with pytest.raises(AuthenticationException, match="Invalid token claims"):
+ await service._validate_aad_token_common(token, False, None)
\ No newline at end of file
diff --git a/Backend/python/tests/unit/services/authentication/test_authentication_msal.py b/Backend/python/tests/unit/services/authentication/test_authentication_msal.py
new file mode 100644
index 0000000..597e974
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/test_authentication_msal.py
@@ -0,0 +1,338 @@
+"""
+Parameterized MSAL error handling tests for AuthenticationService.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+
+from services.authentication import AuthenticationService
+from models.authentication_models import AuthorizationContext
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+from constants.environment_constants import EnvironmentConstants
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestMSALErrorHandlingParameterized:
+ """Parameterized MSAL error handling tests to eliminate duplication."""
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize("error_scenario", [
+ {
+ "error_code": "invalid_client",
+ "error_description": "Client authentication failed",
+ "expected_message": "MSAL exception: invalid_client"
+ },
+ {
+ "error_code": "invalid_scope",
+ "error_description": "The provided scope is invalid",
+ "expected_message": "MSAL exception: invalid_scope"
+ },
+ {
+ "error_code": "unauthorized_client",
+ "error_description": "The client is not authorized",
+ "expected_message": "MSAL exception: unauthorized_client"
+ },
+ {
+ "error_code": "access_denied",
+ "error_description": "Access denied by authorization server",
+ "expected_message": "MSAL exception: access_denied"
+ },
+ {
+ "error_code": "server_error",
+ "error_description": "Internal server error",
+ "expected_message": "MSAL exception: server_error"
+ }
+ ])
+ async def test_s2s_msal_errors_comprehensive(self, auth_fixtures, error_scenario):
+ """Test S2S token acquisition with various MSAL error scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ mock_app = Mock()
+ mock_app.acquire_token_for_client.return_value = {
+ "error": error_scenario["error_code"],
+ "error_description": error_scenario["error_description"]
+ }
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ with pytest.raises(AuthenticationException, match=error_scenario["expected_message"]):
+ await service.get_fabric_s2s_token()
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize("exception_scenario", [
+ {
+ "exception": Exception("Service unavailable"),
+ "expected_message": "OBO token acquisition failed: Service unavailable"
+ },
+ {
+ "exception": ConnectionError("Network timeout"),
+ "expected_message": "OBO token acquisition failed: Network timeout"
+ },
+ {
+ "exception": ValueError("Invalid parameter"),
+ "expected_message": "OBO token acquisition failed: Invalid parameter"
+ }
+ ])
+ async def test_obo_msal_exceptions_comprehensive(self, auth_fixtures, exception_scenario):
+ """Test OBO flow with various MSAL exception scenarios."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ mock_app = Mock()
+ mock_app.acquire_token_on_behalf_of.side_effect = exception_scenario["exception"]
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ with pytest.raises(AuthenticationException, match=exception_scenario["expected_message"]):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize("ui_required_scenario", [
+ {
+ "error_code": "interaction_required",
+ "error_description": "User interaction required",
+ "suberror": None,
+ "claims": '{"access_token":{"polids":{"essential":true}}}',
+ "test_description": "Basic interaction required"
+ },
+ {
+ "error_code": "consent_required",
+ "error_description": "Admin consent required for application",
+ "suberror": None,
+ "claims": None,
+ "test_description": "Admin consent required"
+ },
+ {
+ "error_code": "interaction_required",
+ "error_description": "Conditional access policy requires device compliance",
+ "suberror": "conditional_access",
+ "claims": '{"access_token":{"capolids":{"essential":true,"values":["device-compliance"]}}}',
+ "test_description": "Conditional access policy"
+ },
+ {
+ "error_code": "invalid_grant",
+ "error_description": "Token has been revoked",
+ "suberror": None,
+ "claims": None,
+ "test_description": "Token revoked scenario"
+ }
+ ])
+ async def test_obo_ui_required_scenarios(self, auth_fixtures, ui_required_scenario):
+ """Test OBO flow UI required scenarios with comprehensive error codes."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ mock_app = Mock()
+ msal_response = {
+ "error": ui_required_scenario["error_code"],
+ "error_description": ui_required_scenario["error_description"]
+ }
+
+ if ui_required_scenario["suberror"]:
+ msal_response["suberror"] = ui_required_scenario["suberror"]
+
+ if ui_required_scenario["claims"]:
+ msal_response["claims"] = ui_required_scenario["claims"]
+
+ mock_app.acquire_token_on_behalf_of.return_value = msal_response
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ with patch('services.authentication.AuthenticationUIRequiredException') as mock_ui_ex:
+ mock_exception = Mock(spec=AuthenticationUIRequiredException)
+ mock_ui_ex.return_value = mock_exception
+
+ with pytest.raises(Exception): # Will raise the mock exception
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ # Verify the exception was created
+ mock_ui_ex.assert_called_once_with(ui_required_scenario["error_description"])
+
+ # Verify claims were added if present
+ if ui_required_scenario["claims"]:
+ mock_exception.add_claims_for_conditional_access.assert_called_once_with(ui_required_scenario["claims"])
+
+ # Verify scopes were added for consent scenarios
+ if "consent_required" in ui_required_scenario["error_code"] or "consent_required" in ui_required_scenario["error_description"].lower():
+ mock_exception.add_scopes_to_consent.assert_called_once_with(["test-scope"])
+
+ @pytest.mark.asyncio
+ async def test_obo_missing_access_token_in_response(self, auth_fixtures):
+ """Test OBO flow when MSAL returns success but no access token."""
+ service = auth_fixtures.get_authentication_service()
+ auth_context = auth_fixtures.create_auth_context()
+
+ mock_app = Mock()
+ # Simulate successful response but missing access_token field
+ mock_app.acquire_token_on_behalf_of.return_value = {
+ "token_type": "Bearer",
+ "expires_in": 3600
+ # Missing "access_token" field
+ }
+
+ with patch.object(service, '_get_msal_app', return_value=mock_app):
+ with pytest.raises(AuthenticationException, match="Access token not found in OBO result"):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ @pytest.mark.asyncio
+ async def test_obo_missing_subject_token_scenarios(self, auth_fixtures):
+ """Test OBO flow error scenarios with missing subject tokens."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test with None original_subject_token
+ auth_context_none = AuthorizationContext(original_subject_token=None)
+ with pytest.raises(AuthenticationException, match="OBO flow requires an original subject token"):
+ await service.get_access_token_on_behalf_of(auth_context_none, ["test-scope"])
+
+ # Test with empty string original_subject_token
+ auth_context_empty = AuthorizationContext(original_subject_token="")
+ with pytest.raises(AuthenticationException, match="OBO flow requires an original subject token"):
+ await service.get_access_token_on_behalf_of(auth_context_empty, ["test-scope"])
+
+ @pytest.mark.asyncio
+ async def test_obo_missing_tenant_context(self, auth_fixtures):
+ """Test OBO flow with missing tenant context."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create auth context without tenant_object_id
+ auth_context = AuthorizationContext(
+ original_subject_token="valid-token",
+ tenant_object_id=None
+ )
+
+ with pytest.raises(AuthenticationException, match="Cannot determine tenant authority for OBO flow"):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ @pytest.mark.asyncio
+ async def test_msal_client_not_configured_scenarios(self, auth_fixtures):
+ """Test scenarios where MSAL client is not properly configured."""
+ mock_openid_manager, _ = auth_fixtures.get_basic_mocks()
+
+ # Test with missing client_id
+ mock_config_no_id = auth_fixtures.get_config_service_mock(
+ client_id=None,
+ client_secret="valid-secret"
+ )
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_no_id):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+ auth_context = auth_fixtures.create_auth_context()
+
+ with pytest.raises(AuthenticationException, match="MSAL client not configured"):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+ # Test with missing client_secret
+ mock_config_no_secret = auth_fixtures.get_config_service_mock(
+ client_id="valid-id",
+ client_secret=None
+ )
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_no_secret):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+ auth_context = auth_fixtures.create_auth_context()
+
+ with pytest.raises(AuthenticationException, match="MSAL client not configured"):
+ await service.get_access_token_on_behalf_of(auth_context, ["test-scope"])
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestMSALAppManagement:
+ """Test MSAL application management and caching."""
+
+ def test_msal_app_caching_comprehensive(self, auth_fixtures):
+ """Test MSAL app caching behavior comprehensively."""
+ service = auth_fixtures.get_authentication_service()
+
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app1 = Mock()
+ mock_app2 = Mock()
+ mock_msal.ConfidentialClientApplication.side_effect = [mock_app1, mock_app2]
+
+ # Test same tenant returns cached app
+ tenant1 = "tenant-1"
+ result1a = service._get_msal_app(tenant1)
+ result1b = service._get_msal_app(tenant1)
+
+ assert result1a == mock_app1
+ assert result1b == mock_app1 # Same instance
+ assert result1a is result1b
+
+ # Test different tenant creates new app
+ tenant2 = "tenant-2"
+ result2 = service._get_msal_app(tenant2)
+
+ assert result2 == mock_app2
+ assert result2 != result1a # Different instances
+
+ # Verify MSAL was called correctly
+ assert mock_msal.ConfidentialClientApplication.call_count == 2
+
+ # Verify authorities were constructed correctly
+ calls = mock_msal.ConfidentialClientApplication.call_args_list
+ assert f"{EnvironmentConstants.AAD_INSTANCE_URL}/{tenant1}" in str(calls[0])
+ assert f"{EnvironmentConstants.AAD_INSTANCE_URL}/{tenant2}" in str(calls[1])
+
+ def test_msal_app_cache_isolation(self, auth_fixtures):
+ """Test that MSAL app cache properly isolates different tenants."""
+ # Create a fresh service instance to avoid cached apps from other tests
+ mock_openid_manager, mock_config_service = auth_fixtures.get_basic_mocks()
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_service):
+ fresh_service = AuthenticationService(openid_manager=mock_openid_manager)
+
+ test_tenants = ["tenant-a", "tenant-b", "tenant-c", "tenant-d"]
+
+ with patch("services.authentication.msal") as mock_msal:
+ # Create unique mock app for each tenant
+ mock_apps = []
+ for i, tenant in enumerate(test_tenants):
+ mock_app = Mock()
+ mock_app.tenant_id = tenant # Add identifier for testing
+ mock_apps.append(mock_app)
+
+ mock_msal.ConfidentialClientApplication.side_effect = mock_apps
+
+ # Get app for each tenant
+ retrieved_apps = {}
+ for tenant in test_tenants:
+ retrieved_apps[tenant] = fresh_service._get_msal_app(tenant)
+
+ # Each tenant should get a unique app (not cached across tenants)
+ # Verify each tenant gets a different app
+ app_ids = set(id(app) for app in retrieved_apps.values())
+ assert len(app_ids) == len(test_tenants), f"Expected {len(test_tenants)} unique apps, got {len(app_ids)}"
+
+ # Test caching: same tenant should get the same app on subsequent calls
+ for tenant in test_tenants:
+ cached_app = fresh_service._get_msal_app(tenant)
+ assert cached_app is retrieved_apps[tenant], f"App for {tenant} should be cached"
+
+ # Verify MSAL was called exactly once per unique tenant
+ assert mock_msal.ConfidentialClientApplication.call_count == len(test_tenants), \
+ f"Expected {len(test_tenants)} MSAL app creations, got {mock_msal.ConfidentialClientApplication.call_count}"
+
+
+ @pytest.mark.asyncio
+ async def test_concurrent_msal_app_access(self, auth_fixtures):
+ """Test concurrent access to MSAL apps doesn't cause issues."""
+ service = auth_fixtures.get_authentication_service()
+ tenant_id = "concurrent-test-tenant"
+
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app = Mock()
+ mock_msal.ConfidentialClientApplication.return_value = mock_app
+
+ # Simulate concurrent access
+ import asyncio
+
+ async def get_app():
+ return service._get_msal_app(tenant_id)
+
+ # Create multiple concurrent tasks
+ tasks = [get_app() for _ in range(10)]
+ results = await asyncio.gather(*tasks)
+
+ # All should return the same app instance
+ assert all(app is mock_app for app in results)
+
+ # Should only create one app despite concurrent access
+ assert mock_msal.ConfidentialClientApplication.call_count == 1
\ No newline at end of file
diff --git a/Backend/python/tests/unit/services/authentication/test_authentication_security.py b/Backend/python/tests/unit/services/authentication/test_authentication_security.py
new file mode 100644
index 0000000..92e44bf
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/test_authentication_security.py
@@ -0,0 +1,267 @@
+"""
+Comprehensive security validation tests for AuthenticationService.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+from jose.exceptions import JWTClaimsError, ExpiredSignatureError
+
+from services.authentication import AuthenticationService
+from services.open_id_connect_configuration import OpenIdConnectConfiguration
+from models.authentication_models import Claim, AuthorizationContext
+from exceptions.exceptions import AuthenticationException, AuthenticationUIRequiredException
+from constants.environment_constants import EnvironmentConstants
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestSecurityValidationsComprehensive:
+ """Comprehensive security validation tests - consolidates duplicate tests."""
+
+ def test_app_id_security_comprehensive(self, auth_fixtures):
+ """Test app ID validation enforces security requirements - comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Valid Fabric app ID should pass
+ valid_claims = [Claim(type="azp", value=EnvironmentConstants.FABRIC_BACKEND_APP_ID)]
+ result = service._validate_claim_one_of_values(
+ valid_claims, "azp",
+ [EnvironmentConstants.FABRIC_BACKEND_APP_ID, EnvironmentConstants.FABRIC_CLIENT_FOR_WORKLOADS_APP_ID],
+ "Valid Fabric app required"
+ )
+ assert result == EnvironmentConstants.FABRIC_BACKEND_APP_ID
+
+ # Invalid app ID should fail
+ invalid_claims = [Claim(type="azp", value="unauthorized-app-id")]
+ with pytest.raises(AuthenticationException, match="Valid Fabric app required"):
+ service._validate_claim_one_of_values(
+ invalid_claims, "azp",
+ [EnvironmentConstants.FABRIC_BACKEND_APP_ID, EnvironmentConstants.FABRIC_CLIENT_FOR_WORKLOADS_APP_ID],
+ "Valid Fabric app required"
+ )
+
+ # Test with empty app ID
+ empty_claims = [Claim(type="azp", value="")]
+ with pytest.raises(AuthenticationException, match="Valid Fabric app required"):
+ service._validate_claim_one_of_values(
+ empty_claims, "azp",
+ [EnvironmentConstants.FABRIC_BACKEND_APP_ID, EnvironmentConstants.FABRIC_CLIENT_FOR_WORKLOADS_APP_ID],
+ "Valid Fabric app required"
+ )
+
+ def test_tenant_isolation_comprehensive(self, auth_fixtures):
+ """Test tenant isolation prevents cross-tenant attacks - comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test direct validation failure
+ malicious_claims = [Claim(type="tid", value="malicious-tenant")]
+ with pytest.raises(AuthenticationException):
+ service._validate_claim_value(
+ malicious_claims, "tid", "legitimate-tenant", "Tenant isolation required"
+ )
+
+ # Test cross-tenant token reuse prevention
+ attacker_claims = [Claim(type="tid", value="attacker-tenant-id")]
+ with pytest.raises(AuthenticationException):
+ service._validate_claim_value(
+ attacker_claims, "tid", "legitimate-tenant-id",
+ "Cross-tenant attack prevented"
+ )
+
+ # Test valid tenant passes
+ valid_claims = [Claim(type="tid", value="legitimate-tenant")]
+ result = service._validate_claim_value(
+ valid_claims, "tid", "legitimate-tenant", "Should pass"
+ )
+ assert result == "legitimate-tenant"
+
+ def test_app_only_token_security_comprehensive(self, auth_fixtures):
+ """Test app-only token security requirements - comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Valid app-only token should pass
+ valid_claims = [
+ Claim(type="idtyp", value="app"),
+ Claim(type="oid", value="service-principal-id")
+ ]
+
+ with patch.object(service, '_validate_claim_value'):
+ with patch.object(service, '_validate_claim_exists'):
+ with patch.object(service, '_validate_no_claim'):
+ # Should not raise exception
+ service._validate_app_only(valid_claims, is_app_only=True)
+
+ # Token confusion attack - app-only token with delegated scopes
+ malicious_claims = [
+ Claim(type="idtyp", value="app"),
+ Claim(type="scp", value="malicious-scope"), # Should not be present in app-only
+ Claim(type="oid", value="attacker-id")
+ ]
+
+ with patch.object(service, '_validate_claim_value'):
+ with patch.object(service, '_validate_claim_exists'):
+ with patch.object(service, '_validate_no_claim', side_effect=AuthenticationException("Token confusion detected")):
+ with pytest.raises(AuthenticationException, match="Token confusion detected"):
+ service._validate_app_only(malicious_claims, is_app_only=True)
+
+ # Test app-only token without required oid claim
+ incomplete_claims = [Claim(type="idtyp", value="app")]
+ with patch.object(service, '_validate_claim_value'):
+ with patch.object(service, '_validate_claim_exists', side_effect=AuthenticationException("Missing oid")):
+ with pytest.raises(AuthenticationException, match="Missing oid"):
+ service._validate_app_only(incomplete_claims, is_app_only=True)
+
+ def test_scope_privilege_escalation_prevention(self, auth_fixtures):
+ """Test prevention of scope privilege escalation attacks."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Token with limited scopes trying to access high-privilege operation
+ limited_claims = [Claim(type="scp", value="read-only-scope")]
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(limited_claims, ["admin-scope", "write-scope"])
+
+ # Token with no scopes
+ no_scope_claims = []
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(no_scope_claims, ["required-scope"])
+
+ # Token with empty scope string
+ empty_scope_claims = [Claim(type="scp", value="")]
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(empty_scope_claims, ["required-scope"])
+
+ # Valid scope should pass
+ valid_scope_claims = [Claim(type="scp", value="admin-scope other-scope")]
+ # Should not raise exception
+ service._validate_any_scope(valid_scope_claims, ["admin-scope"])
+
+ def test_token_tampering_prevention(self, auth_fixtures):
+ """Test prevention of token tampering attacks."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test claim value type coercion security
+ # Integer claim value should be compared as string for security
+ claims = [Claim(type="tid", value=123)]
+ result = service._validate_claim_value(claims, "tid", "123", "Should match")
+ assert result == 123
+
+ # Test that string comparison prevents bypass
+ claims = [Claim(type="tid", value=123)]
+ with pytest.raises(AuthenticationException):
+ service._validate_claim_value(claims, "tid", "456", "Should fail")
+
+ def test_injection_attack_prevention(self, auth_fixtures):
+ """Test prevention of injection attacks through claim values."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test special characters in claim values are handled safely
+ special_chars_values = [
+ "normal-value",
+ "", # Empty
+ " whitespace ", # Whitespace
+ "special@chars#here!", # Special characters
+ "unicode-тест-值", # Unicode
+ "very-long-" + "x" * 1000, # Very long
+ "'; DROP TABLE users; --", # SQL injection attempt
+ "", # XSS attempt
+ "../../etc/passwd", # Path traversal attempt
+ ]
+
+ for test_value in special_chars_values:
+ claims = [Claim(type="test_claim", value=test_value)]
+ result = service._validate_claim_exists(claims, "test_claim", "Test")
+ assert result == test_value
+
+ def test_malformed_scope_security(self, auth_fixtures):
+ """Test security handling of malformed scope strings."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Extra whitespace in scopes should be handled securely
+ claims = [Claim(type="scp", value=" scope1 scope2 scope3 ")]
+ result = service._extract_scopes_from_claims(claims)
+
+ # Should properly split and trim
+ assert "scope1" in result
+ assert "scope2" in result
+ assert "scope3" in result
+ assert all(scope.strip() == scope for scope in result if scope)
+
+ # Test with malicious scope strings
+ malicious_scopes = [
+ "scope1\nscope2", # Newline injection
+ "scope1\x00scope2", # Null byte injection
+ "scope1;rm -rf /", # Command injection attempt
+ ]
+
+ for malicious_scope in malicious_scopes:
+ claims = [Claim(type="scp", value=malicious_scope)]
+ result = service._extract_scopes_from_claims(claims)
+ # Should handle safely without crashing
+ assert isinstance(result, list)
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestTokenSecurityValidation:
+ """Test token-level security validations."""
+
+ @pytest.mark.asyncio
+ async def test_token_signature_validation_security(self, auth_fixtures):
+ """Test that token signature validation prevents tampering."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test missing signing key security
+ payload = auth_fixtures.create_jwt_payload(tenant_id="test-tenant", token_version="2.0")
+ token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID config with different key ID (simulates key not found)
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.signing_keys = [{"kid": "different-key-id", "kty": "RSA"}]
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "unknown-key"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ with pytest.raises(AuthenticationException, match="Token signing key not found"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_token_audience_validation_security(self, auth_fixtures):
+ """Test that audience validation prevents token misuse."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Create token with proper payload structure
+ payload = auth_fixtures.create_jwt_payload(tenant_id="test-tenant", token_version="2.0")
+ token = auth_fixtures.create_mock_jwt_token(payload=payload)
+
+ # Mock OpenID configuration with matching key
+ mock_config = Mock(spec=OpenIdConnectConfiguration)
+ mock_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+ mock_config.signing_keys = [{"kid": "test-key-id", "kty": "RSA"}]
+
+ with patch('services.authentication.jwt.get_unverified_header', return_value={"kid": "test-key-id"}):
+ with patch('services.authentication.jwt.get_unverified_claims', return_value=payload):
+ with patch('services.authentication.jwt.decode', side_effect=JWTClaimsError("Invalid audience")):
+ with patch.object(service.openid_manager, 'get_configuration_async', return_value=mock_config):
+ with pytest.raises(AuthenticationException, match="Invalid token claims"):
+ await service._validate_aad_token_common(token, False, None)
+
+ @pytest.mark.asyncio
+ async def test_malformed_token_security(self, auth_fixtures):
+ """Test security handling of malformed tokens."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test various malformed token formats
+ malformed_tokens = [
+ "invalid.jwt.format",
+ "not-a-jwt-at-all",
+ "",
+ "header.payload", # Missing signature
+ "header.payload.signature.extra", # Too many parts
+ "헤더.페이로드.서명", # Non-ASCII characters
+ ]
+
+ for malformed_token in malformed_tokens:
+ with patch('services.authentication.jwt.get_unverified_header', side_effect=Exception("Invalid token")):
+ with pytest.raises(AuthenticationException, match="Token validation failed"):
+ await service._validate_aad_token_common(malformed_token, False, None)
\ No newline at end of file
diff --git a/Backend/python/tests/unit/services/authentication/test_authentication_validation.py b/Backend/python/tests/unit/services/authentication/test_authentication_validation.py
new file mode 100644
index 0000000..21895f8
--- /dev/null
+++ b/Backend/python/tests/unit/services/authentication/test_authentication_validation.py
@@ -0,0 +1,397 @@
+"""
+Core validation and token processing tests for AuthenticationService.
+"""
+
+import pytest
+from unittest.mock import Mock, AsyncMock, patch
+
+from services.authentication import AuthenticationService
+from services.open_id_connect_configuration import OpenIdConnectConfiguration
+from models.authentication_models import Claim, TokenVersion
+from exceptions.exceptions import AuthenticationException
+from constants.environment_constants import EnvironmentConstants
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestTokenProcessing:
+ """Test core token processing methods that were missing coverage."""
+
+ def test_get_expected_issuer_v1_tokens(self, auth_fixtures):
+ """Test issuer URL construction for v1.0 tokens."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Mock OpenID configuration
+ mock_oidc_config = Mock()
+ mock_oidc_config.issuer_configuration = "https://login.microsoftonline.com/{tenantid}/v2.0"
+
+ tenant_id = "test-tenant-123"
+ result = service.get_expected_issuer(mock_oidc_config, TokenVersion.V1, tenant_id)
+
+ expected = "https://login.microsoftonline.com/test-tenant-123/v2.0"
+ assert result == expected
+
+ def test_get_expected_issuer_v2_tokens(self, auth_fixtures):
+ """Test issuer URL construction for v2.0 tokens."""
+ service = auth_fixtures.get_authentication_service()
+
+ # For v2.0 tokens, it should use AAD_INSTANCE_URL + tenant + v2.0
+ mock_oidc_config = Mock() # Not used for v2.0 tokens
+ tenant_id = "test-tenant-456"
+
+ result = service.get_expected_issuer(mock_oidc_config, TokenVersion.V2, tenant_id)
+
+ expected = f"{EnvironmentConstants.AAD_INSTANCE_URL}/{tenant_id}/v2.0"
+ assert result == expected
+
+ def test_get_expected_issuer_missing_placeholder(self, auth_fixtures):
+ """Test issuer URL construction with missing tenantid placeholder."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Mock OpenID configuration with invalid placeholder format that will cause KeyError
+ mock_oidc_config = Mock()
+ mock_oidc_config.issuer_configuration = "https://login.microsoftonline.com/{invalid}/v2.0" # Wrong placeholder name
+
+ tenant_id = "test-tenant-123"
+
+ # The service catches KeyError and converts to AuthenticationException
+ # when {tenantid} placeholder is missing but other placeholders exist
+ with pytest.raises(AuthenticationException, match="Issuer configuration missing tenantid placeholder"):
+ service.get_expected_issuer(mock_oidc_config, TokenVersion.V1, tenant_id)
+
+ def test_get_expected_issuer_unsupported_version(self, auth_fixtures):
+ """Test issuer URL construction with unsupported token version."""
+ service = auth_fixtures.get_authentication_service()
+
+ mock_oidc_config = Mock()
+ tenant_id = "test-tenant-123"
+
+ with pytest.raises(AuthenticationException, match="Unsupported token version"):
+ service.get_expected_issuer(mock_oidc_config, "v3.0", tenant_id)
+
+ def test_get_expected_audience_by_version(self, auth_fixtures):
+ """Test audience selection based on token version."""
+ service = auth_fixtures.get_authentication_service()
+
+ # For v1.0 tokens, should return service.audience
+ result_v1 = service._get_excpected_audience(TokenVersion.V1)
+ assert result_v1 == service.audience
+
+ # For v2.0 tokens, should return service.client_id
+ result_v2 = service._get_excpected_audience(TokenVersion.V2)
+ assert result_v2 == service.client_id
+
+ def test_get_token_version_comprehensive(self, auth_fixtures):
+ """Test token version extraction with comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test v1.0 token
+ claims_v1 = [Claim(type="ver", value="1.0")]
+ result_v1 = service._get_token_version(claims_v1)
+ assert result_v1 == TokenVersion.V1
+
+ # Test v2.0 token
+ claims_v2 = [Claim(type="ver", value="2.0")]
+ result_v2 = service._get_token_version(claims_v2)
+ assert result_v2 == TokenVersion.V2
+
+ # Test missing version claim
+ claims_no_version = []
+ with pytest.raises(AuthenticationException, match="Missing claim ver"):
+ service._get_token_version(claims_no_version)
+
+ # Test invalid version format
+ claims_invalid_version = [Claim(type="ver", value="invalid")]
+ with pytest.raises(AuthenticationException, match="Unsupported token version"):
+ service._get_token_version(claims_invalid_version)
+
+ # Test future version
+ claims_future_version = [Claim(type="ver", value="3.0")]
+ with pytest.raises(AuthenticationException, match="Unsupported token version"):
+ service._get_token_version(claims_future_version)
+
+ # Test non-string version value
+ claims_numeric_version = [Claim(type="ver", value=2.0)]
+ with pytest.raises(AuthenticationException, match="Unsupported token version"):
+ service._get_token_version(claims_numeric_version)
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestScopeValidationComprehensive:
+ """Comprehensive scope validation tests covering edge cases."""
+
+ def test_extract_scopes_comprehensive(self, auth_fixtures):
+ """Test scope extraction with comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test normal scopes
+ claims = [Claim(type="scp", value="scope1 scope2 scope3")]
+ result = service._extract_scopes_from_claims(claims)
+ assert result == ["scope1", "scope2", "scope3"]
+
+ # Test empty string scopes
+ claims = [Claim(type="scp", value="")]
+ result = service._extract_scopes_from_claims(claims)
+ assert result == []
+
+ # Test None value scopes
+ claims = [Claim(type="scp", value=None)]
+ result = service._extract_scopes_from_claims(claims)
+ assert result == []
+
+ # Test whitespace handling
+ claims = [Claim(type="scp", value=" scope1 scope2 scope3 ")]
+ result = service._extract_scopes_from_claims(claims)
+ assert "scope1" in result
+ assert "scope2" in result
+ assert "scope3" in result
+ assert all(scope.strip() == scope for scope in result if scope)
+
+ # Test roles claim (list format)
+ claims = [Claim(type="roles", value=["role1", "role2"])]
+ result = service._extract_scopes_from_claims(claims)
+ assert "role1" in result
+ assert "role2" in result
+
+ # Test roles claim (string format)
+ claims = [Claim(type="roles", value="single-role")]
+ result = service._extract_scopes_from_claims(claims)
+ assert "single-role" in result
+
+ # Test combined scp and roles
+ claims = [
+ Claim(type="scp", value="delegated-scope"),
+ Claim(type="roles", value=["app-role1", "app-role2"])
+ ]
+ result = service._extract_scopes_from_claims(claims)
+ assert "delegated-scope" in result
+ assert "app-role1" in result
+ assert "app-role2" in result
+
+ def test_malformed_roles_claims(self, auth_fixtures):
+ """Test handling of malformed roles claims."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test None roles value
+ claims = [Claim(type="roles", value=None)]
+ result = service._extract_scopes_from_claims(claims)
+ assert result == []
+
+ # Test empty list roles
+ claims = [Claim(type="roles", value=[])]
+ result = service._extract_scopes_from_claims(claims)
+ assert result == []
+
+ # Test numeric values in roles (should be handled gracefully)
+ claims = [Claim(type="roles", value=[123, "valid-role"])]
+ result = service._extract_scopes_from_claims(claims)
+ assert 123 in result # Should include numeric values as-is
+ assert "valid-role" in result
+
+ def test_validate_any_scope_edge_cases(self, auth_fixtures):
+ """Test scope validation with various edge cases."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test case-sensitive scope matching
+ claims = [Claim(type="scp", value="FabricWorkloadControl")]
+ service._validate_any_scope(claims, ["FabricWorkloadControl"]) # Should pass
+
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(claims, ["fabricworkloadcontrol"]) # Different case
+
+ # Test partial scope matching (should not match)
+ claims = [Claim(type="scp", value="FabricWorkload")]
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(claims, ["FabricWorkloadControl"])
+
+ # Test multiple allowed scopes (any match should pass)
+ claims = [Claim(type="scp", value="scope2")]
+ service._validate_any_scope(claims, ["scope1", "scope2", "scope3"]) # Should pass
+
+ # Test empty allowed scopes list
+ claims = [Claim(type="scp", value="any-scope")]
+ with pytest.raises(AuthenticationException, match="missing required scopes"):
+ service._validate_any_scope(claims, [])
+
+ def test_special_characters_in_scopes(self, auth_fixtures):
+ """Test handling of special characters in scope names."""
+ service = auth_fixtures.get_authentication_service()
+
+ special_scope_names = [
+ "scope-with-dashes",
+ "scope.with.dots",
+ "scope_with_underscores",
+ "scope:with:colons",
+ "scope/with/slashes",
+ "scope@with@at",
+ "https://graph.microsoft.com/.default", # Real-world example
+ ]
+
+ for scope_name in special_scope_names:
+ claims = [Claim(type="scp", value=scope_name)]
+ result = service._extract_scopes_from_claims(claims)
+ assert scope_name in result
+
+ # Test validation passes for exact match
+ service._validate_any_scope(claims, [scope_name])
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestClaimValidationComprehensive:
+ """Comprehensive claim validation tests."""
+
+ def test_validate_claim_exists_edge_cases(self, auth_fixtures):
+ """Test claim existence validation with edge cases."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test with multiple claims of same type (should return first)
+ claims = [
+ Claim(type="tid", value="first-tenant"),
+ Claim(type="tid", value="second-tenant")
+ ]
+ result = service._validate_claim_exists(claims, "tid", "Tenant required")
+ assert result == "first-tenant"
+
+ # Test with empty claim value
+ claims = [Claim(type="tid", value="")]
+ result = service._validate_claim_exists(claims, "tid", "Tenant required")
+ assert result == ""
+
+ # Test with None claim value
+ claims = [Claim(type="tid", value=None)]
+ result = service._validate_claim_exists(claims, "tid", "Tenant required")
+ assert result is None
+
+ # Test case-sensitive claim name matching
+ claims = [Claim(type="TID", value="test-tenant")]
+ with pytest.raises(AuthenticationException, match="Missing claim tid"):
+ service._validate_claim_exists(claims, "tid", "Tenant required")
+
+ def test_validate_claim_value_comprehensive(self, auth_fixtures):
+ """Test claim value validation with comprehensive scenarios."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test successful validation
+ claims = [Claim(type="tid", value="test-tenant")]
+ result = service._validate_claim_value(claims, "tid", "test-tenant", "Should match")
+ assert result == "test-tenant"
+
+ # Test type coercion (int to string comparison)
+ claims = [Claim(type="tid", value=123)]
+ result = service._validate_claim_value(claims, "tid", "123", "Should match")
+ assert result == 123
+
+ # Test without expected value (should just return claim value)
+ claims = [Claim(type="tid", value="any-value")]
+ result = service._validate_claim_value(claims, "tid", None, "No validation")
+ assert result == "any-value"
+
+ # Test value mismatch
+ claims = [Claim(type="tid", value="wrong-tenant")]
+ with pytest.raises(AuthenticationException, match="Should fail"):
+ service._validate_claim_value(claims, "tid", "correct-tenant", "Should fail")
+
+ def test_validate_no_claim_security(self, auth_fixtures):
+ """Test that _validate_no_claim prevents token confusion attacks."""
+ service = auth_fixtures.get_authentication_service()
+
+ # Test that method correctly identifies unexpected claims
+ claims = [Claim(type="scp", value="delegated-scope")]
+ with pytest.raises(AuthenticationException, match="Unexpected token format"):
+ service._validate_no_claim(claims, "scp", "App-only tokens should not have this")
+
+ # Test that method passes when claim is not present
+ claims = [Claim(type="other", value="other-value")]
+ # Should not raise exception
+ service._validate_no_claim(claims, "scp", "App-only tokens should not have this")
+
+ # Test with empty claims list
+ claims = []
+ # Should not raise exception
+ service._validate_no_claim(claims, "scp", "App-only tokens should not have this")
+
+
+@pytest.mark.unit
+@pytest.mark.services
+class TestConfigurationHandling:
+ """Comprehensive configuration handling tests - consolidates duplicate tests."""
+
+ def test_missing_configuration_scenarios(self, auth_fixtures):
+ """Test various missing configuration combinations."""
+ mock_openid_manager, _ = auth_fixtures.get_basic_mocks()
+
+ # Test all configuration missing
+ mock_config_all_none = auth_fixtures.get_config_service_mock(
+ publisher_tenant_id=None,
+ client_id=None,
+ client_secret=None,
+ audience=None
+ )
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_all_none):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+
+ # Service should be created but with minimal functionality
+ assert service.client_id is None
+ assert service.client_secret is None
+ assert service.publisher_tenant_id is None
+ assert service.audience is None
+ assert len(service._msal_apps) == 0
+
+ # Test partial configuration missing (client_id and secret only)
+ mock_config_partial = auth_fixtures.get_config_service_mock(
+ client_id=None,
+ client_secret=None
+ )
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_partial):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+ assert len(service._msal_apps) == 0
+ assert service.publisher_tenant_id == "publisher-tenant-id" # This should still be set
+
+ # Test empty string configuration
+ mock_config_empty = auth_fixtures.get_config_service_mock(
+ publisher_tenant_id="",
+ client_id="",
+ client_secret="",
+ audience=""
+ )
+
+ with patch("services.authentication.get_configuration_service", return_value=mock_config_empty):
+ service = AuthenticationService(openid_manager=mock_openid_manager)
+ # Empty strings are falsy, so MSAL app shouldn't be created
+ assert len(service._msal_apps) == 0
+
+ def test_msal_app_authority_construction(self, auth_fixtures):
+ """Test MSAL app authority URL construction for different tenants."""
+ service = auth_fixtures.get_authentication_service()
+
+ test_cases = [
+ "common",
+ "organizations",
+ "consumers",
+ "specific-tenant-id",
+ "12345678-1234-1234-1234-123456789012", # GUID format
+ ]
+
+ # Test MSAL app authority construction for different tenant types
+ with patch("services.authentication.msal") as mock_msal:
+ mock_app = Mock()
+ mock_msal.ConfidentialClientApplication.return_value = mock_app
+
+ # Test that we can create MSAL apps for different tenants
+ for tenant_id in test_cases:
+ result = service._get_msal_app(tenant_id)
+ # Just verify we get a valid app back
+ assert result is not None
+
+ # Due to caching, some tenants may reuse apps, so we check that
+ # MSAL was called at least once (but possibly less than len(test_cases) due to caching)
+ assert mock_msal.ConfidentialClientApplication.call_count > 0
+ assert mock_msal.ConfidentialClientApplication.call_count <= len(test_cases)
+
+ # Verify that at least one authority URL was constructed correctly
+ calls = mock_msal.ConfidentialClientApplication.call_args_list
+ assert len(calls) > 0, "At least one MSAL app should have been created"
\ No newline at end of file
diff --git a/Backend/python/tools/ManifestGeneratorTool.md b/Backend/python/tools/ManifestGeneratorTool.md
new file mode 100644
index 0000000..ee2932b
--- /dev/null
+++ b/Backend/python/tools/ManifestGeneratorTool.md
@@ -0,0 +1,287 @@
+# Manifest Package Generator for Python Backend
+
+This tool generates the **ManifestPackage.nupkg** file required for Microsoft Fabric workload registration. It creates the exact same package structure as the C# backend by using the same `.nuspec` template files.
+
+## 🚀 Quick Start
+
+```bash
+# Navigate to tools directory
+cd Backend/python/tools
+
+# Generate your manifest package (takes < 5 seconds)
+python manifest_package_generator.py --version 1.0.0
+```
+
+**That's it!** You now have:
+- ✅ `ManifestPackage.1.0.0.nupkg` in `python/bin/Debug/`
+- ✅ `ManifestPackage.1.0.0.buildinfo.json` with build metadata
+- ✅ Your package validated and ready to use
+
+## 📦 What Does This Tool Do?
+
+The manifest package tells Microsoft Fabric how to register and communicate with your Python backend. It contains:
+
+```
+ManifestPackage.1.0.0.nupkg
+├── BE/ # Backend configuration
+│ ├── WorkloadManifest.xml # Your workload settings
+│ └── Item1.xml # Item type definitions
+└── FE/ # Frontend assets
+ ├── Product.json # UI configuration
+ ├── Item1.json # Item-specific UI
+ └── assets/ # Images & translations
+```
+
+## 🎯 Common Scenarios
+
+### Development Environment
+
+**First time setup:**
+```bash
+# 1. Generate the manifest package
+cd Backend/python/tools
+python manifest_package_generator.py --version 1.0.0
+
+# 2. Create your own workload-dev-mode.json with the generated package path
+
+# 3. Start your FastAPI backend
+cd ../src
+python main.py
+
+# 4. Register with DevGateway
+```
+
+**Making changes to manifest files:**
+```bash
+# Edit your files in src/Packages/manifest/
+# Regenerate the package with a new version
+cd Backend/python/tools
+python manifest_package_generator.py --version 1.0.1
+```
+
+### Production Deployment
+
+**Generate a release package:**
+```bash
+# Uses the Release template (ManifestPackageRelease.nuspec)
+python manifest_package_generator.py --configuration Release --version 2.0.0
+```
+
+**CI/CD Pipeline:**
+```bash
+# In your deployment script
+cd Backend/python/tools
+python manifest_package_generator.py \
+ --configuration Release \
+ --version $BUILD_NUMBER \
+ --output-dir $ARTIFACT_PATH
+```
+
+## 🔧 Understanding Template Selection
+
+The `--configuration` parameter selects which `.nuspec` template file to use:
+
+| Parameter | Template File Used | Output Package | When to Use |
+|-----------|-------------------|----------------|-------------|
+| `Debug` (default) | `ManifestPackageDebug.nuspec` | `ManifestPackage.X.X.X.nupkg` | Development & testing |
+| `Release` | `ManifestPackageRelease.nuspec` | `ManifestPackageRelease.X.X.X.nupkg` | Production deployment |
+
+**Note**: Unlike C#, Python doesn't compile code. The "configuration" only affects which template is used for package metadata.
+
+## 📋 Prerequisites
+
+Before running the generator, ensure these files exist:
+
+### Required Files
+```
+python/
+└── src/Packages/manifest/
+ ├── WorkloadManifest.xml # Your workload configuration
+ ├── ManifestPackageDebug.nuspec # Debug template
+ └── ManifestPackageRelease.nuspec # Release template
+```
+
+### Optional Files
+- `src/Packages/manifest/Item1.xml` - Item definitions (auto-generated if missing)
+- `Frontend/Package/` - Frontend assets (works without them)
+
+## 🛠️ Command Line Options
+
+```bash
+python manifest_package_generator.py [OPTIONS]
+
+Options:
+ --version VERSION Package version (default: 1.0.0)
+ --configuration CONFIG Debug or Release (default: Debug)
+ --output-dir DIR Output directory (default: ./bin/{configuration})
+ --project-root DIR Project root (default: auto-detected)
+```
+
+### Examples
+
+```bash
+# Generate development package with custom version
+python manifest_package_generator.py --version 1.0.1
+
+# Generate release package
+python manifest_package_generator.py --configuration Release --version 2.0.0
+
+# Custom output location
+python manifest_package_generator.py --version 1.0.1 --output-dir ./artifacts
+
+# Run from any directory
+cd /anywhere
+python /path/to/manifest_package_generator.py --project-root /path/to/PythonBackend --version 1.0.0
+```
+
+## 🐛 Troubleshooting
+
+### "Missing required files"
+```
+❌ Missing required files:
+ - src/Packages/manifest/WorkloadManifest.xml
+```
+**Solution:** Ensure all required files exist. The tool shows the exact paths it's looking for.
+
+### "WorkloadName must have 'Org.' prefix"
+```
+❌ WorkloadName 'MyWorkload' must have 'Org.' prefix
+```
+**Solution:** Edit WorkloadManifest.xml to use format like `Org.CompanyName.WorkloadName`
+
+### "Manifest directory not found"
+```
+❌ Error: Manifest directory not found at: ...
+Current directory: ...
+Project root: ...
+```
+**Solution:** Run from the tools directory or specify `--project-root` explicitly.
+
+### Package seems empty
+```bash
+# Inspect package contents
+unzip -l ManifestPackage.1.0.0.nupkg
+
+# On Windows
+7z l ManifestPackage.1.0.0.nupkg
+```
+
+## 🔄 Complete Development Workflow
+
+### Local Development Script
+```bash
+#!/bin/bash
+# dev-start.sh - Complete development setup
+
+echo "🚀 Starting Fabric Python Backend Development"
+
+# 1. Generate/update manifest package
+cd Backend/pthon/tools
+python manifest_package_generator.py --version 1.0.1
+
+# 2. Start backend
+cd ../src
+python main.py
+```
+
+### Windows Batch Script
+```batch
+@echo off
+REM dev-start.bat - Windows development setup
+
+echo Starting Fabric Python Backend Development...
+
+REM 1. Generate/update manifest package
+cd Backend\python\tools
+python manifest_package_generator.py --version 1.0.1
+
+REM 2. Start backend
+cd ..\src
+python main.py
+```
+
+### Docker Integration
+```dockerfile
+# In your Dockerfile
+WORKDIR /app/python
+
+# Copy source files
+COPY . .
+
+# Generate manifest package during build
+RUN cd tools && python manifest_package_generator.py --configuration Release --version ${BUILD_VERSION:-1.0.0}
+
+# Start the backend
+WORKDIR /app/python/src
+CMD ["python", "main.py"]
+```
+
+### GitHub Actions
+```yaml
+name: Build and Test
+on: [push, pull_request]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+
+ - name: Install dependencies
+ run: |
+ cd Backend/python
+ pip install -r requirements.txt
+
+ - name: Generate manifest packages
+ run: |
+ cd Backend/python/tools
+ # Development package
+ python manifest_package_generator.py --version 1.0.${{ github.run_number }}
+ # Release package
+ python manifest_package_generator.py --configuration Release --version 1.0.${{ github.run_number }}
+
+ - name: Run tests
+ run: |
+ cd Backend/python
+ pytest
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v3
+ with:
+ name: manifest-packages
+ path: python/bin/**/*.nupkg
+```
+
+## 📝 Important Notes
+
+1. **Backend Start Command**: Always start your backend from the `src` directory using `python main.py`
+2. **Version Management**: Use semantic versioning (1.0.0, 1.0.1, etc.) for your packages
+3. **Build Info**: Each package generates a `.buildinfo.json` file for tracking
+4. **Auto-detection**: The tool automatically finds your Python Backend directory when run from tools/
+5. **Compatibility**: This tool produces the exact same output as the C# build process
+
+## 🆘 Getting Help
+
+1. **Check error messages** - They show exact file paths and current directory
+2. **Use DEBUG mode** - Set `DEBUG=1` environment variable for detailed output
+3. **Inspect the package** - Use `unzip -l` to verify contents
+4. **Check the templates** - Review `.nuspec` files for package structure
+
+## 📚 Additional Resources
+
+- [Backend Setup Guide](../README.md) - Complete backend documentation
+- [Fabric Workload Documentation](https://docs.microsoft.com/fabric) - Official docs
+- [API Reference](../docs/api.md) - Your API endpoints
+
+---
+
+**Quick Reference:**
+- **Generate package:** `python manifest_package_generator.py --version 1.0.1`
+- **Start backend:** `cd Backend/python/src && python main.py`
+- **Check package:** `unzip -l ManifestPackage.1.0.1.nupkg`
+- **Debug mode:** `DEBUG=1 python manifest_package_generator.py --version 1.0.1`
\ No newline at end of file
diff --git a/Backend/python/tools/manifest_package_generator.py b/Backend/python/tools/manifest_package_generator.py
new file mode 100644
index 0000000..4d2bf70
--- /dev/null
+++ b/Backend/python/tools/manifest_package_generator.py
@@ -0,0 +1,531 @@
+#!/usr/bin/env python3
+"""
+Manifest Package Generator for Microsoft Fabric Workload
+
+Production-ready tool that generates ManifestPackage.nupkg files, mirroring the C# build process
+exactly using the existing .nuspec template files.
+
+Usage:
+ python manifest_package_generator.py [--version VERSION] [--configuration CONFIG] [--output-dir OUTPUT_DIR]
+"""
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+import xml.etree.ElementTree as ET
+from datetime import datetime, timezone
+from pathlib import Path
+from typing import Optional, Dict, Any
+
+
+class ManifestPackageGenerator:
+ def __init__(self, project_root: str, version: str = "1.0.0", build_config: str = "Debug"):
+ """
+ Initialize the manifest package generator.
+
+ Args:
+ project_root: Root directory of the Python project
+ version: Version for the package (default: 1.0.0)
+ build_config: Build configuration - "Debug" or "Release"
+ """
+ self.project_root = Path(project_root).resolve()
+ self.version = version
+ self.build_config = build_config
+ self.manifest_dir = self.project_root / "src" / "Packages" / "manifest"
+ self.frontend_package_dir = self.project_root.parent.parent / "Frontend" / "Package"
+
+ # Validate project structure
+ if not self.manifest_dir.exists():
+ raise ValueError(
+ f"Manifest directory not found at: {self.manifest_dir}\n"
+ f"Current directory: {Path.cwd()}\n"
+ f"Project root: {self.project_root}\n"
+ f"Please ensure you're running from the correct location or specify --project-root"
+ )
+
+ # Select nuspec file based on build configuration
+ if build_config.lower() == "release":
+ self.nuspec_file = self.manifest_dir / "ManifestPackageRelease.nuspec"
+ self.package_id = "ManifestPackageRelease"
+ else:
+ self.nuspec_file = self.manifest_dir / "ManifestPackageDebug.nuspec"
+ self.package_id = "ManifestPackage"
+
+ def validate_source_files(self) -> bool:
+ """Validate that all required source files exist."""
+ required_files = [
+ self.manifest_dir / "WorkloadManifest.xml",
+ self.nuspec_file
+ ]
+
+ # Item1.xml is optional - will create template if missing
+ optional_files = [
+ self.manifest_dir / "Item1.xml"
+ ]
+
+ missing_files = []
+ for file_path in required_files:
+ if not file_path.exists():
+ missing_files.append(str(file_path))
+
+ if missing_files:
+ print(f"❌ Missing required files:")
+ for file_path in missing_files:
+ print(f" - {file_path}")
+ return False
+
+ # Check optional files
+ for file_path in optional_files:
+ if not file_path.exists():
+ print(f"⚠️ Optional file missing: {file_path} (will create template)")
+
+ print("✅ All required source files found")
+ return True
+
+ def run_validation_steps(self) -> bool:
+ """Run validation steps that mirror C# PreBuild process."""
+ print("🔍 Running validation steps...")
+
+ # Step 1: Validate XML files
+ if not self.validate_xml_files():
+ return False
+
+ # Step 2: Validate workload configuration
+ if not self.validate_workload_configuration():
+ return False
+
+ print("✅ All validation steps passed")
+ return True
+
+ def validate_xml_files(self) -> bool:
+ """Validate XML files for well-formedness and content."""
+ xml_files = ["WorkloadManifest.xml", "Item1.xml"]
+
+ for xml_file in xml_files:
+ xml_path = self.manifest_dir / xml_file
+
+ if not xml_path.exists():
+ if xml_file == "Item1.xml":
+ print(f"⚠️ {xml_file} not found, will create template")
+ continue
+ else:
+ print(f"❌ Required file not found: {xml_path}")
+ return False
+
+ # Validate XML structure
+ try:
+ tree = ET.parse(xml_path)
+ print(f"✅ {xml_file} is valid XML")
+
+ # Additional validation for WorkloadManifest
+ if xml_file == "WorkloadManifest.xml":
+ if not self.validate_workload_manifest_content(tree):
+ return False
+
+ except ET.ParseError as e:
+ print(f"❌ {xml_file} has XML errors: {e}")
+ return False
+
+ return True
+
+ def validate_workload_manifest_content(self, tree: ET.ElementTree) -> bool:
+ """Validate WorkloadManifest.xml content."""
+ root = tree.getroot()
+
+ # Find Workload element (handle namespaces)
+ workload_elem = None
+ for elem in root.iter():
+ if elem.tag.endswith('Workload'):
+ workload_elem = elem
+ break
+
+ if workload_elem is not None:
+ workload_name = workload_elem.get('WorkloadName')
+ if workload_name:
+ if not workload_name.startswith('Org.'):
+ print(f"❌ WorkloadName '{workload_name}' must have 'Org.' prefix")
+ return False
+ print(f"✅ WorkloadName format valid: {workload_name}")
+ else:
+ print("❌ WorkloadName attribute not found")
+ return False
+ else:
+ print("❌ Workload element not found in WorkloadManifest.xml")
+ return False
+
+ return True
+
+ def validate_workload_configuration(self) -> bool:
+ """Validate overall workload configuration."""
+ # Check for XSD files (optional but recommended)
+ xsd_files = ["WorkloadDefinition.xsd", "ItemDefinition.xsd"]
+ for xsd_file in xsd_files:
+ xsd_path = self.manifest_dir / xsd_file
+ if not xsd_path.exists():
+ print(f"⚠️ XSD schema not found: {xsd_path} (optional)")
+
+ return True
+
+ def get_workload_name(self) -> str:
+ """Extract WorkloadName from WorkloadManifest.xml."""
+ try:
+ manifest_path = self.manifest_dir / "WorkloadManifest.xml"
+ tree = ET.parse(manifest_path)
+ root = tree.getroot()
+
+ # Find Workload element (handle namespaces)
+ for elem in root.iter():
+ if elem.tag.endswith('Workload'):
+ workload_name = elem.get('WorkloadName')
+ if workload_name:
+ return workload_name
+ except Exception as e:
+ print(f"⚠️ Could not extract WorkloadName: {e}")
+
+ return 'Org.WorkloadSample'
+
+ def load_and_update_nuspec(self) -> str:
+ """Load the nuspec template and update version."""
+ if not self.nuspec_file.exists():
+ raise FileNotFoundError(f"Nuspec template not found: {self.nuspec_file}")
+
+ print(f"📄 Using nuspec template: {self.nuspec_file.name}")
+
+ # Read the template
+ nuspec_content = self.nuspec_file.read_text(encoding='utf-8')
+
+ # Update version in the template
+ try:
+ # Register namespace
+ ET.register_namespace('', 'http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd')
+ root = ET.fromstring(nuspec_content)
+
+ # Find version element
+ for elem in root.iter():
+ if elem.tag.endswith('version'):
+ elem.text = self.version
+ print(f"✅ Updated package version to: {self.version}")
+ break
+
+ # Return updated XML
+ return ET.tostring(root, encoding='unicode', xml_declaration=True)
+
+ except ET.ParseError:
+ # Fallback: string replacement
+ print("⚠️ Using string replacement for version update")
+ return nuspec_content.replace('1.0.0', f'{self.version}')
+
+ def create_item1_template(self, output_path: Path) -> None:
+ """Create Item1.xml with correct workload name."""
+ workload_name = self.get_workload_name()
+ item_type = f"{workload_name}.SampleWorkloadItem"
+
+ template_content = f'''
+
+ -
+
+
+ PerItem
+ PerItem
+
+
+
+
+
+
+
+
+
+'''
+
+ output_path.write_text(template_content, encoding='utf-8')
+ print(f"✅ Created Item1.xml template with WorkloadName: {workload_name}")
+
+ def process_frontend_pattern(self, zipf: zipfile.ZipFile, src_pattern: str, target: str) -> None:
+ """Process frontend file patterns from nuspec."""
+ print(f"📁 Processing frontend pattern: {src_pattern} -> {target}")
+
+ # Convert Windows-style paths to cross-platform
+ src_pattern = src_pattern.replace('\\', '/')
+
+ # Handle relative paths from nuspec
+ if src_pattern.startswith('../../../../Frontend/Package'):
+ # Remove the relative part and get actual path
+ pattern = src_pattern.replace('../../../../Frontend/Package/', '')
+ pattern = pattern.rstrip('/') # Remove trailing slash if any
+
+ if not self.frontend_package_dir.exists():
+ print(f"⚠️ Frontend package directory not found: {self.frontend_package_dir}")
+ return
+
+ if pattern == '*':
+ # Add all files in Package directory (non-recursive)
+ for file_path in self.frontend_package_dir.glob('*'):
+ if file_path.is_file():
+ zipf.write(file_path, f"{target}/{file_path.name}")
+ print(f" Added: {file_path.name}")
+
+ elif pattern.startswith('assets/'):
+ # Handle assets directory patterns
+ if pattern.endswith('**') or pattern.endswith('**/*'):
+ # Add all files in assets directory recursively
+ assets_dir = self.frontend_package_dir / "assets"
+ if assets_dir.exists():
+ for root, dirs, files in os.walk(assets_dir):
+ for file in files:
+ file_path = Path(root) / file
+ rel_path = file_path.relative_to(assets_dir)
+ zipf.write(file_path, f"{target}/{rel_path}")
+ print(f" Added: {rel_path}")
+ else:
+ # Specific assets file
+ asset_file = self.frontend_package_dir / pattern
+ if asset_file.exists():
+ zipf.write(asset_file, f"{target}/{pattern}")
+ print(f" Added: {pattern}")
+ else:
+ # Specific file
+ file_path = self.frontend_package_dir / pattern
+ if file_path.exists():
+ zipf.write(file_path, f"{target}/{pattern}")
+ print(f" Added: {pattern}")
+
+ def add_frontend_files_from_nuspec(self, zipf: zipfile.ZipFile, nuspec_content: str) -> None:
+ """Add frontend files based on nuspec file patterns."""
+ print("🎨 Adding frontend files based on nuspec patterns...")
+
+ try:
+ root = ET.fromstring(nuspec_content)
+
+ # Find all file elements
+ for elem in root.iter():
+ if elem.tag.endswith('file'):
+ src = elem.get('src')
+ target = elem.get('target')
+
+ if src and target and 'Frontend' in src:
+ self.process_frontend_pattern(zipf, src, target)
+
+ except ET.ParseError as e:
+ print(f"⚠️ Could not parse nuspec for frontend patterns: {e}")
+ # Fallback: add basic frontend files
+ self.add_basic_frontend_files(zipf)
+
+ def add_basic_frontend_files(self, zipf: zipfile.ZipFile) -> None:
+ """Fallback method to add basic frontend files."""
+ print("📄 Adding basic frontend files (fallback)...")
+
+ if not self.frontend_package_dir.exists():
+ print(f"⚠️ Frontend package directory not found: {self.frontend_package_dir}")
+ return
+
+ basic_files = ["Product.json", "Item1.json"]
+ for filename in basic_files:
+ file_path = self.frontend_package_dir / filename
+ if file_path.exists():
+ zipf.write(file_path, f"FE/{filename}")
+ print(f" Added: {filename}")
+
+ # Add assets directory if it exists
+ assets_dir = self.frontend_package_dir / "assets"
+ if assets_dir.exists():
+ for root, dirs, files in os.walk(assets_dir):
+ for file in files:
+ file_path = Path(root) / file
+ rel_path = file_path.relative_to(self.frontend_package_dir)
+ zipf.write(file_path, f"FE/{rel_path}")
+ print(f" Added: {rel_path}")
+
+ def create_nupkg_using_nuspec_template(self, output_dir: str = None) -> str:
+ """Create .nupkg using the existing nuspec template."""
+ if output_dir is None:
+ output_dir = str(self.project_root / "bin" / self.build_config)
+
+ output_path = Path(output_dir)
+ output_path.mkdir(parents=True, exist_ok=True)
+
+ # Generate package filename
+ nupkg_filename = f"{self.package_id}.{self.version}.nupkg"
+ nupkg_path = output_path / nupkg_filename
+
+ print(f"🔧 Creating package: {nupkg_filename}")
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ temp_path = Path(temp_dir)
+
+ # Step 1: Copy manifest files to temp directory
+ print("📄 Copying backend manifest files...")
+ shutil.copy2(self.manifest_dir / "WorkloadManifest.xml", temp_path)
+
+ item1_path = self.manifest_dir / "Item1.xml"
+ if item1_path.exists():
+ shutil.copy2(item1_path, temp_path)
+ else:
+ self.create_item1_template(temp_path / "Item1.xml")
+
+ # Step 2: Load and process nuspec template
+ print("📋 Processing nuspec template...")
+ nuspec_content = self.load_and_update_nuspec()
+ nuspec_path = temp_path / self.nuspec_file.name
+ nuspec_path.write_text(nuspec_content, encoding='utf-8')
+
+ # Step 3: Create the .nupkg file
+ print("📦 Creating .nupkg file...")
+ with zipfile.ZipFile(nupkg_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
+ # Add nuspec to root of package
+ zipf.write(nuspec_path, self.nuspec_file.name)
+
+ # Add BE files
+ zipf.write(temp_path / "WorkloadManifest.xml", "BE/WorkloadManifest.xml")
+ if (temp_path / "Item1.xml").exists():
+ zipf.write(temp_path / "Item1.xml", "BE/Item1.xml")
+
+ # Add FE files
+ self.add_frontend_files_from_nuspec(zipf, nuspec_content)
+
+ return str(nupkg_path)
+
+ def create_build_info(self, output_dir: Path) -> None:
+ """Create build information file for tracking."""
+ build_info = {
+ "version": self.version,
+ "configuration": self.build_config,
+ "build_time": datetime.now(timezone.utc).isoformat(),
+ "package_id": self.package_id,
+ "python_version": f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}",
+ "manifest_dir": str(self.manifest_dir),
+ "frontend_dir": str(self.frontend_package_dir),
+ "workload_name": self.get_workload_name()
+ }
+
+ build_info_file = output_dir / f"{self.package_id}.{self.version}.buildinfo.json"
+ build_info_file.write_text(json.dumps(build_info, indent=2), encoding='utf-8')
+ print(f"📄 Created build info: {build_info_file.name}")
+
+ def validate_version_format(self) -> bool:
+ """Validate semantic versioning format."""
+ import re
+ pattern = r'^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$'
+ if not re.match(pattern, self.version):
+ print(f"⚠️ Version '{self.version}' doesn't follow semantic versioning (e.g., 1.0.0, 1.0.0-beta.1)")
+ return False
+ return True
+
+ def generate(self, output_dir: str = None) -> str:
+ """
+ Generate the manifest package using nuspec templates.
+
+ Args:
+ output_dir: Output directory for the .nupkg file
+
+ Returns:
+ Path to the generated .nupkg file
+ """
+ print(f"🔧 Generating Manifest Package v{self.version} ({self.build_config} configuration)")
+ print(f"📁 Project root: {self.project_root}")
+ print(f"📄 Manifest directory: {self.manifest_dir}")
+ print(f"📋 Using nuspec: {self.nuspec_file.name}")
+ print(f"🎨 Frontend package directory: {self.frontend_package_dir}")
+ print()
+
+ # Validate version format
+ if not self.validate_version_format():
+ print("⚠️ Consider using semantic versioning for production")
+
+ # Step 1: Validate source files
+ if not self.validate_source_files():
+ raise FileNotFoundError("Required source files are missing")
+
+ # Step 2: Run validation steps
+ if not self.run_validation_steps():
+ raise ValueError("Validation failed")
+
+ # Step 3: Create the package
+ nupkg_path = self.create_nupkg_using_nuspec_template(output_dir)
+
+ # Step 4: Create build info
+ output_path = Path(nupkg_path).parent
+ self.create_build_info(output_path)
+
+ print()
+ print(f"✅ Manifest package created successfully!")
+ print(f"📦 Package location: {nupkg_path}")
+ print(f"📏 Package size: {Path(nupkg_path).stat().st_size:,} bytes")
+ print(f"🏷️ Package ID: {self.package_id}")
+ print(f"🔢 Version: {self.version}")
+ print(f"⚙️ Configuration: {self.build_config}")
+
+ return nupkg_path
+
+
+def find_python_backend_root() -> str:
+ """Find the Python Backend root directory."""
+ current = Path(__file__).resolve().parent
+
+ # If we're in the tools directory, parent should be Python Backend
+ if current.name == 'tools' and (current.parent / 'src' / 'Packages' / 'manifest').exists():
+ return str(current.parent)
+
+ # Search up the directory tree
+ search_path = current
+ while search_path != search_path.parent:
+ if (search_path / 'src' / 'Packages' / 'manifest').exists():
+ return str(search_path)
+ search_path = search_path.parent
+
+ # Default to current directory
+ return "."
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Generate ManifestPackage.nupkg for Python Fabric Workload",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
+Examples:
+ %(prog)s --version 1.0.1
+ %(prog)s --configuration Release --version 2.0.0
+ %(prog)s --output-dir ./artifacts --version 1.2.3
+ %(prog)s --project-root /path/to/PythonBackend --version 1.0.0
+ """
+ )
+
+ parser.add_argument("--version", default="1.0.0", help="Package version (default: 1.0.0)")
+ parser.add_argument("--configuration", choices=["Debug", "Release"], default="Debug",
+ help="Build configuration (default: Debug)")
+ parser.add_argument("--output-dir", help="Output directory (default: {project_root}/bin/{configuration})")
+ parser.add_argument("--project-root", default=find_python_backend_root(),
+ help="Project root directory (default: auto-detected)")
+
+ args = parser.parse_args()
+
+ try:
+ generator = ManifestPackageGenerator(
+ args.project_root,
+ args.version,
+ args.configuration
+ )
+
+ nupkg_path = generator.generate(args.output_dir)
+
+ print("\n" + "="*70)
+ print("🎉 SUCCESS: Manifest package generated!")
+ print(f"📦 Location: {nupkg_path}")
+ print("="*70)
+
+ except Exception as e:
+ print(f"❌ Error: {e}")
+ if os.environ.get('DEBUG'):
+ import traceback
+ traceback.print_exc()
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ exit(main())
\ No newline at end of file
diff --git a/Backend/src/Fabric_Extension_BE_Boilerplate.csproj b/Backend/src/Fabric_Extension_BE_Boilerplate.csproj
deleted file mode 100644
index e480318..0000000
--- a/Backend/src/Fabric_Extension_BE_Boilerplate.csproj
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-
- net8.0
- true
- PreBuild
- true
- true
-
-
-
- Packages\manifest\ManifestPackageRelease.nuspec
-
-
-
- Packages\manifest\ManifestPackageDebug.nuspec
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- powershell.exe
- pwsh
- $(ProjectDir)ValidationScripts
- $(ProjectDir)Packages\manifest
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Backend/src/Packages/manifest/FrontendManifest.json b/Backend/src/Packages/manifest/FrontendManifest.json
deleted file mode 100644
index ff33797..0000000
--- a/Backend/src/Packages/manifest/FrontendManifest.json
+++ /dev/null
@@ -1,142 +0,0 @@
-{
- "artifacts": [],
- "extension": {"url":"appUrl"},
- "product": {
- "name": "idan",
- "displayName": "Idans data Analysis",
- "favicon": "Trident_API_Playground_Favicon.ico",
- "icon": {
- "sprite": "product-color-icons",
- "name": "analyze"
- },
- "homePage": {
- "learningMaterials": [
- {
- "title": "LearningMaterial_example1",
- "description": "LearningMaterial_example1_description",
- "onClick": {
- "extensionName": "react-example",
- "action": "open.learningMaterial.dialog1"
- },
- "visibilityChecker": "TestArtifact",
- "image": {
- "extensionName": "react-example",
- "path": "/assets/example-learning-material.jpeg"
- }
- },
- {
- "title": "LearningMaterial_example2",
- "description": "LearningMaterial_example2_description",
- "image": "https://th.bing.com/th/id/OIP.PBCsrpFNi1lnTiEvTbrmggHaEj?pid=ImgDet&rs=1",
- "onClick": {
- "extensionName": "react-example",
- "action": "open.learningMaterial.dialog2"
- },
- "visibilityChecker": {
- "action": "check.learningMaterial.dialog2.visibility",
- "extensionName": "react-example"
- }
- },
- {
- "title": "LearningMaterial_example3",
- "description": "LearningMaterial_example3_description",
- "link": "https://powerbi.visualstudio.com/Trident/_wiki/wikis/Trident.wiki/16197/Onboarding-to-Workload-Homepage-Template",
- "image": {
- "extensionName": "react-example",
- "path": "/assets/example-learning-material.jpeg"
- }
- }
- ],
- "recommendedArtifactTypes": [
- "HomeOne"
- ]
- },
- "createExperience": {
- "description": "CreateHub_Product_Description",
- "cards": [
- {
- "title": "Sentiment Analysis",
- "description": "Enrich your data by adding sentiment Analysis",
- "icon": {
- "name": "pipeline_24_regular"
- },
- "icon_small": {
- "name": "pipeline_20_regular"
- },
- "onClick": {
- "extensionName": "my-extension",
- "action": "open.createSentimentAnalysis"
- },
- "availableIn": [
- "home",
- "create-hub",
- "workspace-plus-new"
- ],
- "visibilityChecker": "HomeOne"
- },
- {
- "title": "Translator",
- "description": "localize your data",
- "icon": {
- "sprite": "artifact-icons",
- "name": "lakehouse_24"
- },
- "icon_small": {
- "sprite": "artifact-icons",
- "name": "lakehouse_20"
- },
- "onClick": {
- "extensionName": "my-extension",
- "action": "open.createTranslater"
- },
- "availableIn": [
- "home",
- "create-hub",
- "workspace-plus-new"
- ],
- "visibilityChecker": "HomeOne"
- }
- ]
- },
- "workspaceSettings": {
- "getWorkspaceSettings": {
- "extensionName": "react-example",
- "action": "getWorkspaceSettings"
- }
- },
- "helpPanel": {
- "productCards": [
- {
- "title": "Extension API playground get started",
- "description": "Extension API playground documentation provides expert information and answers to get you started.",
- "link": "https://powerbi.visualstudio.com/Trident/_wiki/wikis/Trident.wiki/16197/Onboarding-to-Workload-Homepage-Template",
- "defaultCollapsed": false
- }
- ],
- "productPageFeatureCards": [
- {
- "pageName": "/home",
- "cards": [
- {
- "title": "Page Card",
- "description": "This is a Product Page Card of /home.",
- "link": "https://powerbi.visualstudio.com/Trident/_wiki/wikis/Trident.wiki/16197/Onboarding-to-Workload-Homepage-Template",
- "defaultCollapsed": false
- }
- ]
- },
- {
- "pageName": "/datahub",
- "cards": [
- {
- "title": "Page Card",
- "description": "This is a Product Page Card of /datahub.",
- "link": "https://powerbi.visualstudio.com/Trident/_wiki/wikis/Trident.wiki/16197/Onboarding-to-Workload-Homepage-Template",
- "defaultCollapsed": false
- }
- ]
- }
- ]
- }
- }
-}
\ No newline at end of file
diff --git a/Backend/src/ValidationScripts/ItemManifestValidator.ps1 b/Backend/src/ValidationScripts/ItemManifestValidator.ps1
deleted file mode 100644
index f151243..0000000
--- a/Backend/src/ValidationScripts/ItemManifestValidator.ps1
+++ /dev/null
@@ -1,54 +0,0 @@
-param (
- [string]$inputDirectory,
- [string]$inputXsd,
- [string]$outputDirectory
-)
-try
-{
- if (-not($inputDirectory -and $inputXsd -and $outputDirectory))
- {
- throw "Invalid input"
- }
- $workloadManifest = "WorkloadManifest.xml"
- $workloadXmlPath = Join-Path $inputDirectory $workloadManifest
- $workloadXml = [xml](Get-Content -Path $workloadXmlPath)
- $workloadName = $workloadXml.WorkloadManifestConfiguration.Workload.WorkloadName
- $itemXmls = Get-ChildItem -Path $inputDirectory -Filter "*.xml"
- foreach ($itemXml in $itemXmls)
- {
- if ($itemXml.Name -ne $workloadManifest)
- {
- $manifestValidatorPath = Join-Path $PSScriptRoot "ManifestValidator.ps1"
- & $manifestValidatorPath -inputDirectory $inputDirectory -inputXml $itemXml.Name -inputXsd $inputXsd -outputDirectory $outputDirectory
- # Naming Validations
- $itemXmlPath = $itemXml.FullName
- $xdoc = [xml](Get-Content -Path $itemXmlPath)
- $itemWorkloadName = $xdoc.ItemManifestConfiguration.Item.Workload.WorkloadName
- if ($itemWorkloadName -ne $workloadName)
- {
- $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
- & $scriptPath -errors "Non matching WorkloadName between WorkloadManifest.xml and $($itemXml.Name)" -outputDirectory $outputDirectory
- }
- $itemName = $xdoc.ItemManifestConfiguration.Item.TypeName
- if (-not ($itemName -clike "$($itemWorkloadName).*"))
- {
- $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
- & $scriptPath -errors "Item name's prefix should be WorkloadName for item $($itemName)" -outputDirectory $outputDirectory
- }
- $jobNames = $xdoc.SelectNodes("//ItemJobType")
- foreach ($jobName in $jobNames)
- {
- if (-not ($jobName.Name -clike "$($itemName).*"))
- {
- $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
- & $scriptPath -errors "Job type name's prefix should be ItemName for jobType $($jobName.Name)" -outputDirectory $outputDirectory
- }
- }
- }
- }
-}
-catch
-{
- Write-Host "An error occurred:"
- Write-Host $_
-}
\ No newline at end of file
diff --git a/Backend/src/ValidationScripts/ManifestValidator.ps1 b/Backend/src/ValidationScripts/ManifestValidator.ps1
deleted file mode 100644
index c721ed4..0000000
--- a/Backend/src/ValidationScripts/ManifestValidator.ps1
+++ /dev/null
@@ -1,48 +0,0 @@
-param (
- [string]$inputDirectory,
- [string]$inputXml,
- [string]$inputXsd,
- [string]$outputDirectory
-)
-try
-{
- if (-not($inputDirectory -and $inputXml -and $inputXsd -and $outputDirectory))
- {
- throw "Invalid input"
- }
- $schemaSet = [System.Xml.Schema.XmlSchemaSet]::new()
- $schema = [System.Xml.Schema.XmlSchema]::Read([System.IO.StreamReader](Join-Path $inputDirectory $inputXsd), $null)
- $schemaCommon = [System.Xml.Schema.XmlSchema]::Read([System.IO.StreamReader](Join-Path $inputDirectory "CommonTypesDefinitions.xsd"), $null)
- $schemaSet.Add($schema)
- $schemaSet.Add($schemaCommon)
- $settings = [System.Xml.XmlReaderSettings]::new()
- $settings.ValidationType = [System.Xml.ValidationType]::Schema
- $settings.ValidationFlags = [System.Xml.Schema.XmlSchemaValidationFlags]::ReportValidationWarnings
- $settings.DtdProcessing = [System.Xml.DtdProcessing]::Prohibit
- $settings.Schemas.Add($schemaSet)
- $handler = [System.Xml.Schema.ValidationEventHandler] {
- $args = $_ # entering new block so copy $_
- if ($args.Severity -eq [System.Xml.Schema.XmlSeverityType]::Warning -or $args.Severity -eq [System.Xml.Schema.XmlSeverityType]::Error)
- {
- $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
- & $scriptPath -errors "$($args.Message)`r`n" -outputDirectory $outputDirectory
- }
- }
- $settings.add_ValidationEventHandler($handler)
- $reader = [System.Xml.XmlReader]::Create([string](Join-Path $inputDirectory $inputXml), [System.Xml.XmlReaderSettings]$settings);
- while ($reader.Read()) { }
-
- $workloadXml = [xml](Get-Content -Path (Join-Path $inputDirectory $inputXml))
- $workloadName = $workloadXml.WorkloadManifestConfiguration.Workload.WorkloadName
- $aadApp = $workloadXml.SelectSingleNode("//AADApp")
- if ($aadApp -and (-not ($aadApp.ResourceId -clike "*$($workloadName)")) -and (-not ($aadApp.ResourceId -clike "*$($workloadName)/*")))
- {
- $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
- & $scriptPath -errors "AADApp.resourceId: $($aadApp.ResourceId), should contain the exact WorkloadName: $($workloadName)" -outputDirectory $outputDirectory
- }
-}
-catch
-{
- Write-Host "An error occurred:"
- Write-Host $_
-}
\ No newline at end of file
diff --git a/Backend/src/ValidationScripts/ValidateNoDefaults.ps1 b/Backend/src/ValidationScripts/ValidateNoDefaults.ps1
deleted file mode 100644
index 00a9921..0000000
--- a/Backend/src/ValidationScripts/ValidateNoDefaults.ps1
+++ /dev/null
@@ -1,25 +0,0 @@
-param (
- [string]$outputDirectory
-)
-try
-{
- if (-not($outputDirectory))
- {
- throw "Invalid input"
- }
- $appSettingsPath = "appsettings.json"
- $appSettingsContent = (Get-Content $appSettingsPath) -replace '// .*', '' -join [Environment]::NewLine | ConvertFrom-Json
- $workloadXmlPath = Join-Path -Path $PSScriptRoot -ChildPath "..\Packages\manifest\WorkloadManifest.xml"
- $workloadXml = [xml](Get-Content -Path $workloadXmlPath)
- $aadApp = $workloadXml.SelectSingleNode("//AADApp")
- if (($appSettingsContent.Audience -ne $aadApp.ResourceId) -or ($appSettingsContent.ClientId -ne $aadApp.AppId))
- {
- $scriptPath = Join-Path -Path $PSScriptRoot -ChildPath "WriteErrorsToFile.ps1"
- & $scriptPath -errors "Non matching default values in WorkloadManifest.xml file" -outputDirectory $outputDirectory
- }
-}
-catch
-{
- Write-Host "An error occurred:"
- Write-Host $_
-}
\ No newline at end of file
diff --git a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json b/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json
deleted file mode 100644
index 8c7b7a6..0000000
--- a/Backend/test/Fabric_Extension_BE_Boilerplate_UnitTests/Properties/launchSettings.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "profiles": {
- "Fabric_Extension_BE_Boilerplate_UnitTests": {
- "commandName": "Project",
- "launchBrowser": true,
- "environmentVariables": {
- "ASPNETCORE_ENVIRONMENT": "Development"
- },
- "applicationUrl": "https://localhost:54848;http://localhost:54849"
- }
- }
-}
\ No newline at end of file
diff --git a/Frontend/Package/Item1.json b/Frontend/Package/Item1.json
index bd7f3f1..db3c79f 100644
--- a/Frontend/Package/Item1.json
+++ b/Frontend/Package/Item1.json
@@ -2,6 +2,7 @@
"name": "SampleWorkloadItem",
"displayName": "Item_1_DisplayName",
"displayNamePlural": "Item_1_DisplayName_Plural",
+ "version": "1.100",
"editor": {
"path": "/sample-workload-editor"
},
@@ -79,5 +80,12 @@
"canDestroy": "sample.tab.canDestroy",
"onDestroy": "sample.tab.onDestroy",
"onDelete": "sample.tab.onDelete"
- }
+ },
+ "createItemDialogConfig": {
+ "onCreationFailure": { "action": "item.onCreationFailure" },
+ "onCreationSuccess": { "action": "item.onCreationSuccess" }
+ },
+ "oneLakeCatalogCategory": [
+ "Process"
+ ]
}
diff --git a/Frontend/Package/Product.json b/Frontend/Package/Product.json
index 7395cc3..2554a11 100644
--- a/Frontend/Package/Product.json
+++ b/Frontend/Package/Product.json
@@ -1,5 +1,6 @@
{
"name": "Product",
+ "version": "1.100",
"displayName": "Workload_Display_Name",
"fullDisplayName": "Workload_Full_Display_Name",
"description": "Workload_Description",
@@ -80,6 +81,27 @@
"workspace-plus-new-teams"
],
"itemType": "SampleWorkloadItem"
+ },
+ {
+ "title": "CreateHub_Card_2_Title",
+ "description": "CreateHub_Card_2_Description",
+ "icon": {
+ "name": "assets/images/dial.png"
+ },
+ "icon_small": {
+ "name": "assets/images/dial.png"
+ },
+ "availableIn": [
+ "home",
+ "create-hub",
+ "workspace-plus-new",
+ "workspace-plus-new-teams"
+ ],
+ "itemType": "SampleWorkloadItem",
+ "createItemDialogConfig": {
+ "onCreationFailure": { "action": "item.onCreationFailure" },
+ "onCreationSuccess": { "action": "item.onCreationSuccess" }
+ }
}
]
},
diff --git a/Frontend/Package/assets/locales/en-US/translations.json b/Frontend/Package/assets/locales/en-US/translations.json
index fb663ac..f90cca5 100644
--- a/Frontend/Package/assets/locales/en-US/translations.json
+++ b/Frontend/Package/assets/locales/en-US/translations.json
@@ -18,6 +18,8 @@
"CreateHub_Workload_Description":"CreateHub Product Description",
"CreateHub_Card_1_Title": "Sample Item",
"CreateHub_Card_1_Description": "Create a Sample Workload Item for your Dev Experience",
+ "CreateHub_Card_2_Title": "Sample Item (Standard)",
+ "CreateHub_Card_2_Description": "Create a Sample Workload Item for your Dev Experience (Standard)",
"Workload_Hub_Workload_Slogan": "The quickest way from a billion points of data to a point of view.",
"Workload_Hub_Workload_Description": "Contoso workload enables organizations to conduct real-time customer interactions and automate operational business decisions at scale. It supports a wide variety of customer-facing activities such as personalized marketing and next best action, and customer-impacting decisions including credit services and fraud prevention.",
"Item_1_DisplayName": "Sample Workload Item",
diff --git a/Frontend/Package/assets/locales/es/translations.json b/Frontend/Package/assets/locales/es/translations.json
index 14b7744..b01eacb 100644
--- a/Frontend/Package/assets/locales/es/translations.json
+++ b/Frontend/Package/assets/locales/es/translations.json
@@ -18,6 +18,8 @@
"CreateHub_Workload_Description": "Descripción del producto CreateHub",
"CreateHub_Card_1_Title": "Elemento de muestra",
"CreateHub_Card_1_Description": "Cree un elemento de carga de trabajo de muestra para su experiencia de desarrollo",
+ "CreateHub_Card_2_Title": "Elemento de muestra (Estándar)",
+ "CreateHub_Card_2_Description": "Cree un elemento de carga de trabajo de muestra para su experiencia de desarrollo (Estándar)",
"Workload_Hub_Workload_Slogan": "La forma más rápida de pasar de mil millones de puntos de datos a un punto de vista.",
"Workload_Hub_Workload_Description": "La carga de trabajo de Contoso permite a las organizaciones interactuar con los clientes en tiempo real y automatizar las decisiones comerciales operativas a gran escala. Admite una amplia variedad de actividades orientadas al cliente, como marketing personalizado y la siguiente mejor acción, y decisiones que afectan al cliente, incluidos los servicios de crédito y la prevención del fraude.",
"Item_1_DisplayName": "Elemento de carga de trabajo de muestra",
diff --git a/Frontend/package.json b/Frontend/package.json
index aadb0db..bf4e217 100644
--- a/Frontend/package.json
+++ b/Frontend/package.json
@@ -5,7 +5,7 @@
"main": "index.js",
"scripts": {
"start": "node validation/generate-nuspec.js && node validation/build-nuget.js && env-cmd -f .env.dev webpack serve --open --config ./tools/webpack.config.js",
- "build:test": "node validation/generate-nuspec.js && node validation/build-nuget.js && env-cmd -f .env.test webpack --config ./tools/webpack.config.js",
+ "build:test": "env-cmd -f .env.test webpack --config ./tools/webpack.config.js",
"build:prod": "env-cmd -f .env.prod webpack --config ./tools/webpack.config.js"
},
"author": "",
@@ -14,7 +14,7 @@
"@fluentui/react": "^8.110.7",
"@fluentui/react-components": "^9.7.2",
"@fluentui/react-icons": "2.0.226",
- "@ms-fabric/workload-client": ">2.0.0 <3.0.0",
+ "@ms-fabric/workload-client": "^2.1.0",
"@reduxjs/toolkit": "^2.6.0",
"history": "^4.9.0",
"i18next": "^25.4.1",
diff --git a/Frontend/src/components/ClientSDKPlayground/ActionDialog/MessageBoxExample.tsx b/Frontend/src/components/ClientSDKPlayground/ActionDialog/MessageBoxExample.tsx
index 5757dd6..537bca9 100644
--- a/Frontend/src/components/ClientSDKPlayground/ActionDialog/MessageBoxExample.tsx
+++ b/Frontend/src/components/ClientSDKPlayground/ActionDialog/MessageBoxExample.tsx
@@ -73,7 +73,7 @@ export function MessageBoxExample(props: TabContentProps) {
/>
;
}
+
+export interface ItemCreationFailureData {
+ errorCode?: string;
+ resultCode?: string;
+}
+
+export interface ItemCreationSuccessData {
+ item: ItemLikeV2;
+}
diff --git a/Frontend/src/utils.ts b/Frontend/src/utils.ts
index 139d1ae..42c958a 100644
--- a/Frontend/src/utils.ts
+++ b/Frontend/src/utils.ts
@@ -7,7 +7,7 @@ export function convertGetItemResultToWorkloadItem(item: GetItemResult): Work
if (item.workloadPayload) {
try {
payload = JSON.parse(item.workloadPayload);
- console.log(`Parsed payload of item ${item.objectId} is ${payload}`);
+ console.log(`Parsed payload of item ${item.objectId} is`, payload);
} catch (payloadParseError) {
console.error(`Failed parsing payload for item ${item.objectId}, payloadString: ${item.workloadPayload}`, payloadParseError);
}
diff --git a/SUPPORT.md b/SUPPORT.md
index 05efacd..61f0efb 100644
--- a/SUPPORT.md
+++ b/SUPPORT.md
@@ -11,4 +11,4 @@
- Check existing issues (open and closed) to avoid duplicates.
- Review our documentation and FAQs.
-👉 To open a new support request, go to: [New Issue](../../issues/new/choose)
\ No newline at end of file
+👉 To open a new support request, go to: [New Issue](../../issues/new/choose)
diff --git a/tools/DevGatewayContainer/Dockerfile b/tools/DevGatewayContainer/Dockerfile
index 6b1fc28..46c5f33 100644
--- a/tools/DevGatewayContainer/Dockerfile
+++ b/tools/DevGatewayContainer/Dockerfile
@@ -1,4 +1,4 @@
-# Use the official .NET 6.0 runtime as the base image
+# Use the official .NET 8.0 runtime as the base image
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
diff --git a/tools/validation/Download-XSDComponents.ps1 b/tools/validation/Download-XSDComponents.ps1
new file mode 100644
index 0000000..e71381b
--- /dev/null
+++ b/tools/validation/Download-XSDComponents.ps1
@@ -0,0 +1,262 @@
+param(
+ [Parameter(Mandatory=$true)]
+ [hashtable]$ComponentVersions,
+
+ [string]$SchemaBaseUrl = "https://schemas.microsoft.com/fabric/extensibility/xsd",
+
+ [Parameter(Mandatory=$true)]
+ [string]$OutputDirectory,
+
+ [switch]$Force
+)
+
+<#
+.SYNOPSIS
+ Downloads individual XSD schema files from Microsoft's schema repository based on component-specific versions.
+
+.DESCRIPTION
+ This script downloads XSD files for each component (WorkloadDefinition, ItemDefinition,
+ CommonTypesDefinitions) from Microsoft's official schema repository. Each component can
+ have a different version, and CommonTypesDefinitions is version-agnostic.
+
+.PARAMETER ComponentVersions
+ Hashtable containing the schema version for each component:
+ @{
+ WorkloadDefinition = "1.101.0"
+ ItemDefinition = "1.102.0"
+ CommonTypesDefinitions = "common"
+ }
+
+.PARAMETER SchemaBaseUrl
+ The base URL of Microsoft's schema repository.
+ Defaults to "https://schemas.microsoft.com/fabric/extensibility/xsd"
+
+.PARAMETER OutputDirectory
+ The local directory where XSD files will be saved.
+
+.PARAMETER Force
+ Force download even if files already exist in the output directory.
+
+.EXAMPLE
+ $versions = @{
+ WorkloadDefinition = "1.101.0"
+ ItemDefinition = "1.102.0"
+ CommonTypesDefinitions = "common"
+ }
+ .\Download-XSDComponents.ps1 -ComponentVersions $versions -OutputDirectory "C:\temp\xsd-cache"
+
+.OUTPUTS
+ Returns hashtable with download results for each component.
+#>
+
+# Set verbose preference
+if ($Verbose) {
+ $VerbosePreference = "Continue"
+}
+
+# Component to XSD file mapping
+$ComponentXsdMapping = @{
+ "WorkloadDefinition" = "WorkloadDefinition.xsd"
+ "ItemDefinition" = "ItemDefinition.xsd"
+ "CommonTypesDefinitions" = "CommonTypesDefinitions.xsd"
+}
+
+try {
+ Write-Verbose "Starting component-based XSD download..."
+ Write-Verbose "Microsoft Schema Repository: $SchemaBaseUrl"
+ Write-Verbose "Output Directory: $OutputDirectory"
+
+ # Validate input parameters
+ if (-not $ComponentVersions -or $ComponentVersions.Count -eq 0) {
+ throw "ComponentVersions parameter is required and cannot be empty"
+ }
+
+ # Validate provided components (flexible - doesn't require all components)
+ $validComponents = @("WorkloadDefinition", "ItemDefinition", "CommonTypesDefinitions")
+ $componentsToProcess = @()
+
+ foreach ($component in $ComponentVersions.Keys) {
+ if ($component -notin $validComponents) {
+ throw "Invalid component: $component. Valid components are: $($validComponents -join ', ')"
+ }
+
+ $version = $ComponentVersions[$component]
+
+ # Validate version format for each component
+ if ($component -eq "CommonTypesDefinitions") {
+ if ($version -ne "common") {
+ throw "CommonTypesDefinitions must use 'common' as version, got: $version"
+ }
+ } else {
+ if ($version -notmatch '^\d+\.\d+\.\d+$') {
+ throw "Invalid version format for $component`: $version (expected format: x.y.z)"
+ }
+ }
+
+ $componentsToProcess += $component
+ }
+
+ if ($componentsToProcess.Count -eq 0) {
+ throw "No valid components provided for download"
+ }
+
+ # Display component versions
+ Write-Verbose "Component versions to download:"
+ foreach ($component in $componentsToProcess) {
+ Write-Verbose " $component = $($ComponentVersions[$component])"
+ }
+
+ # Create output directory if it doesn't exist
+ if (-not (Test-Path -Path $OutputDirectory)) {
+ Write-Verbose "Creating output directory: $OutputDirectory"
+ New-Item -Path $OutputDirectory -ItemType Directory -Force | Out-Null
+ }
+
+ # Track download results
+ $downloadResults = @{}
+ $downloadedFiles = @()
+ $failedFiles = @()
+ $skippedFiles = @()
+
+ # Download each component's XSD file
+ foreach ($component in $componentsToProcess) {
+ $version = $ComponentVersions[$component]
+ $xsdFileName = $ComponentXsdMapping[$component]
+ $filePath = Join-Path $OutputDirectory $xsdFileName
+
+ Write-Verbose "Processing $component (v$version) -> $xsdFileName"
+
+ # Check if file already exists and not forcing
+ if ((Test-Path -Path $filePath) -and (-not $Force)) {
+ Write-Verbose "Skipping existing file: $xsdFileName"
+ $skippedFiles += $xsdFileName
+ $downloadResults[$component] = @{
+ Status = "Skipped"
+ Version = $version
+ FilePath = $filePath
+ Message = "File already exists"
+ }
+ continue
+ }
+
+ try {
+ # Construct download URL based on component type
+ if ($component -eq "CommonTypesDefinitions") {
+ # CommonTypesDefinitions is version-agnostic
+ $url = "$($SchemaBaseUrl.TrimEnd('/'))/$xsdFileName"
+ } else {
+ # Versioned components
+ $url = "$($SchemaBaseUrl.TrimEnd('/'))/$version/$xsdFileName"
+ }
+
+ Write-Verbose "Downloading $url to $filePath"
+
+ # Download with retry logic
+ $maxRetries = 3
+ $retryCount = 0
+ $downloadSuccess = $false
+ $lastError = $null
+
+ while (-not $downloadSuccess -and $retryCount -lt $maxRetries) {
+ try {
+ Invoke-WebRequest -Uri $url -OutFile $filePath -ErrorAction Stop
+ $downloadSuccess = $true
+ }
+ catch {
+ $lastError = $_
+ $retryCount++
+ if ($retryCount -lt $maxRetries) {
+ Write-Warning "Download failed for $xsdFileName (attempt $retryCount/$maxRetries): $($_.Exception.Message). Retrying..."
+ Start-Sleep -Seconds 2
+ }
+ }
+ }
+
+ if (-not $downloadSuccess) {
+ throw $lastError
+ }
+
+ # Verify the downloaded file is valid XML
+ Write-Verbose "Validating downloaded XML file: $xsdFileName"
+ $testXml = [xml](Get-Content -Path $filePath -ErrorAction Stop)
+
+ # Verify it's actually an XSD schema
+ if ($testXml.DocumentElement.LocalName -ne "schema" -or
+ $testXml.DocumentElement.NamespaceURI -ne "http://www.w3.org/2001/XMLSchema") {
+ throw "Downloaded file is not a valid XSD schema"
+ }
+
+ Write-Verbose "Successfully downloaded and verified: $xsdFileName (v$version)"
+ $downloadedFiles += $xsdFileName
+
+ $downloadResults[$component] = @{
+ Status = "Downloaded"
+ Version = $version
+ FilePath = $filePath
+ Url = $url
+ Message = "Successfully downloaded and verified"
+ }
+
+ }
+ catch {
+ $errorMessage = "Failed to download $xsdFileName (v$version) from $url`: $($_.Exception.Message)"
+ Write-Error $errorMessage
+ $failedFiles += $xsdFileName
+
+ $downloadResults[$component] = @{
+ Status = "Failed"
+ Version = $version
+ FilePath = $filePath
+ Url = $url
+ Message = $_.Exception.Message
+ }
+ }
+ }
+
+ # Summary
+ Write-Verbose "Download completed:"
+ Write-Verbose " Downloaded: $($downloadedFiles.Count) files"
+ Write-Verbose " Skipped: $($skippedFiles.Count) files"
+ Write-Verbose " Failed: $($failedFiles.Count) files"
+
+ if ($downloadedFiles.Count -gt 0) {
+ Write-Verbose "Downloaded files: $($downloadedFiles -join ', ')"
+ }
+
+ if ($skippedFiles.Count -gt 0) {
+ Write-Verbose "Skipped files: $($skippedFiles -join ', ')"
+ }
+
+ if ($failedFiles.Count -gt 0) {
+ throw "Failed to download the following XSD files: $($failedFiles -join ', ')"
+ }
+
+ # Create a summary file in the output directory
+ $summaryPath = Join-Path $OutputDirectory "download-summary.json"
+ $summary = @{
+ DownloadDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ SchemaRepository = $SchemaBaseUrl
+ ComponentVersions = $ComponentVersions
+ DownloadResults = $downloadResults
+ Statistics = @{
+ Downloaded = $downloadedFiles.Count
+ Skipped = $skippedFiles.Count
+ Failed = $failedFiles.Count
+ }
+ }
+
+ $summary | ConvertTo-Json -Depth 4 | Set-Content -Path $summaryPath -Force
+ Write-Verbose "Download summary saved to: $summaryPath"
+
+ $successCount = $downloadedFiles.Count + $skippedFiles.Count
+ Write-Output "Successfully processed $successCount/$($componentsToProcess.Count) XSD components"
+
+ # Return the download results for use by calling scripts
+ Write-Output $downloadResults
+ exit 0
+
+}
+catch {
+ Write-Error "XSD component download failed: $_"
+ exit 1
+}
\ No newline at end of file
diff --git a/tools/validation/Get-AllSchemaVersions.ps1 b/tools/validation/Get-AllSchemaVersions.ps1
new file mode 100644
index 0000000..a3b74a5
--- /dev/null
+++ b/tools/validation/Get-AllSchemaVersions.ps1
@@ -0,0 +1,189 @@
+param(
+ [Parameter(Mandatory=$true)]
+ [string]$PackageDirectory
+)
+
+<#
+.SYNOPSIS
+ Detects schema versions for all manifest components in a package directory.
+
+.DESCRIPTION
+ This script analyzes all manifest files in a package directory and extracts the schema
+ versions for each component type (WorkloadManifest and ItemManifests).
+
+.PARAMETER PackageDirectory
+ Path to the package directory containing manifest files.
+
+.EXAMPLE
+ .\Get-AllSchemaVersions.ps1 -PackageDirectory "..\..\Backend\src\Packages\manifest"
+ Returns a hashtable with component versions.
+
+.OUTPUTS
+ Hashtable containing schema versions for each component:
+ @{
+ WorkloadDefinition = "1.101.0"
+ Item1 = "1.102.0"
+ Item2 = "1.103.0"
+ CommonTypesDefinitions = "common"
+ }
+
+ Note: Individual item manifests are returned with their file names as keys.
+#>
+
+# Set verbose preference
+if ($Verbose) {
+ $VerbosePreference = "Continue"
+}
+
+try {
+ Write-Verbose "Analyzing schema versions in package directory: $PackageDirectory"
+
+ # Validate package directory exists
+ if (-not (Test-Path -Path $PackageDirectory -PathType Container)) {
+ throw "Package directory not found: $PackageDirectory"
+ }
+
+ $versions = @{}
+
+ # 1. Detect WorkloadManifest schema version
+ Write-Verbose "Detecting WorkloadManifest schema version..."
+ $workloadManifestPath = Join-Path $PackageDirectory "WorkloadManifest.xml"
+
+ if (-not (Test-Path -Path $workloadManifestPath -PathType Leaf)) {
+ throw "WorkloadManifest.xml not found in package directory"
+ }
+
+ $workloadXml = [xml](Get-Content -Path $workloadManifestPath -ErrorAction Stop)
+ $workloadVersion = $workloadXml.WorkloadManifestConfiguration.SchemaVersion
+
+ if (-not $workloadVersion) {
+ throw "SchemaVersion attribute not found in WorkloadManifestConfiguration"
+ }
+
+ if ($workloadVersion -notmatch '^\d+\.\d+\.\d+$') {
+ Write-Warning "WorkloadManifest schema version format may be invalid: $workloadVersion"
+ }
+
+ $versions["WorkloadDefinition"] = $workloadVersion
+ Write-Verbose "WorkloadDefinition schema version: $workloadVersion"
+
+ # 2. Detect ItemManifest schema versions for each individual item
+ Write-Verbose "Detecting ItemManifest schema versions..."
+ $allXmls = Get-ChildItem -Path $PackageDirectory -Filter "*.xml"
+
+ # Filter to only include files with ItemManifestConfiguration root element
+ $itemXmls = @()
+ foreach ($xmlFile in $allXmls) {
+ Write-Verbose "Checking XML file: $($xmlFile.Name)"
+
+ try {
+ $xmlContent = [xml](Get-Content -Path $xmlFile.FullName -ErrorAction Stop)
+ if ($xmlContent.DocumentElement.LocalName -eq "ItemManifestConfiguration") {
+ $itemXmls += $xmlFile
+ Write-Verbose "Detected item manifest: $($xmlFile.Name)"
+ }
+ else {
+ Write-Verbose "Skipping non-item manifest: $($xmlFile.Name) (root element: $($xmlContent.DocumentElement.LocalName))"
+ }
+ }
+ catch {
+ Write-Warning "Failed to parse XML file $($xmlFile.Name): $($_.Exception.Message)"
+ }
+ }
+
+ if ($itemXmls.Count -eq 0) {
+ Write-Warning "No item manifests with ItemManifestConfiguration found in package directory"
+ # Still add ItemDefinition with WorkloadDefinition version as fallback
+ $versions["ItemDefinition"] = $workloadVersion
+ Write-Verbose "Using WorkloadDefinition version as ItemDefinition fallback: $workloadVersion"
+ }
+ else {
+ foreach ($itemXml in $itemXmls) {
+ Write-Verbose "Processing item manifest: $($itemXml.Name)"
+
+ try {
+ $itemXmlContent = [xml](Get-Content -Path $itemXml.FullName -ErrorAction Stop)
+ $itemVersion = $itemXmlContent.ItemManifestConfiguration.SchemaVersion
+
+ if ($itemVersion) {
+ # Validate version format
+ if ($itemVersion -notmatch '^\d+\.\d+\.\d+$') {
+ Write-Warning "Item manifest $($itemXml.Name) schema version format may be invalid: $itemVersion"
+ # Try to fix simple cases like "1" -> "1.0.0"
+ if ($itemVersion -match '^\d+$') {
+ $itemVersion = "$itemVersion.0.0"
+ Write-Verbose "Auto-corrected version to: $itemVersion"
+ }
+ }
+
+ # Use the item file name (without .xml) as the component key
+ $itemName = $itemXml.BaseName
+ $versions[$itemName] = $itemVersion
+ Write-Verbose "Item manifest $($itemXml.Name) schema version: $itemVersion"
+ }
+ else {
+ Write-Warning "SchemaVersion attribute not found in $($itemXml.Name)"
+ # Use WorkloadDefinition version as fallback for this item
+ $itemName = $itemXml.BaseName
+ $versions[$itemName] = $workloadVersion
+ Write-Verbose "Using WorkloadDefinition version as fallback for $($itemXml.Name): $workloadVersion"
+ }
+ }
+ catch {
+ Write-Warning "Failed to parse item manifest $($itemXml.Name): $($_.Exception.Message)"
+ # Use WorkloadDefinition version as fallback for this item
+ $itemName = $itemXml.BaseName
+ $versions[$itemName] = $workloadVersion
+ Write-Verbose "Using WorkloadDefinition version as fallback for $($itemXml.Name) due to parse error: $workloadVersion"
+ }
+ }
+ }
+
+ # 3. Add CommonTypesDefinitions as version-agnostic
+ $versions["CommonTypesDefinitions"] = "common"
+ Write-Verbose "CommonTypesDefinitions schema version: common (version-agnostic)"
+
+ # 4. Validate all versions are reasonable
+ Write-Verbose "Validating detected schema versions..."
+ foreach ($component in $versions.Keys) {
+ $version = $versions[$component]
+
+ # Skip validation for CommonTypesDefinitions (always "common")
+ if ($component -eq "CommonTypesDefinitions") {
+ if ($version -ne "common") {
+ Write-Warning "CommonTypesDefinitions should be 'common', got: $version"
+ }
+ continue
+ }
+
+ # Validate semantic versioning for other components
+ try {
+ $versionObj = [Version]$version
+ if ($versionObj.Major -lt 1) {
+ Write-Warning "$component version seems too low: $version"
+ }
+ }
+ catch {
+ Write-Warning "$component version format invalid: $version"
+ }
+ }
+
+ # 4. Add CommonTypesDefinitions as version-agnostic
+ $versions["CommonTypesDefinitions"] = "common"
+ Write-Verbose "CommonTypesDefinitions schema version: common (version-agnostic)"
+
+ # Output summary
+ Write-Verbose "Schema version detection completed successfully"
+ Write-Verbose "Detected versions:"
+ foreach ($component in $versions.Keys | Sort-Object) {
+ Write-Verbose " $component = $($versions[$component])"
+ }
+
+ # Return the versions hashtable
+ Write-Output $versions
+
+}
+catch {
+ Write-Error "Failed to detect schema versions: $_"
+ exit 1
+}
\ No newline at end of file
diff --git a/tools/validation/Invoke-ManifestValidation.ps1 b/tools/validation/Invoke-ManifestValidation.ps1
new file mode 100644
index 0000000..d26e576
--- /dev/null
+++ b/tools/validation/Invoke-ManifestValidation.ps1
@@ -0,0 +1,398 @@
+param(
+ [Parameter(Mandatory=$true)]
+ [string]$PackageDirectory,
+
+ [Parameter(Mandatory=$true)]
+ [string]$AppSettingsPath,
+
+ [string]$SchemaBaseUrl = "https://schemas.microsoft.com/fabric/extensibility/xsd",
+ [string]$CacheDirectory = "$env:TEMP\FabricXSDCache",
+ [switch]$Force,
+ [switch]$SkipCache,
+ [switch]$CleanCache
+)
+
+<#
+.SYNOPSIS
+ Main validation script that downloads XSD schemas from Microsoft's official repository and validates manifest files.
+
+.DESCRIPTION
+ This script orchestrates the entire validation process:
+ 1. Detects schema versions from manifest files (component-specific)
+ 2. Downloads or retrieves cached XSD files from Microsoft's schema repository
+ 3. Runs validation scripts against the manifest files separately for each component
+ 4. Reports validation results
+
+.PARAMETER PackageDirectory
+ Path to the package directory containing manifest files to validate.
+ Must contain WorkloadManifest.xml and any item manifest XML files.
+
+.PARAMETER AppSettingsPath
+ Path to the appsettings.json file for validation configuration.
+ Example: "..\..\Backend\src\appsettings.json"
+
+.PARAMETER SchemaBaseUrl
+ Base URL of Microsoft's schema repository.
+ Defaults to "https://schemas.microsoft.com/fabric/extensibility/xsd"
+
+.PARAMETER CacheDirectory
+ Root directory for XSD file cache. Defaults to %TEMP%\FabricXSDCache
+
+.PARAMETER Force
+ Force re-download of XSD files even if they exist in cache.
+
+.PARAMETER SkipCache
+ Skip cache entirely and always download fresh XSD files to a temporary location.
+
+.PARAMETER CleanCache
+ Clean old cache entries before validation (removes entries older than 30 days).
+
+.EXAMPLE
+ .\Invoke-ManifestValidation.ps1 -PackageDirectory "..\..\Backend\src\Packages\manifest" -AppSettingsPath "..\..\Backend\src\appsettings.json"
+ Basic validation using default Microsoft schema repository.
+
+.EXAMPLE
+ .\Invoke-ManifestValidation.ps1 -PackageDirectory "..\..\Backend\src\Packages\manifest" -AppSettingsPath "..\..\Backend\src\appsettings.json" -Force -Verbose
+ Force re-download XSDs and show verbose output.
+
+.EXAMPLE
+ .\Invoke-ManifestValidation.ps1 -PackageDirectory "..\..\Backend\src\Packages\manifest" -AppSettingsPath "..\..\Backend\src\appsettings.json" -SchemaBaseUrl "https://custom.schema.com/fabric/xsd"
+ Use a custom schema repository URL.
+
+.OUTPUTS
+ Exit code 0 on success, non-zero on failure.
+ Validation errors are written to ValidationErrors.txt in the script directory.
+#>
+
+# Set verbose preference based on parameter
+if ($Verbose) {
+ $VerbosePreference = "Continue"
+}
+
+# Initialize validation context
+$script:ValidationStartTime = Get-Date
+$script:ErrorCount = 0
+$script:WarningCount = 0
+
+function Write-ValidationLog {
+ param(
+ [string]$Message,
+ [ValidateSet("Info", "Warning", "Error", "Success")]
+ [string]$Level = "Info"
+ )
+
+ $timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
+ $prefix = switch ($Level) {
+ "Info" { "[INFO]" }
+ "Warning" { "[WARN]" }
+ "Error" { "[ERROR]" }
+ "Success" { "[SUCCESS]" }
+ }
+
+ $logMessage = "$timestamp $prefix $Message"
+
+ switch ($Level) {
+ "Info" { Write-Host $logMessage }
+ "Warning" { Write-Warning $Message; $script:WarningCount++ }
+ "Error" { Write-Error $Message; $script:ErrorCount++ }
+ "Success" { Write-Host $logMessage -ForegroundColor Green }
+ }
+
+ Write-Verbose $logMessage
+}
+
+function Test-Prerequisites {
+ Write-ValidationLog "Checking prerequisites..."
+
+ # Check if PackageDirectory exists
+ if (-not (Test-Path -Path $PackageDirectory -PathType Container)) {
+ Write-ValidationLog "Package directory not found: $PackageDirectory" -Level Error
+ return $false
+ }
+
+ # Check if AppSettingsPath exists
+ if (-not (Test-Path -Path $AppSettingsPath -PathType Leaf)) {
+ Write-ValidationLog "AppSettings file not found: $AppSettingsPath" -Level Error
+ return $false
+ }
+
+ # Check if WorkloadManifest.xml exists
+ $workloadManifestPath = Join-Path $PackageDirectory "WorkloadManifest.xml"
+ if (-not (Test-Path -Path $workloadManifestPath -PathType Leaf)) {
+ Write-ValidationLog "WorkloadManifest.xml not found in package directory: $PackageDirectory" -Level Error
+ return $false
+ }
+
+ # Check internet connectivity (basic test)
+ try {
+ $testConnection = Test-NetConnection -ComputerName "schemas.microsoft.com" -Port 443 -InformationLevel Quiet -WarningAction SilentlyContinue
+ if (-not $testConnection) {
+ Write-ValidationLog "Cannot connect to Microsoft schema repository. Internet connection may be required for XSD download." -Level Warning
+ }
+ }
+ catch {
+ Write-ValidationLog "Could not test internet connectivity: $($_.Exception.Message)" -Level Warning
+ }
+
+ Write-ValidationLog "Prerequisites check completed" -Level Success
+ return $true
+}
+
+function Get-ValidationScriptPath {
+ param([string]$ScriptName)
+
+ $scriptPath = Join-Path $PSScriptRoot $ScriptName
+ if (-not (Test-Path -Path $scriptPath)) {
+ throw "Required validation script not found: $scriptPath"
+ }
+ return $scriptPath
+}
+
+try {
+ Write-ValidationLog "Starting manifest validation process..."
+
+ # Resolve PackageDirectory to absolute path to avoid path resolution issues
+ $PackageDirectory = Resolve-Path $PackageDirectory -ErrorAction Stop
+
+ Write-ValidationLog "Package Directory: $PackageDirectory"
+ Write-ValidationLog "Microsoft Schema Repository: $SchemaBaseUrl"
+ Write-ValidationLog "Cache Directory: $CacheDirectory"
+
+ # Check prerequisites
+ if (-not (Test-Prerequisites)) {
+ throw "Prerequisites check failed"
+ }
+
+ # Clean cache if requested
+ if ($CleanCache) {
+ Write-ValidationLog "Cleaning old cache entries..."
+ $cacheManagerPath = Get-ValidationScriptPath "Manage-ComponentXSDCache.ps1"
+ $cleanedCount = & $cacheManagerPath -ComponentVersions @{WorkloadDefinition="1.0.0";ItemDefinition="1.0.0";CommonTypesDefinitions="1.0.0"} -CacheRootDirectory $CacheDirectory -Action "Clean" -Verbose:$Verbose
+ Write-ValidationLog "Cleaned $cleanedCount old cache entries" -Level Success
+ }
+
+ # Step 1: Detect component schema versions
+ Write-ValidationLog "Detecting component schema versions..."
+ $versionDetectorPath = Get-ValidationScriptPath "Get-AllSchemaVersions.ps1"
+
+ $componentVersions = & $versionDetectorPath -PackageDirectory $PackageDirectory -Verbose:$Verbose
+ if (-not $componentVersions -or $componentVersions.Count -eq 0) {
+ throw "Failed to detect component schema versions from manifest files"
+ }
+
+ # Update CommonTypesDefinitions to use "common" for the new architecture
+ $componentVersions["CommonTypesDefinitions"] = "common"
+
+ Write-ValidationLog "Detected component schema versions:" -Level Success
+ foreach ($component in $componentVersions.Keys | Sort-Object) {
+ Write-ValidationLog " $component = $($componentVersions[$component])" -Level Info
+ }
+
+ # Step 2: Manage component XSD cache
+ $componentCachePaths = @{}
+
+ if ($SkipCache) {
+ Write-ValidationLog "Skipping cache, using temporary directory..."
+ $tempDirectory = Join-Path $env:TEMP "FabricXSD_$(Get-Date -Format 'yyyyMMdd_HHmmss')"
+ New-Item -Path $tempDirectory -ItemType Directory -Force | Out-Null
+
+ # Use same temp directory for all components
+ foreach ($component in $componentVersions.Keys) {
+ $componentCachePaths[$component] = $tempDirectory
+ }
+ }
+ else {
+ Write-ValidationLog "Checking component XSD cache..."
+ $cacheManagerPath = Get-ValidationScriptPath "Manage-ComponentXSDCache.ps1"
+
+ # Check if component versions are already cached (unless Force is specified)
+ if (-not $Force) {
+ $cachedPaths = & $cacheManagerPath -ComponentVersions $componentVersions -CacheRootDirectory $CacheDirectory -Action "Check" -Verbose:$Verbose
+ if ($cachedPaths -and $cachedPaths.Count -gt 0) {
+ Write-ValidationLog "Using cached XSD files" -Level Success
+ $componentCachePaths = $cachedPaths
+ }
+ }
+
+ # Create cache directories if not found or forcing
+ if ($componentCachePaths.Count -eq 0) {
+ Write-ValidationLog "Creating cache directories for component versions..."
+ $componentCachePaths = & $cacheManagerPath -ComponentVersions $componentVersions -CacheRootDirectory $CacheDirectory -Action "Create" -Verbose:$Verbose
+ }
+ }
+
+ # Step 3: Download component XSD files if needed
+ $componentXsdMapping = @{
+ "WorkloadDefinition" = "WorkloadDefinition.xsd"
+ "ItemDefinition" = "ItemDefinition.xsd"
+ "CommonTypesDefinitions" = "CommonTypesDefinitions.xsd"
+ }
+
+ $needsDownload = $Force -or $SkipCache
+ $missingComponents = @()
+
+ if (-not $needsDownload) {
+ # Check if all required XSD files exist in their respective cache directories
+ foreach ($component in $componentVersions.Keys) {
+ $cacheDir = $componentCachePaths[$component]
+
+ # Map component to appropriate XSD file
+ if ($component -eq "WorkloadDefinition") {
+ $xsdFile = "WorkloadDefinition.xsd"
+ }
+ elseif ($component -eq "CommonTypesDefinitions") {
+ $xsdFile = "CommonTypesDefinitions.xsd"
+ }
+ else {
+ # All item manifests use ItemDefinition.xsd
+ $xsdFile = "ItemDefinition.xsd"
+ }
+
+ $xsdPath = Join-Path $cacheDir $xsdFile
+
+ if (-not (Test-Path -Path $xsdPath)) {
+ $needsDownload = $true
+ $missingComponents += $component
+ }
+ }
+ }
+
+ if ($needsDownload) {
+ Write-ValidationLog "Downloading component XSD files from Microsoft schema repository..."
+ $downloaderPath = Get-ValidationScriptPath "Download-XSDComponents.ps1"
+
+ # Map component versions to standard XSD component names
+ $standardComponentVersions = @{}
+
+ foreach ($component in $componentVersions.Keys) {
+ if ($component -eq "WorkloadDefinition") {
+ $standardComponentVersions["WorkloadDefinition"] = $componentVersions[$component]
+ }
+ elseif ($component -eq "CommonTypesDefinitions") {
+ $standardComponentVersions["CommonTypesDefinitions"] = $componentVersions[$component]
+ }
+ else {
+ # All item manifests use ItemDefinition XSD
+ if (-not $standardComponentVersions.ContainsKey("ItemDefinition")) {
+ $standardComponentVersions["ItemDefinition"] = $componentVersions[$component]
+ }
+ }
+ }
+
+ # Download to each component's specific cache directory
+ foreach ($component in $standardComponentVersions.Keys) {
+ # Find the cache directory for this standard component
+ if ($component -eq "WorkloadDefinition" -or $component -eq "CommonTypesDefinitions") {
+ $cacheDir = $componentCachePaths[$component]
+ } else {
+ # For ItemDefinition, find any item component's cache directory
+ $itemComponentName = $componentVersions.Keys | Where-Object { $_ -ne "WorkloadDefinition" -and $_ -ne "CommonTypesDefinitions" } | Select-Object -First 1
+ $cacheDir = $componentCachePaths[$itemComponentName]
+ }
+
+ $version = $standardComponentVersions[$component]
+ $singleComponentVersions = @{ $component = $version }
+
+ Write-ValidationLog "Downloading $component v$version to $cacheDir"
+ $downloadResult = & $downloaderPath -ComponentVersions $singleComponentVersions -SchemaBaseUrl $SchemaBaseUrl -OutputDirectory $cacheDir -Force:$Force -Verbose:$Verbose
+
+ if ($LASTEXITCODE -ne 0) {
+ throw "Failed to download XSD files for $component from Microsoft schema repository"
+ }
+ }
+
+ Write-ValidationLog "XSD files downloaded successfully" -Level Success
+ }
+ else {
+ Write-ValidationLog "All required XSD files found in cache" -Level Success
+ }
+
+ # Verify all required XSD files are present
+ Write-ValidationLog "Verifying XSD files..."
+ foreach ($component in $componentVersions.Keys) {
+ $cacheDir = $componentCachePaths[$component]
+ $xsdFile = $componentXsdMapping[$component]
+ $xsdPath = Join-Path $cacheDir $xsdFile
+
+ if (-not (Test-Path -Path $xsdPath)) {
+ throw "Required XSD file not found: $xsdPath"
+ }
+ }
+ Write-ValidationLog "All XSD files verified" -Level Success
+
+ # Step 4: Run validation scripts
+ Write-ValidationLog "Starting manifest validation..."
+
+ # Clean up any existing error files
+ $removeErrorPath = Get-ValidationScriptPath "RemoveErrorFile.ps1"
+ & $removeErrorPath -outputDirectory $PSScriptRoot -Verbose:$Verbose
+
+ # Validate WorkloadManifest.xml
+ Write-ValidationLog "Validating WorkloadManifest.xml..."
+ $manifestValidatorPath = Get-ValidationScriptPath "ManifestValidator.ps1"
+ $workloadXsdDir = $componentCachePaths["WorkloadDefinition"]
+ & $manifestValidatorPath -inputDirectory $PackageDirectory -inputXml "WorkloadManifest.xml" -inputXsd "WorkloadDefinition.xsd" -outputDirectory $PSScriptRoot -xsdDirectory $workloadXsdDir -Verbose:$Verbose
+
+ # Validate Item manifests
+ Write-ValidationLog "Validating item manifests..."
+ $itemValidatorPath = Get-ValidationScriptPath "ItemManifestValidator.ps1"
+
+ # Find any item cache path (exclude WorkloadDefinition and CommonTypesDefinitions)
+ $itemCacheKey = $componentCachePaths.Keys | Where-Object { $_ -ne "WorkloadDefinition" -and $_ -ne "CommonTypesDefinitions" } | Select-Object -First 1
+
+ if (-not $itemCacheKey) {
+ throw "No item cache paths found in componentCachePaths. Available paths: $($componentCachePaths.Keys -join ', ')"
+ }
+
+ $itemXsdDir = $componentCachePaths[$itemCacheKey]
+ Write-ValidationLog "Using item XSD directory from $itemCacheKey`: $itemXsdDir"
+
+ & $itemValidatorPath -inputDirectory $PackageDirectory -inputXsd "ItemDefinition.xsd" -outputDirectory $PSScriptRoot -xsdDirectory $itemXsdDir
+
+ # Run additional validations
+ Write-ValidationLog "Running additional validations..."
+ $noDefaultsValidatorPath = Get-ValidationScriptPath "ValidateNoDefaults.ps1"
+ & $noDefaultsValidatorPath -outputDirectory $PSScriptRoot -appsettingsLocation $AppSettingsPath -packageDirectory $PackageDirectory
+
+ # Step 5: Check for validation errors
+ $errorFilePath = Join-Path $PSScriptRoot "ValidationErrors.txt"
+ if (Test-Path -Path $errorFilePath) {
+ $errorContent = Get-Content -Path $errorFilePath -Raw
+ if ($errorContent.Trim()) {
+ Write-ValidationLog "Validation errors found:" -Level Error
+ Write-Host $errorContent -ForegroundColor Red
+ throw "Validation failed with errors"
+ }
+ }
+
+ # Success!
+ $duration = (Get-Date) - $script:ValidationStartTime
+ Write-ValidationLog "Validation completed successfully in $($duration.TotalSeconds.ToString('F2')) seconds" -Level Success
+ Write-ValidationLog "Component Schema Versions:" -Level Success
+ foreach ($component in $componentVersions.Keys | Sort-Object) {
+ Write-ValidationLog " $component = $($componentVersions[$component])" -Level Success
+ }
+ Write-ValidationLog "XSD Source: $SchemaBaseUrl (Microsoft official repository)" -Level Success
+
+ if ($script:WarningCount -gt 0) {
+ Write-ValidationLog "Validation completed with $($script:WarningCount) warning(s)" -Level Warning
+ }
+
+ exit 0
+}
+catch {
+ $duration = (Get-Date) - $script:ValidationStartTime
+ Write-ValidationLog "Validation failed after $($duration.TotalSeconds.ToString('F2')) seconds: $($_.Exception.Message)" -Level Error
+
+ exit 1
+}
+finally {
+ # Cleanup temporary directory if used (guaranteed to run regardless of success/failure)
+ if ($SkipCache -and $componentCachePaths.Count -gt 0) {
+ Write-ValidationLog "Cleaning up temporary XSD directory..."
+ $tempDirectory = $componentCachePaths.Values | Select-Object -First 1
+ if ($tempDirectory -and (Test-Path -Path $tempDirectory)) {
+ Remove-Item -Path $tempDirectory -Recurse -Force -ErrorAction SilentlyContinue
+ }
+ }
+}
\ No newline at end of file
diff --git a/tools/validation/ItemManifestValidator.ps1 b/tools/validation/ItemManifestValidator.ps1
new file mode 100644
index 0000000..77dcd66
--- /dev/null
+++ b/tools/validation/ItemManifestValidator.ps1
@@ -0,0 +1,120 @@
+param (
+ [string]$inputDirectory,
+ [string]$inputXsd,
+ [string]$outputDirectory,
+ [string]$xsdDirectory = $null # Optional separate XSD directory
+)
+
+<#
+.SYNOPSIS
+ Validates item manifest XML files against XSD schemas.
+
+.DESCRIPTION
+ This script validates all item manifest XML files in a directory, excluding the
+ WorkloadManifest.xml file. It also performs naming convention validations.
+
+.PARAMETER inputDirectory
+ Directory containing the XML files to validate.
+
+.PARAMETER inputXsd
+ Name of the item XSD schema file.
+
+.PARAMETER outputDirectory
+ Directory where validation error files will be written.
+
+.PARAMETER xsdDirectory
+ Optional directory containing XSD schema files. If not provided, will look in inputDirectory.
+#>
+
+try
+{
+ if (-not($inputDirectory -and $inputXsd -and $outputDirectory))
+ {
+ throw "Invalid input parameters"
+ }
+
+ Write-Verbose "Starting item manifest validation..."
+ Write-Verbose "Input Directory: $inputDirectory"
+ Write-Verbose "XSD Directory: $(if ($xsdDirectory) { $xsdDirectory } else { $inputDirectory })"
+ Write-Verbose "Input XSD: $inputXsd"
+
+ $workloadManifest = "WorkloadManifest.xml"
+ $workloadXmlPath = Join-Path $inputDirectory $workloadManifest
+
+ if (-not (Test-Path -Path $workloadXmlPath)) {
+ throw "WorkloadManifest.xml not found in input directory: $inputDirectory"
+ }
+
+ $workloadXml = [xml](Get-Content -Path $workloadXmlPath)
+ $workloadName = $workloadXml.WorkloadManifestConfiguration.Workload.WorkloadName
+ Write-Verbose "Workload Name: $workloadName"
+
+ $allXmls = Get-ChildItem -Path $inputDirectory -Filter "*.xml"
+ Write-Verbose "Found $($allXmls.Count) XML files to examine"
+
+ # Filter to only include files with ItemManifestConfiguration root element
+ $itemXmls = @()
+ foreach ($xmlFile in $allXmls) {
+ Write-Verbose "Checking XML file: $($xmlFile.Name)"
+
+ try {
+ $xmlContent = [xml](Get-Content -Path $xmlFile.FullName -ErrorAction Stop)
+ if ($xmlContent.DocumentElement.LocalName -eq "ItemManifestConfiguration") {
+ $itemXmls += $xmlFile
+ Write-Verbose "Detected item manifest: $($xmlFile.Name)"
+ }
+ else {
+ Write-Verbose "Skipping non-item manifest: $($xmlFile.Name) (root element: $($xmlContent.DocumentElement.LocalName))"
+ }
+ }
+ catch {
+ Write-Warning "Failed to parse XML file $($xmlFile.Name): $($_.Exception.Message)"
+ }
+ }
+
+ Write-Verbose "Found $($itemXmls.Count) item manifest files to validate"
+
+ foreach ($itemXml in $itemXmls)
+ {
+ Write-Verbose "Validating item manifest: $($itemXml.Name)"
+ $manifestValidatorPath = Join-Path $PSScriptRoot "ManifestValidator.ps1"
+
+ # Pass xsdDirectory parameter if provided
+ if ($xsdDirectory) {
+ & $manifestValidatorPath -inputDirectory $inputDirectory -inputXml $itemXml.Name -inputXsd $inputXsd -outputDirectory $outputDirectory -xsdDirectory $xsdDirectory
+ }
+ else {
+ & $manifestValidatorPath -inputDirectory $inputDirectory -inputXml $itemXml.Name -inputXsd $inputXsd -outputDirectory $outputDirectory
+ }
+
+ # Naming Validations
+ $itemXmlPath = $itemXml.FullName
+ $xdoc = [xml](Get-Content -Path $itemXmlPath)
+ $itemWorkloadName = $xdoc.ItemManifestConfiguration.Item.Workload.WorkloadName
+ if ($itemWorkloadName -ne $workloadName)
+ {
+ $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "Non matching WorkloadName between WorkloadManifest.xml and $($itemXml.Name)" -outputDirectory $outputDirectory
+ }
+ $itemName = $xdoc.ItemManifestConfiguration.Item.TypeName
+ if (-not ($itemName -clike "$($itemWorkloadName).*"))
+ {
+ $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "Item name's prefix should be WorkloadName for item $($itemName)" -outputDirectory $outputDirectory
+ }
+ $jobNames = $xdoc.SelectNodes("//ItemJobType")
+ foreach ($jobName in $jobNames)
+ {
+ if (-not ($jobName.Name -clike "$($itemName).*"))
+ {
+ $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "Job type name's prefix should be ItemName for jobType $($jobName.Name)" -outputDirectory $outputDirectory
+ }
+ }
+ }
+}
+catch
+{
+ Write-Host "An error occurred:"
+ Write-Host $_
+}
\ No newline at end of file
diff --git a/tools/validation/Manage-ComponentXSDCache.ps1 b/tools/validation/Manage-ComponentXSDCache.ps1
new file mode 100644
index 0000000..0b8f030
--- /dev/null
+++ b/tools/validation/Manage-ComponentXSDCache.ps1
@@ -0,0 +1,399 @@
+param(
+ [Parameter(Mandatory=$false)]
+ [hashtable]$ComponentVersions,
+
+ [Parameter(Mandatory=$false)]
+ [string]$Version,
+
+ [string]$CacheRootDirectory = "$env:TEMP\FabricXSDCache",
+
+ [ValidateSet("Check", "Create", "Clean", "List")]
+ [string]$Action = "Check",
+
+ [int]$MaxCacheAgeDays = 30
+)
+
+<#
+.SYNOPSIS
+ Manages local XSD cache with simplified version-based structure.
+
+.DESCRIPTION
+ This script provides cache management functionality for XSD files with simplified
+ version-based storage. Each version gets its own directory, and CommonTypesDefinitions
+ is stored in the root cache directory for XSD reference compatibility.
+
+.PARAMETER ComponentVersions
+ Hashtable containing the schema version for each component:
+ @{
+ WorkloadDefinition = "1.101.0"
+ ItemDefinition = "1.102.0"
+ CommonTypesDefinitions = "common"
+ }
+
+.PARAMETER Version
+ Single version to operate on (alternative to ComponentVersions for single-version operations)
+
+.PARAMETER CacheRootDirectory
+ The root directory for the XSD cache. Defaults to %TEMP%\FabricXSDCache
+
+.PARAMETER Action
+ The action to perform:
+ - Check: Check if the versions are cached and return cache paths
+ - Create: Create cache directories for the versions
+ - Clean: Remove cache entries older than MaxCacheAgeDays
+ - List: List all cached versions
+
+.PARAMETER MaxCacheAgeDays
+ Maximum age in days for cache entries (used with Clean action). Defaults to 30 days.
+
+.EXAMPLE
+ $versions = @{
+ WorkloadDefinition = "1.101.0"
+ ItemDefinition = "1.102.0"
+ CommonTypesDefinitions = "common"
+ }
+ .\Manage-ComponentXSDCache.ps1 -ComponentVersions $versions -Action Check
+
+.OUTPUTS
+ For Check action: Returns hashtable of cache directory paths by component.
+ For Create action: Returns hashtable of created cache directory paths.
+ For Clean action: Returns count of cleaned entries.
+ For List action: Returns array of cached version information.
+#>
+
+# Set verbose preference
+if ($Verbose) {
+ $VerbosePreference = "Continue"
+}
+
+# Component to XSD file mapping
+$ComponentXsdMapping = @{
+ "WorkloadDefinition" = "WorkloadDefinition.xsd"
+ "ItemDefinition" = "ItemDefinition.xsd"
+ "CommonTypesDefinitions" = "CommonTypesDefinitions.xsd"
+}
+
+function Get-VersionCacheDirectory {
+ param(
+ [string]$Version,
+ [string]$CacheRoot
+ )
+
+ if ($Version -eq "common") {
+ # CommonTypesDefinitions goes in root cache directory for XSD reference compatibility
+ return $CacheRoot
+ } else {
+ return Join-Path $CacheRoot "v$Version"
+ }
+}
+
+function Test-CacheIntegrity {
+ param(
+ [string]$CachePath,
+ [string]$Component
+ )
+
+ Write-Verbose "Checking cache integrity for $Component at: $CachePath"
+
+ if (-not (Test-Path -Path $CachePath -PathType Container)) {
+ Write-Verbose "Cache directory does not exist: $CachePath"
+ return $false
+ }
+
+ $xsdFile = $ComponentXsdMapping[$Component]
+ $filePath = Join-Path $CachePath $xsdFile
+
+ if (-not (Test-Path -Path $filePath -PathType Leaf)) {
+ Write-Verbose "Missing XSD file: $filePath"
+ return $false
+ }
+
+ try {
+ # Verify it's valid XML
+ $xml = [xml](Get-Content -Path $filePath -ErrorAction Stop)
+
+ # Verify it's an XSD schema
+ if ($xml.DocumentElement.LocalName -ne "schema" -or
+ $xml.DocumentElement.NamespaceURI -ne "http://www.w3.org/2001/XMLSchema") {
+ Write-Verbose "Invalid XSD schema file: $filePath"
+ return $false
+ }
+ }
+ catch {
+ Write-Verbose "Failed to parse XSD file $filePath`: $_"
+ return $false
+ }
+
+ Write-Verbose "Cache integrity check passed for $Component at: $CachePath"
+ return $true
+}
+
+function Get-CacheMetadata {
+ param([string]$CachePath)
+
+ $metadataFile = Join-Path $CachePath ".metadata"
+
+ if (Test-Path -Path $metadataFile) {
+ try {
+ $metadata = Get-Content -Path $metadataFile -Raw | ConvertFrom-Json
+ return $metadata
+ }
+ catch {
+ Write-Verbose "Failed to read cache metadata: $_"
+ }
+ }
+
+ # Return default metadata if file doesn't exist or is invalid
+ return @{
+ CreatedDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ LastAccessDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ CacheFormat = "VersionBased"
+ }
+}
+
+function Set-CacheMetadata {
+ param(
+ [string]$CachePath,
+ $Metadata # Accept any type (hashtable or PSCustomObject)
+ )
+
+ $metadataFile = Join-Path $CachePath ".metadata"
+
+ try {
+ # Convert PSCustomObject to hashtable if needed
+ if ($Metadata -is [PSCustomObject]) {
+ $hashMetadata = @{}
+ $Metadata.PSObject.Properties | ForEach-Object {
+ $hashMetadata[$_.Name] = $_.Value
+ }
+ $hashMetadata | ConvertTo-Json -Depth 3 | Set-Content -Path $metadataFile -Force
+ } else {
+ $Metadata | ConvertTo-Json -Depth 3 | Set-Content -Path $metadataFile -Force
+ }
+ Write-Verbose "Updated cache metadata: $metadataFile"
+ }
+ catch {
+ Write-Warning "Failed to update cache metadata: $_"
+ }
+}
+
+try {
+ Write-Verbose "Cache management action: $Action"
+ Write-Verbose "Cache root directory: $CacheRootDirectory"
+
+ # Determine which versions to work with
+ $versionsToProcess = @{}
+
+ if ($ComponentVersions) {
+ $versionsToProcess = $ComponentVersions.Clone()
+ Write-Verbose "Component versions: $(($ComponentVersions.GetEnumerator() | ForEach-Object { "$($_.Key)=$($_.Value)" }) -join ', ')"
+ } elseif ($Version) {
+ # Single version operation - assume all components use this version except CommonTypes
+ $versionsToProcess = @{
+ WorkloadDefinition = $Version
+ ItemDefinition = $Version
+ CommonTypesDefinitions = "common"
+ }
+ Write-Verbose "Single version operation: $Version"
+ }
+
+ switch ($Action) {
+ "Check" {
+ Write-Verbose "Checking cache for component versions..."
+
+ $cachePaths = @{}
+ $allCached = $true
+
+ foreach ($component in $versionsToProcess.Keys) {
+ $version = $versionsToProcess[$component]
+ $cacheDir = Get-VersionCacheDirectory -Version $version -CacheRoot $CacheRootDirectory
+
+ if (Test-CacheIntegrity -CachePath $cacheDir -Component $component) {
+ # Update last access time
+ $metadata = Get-CacheMetadata -CachePath $cacheDir
+ $metadata.LastAccessDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ Set-CacheMetadata -CachePath $cacheDir -Metadata $metadata
+
+ $cachePaths[$component] = $cacheDir
+ Write-Verbose "Cache found for $component v$version at: $cacheDir"
+ } else {
+ $allCached = $false
+ Write-Verbose "Cache not found or invalid for $component v$version"
+ }
+ }
+
+ if ($allCached) {
+ Write-Output $cachePaths
+ } else {
+ Write-Output @{}
+ }
+ }
+
+ "Create" {
+ Write-Verbose "Creating cache directories for component versions..."
+
+ $createdPaths = @{}
+
+ foreach ($component in $versionsToProcess.Keys) {
+ $version = $versionsToProcess[$component]
+ $cacheDir = Get-VersionCacheDirectory -Version $version -CacheRoot $CacheRootDirectory
+
+ if (-not (Test-Path -Path $cacheDir)) {
+ New-Item -Path $cacheDir -ItemType Directory -Force | Out-Null
+ Write-Verbose "Created cache directory for $component v$version`: $cacheDir"
+ } else {
+ Write-Verbose "Cache directory already exists for $component v$version`: $cacheDir"
+ }
+
+ # Create/update metadata
+ $metadata = @{
+ Version = $version
+ Component = $component
+ CreatedDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ LastAccessDate = (Get-Date).ToString("yyyy-MM-ddTHH:mm:ssZ")
+ CacheFormat = "VersionBased"
+ }
+ Set-CacheMetadata -CachePath $cacheDir -Metadata $metadata
+
+ $createdPaths[$component] = $cacheDir
+ }
+
+ Write-Output $createdPaths
+ }
+
+ "Clean" {
+ Write-Verbose "Cleaning cache entries older than $MaxCacheAgeDays days..."
+
+ $cutoffDate = (Get-Date).AddDays(-$MaxCacheAgeDays)
+ $cleanedCount = 0
+
+ if (Test-Path -Path $CacheRootDirectory) {
+ # Check CommonTypesDefinitions.xsd in root directory
+ $commonTypesFile = Join-Path $CacheRootDirectory "CommonTypesDefinitions.xsd"
+ if (Test-Path -Path $commonTypesFile) {
+ $metadata = Get-CacheMetadata -CachePath $CacheRootDirectory
+
+ try {
+ $lastAccessDate = [DateTime]::Parse($metadata.LastAccessDate)
+
+ if ($lastAccessDate -lt $cutoffDate) {
+ Write-Verbose "Removing old CommonTypesDefinitions cache (last accessed: $lastAccessDate)"
+ Remove-Item -Path $commonTypesFile -Force
+ # Also remove metadata and download summary if they exist
+ $metadataFile = Join-Path $CacheRootDirectory ".metadata"
+ $summaryFile = Join-Path $CacheRootDirectory "download-summary.json"
+ if (Test-Path -Path $metadataFile) { Remove-Item -Path $metadataFile -Force }
+ if (Test-Path -Path $summaryFile) { Remove-Item -Path $summaryFile -Force }
+ $cleanedCount++
+ }
+ }
+ catch {
+ Write-Warning "Failed to parse last access date for CommonTypesDefinitions, skipping cleanup"
+ }
+ }
+
+ # Clean versioned directories
+ $cacheDirectories = Get-ChildItem -Path $CacheRootDirectory -Directory
+
+ foreach ($cacheDir in $cacheDirectories) {
+ $fullCachePath = $cacheDir.FullName
+ $metadata = Get-CacheMetadata -CachePath $fullCachePath
+
+ try {
+ $lastAccessDate = [DateTime]::Parse($metadata.LastAccessDate)
+
+ if ($lastAccessDate -lt $cutoffDate) {
+ Write-Verbose "Removing old cache entry: $($cacheDir.Name) (last accessed: $lastAccessDate)"
+ Remove-Item -Path $fullCachePath -Recurse -Force
+ $cleanedCount++
+ }
+ }
+ catch {
+ Write-Warning "Failed to parse last access date for $($cacheDir.Name), skipping cleanup"
+ }
+ }
+ }
+
+ Write-Verbose "Cleaned $cleanedCount cache entries"
+ Write-Output $cleanedCount
+ }
+
+ "List" {
+ Write-Verbose "Listing cached versions..."
+
+ $cachedVersions = @()
+
+ if (Test-Path -Path $CacheRootDirectory) {
+ # First, check for CommonTypesDefinitions.xsd in the root cache directory
+ $commonTypesFile = Join-Path $CacheRootDirectory "CommonTypesDefinitions.xsd"
+ if (Test-Path -Path $commonTypesFile) {
+ $metadata = Get-CacheMetadata -CachePath $CacheRootDirectory
+
+ $versionInfo = [PSCustomObject]@{
+ CacheDirectory = $CacheRootDirectory
+ DirectoryName = "(root)"
+ Version = "common"
+ AvailableComponents = @("CommonTypesDefinitions")
+ CreatedDate = $metadata.CreatedDate
+ LastAccessDate = $metadata.LastAccessDate
+ CacheFormat = $metadata.CacheFormat
+ }
+
+ $cachedVersions += $versionInfo
+ Write-Verbose "Found CommonTypesDefinitions in root cache directory"
+ }
+
+ # Then check versioned directories
+ $cacheDirectories = Get-ChildItem -Path $CacheRootDirectory -Directory
+
+ foreach ($cacheDir in $cacheDirectories) {
+ $fullCachePath = $cacheDir.FullName
+ $metadata = Get-CacheMetadata -CachePath $fullCachePath
+
+ # Determine version from directory name
+ $dirName = $cacheDir.Name
+ if ($dirName -match "^v(.+)$") {
+ $version = $matches[1]
+ } else {
+ $version = $dirName
+ }
+
+ # Check what components are available in this cache
+ $availableComponents = @()
+ foreach ($component in $ComponentXsdMapping.Keys) {
+ # Skip CommonTypesDefinitions for versioned directories since it's in root
+ if ($component -eq "CommonTypesDefinitions") {
+ continue
+ }
+
+ $xsdFile = $ComponentXsdMapping[$component]
+ $filePath = Join-Path $fullCachePath $xsdFile
+ if (Test-Path -Path $filePath) {
+ $availableComponents += $component
+ }
+ }
+
+ $versionInfo = [PSCustomObject]@{
+ CacheDirectory = $fullCachePath
+ DirectoryName = $dirName
+ Version = $version
+ AvailableComponents = $availableComponents
+ CreatedDate = $metadata.CreatedDate
+ LastAccessDate = $metadata.LastAccessDate
+ CacheFormat = $metadata.CacheFormat
+ }
+
+ $cachedVersions += $versionInfo
+ Write-Verbose "Found cached version: $version with components: $($availableComponents -join ', ')"
+ }
+ }
+
+ Write-Verbose "Found $($cachedVersions.Count) cached entries total"
+ Write-Output $cachedVersions
+ }
+ }
+}
+catch {
+ Write-Error "Component cache management failed: $_"
+ exit 1
+}
\ No newline at end of file
diff --git a/tools/validation/ManifestValidator.ps1 b/tools/validation/ManifestValidator.ps1
new file mode 100644
index 0000000..8b1a630
--- /dev/null
+++ b/tools/validation/ManifestValidator.ps1
@@ -0,0 +1,155 @@
+param (
+ [string]$inputDirectory,
+ [string]$inputXml,
+ [string]$inputXsd,
+ [string]$outputDirectory,
+ [string]$xsdDirectory = $null # Optional separate XSD directory
+)
+
+<#
+.SYNOPSIS
+ Validates XML manifest files against XSD schemas.
+
+.DESCRIPTION
+ This script validates XML manifest files using XSD schemas. It supports both legacy mode
+ (XSD files in same directory as XML) and new mode (XSD files in separate directory).
+
+.PARAMETER inputDirectory
+ Directory containing the XML files to validate.
+
+.PARAMETER inputXml
+ Name of the XML file to validate.
+
+.PARAMETER inputXsd
+ Name of the primary XSD schema file.
+
+.PARAMETER outputDirectory
+ Directory where validation error files will be written.
+
+.PARAMETER xsdDirectory
+ Optional directory containing XSD schema files. If not provided, will look in inputDirectory.
+#>
+
+try
+{
+ if (-not($inputDirectory -and $inputXml -and $inputXsd -and $outputDirectory))
+ {
+ throw "Invalid input parameters"
+ }
+
+ # Determine XSD directory - use provided xsdDirectory or fall back to inputDirectory
+ $actualXsdDirectory = if ($xsdDirectory) { $xsdDirectory } else { $inputDirectory }
+
+ Write-Verbose "Input Directory: $inputDirectory"
+ Write-Verbose "XSD Directory: $actualXsdDirectory"
+ Write-Verbose "Input XML: $inputXml"
+ Write-Verbose "Input XSD: $inputXsd"
+
+ # Verify XSD files exist
+ $primaryXsdPath = Join-Path $actualXsdDirectory $inputXsd
+ # CommonTypesDefinitions.xsd is now in the root cache directory (parent of versioned directories)
+ $commonXsdPath = Join-Path (Split-Path $actualXsdDirectory -Parent) "CommonTypesDefinitions.xsd"
+
+ if (-not (Test-Path -Path $primaryXsdPath)) {
+ throw "Primary XSD file not found: $primaryXsdPath"
+ }
+
+ if (-not (Test-Path -Path $commonXsdPath)) {
+ throw "Common types XSD file not found: $commonXsdPath"
+ }
+
+ Write-Verbose "Loading XSD schemas..."
+ Write-Verbose "Primary XSD: $primaryXsdPath"
+ Write-Verbose "Common XSD: $commonXsdPath"
+
+ # Verify files exist before loading
+ if (-not (Test-Path $primaryXsdPath)) {
+ throw "Primary XSD file not found: $primaryXsdPath"
+ }
+ if (-not (Test-Path $commonXsdPath)) {
+ throw "Common XSD file not found: $commonXsdPath"
+ }
+
+ $schemaSet = [System.Xml.Schema.XmlSchemaSet]::new()
+
+ # Load primary schema with proper disposal
+ $primaryReader = [System.IO.StreamReader]::new($primaryXsdPath)
+ try {
+ $schema = [System.Xml.Schema.XmlSchema]::Read($primaryReader, $null)
+ Write-Verbose "Loaded primary schema with target namespace: $($schema.TargetNamespace)"
+ }
+ finally {
+ $primaryReader.Close()
+ $primaryReader.Dispose()
+ }
+
+ # Load common schema with proper disposal
+ $commonReader = [System.IO.StreamReader]::new($commonXsdPath)
+ try {
+ $schemaCommon = [System.Xml.Schema.XmlSchema]::Read($commonReader, $null)
+ Write-Verbose "Loaded common schema with target namespace: $($schemaCommon.TargetNamespace)"
+ }
+ finally {
+ $commonReader.Close()
+ $commonReader.Dispose()
+ }
+
+ $schemaSet.Add($schema)
+ $schemaSet.Add($schemaCommon)
+ $schemaSet.Compile()
+ Write-Verbose "Schema set compiled successfully"
+ $settings = [System.Xml.XmlReaderSettings]::new()
+ $settings.ValidationType = [System.Xml.ValidationType]::Schema
+ $settings.ValidationFlags = [System.Xml.Schema.XmlSchemaValidationFlags]::ReportValidationWarnings
+ $settings.DtdProcessing = [System.Xml.DtdProcessing]::Prohibit
+ $settings.Schemas.Add($schemaSet)
+
+ # Enhanced validation event handler with debugging
+ $handler = [System.Xml.Schema.ValidationEventHandler] {
+ $args = $_ # entering new block so copy $_
+ Write-Verbose "Validation event: Severity=$($args.Severity), Message=$($args.Message)"
+
+ if ($args.Severity -eq [System.Xml.Schema.XmlSeverityType]::Warning -or $args.Severity -eq [System.Xml.Schema.XmlSeverityType]::Error)
+ {
+ Write-Host "VALIDATION ERROR DETECTED: $($args.Message)" -ForegroundColor Red
+ $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "$($args.Message)`r`n" -outputDirectory $outputDirectory
+ Write-Verbose "Error written to file via WriteErrorsToFile.ps1"
+ }
+ }
+ $settings.add_ValidationEventHandler($handler)
+
+ Write-Verbose "Starting XML validation for: $(Join-Path $inputDirectory $inputXml)"
+ $xmlPath = [string](Join-Path $inputDirectory $inputXml)
+ Write-Verbose "Full XML path: $xmlPath"
+
+ $reader = [System.Xml.XmlReader]::Create($xmlPath, [System.Xml.XmlReaderSettings]$settings)
+ try {
+ $nodeCount = 0
+ while ($reader.Read()) {
+ $nodeCount++
+ }
+ Write-Verbose "XML validation completed. Processed $nodeCount nodes."
+ }
+ finally {
+ $reader.Close()
+ $reader.Dispose()
+ }
+
+ # Additional validation logic (only for WorkloadManifest.xml)
+ if ($inputXml -eq "WorkloadManifest.xml") {
+ $workloadXml = [xml](Get-Content -Path (Join-Path $inputDirectory $inputXml))
+ $workloadName = $workloadXml.WorkloadManifestConfiguration.Workload.WorkloadName
+ $aadApp = $workloadXml.SelectSingleNode("//AADApp")
+ if ($aadApp -and (-not ($aadApp.ResourceId -clike "*$($workloadName)")) -and (-not ($aadApp.ResourceId -clike "*$($workloadName)/*")))
+ {
+ $scriptPath = Join-Path $PSScriptRoot "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "AADApp.resourceId: $($aadApp.ResourceId), should contain the exact WorkloadName: $($workloadName)" -outputDirectory $outputDirectory
+ }
+ }
+}
+catch
+{
+ Write-Host "An error occurred:"
+ Write-Host $_
+}
\ No newline at end of file
diff --git a/tools/validation/README.md b/tools/validation/README.md
new file mode 100644
index 0000000..ccb1e4b
--- /dev/null
+++ b/tools/validation/README.md
@@ -0,0 +1,316 @@
+# Manual Manifest Validation System
+
+This documentation describes how to use the manual validation system that downloads XSD schemas from Microsoft's official schema repository based on component-specific schema versions found in your manifest files.
+
+## Overview
+
+The validation system has been updated to work independently of the build process. Instead of using local XSD files, it automatically downloads the correct XSD schemas from Microsoft's official schema repository based on the schema versions specified in your manifest files. Each component (WorkloadDefinition, ItemDefinition) can have different schema versions, while CommonTypesDefinitions uses a single version-agnostic file.
+
+## Quick Start
+
+Basic validation of a package directory:
+
+```powershell
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json"
+```
+
+## Components
+
+### Main Script
+- **`Invoke-ManifestValidation.ps1`** - Main orchestration script that coordinates the entire validation process
+
+### Core Components
+- **`Get-AllSchemaVersions.ps1`** - Detects schema versions for all components from manifest files
+- **`Download-XSDComponents.ps1`** - Downloads XSD files based on component-specific versions
+- **`Manage-ComponentXSDCache.ps1`** - Manages local cache for component-specific versions
+
+### Updated Validation Scripts
+- **`ManifestValidator.ps1`** - Core XSD validation (updated to support external XSD directory)
+- **`ItemManifestValidator.ps1`** - Item manifest validation (updated to support external XSD directory)
+- **`ValidateNoDefaults.ps1`** - Default value validation (unchanged)
+- **`WriteErrorsToFile.ps1`** - Error reporting (unchanged)
+- **`RemoveErrorFile.ps1`** - Error file cleanup (unchanged)
+
+## Usage Examples
+
+### Basic Validation
+```powershell
+# Validate package using default Microsoft repository
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json"
+```
+
+### Force Re-download
+```powershell
+# Force re-download of XSD files even if cached
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -Force
+```
+
+### Verbose Output
+```powershell
+# Show detailed execution information
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -Verbose
+```
+
+### Custom Schema Repository
+```powershell
+# Use a different schema repository for XSD files
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -SchemaBaseUrl "https://custom.schema.com/fabric/xsd"
+```
+
+### Skip Cache
+```powershell
+# Always download fresh XSD files to temporary location
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -SkipCache
+```
+
+### Clean Cache
+```powershell
+# Clean old cache entries before validation
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -CleanCache
+```
+
+## Parameters
+
+### Invoke-ManifestValidation.ps1
+
+| Parameter | Required | Default | Description |
+|-----------|----------|---------|-------------|
+| `PackageDirectory` | Yes | - | Path to package directory containing manifest files |
+| `AppSettingsPath` | Yes | - | Path to appsettings.json file for validation configuration |
+| `SchemaBaseUrl` | No | `https://schemas.microsoft.com/fabric/extensibility/xsd` | Microsoft schema repository base URL |
+| `CacheDirectory` | No | `%TEMP%\FabricXSDCache` | Root directory for XSD cache |
+| `Force` | No | `false` | Force re-download even if files are cached |
+| `SkipCache` | No | `false` | Skip cache entirely, use temporary directory |
+| `Verbose` | No | `false` | Enable verbose logging |
+| `CleanCache` | No | `false` | Clean old cache entries before validation |
+
+## Microsoft Schema Repository Structure
+
+The validation system downloads XSD files from Microsoft's official schema repository:
+
+```
+https://schemas.microsoft.com/fabric/extensibility/xsd/
+├── 1.100.0/
+│ ├── WorkloadDefinition.xsd
+│ └── ItemDefinition.xsd
+├── 1.101.0/
+│ ├── WorkloadDefinition.xsd
+│ └── ItemDefinition.xsd
+├── 1.102.0/
+│ ├── WorkloadDefinition.xsd
+│ └── ItemDefinition.xsd
+└── CommonTypesDefinitions.xsd (version-agnostic)
+```
+
+### XSD File Organization
+- **Versioned XSDs**: Each version folder contains component-specific XSD files
+ - `WorkloadDefinition.xsd` - Schema for WorkloadManifest.xml
+ - `ItemDefinition.xsd` - Schema for item manifest XML files
+- **Common Types**: Single version-agnostic file at repository root
+ - `CommonTypesDefinitions.xsd` - Common type definitions referenced by all schemas
+
+## Schema Version Detection
+
+The system automatically detects schema versions from multiple sources:
+
+### Component Schema Versions
+Each component can have different schema versions:
+
+**WorkloadManifest.xml:**
+```xml
+
+
+
+
+```
+
+**Item1.xml (Item Manifest):**
+```xml
+
+
+
+
+```
+
+### Version Resolution Strategy
+- **WorkloadDefinition.xsd**: Uses version from WorkloadManifest.xml
+- **ItemDefinition.xsd**: Uses highest version from all item manifest files
+- **CommonTypesDefinitions.xsd**: Always uses "common" (version-agnostic)
+
+### Example Output
+```
+Component Schema Versions:
+ WorkloadDefinition = 1.101.0
+ ItemDefinition = 1.102.0
+ CommonTypesDefinitions = common
+```
+
+## Caching System
+
+### Cache Location
+By default, XSD files are cached in `%TEMP%\FabricXSDCache\`
+
+### Cache Structure
+The cache uses simplified version-based storage:
+
+```
+%TEMP%\FabricXSDCache\
+├── CommonTypesDefinitions.xsd (in root for XSD reference compatibility)
+├── .metadata
+├── download-summary.json
+├── v1.100.0\
+│ ├── WorkloadDefinition.xsd
+│ ├── ItemDefinition.xsd
+│ ├── .metadata
+│ └── download-summary.json
+├── v1.101.0\
+│ ├── WorkloadDefinition.xsd
+│ ├── ItemDefinition.xsd
+│ ├── .metadata
+│ └── download-summary.json
+└── v1.102.0\
+ ├── WorkloadDefinition.xsd
+ ├── ItemDefinition.xsd
+ ├── .metadata
+ └── download-summary.json
+```
+
+**Cache Directory Naming:**
+- **Versioned Components**: `v{version}` (e.g., `v1.101.0`, `v1.102.0`)
+- **Common Types**: Root cache directory (for XSD reference compatibility via `..\..\common.xsd`)
+- Each version gets its own cache directory for better organization
+
+### Cache Management
+- Cache entries are automatically validated for integrity
+- Old cache entries (>30 days) can be cleaned with `-CleanCache`
+- Cache can be bypassed with `-SkipCache`
+- Cache can be forced to refresh with `-Force`
+
+## Validation Process Flow
+
+1. **Prerequisites Check**: Verify package directory and internet connectivity to Microsoft schema repository
+2. **Component Schema Version Detection**: Extract versions from WorkloadManifest.xml and all item manifest files
+3. **Version-Based Cache Management**: Check for cached XSD files in version-specific directories
+4. **Microsoft Schema Download**: Download missing XSD files from official Microsoft repository by version
+5. **File Verification**: Ensure all required XSD files are present and valid in their respective cache directories
+6. **Separate Component Validation**: Run validation scripts for each component using its specific XSD version
+7. **Error Reporting**: Check for validation errors and report results with component version details
+
+## Error Handling
+
+### Common Errors and Solutions
+
+**Error: Package directory not found**
+- Ensure the package directory path is correct and exists
+- Use absolute paths if relative paths don't work
+
+**Error: WorkloadManifest.xml not found**
+- Verify that WorkloadManifest.xml exists in the package directory
+- Check file name spelling and case sensitivity
+
+**Error: Failed to detect schema version**
+- Ensure WorkloadManifest.xml has a valid SchemaVersion attribute
+- Verify XML is well-formed and not corrupted
+
+**Error: Failed to download XSD files**
+- Check internet connectivity to schemas.microsoft.com
+- Verify Microsoft schema repository URL is correct and accessible
+- Check if specific version exists in the Microsoft schema repository
+
+**Error: Invalid XSD schema file**
+- Verify downloaded files are valid XSD schemas
+- Check Microsoft schema repository for file integrity
+
+### Exit Codes
+- `0` - Validation successful
+- `1` - Validation failed or error occurred
+
+### Error Files
+Validation errors are written to `ValidationErrors.txt` in the script directory.
+
+## Migration from Build-Time Validation
+
+### Before (Build-Time)
+```xml
+
+
+
+
+```
+
+### After (Manual)
+```powershell
+# Run manually before/after development
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json"
+```
+
+### CI/CD Integration
+For continuous integration, you can still call the validation script:
+
+```yaml
+# Azure DevOps Pipeline example
+- task: PowerShell@2
+ displayName: 'Validate Manifests'
+ inputs:
+ filePath: 'tools/validation/Invoke-ManifestValidation.ps1'
+ arguments: '-PackageDirectory "Backend/src/Packages/manifest" -AppSettingsPath "Backend/src/appsettings.json" -Verbose'
+ pwsh: true
+```
+
+## Troubleshooting
+
+### Enable Verbose Logging
+Add `-Verbose` to see detailed execution information:
+
+```powershell
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -Verbose
+```
+
+### Clear Cache
+If you encounter cache-related issues:
+
+```powershell
+# Clean old entries
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -CleanCache
+
+# Or skip cache entirely
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -SkipCache
+```
+
+### Force Fresh Download
+If XSD files seem outdated or corrupted:
+
+```powershell
+.\Invoke-ManifestValidation.ps1 -PackageDirectory "Backend\src\Packages\manifest" -AppSettingsPath "Backend\src\appsettings.json" -Force
+```
+
+### Manual Cache Management
+You can also manage cache manually using the cache management script:
+
+```powershell
+# List cached versions
+.\Manage-XSDCache.ps1 -SchemaVersion "1.101.0" -Action "List"
+
+# Check specific version
+.\Manage-XSDCache.ps1 -SchemaVersion "1.101.0" -Action "Check"
+
+# Clean old entries
+.\Manage-XSDCache.ps1 -SchemaVersion "1.101.0" -Action "Clean"
+```
+
+## Best Practices
+
+1. **Run validation regularly** during development to catch issues early
+2. **Use caching** for better performance in repeated validations
+3. **Check error files** thoroughly when validation fails
+4. **Stay updated** with new schema versions from Microsoft's repository
+5. **Use verbose mode** when troubleshooting issues
+6. **Clean cache periodically** to avoid disk space issues
+
+## Support
+
+For issues or questions:
+1. Check this documentation first
+2. Enable verbose logging to get detailed error information
+3. Verify Microsoft schema repository accessibility
+4. Check internet connectivity to schemas.microsoft.com for XSD downloads
\ No newline at end of file
diff --git a/Backend/src/ValidationScripts/RemoveErrorFile.ps1 b/tools/validation/RemoveErrorFile.ps1
similarity index 100%
rename from Backend/src/ValidationScripts/RemoveErrorFile.ps1
rename to tools/validation/RemoveErrorFile.ps1
diff --git a/tools/validation/Test-ValidationSystem.ps1 b/tools/validation/Test-ValidationSystem.ps1
new file mode 100644
index 0000000..9420fce
--- /dev/null
+++ b/tools/validation/Test-ValidationSystem.ps1
@@ -0,0 +1,305 @@
+param(
+ [string]$PackageDirectory = "..\..\Backend\src\Packages\manifest"
+)
+
+<#
+.SYNOPSIS
+ Test script to verify the manual validation system is working correctly.
+
+.DESCRIPTION
+ This script performs basic tests on the validation system components to ensure
+ they are functioning properly before running actual validation.
+
+.PARAMETER PackageDirectory
+ Path to the package directory to test with. Defaults to "..\..\Backend\src\Packages\manifest"
+
+.EXAMPLE
+ .\Test-ValidationSystem.ps1
+ Run basic tests with default package directory.
+
+.EXAMPLE
+ .\Test-ValidationSystem.ps1 -PackageDirectory "path\to\package" -Verbose
+ Run tests with custom package directory and verbose output.
+#>
+
+# Set verbose preference
+if ($Verbose) {
+ $VerbosePreference = "Continue"
+}
+
+$script:TestResults = @()
+$script:TestCount = 0
+$script:PassCount = 0
+$script:FailCount = 0
+
+function Test-Component {
+ param(
+ [string]$TestName,
+ [scriptblock]$TestScript
+ )
+
+ $script:TestCount++
+ Write-Host "[$script:TestCount] Testing: $TestName" -ForegroundColor Cyan
+
+ try {
+ $result = & $TestScript
+ if ($result) {
+ Write-Host " ✓ PASS" -ForegroundColor Green
+ $script:PassCount++
+ $script:TestResults += [PSCustomObject]@{
+ Test = $TestName
+ Result = "PASS"
+ Message = ""
+ }
+ }
+ else {
+ Write-Host " ✗ FAIL" -ForegroundColor Red
+ $script:FailCount++
+ $script:TestResults += [PSCustomObject]@{
+ Test = $TestName
+ Result = "FAIL"
+ Message = "Test returned false"
+ }
+ }
+ }
+ catch {
+ Write-Host " ✗ FAIL: $($_.Exception.Message)" -ForegroundColor Red
+ $script:FailCount++
+ $script:TestResults += [PSCustomObject]@{
+ Test = $TestName
+ Result = "FAIL"
+ Message = $_.Exception.Message
+ }
+ }
+
+ Write-Host ""
+}
+
+function Test-FileExists {
+ param([string]$FilePath, [string]$Description)
+
+ if (Test-Path -Path $FilePath) {
+ Write-Verbose "✓ Found: $Description at $FilePath"
+ return $true
+ }
+ else {
+ Write-Verbose "✗ Missing: $Description at $FilePath"
+ return $false
+ }
+}
+
+Write-Host "Manual Validation System - Component Tests" -ForegroundColor Yellow
+Write-Host "=========================================" -ForegroundColor Yellow
+Write-Host ""
+
+# Test 1: Check if all required scripts exist
+Test-Component "Required Scripts Exist" {
+ $requiredScripts = @(
+ "Get-AllSchemaVersions.ps1",
+ "Download-XSDComponents.ps1",
+ "Manage-ComponentXSDCache.ps1",
+ "Invoke-ManifestValidation.ps1",
+ "ManifestValidator.ps1",
+ "ItemManifestValidator.ps1",
+ "ValidateNoDefaults.ps1",
+ "WriteErrorsToFile.ps1",
+ "RemoveErrorFile.ps1"
+ )
+
+ $allExist = $true
+ foreach ($script in $requiredScripts) {
+ $scriptPath = Join-Path $PSScriptRoot $script
+ if (-not (Test-FileExists $scriptPath "Script: $script")) {
+ $allExist = $false
+ }
+ }
+
+ return $allExist
+}
+
+# Test 2: Check if package directory exists
+Test-Component "Package Directory Exists" {
+ $fullPath = Resolve-Path $PackageDirectory -ErrorAction SilentlyContinue
+ if ($fullPath) {
+ Write-Verbose "✓ Package directory found: $fullPath"
+ return $true
+ }
+ else {
+ Write-Verbose "✗ Package directory not found: $PackageDirectory"
+ return $false
+ }
+}
+
+# Test 3: Check if WorkloadManifest.xml exists
+Test-Component "WorkloadManifest.xml Exists" {
+ $manifestPath = Join-Path $PackageDirectory "WorkloadManifest.xml"
+ return Test-FileExists $manifestPath "WorkloadManifest.xml"
+}
+
+# Test 4: Test component schema version detection
+Test-Component "Component Schema Version Detection" {
+ $versionScript = Join-Path $PSScriptRoot "Get-AllSchemaVersions.ps1"
+ $componentVersions = & $versionScript -PackageDirectory $PackageDirectory
+
+ if (-not $componentVersions -or $componentVersions.Count -eq 0) {
+ throw "Failed to detect component schema versions"
+ }
+
+ # Check for required components
+ if (-not $componentVersions.ContainsKey("WorkloadDefinition")) {
+ throw "Missing component version: WorkloadDefinition"
+ }
+
+ if (-not $componentVersions.ContainsKey("CommonTypesDefinitions")) {
+ throw "Missing component version: CommonTypesDefinitions"
+ }
+
+ # Validate each component version
+ foreach ($component in $componentVersions.Keys) {
+ $version = $componentVersions[$component]
+
+ # CommonTypesDefinitions uses "common", others use x.y.z format
+ if ($component -eq "CommonTypesDefinitions") {
+ if ($version -ne "common") {
+ throw "Invalid version format for $component`: $version (expected: common)"
+ }
+ } else {
+ # All other components (WorkloadDefinition and individual items) use semantic versioning
+ if ($version -notmatch '^\d+\.\d+\.\d+$') {
+ throw "Invalid version format for $component`: $version (expected format: x.y.z)"
+ }
+ }
+ }
+
+ # Check that we have at least one item component (anything that's not WorkloadDefinition or CommonTypesDefinitions)
+ $itemComponents = $componentVersions.Keys | Where-Object { $_ -ne "WorkloadDefinition" -and $_ -ne "CommonTypesDefinitions" }
+ if ($itemComponents.Count -eq 0) {
+ throw "No item manifest components found"
+ }
+
+ Write-Verbose "✓ Detected component versions:"
+ foreach ($component in $componentVersions.Keys | Sort-Object) {
+ Write-Verbose " $component = $($componentVersions[$component])"
+ }
+ return $true
+}
+
+# Test 5: Test component cache management (basic functionality)
+Test-Component "Component Cache Management" {
+ $cacheScript = Join-Path $PSScriptRoot "Manage-ComponentXSDCache.ps1"
+ $testCacheDir = Join-Path $env:TEMP "FabricXSDTest_$(Get-Date -Format 'yyyyMMdd_HHmmss')"
+
+ $testComponentVersions = @{
+ WorkloadDefinition = "1.0.0"
+ ItemDefinition = "1.0.0"
+ CommonTypesDefinitions = "1.0.0"
+ }
+
+ try {
+ # Test cache directory creation
+ $createdDir = & $cacheScript -ComponentVersions $testComponentVersions -CacheRootDirectory $testCacheDir -Action "Create"
+
+ if (-not (Test-Path $createdDir)) {
+ throw "Failed to create cache directory"
+ }
+
+ # Test cache check
+ $checkResult = & $cacheScript -ComponentVersions $testComponentVersions -CacheRootDirectory $testCacheDir -Action "Check"
+
+ # Should return empty string since no XSD files are present
+ if ($checkResult) {
+ throw "Cache check should return empty for directory without XSD files"
+ }
+
+ Write-Verbose "✓ Component cache management basic functionality working"
+ return $true
+ }
+ finally {
+ # Cleanup test cache directory
+ if (Test-Path $testCacheDir) {
+ Remove-Item $testCacheDir -Recurse -Force -ErrorAction SilentlyContinue
+ }
+ }
+}
+
+# Test 6: Test internet connectivity (optional)
+Test-Component "Internet Connectivity" {
+ try {
+ $testConnection = Test-NetConnection -ComputerName "github.com" -Port 443 -InformationLevel Quiet -WarningAction SilentlyContinue
+ if ($testConnection) {
+ Write-Verbose "✓ Can connect to GitHub"
+ return $true
+ }
+ else {
+ Write-Verbose "✗ Cannot connect to GitHub (this may be expected in some environments)"
+ return $false
+ }
+ }
+ catch {
+ Write-Verbose "✗ Internet connectivity test failed: $($_.Exception.Message)"
+ return $false
+ }
+}
+
+# Test 7: Test main script parameter validation
+Test-Component "Main Script Parameter Validation" {
+ $mainScript = Join-Path $PSScriptRoot "Invoke-ManifestValidation.ps1"
+
+ # Test with invalid package directory (should fail gracefully)
+ try {
+ $result = & $mainScript -PackageDirectory "NonExistentDirectory" -ErrorAction SilentlyContinue
+ # Script should exit with error code 1
+ if ($LASTEXITCODE -eq 1) {
+ Write-Verbose "✓ Main script correctly validates parameters"
+ return $true
+ }
+ else {
+ throw "Main script should have failed with invalid directory"
+ }
+ }
+ catch {
+ # Expected behavior - script should throw error for invalid directory
+ Write-Verbose "✓ Main script correctly validates parameters (threw exception as expected)"
+ return $true
+ }
+}
+
+# Summary
+Write-Host "Test Summary" -ForegroundColor Yellow
+Write-Host "============" -ForegroundColor Yellow
+Write-Host "Total Tests: $script:TestCount" -ForegroundColor White
+Write-Host "Passed: $script:PassCount" -ForegroundColor Green
+Write-Host "Failed: $script:FailCount" -ForegroundColor $(if ($script:FailCount -gt 0) { "Red" } else { "Green" })
+Write-Host ""
+
+# Show detailed results if there were failures
+if ($script:FailCount -gt 0) {
+ Write-Host "Failed Tests:" -ForegroundColor Red
+ $script:TestResults | Where-Object { $_.Result -eq "FAIL" } | ForEach-Object {
+ Write-Host " - $($_.Test): $($_.Message)" -ForegroundColor Red
+ }
+ Write-Host ""
+}
+
+# Recommendations
+Write-Host "Recommendations:" -ForegroundColor Yellow
+if ($script:FailCount -eq 0) {
+ Write-Host "✓ All tests passed! The validation system appears to be ready for use." -ForegroundColor Green
+ Write-Host " You can now run: .\Invoke-ManifestValidation.ps1 -PackageDirectory '$PackageDirectory'" -ForegroundColor Green
+}
+else {
+ Write-Host "✗ Some tests failed. Please address the issues before using the validation system." -ForegroundColor Red
+ Write-Host " Check the error messages above and ensure all required files are present." -ForegroundColor Red
+}
+
+if (($script:TestResults | Where-Object { $_.Test -eq "Internet Connectivity" -and $_.Result -eq "FAIL" })) {
+ Write-Host "⚠ Internet connectivity test failed. The validation system will still work if:" -ForegroundColor Yellow
+ Write-Host " - You use -SkipCache to download XSDs to a temporary location" -ForegroundColor Yellow
+ Write-Host " - Or if the required XSD files are already cached" -ForegroundColor Yellow
+}
+
+Write-Host ""
+Write-Host "For more information, see the README.md file in the ValidationScripts directory." -ForegroundColor Cyan
+
+# Exit with appropriate code
+exit $(if ($script:FailCount -gt 0) { 1 } else { 0 })
\ No newline at end of file
diff --git a/tools/validation/ValidateNoDefaults.ps1 b/tools/validation/ValidateNoDefaults.ps1
new file mode 100644
index 0000000..8859502
--- /dev/null
+++ b/tools/validation/ValidateNoDefaults.ps1
@@ -0,0 +1,82 @@
+param (
+ [string]$outputDirectory,
+ [string]$appsettingsLocation = "appsettings.json",
+ [string]$packageDirectory = ""
+)
+
+<#
+.SYNOPSIS
+ Validates that WorkloadManifest.xml values match appsettings.json values.
+
+.DESCRIPTION
+ This script checks that the AADApp configuration in WorkloadManifest.xml
+ matches the corresponding values in appsettings.json file.
+
+.PARAMETER outputDirectory
+ Directory where validation errors will be written.
+
+.PARAMETER appsettingsLocation
+ Path to the appsettings.json file. Can be relative or absolute.
+
+.PARAMETER packageDirectory
+ Directory containing the WorkloadManifest.xml file. If not provided,
+ will attempt to find it relative to the script location.
+
+.EXAMPLE
+ .\ValidateNoDefaults.ps1 -outputDirectory "C:\temp" -appsettingsLocation "..\..\Backend\src\appsettings.json"
+#>
+
+try
+{
+ if (-not($outputDirectory))
+ {
+ throw "Invalid input: outputDirectory parameter is required"
+ }
+
+ # Resolve appsettings path
+ $appSettingsPath = $appsettingsLocation
+ if (-not (Test-Path $appSettingsPath)) {
+ throw "AppSettings file not found at: $appSettingsPath"
+ }
+
+ # Load appsettings.json
+ $appSettingsContent = (Get-Content $appSettingsPath) -replace '// .*', '' -join [Environment]::NewLine | ConvertFrom-Json
+
+ # Determine WorkloadManifest.xml path
+ if ($packageDirectory -and (Test-Path $packageDirectory)) {
+ $workloadXmlPath = Join-Path -Path $packageDirectory -ChildPath "WorkloadManifest.xml"
+ } else {
+ # Fallback to relative path for backward compatibility
+ $workloadXmlPath = Join-Path -Path $PSScriptRoot -ChildPath "..\..\Backend\src\Packages\manifest\WorkloadManifest.xml"
+ }
+
+ if (-not (Test-Path $workloadXmlPath)) {
+ throw "WorkloadManifest.xml not found at: $workloadXmlPath"
+ }
+
+ # Load and validate WorkloadManifest.xml
+ $workloadXml = [xml](Get-Content -Path $workloadXmlPath)
+ $aadApp = $workloadXml.SelectSingleNode("//AADApp")
+
+ if (-not $aadApp) {
+ throw "AADApp configuration not found in WorkloadManifest.xml"
+ }
+
+ # Check if values match
+ if (($appSettingsContent.Audience -ne $aadApp.ResourceId) -or ($appSettingsContent.ClientId -ne $aadApp.AppId))
+ {
+ $scriptPath = Join-Path -Path $PSScriptRoot -ChildPath "WriteErrorsToFile.ps1"
+ & $scriptPath -errors "Non matching default values in WorkloadManifest.xml file" -outputDirectory $outputDirectory
+ }
+}
+catch
+{
+ Write-Host "An error occurred in ValidateNoDefaults.ps1:"
+ Write-Host $_.Exception.Message
+
+ # Write error to file for consistent error reporting
+ $scriptPath = Join-Path -Path $PSScriptRoot -ChildPath "WriteErrorsToFile.ps1"
+ if (Test-Path $scriptPath) {
+ & $scriptPath -errors "ValidateNoDefaults.ps1 error: $($_.Exception.Message)" -outputDirectory $outputDirectory
+ }
+}
\ No newline at end of file
diff --git a/Backend/src/ValidationScripts/WorkloadManifestValidator.ps1 b/tools/validation/WorkloadManifestValidator.ps1
similarity index 100%
rename from Backend/src/ValidationScripts/WorkloadManifestValidator.ps1
rename to tools/validation/WorkloadManifestValidator.ps1
diff --git a/Backend/src/ValidationScripts/WriteErrorsToFile.ps1 b/tools/validation/WriteErrorsToFile.ps1
similarity index 100%
rename from Backend/src/ValidationScripts/WriteErrorsToFile.ps1
rename to tools/validation/WriteErrorsToFile.ps1