Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,11 @@ import type { RequestHandler } from 'express';
import type { RouterOptions } from '../models/RouterOptions';
import {
authorize,
ClusterProjectResult,
filterAuthorizedClusterIds,
filterAuthorizedClusterProjectIds,
filterAuthorizedClustersAndProjects,
} from '../util/checkPermissions';
import { rosPluginPermissions } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common/permissions';
import { getTokenFromApi } from '../util/tokenUtil';
import { AuthorizeResult } from '@backstage/plugin-permission-common';
import { deepMapKeys } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common/json-utils';
import camelCase from 'lodash/camelCase';
import { RecommendationList } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common';

export const getAccess: (options: RouterOptions) => RequestHandler =
options => async (_, response) => {
Expand Down Expand Up @@ -74,39 +69,51 @@ export const getAccess: (options: RouterOptions) => RequestHandler =
clusterDataMap = clusterMapDataFromCache;
allProjects = projectDataFromCache;
} else {
// token
const token = await getTokenFromApi(options);

// hit /recommendation API endpoint
const optimizationResponse = await optimizationApi.getRecommendationList(
{
query: {
limit: -1,
orderHow: 'desc',
orderBy: 'last_reported',
},
},
{ token },
);

if (optimizationResponse.ok) {
const data = await optimizationResponse.json();
const camelCaseTransformedResponse = deepMapKeys(
data,
camelCase as (value: string | number) => string,
) as RecommendationList;
try {
// token
const token = await getTokenFromApi(options);

// hit /recommendation API endpoint
const optimizationResponse =
await optimizationApi.getRecommendationList(
{
query: {
limit: -1,
orderHow: 'desc',
orderBy: 'last_reported',
},
},
{ token },
);

// OptimizationsClient already transforms to camelCase when token is provided
const recommendationList = await optimizationResponse.json();

// Check if response contains errors
if ((recommendationList as any).errors) {
logger.error(
'API returned errors:',
(recommendationList as any).errors,
);
return response.status(500).json({
decision: AuthorizeResult.DENY,
error: 'API returned errors',
authorizeClusterIds: [],
authorizeProjects: [],
});
}

// retrive cluster data from the API result
if (camelCaseTransformedResponse.data) {
camelCaseTransformedResponse.data.map(recommendation => {
if (recommendationList.data) {
recommendationList.data.map(recommendation => {
if (recommendation.clusterAlias && recommendation.clusterUuid)
clusterDataMap[recommendation.clusterAlias] =
recommendation.clusterUuid;
});

allProjects = [
...new Set(
camelCaseTransformedResponse.data.map(
recommendationList.data.map(
recommendation => recommendation.project,
),
),
Expand All @@ -120,47 +127,68 @@ export const getAccess: (options: RouterOptions) => RequestHandler =
ttl: 15 * 60 * 1000,
});
}
} else {
throw new Error(optimizationResponse.statusText);
} catch (error) {
logger.error('Error fetching recommendations', error);

// Return unauthorized response on any error
return response.status(500).json({
decision: AuthorizeResult.DENY,
error: 'Failed to fetch cluster data',
authorizeClusterIds: [],
authorizeProjects: [],
});
}
}

let authorizeClusterIds: string[] = await filterAuthorizedClusterIds(
_,
permissions,
httpAuth,
clusterDataMap,
// RBAC Filtering: Single batch call for both cluster and cluster-project permissions
logger.info(
`Checking permissions for ${
Object.keys(clusterDataMap).length
} clusters and ${allProjects.length} projects`,
);
logger.info(`Cluster names: ${Object.keys(clusterDataMap).join(', ')}`);
logger.info(`Projects: ${allProjects.join(', ')}`);

const authorizeClustersProjects: ClusterProjectResult[] =
await filterAuthorizedClusterProjectIds(
const { authorizedClusterIds, authorizedClusterProjects } =
await filterAuthorizedClustersAndProjects(
_,
permissions,
httpAuth,
clusterDataMap,
allProjects,
);

authorizeClusterIds = [
logger.info(
`Authorization results: ${authorizedClusterIds.length} cluster IDs, ${authorizedClusterProjects.length} cluster-project combinations`,
);
logger.info(`Authorized cluster IDs: ${authorizedClusterIds.join(', ')}`);
logger.info(
`Authorized cluster-projects: ${authorizedClusterProjects
.map(cp => `${cp.cluster}.${cp.project}`)
.join(', ')}`,
);

// Combine cluster IDs from both cluster-level and project-level permissions
const finalAuthorizedClusterIds = [
...new Set([
...authorizeClusterIds,
...authorizeClustersProjects.map(result => result.cluster),
...authorizedClusterIds,
...authorizedClusterProjects.map(result => result.cluster),
]),
];

const authorizeProjects = authorizeClustersProjects.map(
const authorizeProjects = authorizedClusterProjects.map(
result => result.project,
);

if (authorizeClusterIds.length > 0) {
if (finalAuthorizedClusterIds.length > 0) {
finalDecision = AuthorizeResult.ALLOW;
} else {
finalDecision = AuthorizeResult.DENY;
}

const body = {
decision: finalDecision,
authorizeClusterIds,
authorizeClusterIds: finalAuthorizedClusterIds,
authorizeProjects,
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@ import type { RequestHandler } from 'express';
import type { RouterOptions } from '../models/RouterOptions';
import {
authorize,
filterAuthorizedClusterIds,
filterAuthorizedClustersAndProjects,
} from '../util/checkPermissions';
import { costPluginPermissions } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common/permissions';
import { AuthorizeResult } from '@backstage/plugin-permission-common';
import { getTokenFromApi } from '../util/tokenUtil';

// Cache keys for cost management clusters
// Cache keys for cost management clusters and projects
const COST_CLUSTERS_CACHE_KEY = 'cost_clusters';
const COST_PROJECTS_CACHE_KEY = 'cost_projects';
const CACHE_TTL = 15 * 60 * 1000; // 15 minutes

export const getCostManagementAccess: (
Expand Down Expand Up @@ -56,68 +57,117 @@ export const getCostManagementAccess: (
return response.json(body);
}

// RBAC Filtering logic for Cluster using cost.{clusterName} permissions
// RBAC Filtering logic for Cluster & Project using cost.{clusterName} and cost.{clusterName}.{projectName} permissions
let clusterDataMap: Record<string, string> = {};
let allProjects: string[] = [];

// Check the cluster & project data in the cache first
const clustersFromCache = (await cache.get(COST_CLUSTERS_CACHE_KEY)) as
| Record<string, string>
| undefined;
const projectsFromCache = (await cache.get(COST_PROJECTS_CACHE_KEY)) as
| string[]
| undefined;

if (clustersFromCache) {
if (clustersFromCache && projectsFromCache) {
clusterDataMap = clustersFromCache;
logger.info(`Using cached data: ${clusterDataMap.length} clusters`);
allProjects = projectsFromCache;
logger.info(
`Using cached data: ${Object.keys(clusterDataMap).length} clusters, ${
allProjects.length
} projects`,
);
} else {
// Fetch clusters from Cost Management API
// Fetch clusters and projects from Cost Management API
try {
const token = await getTokenFromApi(options);

const clustersResponse = await costManagementApi.searchOpenShiftClusters(
'',
{ token },
);
// Fetch clusters and projects in parallel for better performance
const [clustersResponse, projectsResponse] = await Promise.all([
costManagementApi.searchOpenShiftClusters('', { token, limit: 1000 }),
costManagementApi.searchOpenShiftProjects('', { token, limit: 1000 }),
]);

const clustersData = await clustersResponse.json();
const projectsData = await projectsResponse.json();

// Extract cluster names from response
clustersData.data?.map(
clustersData.data?.forEach(
(cluster: { value: string; cluster_alias: string }) => {
if (cluster.cluster_alias && cluster.value)
logger.info(
`Cluster: ${cluster.cluster_alias} -> ${cluster.value}`,
);
clusterDataMap[cluster.cluster_alias] = cluster.value;
if (cluster.cluster_alias && cluster.value) {
clusterDataMap[cluster.cluster_alias] = cluster.value;
}
},
);

// Extract unique project names
allProjects = [
...new Set(
projectsData.data?.map((project: { value: string }) => project.value),
),
].filter(project => project !== undefined) as string[];

logger.info(
`Fetched ${Object.keys(clusterDataMap).length} clusters and ${
allProjects.length
} projects from Cost Management API`,
);

// Store in cache
await cache.set(COST_CLUSTERS_CACHE_KEY, clusterDataMap, {
ttl: CACHE_TTL,
});
await Promise.all([
cache.set(COST_CLUSTERS_CACHE_KEY, clusterDataMap, {
ttl: CACHE_TTL,
}),
cache.set(COST_PROJECTS_CACHE_KEY, allProjects, {
ttl: CACHE_TTL,
}),
]);
} catch (error) {
logger.error(`Failed to fetch clusters from Cost Management API`, error);
throw error;
logger.error('Error fetching cost management data', error);

// Return unauthorized response on any error
return response.status(500).json({
decision: AuthorizeResult.DENY,
error: 'Failed to fetch cluster data',
authorizedClusterNames: [],
authorizeProjects: [],
});
}
}

// Filter clusters based on cost.{clusterName} permissions
const authorizedClusterNames: string[] = await filterAuthorizedClusterIds(
_,
permissions,
httpAuth,
clusterDataMap,
'cost',
// RBAC Filtering: Single batch call for both cluster and cluster-project permissions

const { authorizedClusterIds, authorizedClusterProjects } =
await filterAuthorizedClustersAndProjects(
_,
permissions,
httpAuth,
clusterDataMap,
allProjects,
'cost',
);

// Combine cluster names from both cluster-level and project-level permissions
const finalAuthorizedClusterNames = [
...new Set([
...authorizedClusterIds,
...authorizedClusterProjects.map(result => result.cluster),
]),
];

const authorizeProjects = authorizedClusterProjects.map(
result => result.project,
);

// If user has access to at least one cluster, allow access
if (authorizedClusterNames.length > 0) {
if (finalAuthorizedClusterNames.length > 0) {
finalDecision = AuthorizeResult.ALLOW;
}

const body = {
decision: finalDecision,
authorizedClusterNames,
authorizeProjects: [],
authorizedClusterNames: finalAuthorizedClusterNames,
authorizeProjects,
};

return response.json(body);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import {
} from '@backstage/backend-plugin-api';
import type { OptimizationsApi } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common/clients';
import { OptimizationsClient } from '@red-hat-developer-hub/plugin-redhat-resource-optimization-common/clients';
import { DEFAULT_API_BASE_URL } from '../util/constant';
import { DEFAULT_COST_MANAGEMENT_PROXY_BASE_URL } from '../util/constant';

export const optimizationServiceRef = createServiceRef<OptimizationsApi>({
id: 'optimization-client',
Expand All @@ -32,9 +32,10 @@ export const optimizationServiceRef = createServiceRef<OptimizationsApi>({
configApi: coreServices.rootConfig,
},
async factory({ configApi }): Promise<OptimizationsApi> {
// Base URL without /cost-management/v1 since OptimizationsClient appends it
const baseUrl =
configApi.getOptionalString('optimizationsBaseUrl') ??
DEFAULT_API_BASE_URL;
DEFAULT_COST_MANAGEMENT_PROXY_BASE_URL;

return new OptimizationsClient({
discoveryApi: {
Expand Down
Loading