Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions changelog/7868-fix-property-dag-filter.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
type: Fixed
description: Fixed property-based DAG filtering for SaaS connector datasets.
pr: 7868
4 changes: 4 additions & 0 deletions changelog/7882-fix-login-error-message.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
type: Fixed
description: Fixed login page showing SSO configuration error instead of generic login failure message when entering incorrect credentials
pr: 7882
labels: []
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export const useIntegrationFilters = () => {
entraMonitor,
newIntegrationManagement,
webMonitor,
alphaJiraIntegration,
jiraIntegration,
},
} = useFlags();

Expand Down Expand Up @@ -63,7 +63,7 @@ export const useIntegrationFilters = () => {
);
}

if (!alphaJiraIntegration) {
if (!jiraIntegration) {
staticIntegrations = staticIntegrations.filter(
(integration) =>
integration.placeholder.connection_type !==
Expand Down Expand Up @@ -99,7 +99,7 @@ export const useIntegrationFilters = () => {
connectionTypes,
awsMonitor,
entraMonitor,
alphaJiraIntegration,
jiraIntegration,
newIntegrationManagement,
]);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ type RequestDetailsProps = {
const RequestDetails = ({ subjectRequest }: RequestDetailsProps) => {
const { plus: hasPlus } = useFeatures();
const {
flags: { alphaJiraIntegration },
flags: { jiraIntegration },
} = useFlags();
const {
id,
Expand Down Expand Up @@ -111,7 +111,7 @@ const RequestDetails = ({ subjectRequest }: RequestDetailsProps) => {
</Form.Item>
</Form>
<RequestAttachments subjectRequest={subjectRequest} />
{hasPlus && alphaJiraIntegration && (
{hasPlus && jiraIntegration && (
<RequestJiraTickets subjectRequest={subjectRequest} />
)}
</div>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export const PrivacyRequestsDashboard = () => {
</div>
) : (
<Flex vertical gap="medium">
<Spin spinning={isFetching}>
<Spin spinning={isFetching} className="w-full">
<List<PrivacyRequestResponse>
dataSource={requests}
rowSelection={{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ const usePrivacyRequestsFilters = ({
const { flags } = useFlags();
const allowedStatusFilterOptions = [...SubjectRequestStatusMap.keys()].filter(
(status) =>
status !== PrivacyRequestStatus.PENDING_EXTERNAL ||
flags.alphaJiraIntegration,
status !== PrivacyRequestStatus.PENDING_EXTERNAL || flags.jiraIntegration,
);

const [filters, setFilters] = useQueryStates(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ export const Header = ({ privacyRequest, primaryIdentity }: HeaderProps) => {
</Flex>
)}
{/* Only the first ticket is shown — at most one Jira ticket per request is supported today */}
{flags.alphaJiraIntegration && privacyRequest.jira_tickets?.[0] && (
{flags.jiraIntegration && privacyRequest.jira_tickets?.[0] && (
<Flex gap={4} align="center">
<Typography.Link
href={privacyRequest.jira_tickets[0].ticket_url}
Expand Down
6 changes: 3 additions & 3 deletions clients/admin-ui/src/flags.json
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,11 @@
"test": false,
"production": false
},
"alphaJiraIntegration": {
"label": "Alpha Jira integration",
"jiraIntegration": {
"label": "Jira integration",
"description": "Enable Jira DSR integration for creating and managing Jira issues from privacy requests",
"development": true,
"test": false,
"test": true,
"production": false
},
"alphaPurposeBasedAccessControl": {
Expand Down
4 changes: 2 additions & 2 deletions clients/admin-ui/src/pages/integrations/[id].tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ const IntegrationDetailView: NextPage = () => {
const oauthHandled = useRef(false);

const {
flags: { alphaJiraIntegration },
flags: { jiraIntegration },
} = useFlags();

const {
Expand Down Expand Up @@ -124,7 +124,7 @@ const IntegrationDetailView: NextPage = () => {
if (
!!connection &&
connection.connection_type === ConnectionType.JIRA_TICKET &&
!alphaJiraIntegration
!jiraIntegration
) {
router.push(INTEGRATION_MANAGEMENT_ROUTE);
}
Expand Down
6 changes: 3 additions & 3 deletions clients/admin-ui/src/pages/integrations/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ const IntegrationListView: NextPage = () => {
const oauthHandled = useRef(false);

const {
flags: { newIntegrationManagement, alphaJiraIntegration },
flags: { newIntegrationManagement, jiraIntegration },
} = useFlags();

useEffect(() => {
Expand Down Expand Up @@ -138,12 +138,12 @@ const IntegrationListView: NextPage = () => {
if (!newIntegrationManagement) {
types = types.filter((type) => type !== ConnectionType.SAAS);
}
if (!alphaJiraIntegration) {
if (!jiraIntegration) {
types = types.filter((type) => type !== ConnectionType.JIRA_TICKET);
}

return types;
}, [newIntegrationManagement, alphaJiraIntegration]);
}, [newIntegrationManagement, jiraIntegration]);

const { data, isLoading, error } = useGetAllDatastoreConnectionsQuery({
connection_type: connectionTypesToQuery,
Expand Down
27 changes: 15 additions & 12 deletions clients/admin-ui/src/pages/login.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -168,22 +168,25 @@
dispatch(login(user));
} catch (error) {
setShowAnimation(false);
// eslint-disable-next-line no-console
console.log(error);
let defaultErrorMsg: string;
console.error(error);

Check warning on line 171 in clients/admin-ui/src/pages/login.tsx

View workflow job for this annotation

GitHub Actions / Clients-Unit

Unexpected console statement

Check warning on line 171 in clients/admin-ui/src/pages/login.tsx

View workflow job for this annotation

GitHub Actions / Clients-Unit

Unexpected console statement
let errorMsg: string;
if (isFromInvite) {
defaultErrorMsg = "Setup failed. Please try the invite link again.";
// Invite and reset-password flows may surface backend error detail
// (e.g. expired/invalid token) since it is actionable to the user.
errorMsg = getErrorMessage(
error as RTKErrorResult["error"],
"Setup failed. Please try the invite link again.",
);
} else if (isResetPassword) {
defaultErrorMsg =
"Password reset failed. The link may have expired. Please request a new one.";
errorMsg = getErrorMessage(
error as RTKErrorResult["error"],
"Password reset failed. The link may have expired. Please request a new one.",
);
} else {
defaultErrorMsg =
"Login failed. Please check your credentials and try again.";
// Always show a generic message for standard login failures to avoid
// leaking backend details (SSO config, authorization state, etc.)
errorMsg = "Login failed. Please check your credentials and try again.";
}
const errorMsg = getErrorMessage(
error as RTKErrorResult["error"],
defaultErrorMsg,
);
message.error(errorMsg);
} finally {
setIsSubmitting(false);
Expand Down
1 change: 1 addition & 0 deletions src/fides/api/util/saas_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,7 @@ def merge_datasets(dataset: GraphDataset, config_dataset: GraphDataset) -> Graph
name=dataset.name,
collections=collections,
connection_key=dataset.connection_key,
property_ids=dataset.property_ids,
)


Expand Down
114 changes: 114 additions & 0 deletions tests/ops/util/test_saas_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,120 @@ def test_merge_into_dataset_with_data_categories(self):
merged_dataset = merge_datasets(saas_dataset, saas_config)
assert merged_dataset.collections[0].data_categories == {"user"}

def test_merge_preserves_property_ids(self):
"""Verify that property_ids from the customer dataset survive the merge."""
saas_dataset = GraphDataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="list_id"),
],
)
],
connection_key="connection_key",
property_ids=["FDS-12345", "FDS-67890"],
)

saas_config = GraphDataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="query", identity="email"),
],
)
],
connection_key="connection_key",
)

merged_dataset = merge_datasets(saas_dataset, saas_config)
assert merged_dataset.property_ids == ["FDS-12345", "FDS-67890"]

def test_merge_preserves_empty_property_ids(self):
"""Universal datasets (empty property_ids) stay universal after merge."""
saas_dataset = GraphDataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[ScalarField(name="list_id")],
)
],
connection_key="connection_key",
property_ids=[],
)

saas_config = GraphDataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[ScalarField(name="query", identity="email")],
)
],
connection_key="connection_key",
)

merged_dataset = merge_datasets(saas_dataset, saas_config)
assert merged_dataset.property_ids == []

def test_merge_uses_dataset_property_ids_not_config(self):
"""property_ids come from the dataset only, not the config_dataset."""
saas_dataset = GraphDataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[ScalarField(name="list_id")],
)
],
connection_key="connection_key",
property_ids=["FDS-12345"],
)

saas_config = GraphDataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[ScalarField(name="query", identity="email")],
)
],
connection_key="connection_key",
property_ids=["FDS-ZZZZZ"],
)

merged_dataset = merge_datasets(saas_dataset, saas_config)
assert merged_dataset.property_ids == ["FDS-12345"]


@pytest.mark.integration_saas
class TestMergeDatasetsIntegration:
"""Integration test: verify property_ids survive the full
DatasetConfig.get_graph() → merge_datasets chain with a real SaaS config."""

def test_get_graph_preserves_property_ids(self, db, saas_example_dataset_config):
"""property_ids set on the DatasetConfig are present on the
GraphDataset returned by get_graph(), which internally calls
merge_datasets for SaaS connectors."""
saas_example_dataset_config.property_ids = ["FDS-12345", "FDS-67890"]
saas_example_dataset_config.save(db=db)

graph_dataset = saas_example_dataset_config.get_graph()
assert graph_dataset.property_ids == ["FDS-12345", "FDS-67890"]

def test_get_graph_empty_property_ids_stays_universal(
self, db, saas_example_dataset_config
):
"""A SaaS dataset with no property_ids remains universal after merge."""
assert saas_example_dataset_config.property_ids == []

graph_dataset = saas_example_dataset_config.get_graph()
assert graph_dataset.property_ids == []


@pytest.mark.unit_saas
class TestAssignPlaceholders:
Expand Down
Loading