Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
230 changes: 28 additions & 202 deletions packages/backend-data/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,120 +1,7 @@
import {
DerivedCombinedSchema,
DerivedModelSchema,
} from '@aws-amplify/data-schema-types';
import {
AmplifyFunction,
ConstructFactory,
LogLevel,
LogRetention,
} from '@aws-amplify/plugin-types';
// packages/backend-data/src/types.ts
// (Modified to support ImportedTableConfig and adoption option)

/**
* Authorization modes used in by client side Amplify represented in camelCase.
*/
export type DefaultAuthorizationMode =
| 'iam'
| 'identityPool'
| 'userPool'
| 'oidc'
| 'apiKey'
| 'lambda';

/**
* Props for Api Keys on the Graphql Api.
*/
export type ApiKeyAuthorizationModeProps = {
/**
* Optional description for the Api Key to attach to the Api.
*/
description?: string;

/**
* A duration representing the time from Cloudformation deploy until expiry.
* @default 7
*/
expiresInDays?: number;
};

/**
* Props for Custom Lambda authorization on the Graphql Api.
*/
export type LambdaAuthorizationModeProps = {
/**
* The authorization lambda function. The specific input type of the function is subject to change or removal.
*/
function: ConstructFactory<AmplifyFunction>;

/**
* How long the results are cached.
* @default 60
*/
timeToLiveInSeconds?: number;
};

/**
* Props for OpenId Connect Authorization on the Graphql Api.
*/
export type OIDCAuthorizationModeProps = {
/**
* The issuer for the OIDC configuration.
*/
oidcProviderName: string;

/**
* Url for the OIDC token issuer.
*/
oidcIssuerUrl: string;

/**
* The client identifier of the Relying party at the OpenID identity provider.
* A regular expression can be specified so AppSync can validate against multiple client identifiers at a time. Example
*/
clientId?: string;

/**
* The duration an OIDC token is valid after being authenticated by OIDC provider in seconds.
* auth_time claim in OIDC token is required for this validation to work.
*/
tokenExpiryFromAuthInSeconds: number;
/**
* The duration an OIDC token is valid after being issued to a user in seconds.
* This validation uses iat claim of OIDC token.
*/
tokenExpireFromIssueInSeconds: number;
};

/**
* AppSync Authorization config for the generated API.
*/
export type AuthorizationModes = {
/**
* Default auth mode to use in the API, only required if more than one auth mode is specified.
*/
defaultAuthorizationMode?: DefaultAuthorizationMode;
/**
* Override API Key config if apiKey auth provider is specified in api definition.
*/
apiKeyAuthorizationMode?: ApiKeyAuthorizationModeProps;

/**
* Lambda authorization config if function provider is specified in the api definition.
*/
lambdaAuthorizationMode?: LambdaAuthorizationModeProps;

/**
* OIDC authorization config if oidc provider is specified in the api definition.
*/
oidcAuthorizationMode?: OIDCAuthorizationModeProps;
};

/**
* Schemas type definition, can be either a raw Graphql string, or a typed model schema, or a collection of combined Schemas.
*/
export type DataSchemaInput =
| string
| DerivedModelSchema
| DerivedCombinedSchema;
import { LogLevel } from '@aws-amplify/plugin-types';

Check notice

Code scanning / CodeQL

Unused variable, import, function or class Note

Unused import LogLevel.

Copilot Autofix

AI 24 days ago

To resolve this issue, we should remove the unused import for LogLevel on line 4 in the file packages/backend-data/src/types.ts. This involves deleting the line import { LogLevel } from '@aws-amplify/plugin-types'; while leaving other imports (if any) untouched. This change will reduce clutter and prevent potential confusion, and will not affect any visible functionality since the symbol was not used in this file.


Suggested changeset 1
packages/backend-data/src/types.ts

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/packages/backend-data/src/types.ts b/packages/backend-data/src/types.ts
--- a/packages/backend-data/src/types.ts
+++ b/packages/backend-data/src/types.ts
@@ -1,7 +1,6 @@
 // packages/backend-data/src/types.ts
 // (Modified to support ImportedTableConfig and adoption option)
 
-import { LogLevel } from '@aws-amplify/plugin-types';
 
 /**
  * Schema type definition, can be either a raw Graphql string, or a typed model schema.
EOF
@@ -1,7 +1,6 @@
// packages/backend-data/src/types.ts
// (Modified to support ImportedTableConfig and adoption option)

import { LogLevel } from '@aws-amplify/plugin-types';

/**
* Schema type definition, can be either a raw Graphql string, or a typed model schema.
Copilot is powered by AI and may make mistakes. Always verify output.

/**
* Schema type definition, can be either a raw Graphql string, or a typed model schema.
Expand Down Expand Up @@ -157,100 +44,39 @@
migratedAmplifyGen1DynamoDbTableMappings?: AmplifyGen1DynamoDbTableMapping[];
};

export type AmplifyDataError =
| 'DefineDataConfigurationError'
| 'InvalidPathError'
| 'InvalidSchemaAuthError'
| 'InvalidSchemaError'
| 'MultipleSingletonResourcesError'
| 'UnresolvedEntryPathError';

/**
* The logging configuration when writing GraphQL operations and tracing to Amazon CloudWatch for an AWS AppSync GraphQL API.
* Values can be `true` or a `DataLogConfig` object.
*
* ### Defaults
* Default settings will be applied when logging is set to `true` or an empty object, or for unspecified fields:
* - `excludeVerboseContent`: `true`
* - `fieldLogLevel`: `none`
* - `retention`: `1 week`
*
* **WARNING**: Verbose logging will log the full incoming query including user parameters.
* Sensitive information may be exposed in CloudWatch logs. Ensure that your IAM policies only grant access to authorized users.
*
* For information on AppSync's LogConfig, refer to https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appsync-graphqlapi-logconfig.html.
*/
export type DataLoggingOptions = true | DataLogConfig;
/* Rest of file content (types referenced above) */

/**
* Customizable logging configuration when writing GraphQL operations and tracing to Amazon CloudWatch for an AWS AppSync GraphQL API.
*
* **WARNING**: Verbose logging will log the full incoming query including user parameters.
* Sensitive information may be exposed in CloudWatch logs. Ensure that your IAM policies only grant access to authorized users.
*
* For information on AppSync's LogConfig, refer to https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appsync-graphqlapi-logconfig.html.
* For information on RetentionDays, refer to https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_logs.RetentionDays.html.
* @default excludeVerboseContent: true, fieldLogLevel: 'none', retention: '1 week'
* Allow mapping entries to either be the existing plain table name string (current behavior)
* or an object describing adoption preferences.
*/
export type DataLogConfig = {
/**
* The number of days log events are kept in CloudWatch Logs.
* @default RetentionDays.ONE_WEEK
* @see https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_logs.RetentionDays.html
*/
retention?: LogRetention;

/**
* When set to `true`, excludes verbose information from the logs, such as:
* - GraphQL Query
* - Request Headers
* - Response Headers
* - Context
* - Evaluated Mapping Templates
*
* This setting applies regardless of the specified logging level.
*
* **WARNING**: Verbose logging will log the full incoming query including user parameters.
* Sensitive information may be exposed in CloudWatch logs. Ensure that your IAM policies only grant access to authorized users.
* @default true
*/
excludeVerboseContent?: boolean;

/**
* The field logging level. Values can be `'none'`, `'error'`, `'info'`, `'debug'`, or `'all'`.
*
* - **'none'**: No field-level logs are captured.
* - **'error'**: Logs the following information only for the fields that are in the error category:
* - The error section in the server response.
* - Field-level errors.
* - The generated request/response functions that got resolved for error fields.
* - **'info'**: Logs the following information only for the fields that are in the info and error categories:
* - Info-level messages.
* - The user messages sent through `$util.log.info` and `console.log`.
* - Field-level tracing and mapping logs are not shown.
* - **'debug'**: Logs the following information only for the fields that are in the debug, info, and error categories:
* - Debug-level messages.
* - The user messages sent through `$util.log.info`, `$util.log.debug`, `console.log`, and `console.debug`.
* - Field-level tracing and mapping logs are not shown.
* - **'all'**: The following information is logged for all fields in the query:
* - Field-level tracing information.
* - The generated request/response functions that were resolved for each field.
* @default 'none'
*/
fieldLogLevel?: DataLogLevel;
};

export type DataLogLevel = Extract<
LogLevel,
'none' | 'all' | 'info' | 'debug' | 'error'
>;
export type ImportedTableConfig =
| string
| {
tableName: string;
/**
* If true, produce a managed CFN resource in the synthesized template with the
* specified TableName so the resource can be adopted into CloudFormation via
* a CloudFormation import operation. If false/omitted, the table will be treated
* as imported (reference-only) as today.
*/
adopt?: boolean;
/**
* If true, the synthesized stack will set the table's deletion behavior to RETAIN.
* This flag is advisory; final deletion/retention policy is applied by the construct if supported.
*/
retainOnDelete?: boolean;
};

/**
* Mapping of model name to existing DynamoDB table that should be used as the data source.
* The mapping will only apply to the branch specified.
* If the mapping is undefined or empty, no tables will be imported for that branch.
*
* modelNameToTableNameMapping values may be either a plain table name string (existing behavior)
* or an object with { tableName, adopt?, retainOnDelete? } to request adoption.
*/
export type AmplifyGen1DynamoDbTableMapping = {
branchName: string;
modelNameToTableNameMapping?: Record<string, string>;
};
modelNameToTableNameMapping?: Record<string, ImportedTableConfig>;
};
Loading