From 1345cee3fb99fe4d2b2486f3de086eb1a9274f15 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 21 Oct 2025 13:42:40 -0400 Subject: [PATCH 01/16] BCDA-9484: Stubbing out aws-sdk v2 migration --- bcda/aws/parameters.go | 34 +++----- bcda/aws/session.go | 80 ++++++++++++------ bcda/database/config.go | 82 +++++++++--------- bcda/lambda/admin_aco_deny/main.go | 22 ++--- bcda/lambda/admin_create_aco/main.go | 22 ++--- bcda/lambda/admin_create_aco_creds/aws.go | 96 ++++++++++++++++------ bcda/lambda/admin_create_aco_creds/main.go | 16 ++-- bcda/lambda/admin_create_group/main.go | 76 +++++++++++------ bcda/lambda/cclf/main.go | 17 ++-- bcda/lambda/optout/main.go | 17 ++-- bcdaworker/queueing/river.go | 15 ++-- 11 files changed, 293 insertions(+), 184 deletions(-) diff --git a/bcda/aws/parameters.go b/bcda/aws/parameters.go index 7cd7bbcb3..ece3bb4c0 100644 --- a/bcda/aws/parameters.go +++ b/bcda/aws/parameters.go @@ -1,32 +1,24 @@ package bcdaaws import ( + "context" "fmt" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/ssm" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) -// Makes this easier to mock and unit test -var ssmNew = ssm.New -var ssmsvcGetParameter = (*ssm.SSM).GetParameter -var ssmsvcGetParameters = (*ssm.SSM).GetParameters - -func GetParameter(s *session.Session, keyname string) (string, error) { - ssmsvc := ssmNew(s) - +// Returns the value of a single parameter from the SSM Parameter Store +func GetParameter(ctx context.Context, client *ssm.Client, keyname string) (string, error) { withDecryption := true - result, err := ssmsvcGetParameter(ssmsvc, &ssm.GetParameterInput{ + result, err := client.GetParameter(ctx, &ssm.GetParameterInput{ Name: &keyname, WithDecryption: &withDecryption, }) - if err != nil { return "", fmt.Errorf("error retrieving parameter %s from parameter store: %w", keyname, err) } val := *result.Parameter.Value - if val == "" { return "", fmt.Errorf("no parameter store value found for %s", keyname) } @@ -35,12 +27,9 @@ func GetParameter(s *session.Session, keyname string) (string, error) { } // Returns a list of parameters from the SSM Parameter Store -func GetParameters(s *session.Session, keynames []*string) (map[string]string, error) { - // Create an SSM client and pull down keys from the param store - ssmsvc := ssmNew(s) - +func GetParameters(ctx context.Context, client *ssm.Client, keynames []string) (map[string]string, error) { withDecryption := true - params, err := ssmsvcGetParameters(ssmsvc, &ssm.GetParametersInput{ + output, err := client.GetParameters(ctx, &ssm.GetParametersInput{ Names: keynames, WithDecryption: &withDecryption, }) @@ -49,10 +38,10 @@ func GetParameters(s *session.Session, keynames []*string) (map[string]string, e } // Unknown keys will come back as invalid, make sure we error on them - if len(params.InvalidParameters) > 0 { + if len(output.InvalidParameters) > 0 { invalidParamsStr := "" - for i := 0; i < len(params.InvalidParameters); i++ { - invalidParamsStr += fmt.Sprintf("%s,\n", *params.InvalidParameters[i]) + for i := 0; i < len(output.InvalidParameters); i++ { + invalidParamsStr += fmt.Sprintf("%s,\n", *output.InvalidParameters[i]) } return nil, fmt.Errorf("invalid parameters error: %s", invalidParamsStr) } @@ -60,8 +49,9 @@ func GetParameters(s *session.Session, keynames []*string) (map[string]string, e // Build the parameter map that we're going to return paramMap := make(map[string]string) - for _, item := range params.Parameters { + for _, item := range output.Parameters { paramMap[*item.Name] = *item.Value } + return paramMap, nil } diff --git a/bcda/aws/session.go b/bcda/aws/session.go index 2c9c763f7..f07d9d759 100644 --- a/bcda/aws/session.go +++ b/bcda/aws/session.go @@ -1,43 +1,73 @@ package bcdaaws import ( - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/credentials/stscreds" - "github.com/aws/aws-sdk-go/aws/session" + "context" + + "github.com/aws/aws-sdk-go-v2/config" ) var s3Region = "us-east-1" +var DefaultRegion = "us-east-1" // Makes these easily mockable for testing -var newSession = session.NewSession +// var newSession = session.NewSession // NewSession // Returns a new AWS session using the given roleArn -func NewSession(roleArn, endpoint string) (*session.Session, error) { - sess := session.Must(session.NewSession()) - var err error +// func NewSession(roleArn, endpoint string) (*session.Session, error) { +// sess := session.Must(session.NewSession()) +// var err error + +// config := aws.Config{ +// Region: aws.String(s3Region), +// } + +// if endpoint != "" { +// config.S3ForcePathStyle = aws.Bool(true) +// config.Endpoint = &endpoint +// } + +// if roleArn != "" { +// config.Credentials = stscreds.NewCredentials( +// sess, +// roleArn, +// ) +// } + +// sess, err = newSession(&config) + +// if err != nil { +// return nil, err +// } + +// return sess, nil +// } + +func NewAWSConfig(ctx context.Context, roleArn, endpoint string) (config.Config, error) { + return config.LoadDefaultConfig(ctx, config.WithRegion(DefaultRegion)) + // var cfg config.Config - config := aws.Config{ - Region: aws.String(s3Region), - } + // // used to override for localstack + // if endpoint != "" { + // cfg = config.LoadDefaultConfig( + // ctx, + // config.WithRegion(DefaultRegion)), - if endpoint != "" { - config.S3ForcePathStyle = aws.Bool(true) - config.Endpoint = &endpoint - } + // // cfg.S3ForcePathStyle = true + // // cfg.Endpoint = &endpoint + // } else if roleArn != "" { - if roleArn != "" { - config.Credentials = stscreds.NewCredentials( - sess, - roleArn, - ) - } + // client := stscreds.NewFromConfig(cfg) + // appCreds := stscreds.NewAssumeRoleProvider(client, roleArn) + // creds, err := appCreds.Retrieve(ctx) + // if err != nil { + // return config.Config{}, err + // } - sess, err = newSession(&config) + // cfg.Credentials = creds + // } else { - if err != nil { - return nil, err - } + // } - return sess, nil + // return cfg, nil } diff --git a/bcda/database/config.go b/bcda/database/config.go index e73659888..5feb3dc36 100644 --- a/bcda/database/config.go +++ b/bcda/database/config.go @@ -2,10 +2,7 @@ package database import ( "errors" - "fmt" - "os" - bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/conf" "github.com/CMSgov/bcda-app/log" ) @@ -28,20 +25,20 @@ func LoadConfig() (cfg *Config, err error) { return nil, err } - if cfg.DatabaseURL == "" { - // Attempt to load database config from parameter store if ENV var is set. - // This generally indicates that we are running within our lambda environment. - env := os.Getenv("ENV") - - if env != "" { - cfg, err = LoadConfigFromParameterStore( - fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) - - if err != nil { - return nil, err - } - } - } + // if cfg.DatabaseURL == "" { + // // Attempt to load database config from parameter store if ENV var is set. + // // This generally indicates that we are running within our lambda environment. + // env := os.Getenv("ENV") + + // if env != "" { + // cfg, err = LoadConfigFromParameterStore( + // fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) + + // if err != nil { + // return nil, err + // } + // } + // } if cfg.DatabaseURL == "" { return nil, errors.New("invalid config, DatabaseURL must be set") @@ -52,23 +49,34 @@ func LoadConfig() (cfg *Config, err error) { } // Loads database URL from parameter store instead of from environment variables. -func LoadConfigFromParameterStore(dbUrlKey string) (cfg *Config, err error) { - cfg = &Config{} - if err := conf.Checkout(cfg); err != nil { - return nil, err - } - - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) - if err != nil { - return nil, err - } - - params, err := bcdaaws.GetParameters(bcdaSession, []*string{&dbUrlKey}) - if err != nil { - return nil, err - } - - cfg.DatabaseURL = params[dbUrlKey] - - return cfg, nil -} +// func LoadConfigFromParameterStore(dbUrlKey string) (cfg *Config, err error) { +// cfg = &Config{} +// if err := conf.Checkout(cfg); err != nil { +// return nil, err +// } + +// // bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) +// // if err != nil { +// // return nil, err +// // } + +// cfg, err := config.LoadDefaultConfig(ctx) +// if err != nil { +// return awsParams{}, err +// } +// ssmClient := ssm.NewFromConfig(cfg) + +// params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) +// if err != nil { +// return awsParams{}, err +// } + +// params, err := bcdaaws.GetParameters(bcdaSession, []*string{&dbUrlKey}) +// if err != nil { +// return nil, err +// } + +// cfg.DatabaseURL = params[dbUrlKey] + +// return cfg, nil +// } diff --git a/bcda/lambda/admin_aco_deny/main.go b/bcda/lambda/admin_aco_deny/main.go index 683568379..ef75deafa 100644 --- a/bcda/lambda/admin_aco_deny/main.go +++ b/bcda/lambda/admin_aco_deny/main.go @@ -16,6 +16,9 @@ import ( msgr "github.com/CMSgov/bcda-app/bcda/slackmessenger" log "github.com/sirupsen/logrus" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) type payload struct { @@ -45,7 +48,7 @@ func handler(ctx context.Context, event json.RawMessage) error { return err } - params, err := getAWSParams() + params, err := getAWSParams(ctx) if err != nil { log.Errorf("Unable to extract DB URL from parameter store: %+v", err) return err @@ -87,27 +90,26 @@ func handleACODenies(ctx context.Context, conn PgxConnection, data payload) erro return nil } -func getAWSParams() (awsParams, error) { +func getAWSParams(ctx context.Context) (awsParams, error) { env := conf.GetEnv("ENV") if env == "local" { return awsParams{conf.GetEnv("DATABASE_URL"), ""}, nil } - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) - if err != nil { - return awsParams{}, err - } - - dbURL, err := bcdaaws.GetParameter(bcdaSession, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return awsParams{}, err } + ssmClient := ssm.NewFromConfig(cfg) - slackToken, err := bcdaaws.GetParameter(bcdaSession, "/slack/token/workflow-alerts") + dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) + slackTokenName := "/slack/token/workflow-alerts" + paramNames := []string{slackTokenName, dbURLName} + params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { return awsParams{}, err } - return awsParams{dbURL, slackToken}, nil + return awsParams{params[dbURLName], params[slackTokenName]}, nil } diff --git a/bcda/lambda/admin_create_aco/main.go b/bcda/lambda/admin_create_aco/main.go index 28b5b65b1..832ff9692 100644 --- a/bcda/lambda/admin_create_aco/main.go +++ b/bcda/lambda/admin_create_aco/main.go @@ -20,6 +20,9 @@ import ( msgr "github.com/CMSgov/bcda-app/bcda/slackmessenger" log "github.com/sirupsen/logrus" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) type payload struct { @@ -51,7 +54,7 @@ func handler(ctx context.Context, event json.RawMessage) error { return err } - params, err := getAWSParams() + params, err := getAWSParams(ctx) if err != nil { log.Errorf("Unable to extract DB URL from parameter store: %+v", err) return err @@ -133,27 +136,26 @@ func handleCreateACO(ctx context.Context, conn PgxConnection, data payload, id u return nil } -func getAWSParams() (awsParams, error) { +func getAWSParams(ctx context.Context) (awsParams, error) { env := conf.GetEnv("ENV") if env == "local" { return awsParams{conf.GetEnv("DATABASE_URL"), ""}, nil } - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) - if err != nil { - return awsParams{}, err - } - - dbURL, err := bcdaaws.GetParameter(bcdaSession, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return awsParams{}, err } + ssmClient := ssm.NewFromConfig(cfg) - slackToken, err := bcdaaws.GetParameter(bcdaSession, "/slack/token/workflow-alerts") + dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) + slackTokenName := "/slack/token/workflow-alerts" + paramNames := []string{slackTokenName, dbURLName} + params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { return awsParams{}, err } - return awsParams{dbURL, slackToken}, nil + return awsParams{params[dbURLName], params[slackTokenName]}, nil } diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index c392e01c8..c753a3c15 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -1,61 +1,111 @@ package main import ( + "context" "fmt" "os" "strings" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" - "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3iface" log "github.com/sirupsen/logrus" bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/conf" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) var pemFilePath = "/tmp/BCDA_CA_FILE.pem" -func getAWSParams(session *session.Session) (awsParams, error) { +func getAWSParams(ctx context.Context) (awsParams, error) { env := adjustedEnv() - if env == "local" { return awsParams{}, nil } - slackToken, err := bcdaaws.GetParameter(session, "/slack/token/workflow-alerts") - if err != nil { - return awsParams{}, err - } + slackTokenName := "/slack/token/workflow-alerts" + ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) + clientIDName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) + clientSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) + ssasPEMName := fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env) + credsBucketName := fmt.Sprintf("/bcda/%s/aco_creds_bucket", env) - ssasURL, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/SSAS_URL", env)) - if err != nil { - return awsParams{}, err - } - - clientID, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env)) - if err != nil { - return awsParams{}, err - } - - clientSecret, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env)) - if err != nil { - return awsParams{}, err + paramNames := []string{ + slackTokenName, + ssasURLName, + clientIDName, + clientSecretName, + ssasPEMName, + credsBucketName, } - ssasPEM, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env)) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return awsParams{}, err } + ssmClient := ssm.NewFromConfig(cfg) - credsBucket, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/aco_creds_bucket", env)) + params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { return awsParams{}, err } - return awsParams{slackToken, ssasURL, clientID, clientSecret, ssasPEM, credsBucket}, nil + return awsParams{ + params[slackTokenName], + params[ssasURLName], + params[clientIDName], + params[clientSecretName], + params[ssasPEMName], + params[credsBucketName], + }, nil + // output, err := client.GetParameters(ctx, input) + // if err != nil { + // return awsParams{}, err + // } + // slackToken := getParamFromOutput(output, "/slack/token/workflow-alerts") + // ssasURL := getParamFromOutput(output, "/bcda/%s/api/SSAS_URL") + // clientID := getParamFromOutput(output, "/bcda/%s/api/BCDA_SSAS_CLIENT_ID") + // clientSecret := getParamFromOutput(output, "/bcda/%s/api/BCDA_SSAS_SECRET") + // ssasPEM := getParamFromOutput(output, "/bcda/%s/api/BCDA_CA_FILE.pem") + // credsBucket := getParamFromOutput(output, "/bcda/%s/aco_creds_bucket") + + // return awsParams{slackToken, ssasURL, clientID, clientSecret, ssasPEM, credsBucket}, nil + + // slackToken, err := bcdaaws.GetParameter(session, "/slack/token/workflow-alerts") + // if err != nil { + // return awsParams{}, err + // } + + // ssasURL, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/SSAS_URL", env)) + // if err != nil { + // return awsParams{}, err + // } + + // clientID, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env)) + // if err != nil { + // return awsParams{}, err + // } + + // clientSecret, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env)) + // if err != nil { + // return awsParams{}, err + // } + + // ssasPEM, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env)) + // if err != nil { + // return awsParams{}, err + // } + + // credsBucket, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/aco_creds_bucket", env)) + // if err != nil { + // return awsParams{}, err + // } + + // return awsParams{slackToken, ssasURL, clientID, clientSecret, ssasPEM, credsBucket}, nil } func setupEnvironment(params awsParams) error { diff --git a/bcda/lambda/admin_create_aco_creds/main.go b/bcda/lambda/admin_create_aco_creds/main.go index 35d9e0fa1..211f3f15e 100644 --- a/bcda/lambda/admin_create_aco_creds/main.go +++ b/bcda/lambda/admin_create_aco_creds/main.go @@ -17,7 +17,7 @@ import ( log "github.com/sirupsen/logrus" - "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3iface" ) @@ -53,25 +53,27 @@ func handler(ctx context.Context, event json.RawMessage) (string, error) { return "", err } - session, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) + params, err := getAWSParams(ctx) if err != nil { + log.Errorf("Unable to extract slack token from parameter store: %+v", err) return "", err } - params, err := getAWSParams(session) + err = setupEnvironment(params) if err != nil { - log.Errorf("Unable to extract slack token from parameter store: %+v", err) + log.Errorf("Unable to setupEnvironment properly: %+v", err) return "", err } - err = setupEnvironment(params) + provider := auth.NewProvider(database.Connect()) + + cfg, err := bcdaaws.NewAWSConfig(ctx, "", os.Getenv("LOCAL_STACK_ENDPOINT")) if err != nil { log.Errorf("Unable to setupEnvironment properly: %+v", err) return "", err } - provider := auth.NewProvider(database.Connect()) - s3Service := s3.New(session) + s3Service := s3.NewFromConfig(cfg) slackClient := slack.New(params.slackToken) s3Path, err := handleCreateACOCreds(ctx, data, provider, s3Service, params.credsBucket) diff --git a/bcda/lambda/admin_create_group/main.go b/bcda/lambda/admin_create_group/main.go index b413bc704..77922906e 100644 --- a/bcda/lambda/admin_create_group/main.go +++ b/bcda/lambda/admin_create_group/main.go @@ -23,6 +23,9 @@ import ( "github.com/pborman/uuid" log "github.com/sirupsen/logrus" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) type payload struct { @@ -49,7 +52,7 @@ func handler(ctx context.Context, event json.RawMessage) error { return err } - slackToken, err := setupEnv() + slackToken, err := setupEnv(ctx) if err != nil { log.Errorf("Failed to retrieve parameter: %+v", err) return err @@ -130,32 +133,62 @@ func handleCreateGroup(c client.SSASHTTPClient, r *postgres.Repository, data pay return nil } -func setupEnv() (string, error) { +func setupEnv(ctx context.Context) (string, error) { env := conf.GetEnv("ENV") - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) + err := os.Setenv("SSAS_USE_TLS", "true") if err != nil { + log.Errorf("Error setting SSAS_USE_TLS env var: %+v", err) return "", err } - err = os.Setenv("SSAS_USE_TLS", "true") + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + return "", err + } + ssmClient := ssm.NewFromConfig(cfg) + + slackTokenName := "/slack/token/workflow-alerts" + ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) + ssasClientName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) + ssasSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) + caFileName := fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env) + paramNames := []string{ + slackTokenName, + ssasURLName, + ssasClientName, + ssasSecretName, + caFileName, + } + params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { - log.Errorf("Error setting SSAS_USE_TLS env var: %+v", err) return "", err } - envVars := []string{"SSAS_URL", "BCDA_SSAS_CLIENT_ID", "BCDA_SSAS_SECRET", "BCDA_CA_FILE.pem"} - for _, v := range envVars { - envVar, err := bcdaaws.GetParameter(bcdaSession, fmt.Sprintf("/bcda/%s/api/%s", env, v)) - if err != nil { - return "", err - } - err = os.Setenv(v, envVar) - if err != nil { - log.Errorf("Error setting %s env var: %+v", envVar, err) - return "", err - } - + err = os.Setenv(slackTokenName, params[slackTokenName]) + if err != nil { + log.Errorf("Error setting slackTokenName env var: %+v", err) + return "", err + } + err = os.Setenv(ssasURLName, params[ssasURLName]) + if err != nil { + log.Errorf("Error setting ssasURLName env var: %+v", err) + return "", err + } + err = os.Setenv(ssasClientName, params[ssasClientName]) + if err != nil { + log.Errorf("Error setting ssasClientName env var: %+v", err) + return "", err + } + err = os.Setenv(ssasSecretName, params[ssasSecretName]) + if err != nil { + log.Errorf("Error setting ssasSecretName env var: %+v", err) + return "", err + } + err = os.Setenv(caFileName, params[caFileName]) + if err != nil { + log.Errorf("Error setting caFileName env var: %+v", err) + return "", err } err = os.Setenv("BCDA_CA_FILE", "/tmp/BCDA_CA_FILE.pem") @@ -164,7 +197,7 @@ func setupEnv() (string, error) { return "", err } - // parameter store returns the value of the paremeter and SSAS expects a file, so we need to create it + // parameter store returns the value of the parameter and SSAS expects a file, so we need to create it // nosec in use because lambda creates a tmp dir already f, err := os.Create("/tmp/BCDA_CA_FILE.pem") // #nosec if err != nil { @@ -176,10 +209,5 @@ func setupEnv() (string, error) { return "", err } - slackToken, err := bcdaaws.GetParameter(bcdaSession, "/slack/token/workflow-alerts") - if err != nil { - return "", err - } - - return slackToken, nil + return params[slackTokenName], nil } diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index beffc0aa4..941b28cab 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -19,6 +19,9 @@ import ( "github.com/CMSgov/bcda-app/optout" "github.com/CMSgov/bcda-app/conf" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) func main() { @@ -54,7 +57,7 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st for _, e := range s3Event.Records { if strings.Contains(e.EventName, "ObjectCreated") { - s3AssumeRoleArn, err := loadBfdS3Params() + s3AssumeRoleArn, err := loadBfdS3Params(ctx) if err != nil { return "", err } @@ -111,20 +114,16 @@ func handleCSVImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, return result, nil } -func loadBfdS3Params() (string, error) { +func loadBfdS3Params(ctx context.Context) (string, error) { env := conf.GetEnv("ENV") - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) - if err != nil { - return "", err - } - - param, err := bcdaaws.GetParameter(bcdaSession, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return "", err } + ssmClient := ssm.NewFromConfig(cfg) - return param, nil + return bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) } func loadBCDAParams() error { diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index 473b91782..50c46609e 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -20,6 +20,9 @@ import ( "github.com/CMSgov/bcda-app/optout" "github.com/CMSgov/bcda-app/conf" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) func main() { @@ -55,7 +58,7 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, for _, e := range s3Event.Records { if strings.Contains(e.EventName, "ObjectCreated") { - s3AssumeRoleArn, err := loadBfdS3Params() + s3AssumeRoleArn, err := loadBfdS3Params(ctx) if err != nil { return "", err } @@ -70,20 +73,16 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", nil } -func loadBfdS3Params() (string, error) { +func loadBfdS3Params(ctx context.Context) (string, error) { env := conf.GetEnv("ENV") - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) - if err != nil { - return "", err - } - - param, err := bcdaaws.GetParameter(bcdaSession, fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env)) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return "", err } + ssmClient := ssm.NewFromConfig(cfg) - return param, nil + return bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) } func handleOptOutImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { diff --git a/bcdaworker/queueing/river.go b/bcdaworker/queueing/river.go index a87a92665..d1931c68b 100644 --- a/bcdaworker/queueing/river.go +++ b/bcdaworker/queueing/river.go @@ -39,6 +39,9 @@ import ( sloglogrus "github.com/samber/slog-logrus" "github.com/sirupsen/logrus" "github.com/slack-go/slack" + + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" ) type Notifier interface { @@ -179,22 +182,18 @@ func getCutOffTime() time.Time { return cutoff } -func getAWSParams() (string, error) { +func getAWSParams(ctx context.Context) (string, error) { env := conf.GetEnv("ENV") if env == "local" { return conf.GetEnv("workflow-alerts"), nil } - bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return "", err } + ssmClient := ssm.NewFromConfig(cfg) - slackToken, err := bcdaaws.GetParameter(bcdaSession, "/slack/token/workflow-alerts") - if err != nil { - return slackToken, err - } - - return slackToken, nil + return bcdaaws.GetParameter(ctx, ssmClient, "/slack/token/workflow-alerts") } From 83a1229cb2b8f71f76ebeee79571342f195a29c8 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 22 Oct 2025 10:21:16 -0400 Subject: [PATCH 02/16] Mid work checkin --- bcda/aws/parameters.go | 2 +- bcda/lambda/admin_create_aco_creds/aws.go | 32 ++++++--------- bcda/lambda/admin_create_aco_creds/main.go | 10 ++--- go.mod | 19 +++++++++ go.sum | 45 ++++++++++++++++++++++ optout/s3_file_handler.go | 31 +++++++++------ 6 files changed, 99 insertions(+), 40 deletions(-) diff --git a/bcda/aws/parameters.go b/bcda/aws/parameters.go index ece3bb4c0..9af2d4fc8 100644 --- a/bcda/aws/parameters.go +++ b/bcda/aws/parameters.go @@ -41,7 +41,7 @@ func GetParameters(ctx context.Context, client *ssm.Client, keynames []string) ( if len(output.InvalidParameters) > 0 { invalidParamsStr := "" for i := 0; i < len(output.InvalidParameters); i++ { - invalidParamsStr += fmt.Sprintf("%s,\n", *output.InvalidParameters[i]) + invalidParamsStr += fmt.Sprintf("%s,\n", output.InvalidParameters[i]) } return nil, fmt.Errorf("invalid parameters error: %s", invalidParamsStr) } diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index c753a3c15..8df1e2bfd 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -6,17 +6,15 @@ import ( "os" "strings" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/awserr" - "github.com/aws/aws-sdk-go/service/s3" - "github.com/aws/aws-sdk-go/service/s3/s3iface" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/aws-sdk-go-v2/service/ssm" + log "github.com/sirupsen/logrus" bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/conf" - - "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/service/ssm" ) var pemFilePath = "/tmp/BCDA_CA_FILE.pem" @@ -151,28 +149,20 @@ func setupEnvironment(params awsParams) error { return nil } -func putObject(service s3iface.S3API, acoID string, creds string, credsBucket string) (string, error) { +func putObject(ctx context.Context, client *s3.Client, acoID, creds, credsBucket string) (string, error) { s3Input := &s3.PutObjectInput{ - Body: aws.ReadSeekCloser(strings.NewReader(creds)), + Body: strings.NewReader(creds), Bucket: aws.String(credsBucket), Key: aws.String(fmt.Sprintf("%s-creds", acoID)), } - result, err := service.PutObject(s3Input) + result, err := client.PutObject(ctx, s3Input) if err != nil { - if aerr, ok := err.(awserr.Error); ok { - switch aerr.Code() { - default: - log.Error(aerr.Error()) - } - } else { - // Print the error, cast err to awserr.Error to get the Code and Message from an error. - log.Error(err.Error()) - } return "", err } - - return result.String(), nil + // TODO + fmt.Printf("\n--- result metadata %+v", result.ResultMetadata) + return "", nil } func adjustedEnv() string { diff --git a/bcda/lambda/admin_create_aco_creds/main.go b/bcda/lambda/admin_create_aco_creds/main.go index 211f3f15e..012eb850d 100644 --- a/bcda/lambda/admin_create_aco_creds/main.go +++ b/bcda/lambda/admin_create_aco_creds/main.go @@ -11,14 +11,13 @@ import ( "github.com/slack-go/slack" "github.com/CMSgov/bcda-app/bcda/auth" - bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/bcda/database" msgr "github.com/CMSgov/bcda-app/bcda/slackmessenger" log "github.com/sirupsen/logrus" + "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/s3" - "github.com/aws/aws-sdk-go/service/s3/s3iface" ) type payload struct { @@ -67,9 +66,8 @@ func handler(ctx context.Context, event json.RawMessage) (string, error) { provider := auth.NewProvider(database.Connect()) - cfg, err := bcdaaws.NewAWSConfig(ctx, "", os.Getenv("LOCAL_STACK_ENDPOINT")) + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { - log.Errorf("Unable to setupEnvironment properly: %+v", err) return "", err } @@ -94,7 +92,7 @@ func handleCreateACOCreds( ctx context.Context, data payload, provider auth.Provider, - s3Service s3iface.S3API, + s3Service *s3.Client, credsBucket string, ) (string, error) { @@ -105,7 +103,7 @@ func handleCreateACOCreds( return "", err } - s3Path, err := putObject(s3Service, data.ACOID, creds, credsBucket) + s3Path, err := putObject(ctx, s3Service, data.ACOID, creds, credsBucket) if err != nil { log.Errorf("Error putting object: %+v", err) diff --git a/go.mod b/go.mod index d76ab2efe..61037b113 100644 --- a/go.mod +++ b/go.mod @@ -41,6 +41,9 @@ require ( require ( github.com/CMSgov/bcda-app/optout v0.0.0-20231214212912-50fa0a1c3c8a + github.com/aws/aws-sdk-go-v2/config v1.31.13 + github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 + github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 github.com/ccoveille/go-safecast v1.6.1 github.com/pashagolub/pgxmock/v4 v4.5.0 github.com/riverqueue/river v0.20.2 @@ -54,6 +57,22 @@ require ( dario.cat/mergo v1.0.1 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/aws/aws-sdk-go-v2 v1.39.3 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.18.17 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.38.7 // indirect + github.com/aws/smithy-go v1.23.1 // indirect github.com/containerd/errdefs v1.0.0 // indirect github.com/containerd/errdefs/pkg v0.3.0 // indirect github.com/containerd/log v0.1.0 // indirect diff --git a/go.sum b/go.sum index 76612c05e..01b2004db 100644 --- a/go.sum +++ b/go.sum @@ -118,6 +118,50 @@ github.com/aws/aws-lambda-go v1.49.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7Rfg github.com/aws/aws-sdk-go v1.28.8/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ= github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk= +github.com/aws/aws-sdk-go-v2 v1.16.16/go.mod h1:SwiyXi/1zTUZ6KIAmLK5V5ll8SiURNUYOqTerZPaF9k= +github.com/aws/aws-sdk-go-v2 v1.39.3 h1:h7xSsanJ4EQJXG5iuW4UqgP7qBopLpj84mpkNx3wPjM= +github.com/aws/aws-sdk-go-v2 v1.39.3/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 h1:tcFliCWne+zOuUfKNRn8JdFBuWPDuISDH08wD2ULkhk= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= +github.com/aws/aws-sdk-go-v2/config v1.31.13 h1:wcqQB3B0PgRPUF5ZE/QL1JVOyB0mbPevHFoAMpemR9k= +github.com/aws/aws-sdk-go-v2/config v1.31.13/go.mod h1:ySB5D5ybwqGbT6c3GszZ+u+3KvrlYCUQNo62+hkKOFk= +github.com/aws/aws-sdk-go-v2/credentials v1.18.17 h1:skpEwzN/+H8cdrrtT8y+rvWJGiWWv0DeNAe+4VTf+Vs= +github.com/aws/aws-sdk-go-v2/credentials v1.18.17/go.mod h1:Ed+nXsaYa5uBINovJhcAWkALvXw2ZLk36opcuiSZfJM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 h1:UuGVOX48oP4vgQ36oiKmW9RuSeT8jlgQgBFQD+HUiHY= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10/go.mod h1:vM/Ini41PzvudT4YkQyE/+WiQJiQ6jzeDyU8pQKwCac= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23/go.mod h1:2DFxAQ9pfIRy0imBCJv+vZ2X6RKxves6fbnEuSry6b4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 h1:mj/bdWleWEh81DtpdHKkw41IrS+r3uw1J/VQtbwYYp8= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10/go.mod h1:7+oEMxAZWP8gZCyjcm9VicI0M61Sx4DJtcGfKYv2yKQ= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17/go.mod h1:pRwaTYCJemADaqCbUAxltMoHKata7hmB5PjEXeu0kfg= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 h1:wh+/mn57yhUrFtLIxyFPh2RgxgQz/u+Yrf7hiHGHqKY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10/go.mod h1:7zirD+ryp5gitJJ2m1BBux56ai8RIRDykXZrJSp540w= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 h1:ZSIPAkAsCCjYrhqfw2+lNzWDzxzHXEckFkTePL5RSWQ= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14/go.mod h1:AyGgqiKv9ECM6IZeNQtdT8NnMvUb3/2wokeq2Fgryto= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9/go.mod h1:a9j48l6yL5XINLHLcOKInjdvknN+vWqPBxqeIDw7ktw= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 h1:xtuxji5CS0JknaXoACOunXOYOQzgfTvGAc9s2QdCJA4= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2/go.mod h1:zxwi0DIR0rcRcgdbl7E2MSOvxDyyXGBlScvBkARFaLQ= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 h1:BBYoNQt2kUZUUK4bIPsKrCcjVPUMNsgQpNAwhznK/zo= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18/go.mod h1:NS55eQ4YixUJPTC+INxi2/jCqe1y2Uw3rnh9wEOVJxY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17/go.mod h1:4nYOrY41Lrbk2170/BGkcJKBhws9Pfn8MG3aGqjjeFI= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10 h1:DRND0dkCKtJzCj4Xl4OpVbXZgfttY5q712H9Zj7qc/0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10/go.mod h1:tGGNmJKOTernmR2+VJ0fCzQRurcPZj9ut60Zu5Fi6us= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 h1:HfVVR1vItaG6le+Bpw6P4midjBDMKnjMyZnw9MXYUcE= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17/go.mod h1:YqMdV+gEKCQ59NrB7rzrJdALeBIsYiVi8Inj3+KcqHI= +github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 h1:3/gm/JTX9bX8CpzTgIlrtYpB3EVBDxyg/GY/QdcIEZw= +github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 h1:45VTQmiADmmooUvYSCiMvoDCln0FBxAEfmj7HDFTa3w= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= +github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 h1:fspVFg6qMx0svs40YgRmE7LZXh9VRZvTT35PfdQR6FM= +github.com/aws/aws-sdk-go-v2/service/sso v1.29.7/go.mod h1:BQTKL3uMECaLaUV3Zc2L4Qybv8C6BIXjuu1dOPyxTQs= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 h1:scVnW+NLXasGOhy7HhkdT9AGb6kjgW7fJ5xYkUaqHs0= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2/go.mod h1:FRNCY3zTEWZXBKm2h5UBUPvCVDOecTad9KhynDyGBc0= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.7 h1:VEO5dqFkMsl8QZ2yHsFDJAIZLAkEbaYDB+xdKi0Feic= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.7/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= +github.com/aws/smithy-go v1.13.3/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M= +github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/bazelbuild/rules_go v0.24.5 h1:8S5qilf+Il5/TPMZQIOfzQDAZtkhB4jALiAnwRuisDM= github.com/bazelbuild/rules_go v0.24.5/go.mod h1:MC23Dc/wkXEyk3Wpq6lCqz0ZAYOZDw2DR5y3N1q2i7M= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -384,6 +428,7 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-github/v27 v27.0.4/go.mod h1:/0Gr8pJ55COkmv+S/yPKCczSkUPIM/LnFyubufRNIS0= diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index a4d13109a..985b3480c 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -3,13 +3,16 @@ package optout import ( "bufio" "bytes" + "context" "fmt" "os" - "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" + s3types "github.com/aws/aws-sdk-go-v2/service/s3/types" "github.com/aws/aws-sdk-go/aws/credentials/stscreds" "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/sirupsen/logrus" ) @@ -28,7 +31,6 @@ type S3FileHandler struct { // Define logger functions to ensure that logs get sent to: // 1. Splunk (Logger.*) // 2. stdout (Jenkins) - func (handler *S3FileHandler) Infof(format string, rest ...interface{}) { handler.Logger.Infof(format, rest...) } @@ -45,7 +47,7 @@ func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*Opt var result []*OptOutFilenameMetadata bucket, prefix := ParseS3Uri(path) - s3Objects, err := handler.ListFiles(bucket, prefix) + s3Objects, err := handler.ListFiles(context.Background(), bucket, prefix) if err != nil { return &result, skipped, err @@ -69,18 +71,17 @@ func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*Opt return &result, skipped, err } -func (handler *S3FileHandler) ListFiles(bucket, prefix string) (objects []*s3.Object, err error) { - sess, err := handler.createSession() +func (handler *S3FileHandler) ListFiles(ctx context.Context, bucket, prefix string) (objects []s3types.Object, err error) { + cfg, err := config.LoadDefaultConfig(ctx) if err != nil { - handler.Errorf("Failed to create S3 session: %s\n", err) return nil, err } - svc := s3.New(sess) + client := s3.NewFromConfig(cfg) handler.Infof("Listing objects in bucket %s, prefix %s\n", bucket, prefix) - resp, err := svc.ListObjects(&s3.ListObjectsInput{ + resp, err := client.ListObjects(ctx, &s3.ListObjectsInput{ Bucket: aws.String(bucket), Prefix: aws.String(prefix), }) @@ -105,14 +106,20 @@ func (handler *S3FileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bufio return sc, func() {}, err } -func getHeadObject(bucket string, key string, sess *session.Session) (*s3.HeadObjectOutput, error) { - svc := s3.New(sess) +func getHeadObject(ctx context.Context, bucket string, key string, sess *session.Session) (*s3.HeadObjectOutput, error) { + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + return nil, err + } + + client := s3.NewFromConfig(cfg) + input := &s3.HeadObjectInput{ Bucket: aws.String(bucket), Key: aws.String(key), } - output, err := svc.HeadObject(input) + output, err := client.HeadObject(ctx, input) if err != nil { return nil, err From fef5cf4cf26ba2f10e59cd7aabd84e2971868aa9 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 22 Oct 2025 11:29:41 -0400 Subject: [PATCH 03/16] Mid work checkin --- bcda/lambda/cclf/main.go | 32 +- bcda/lambda/optout/main.go | 16 +- bcdaworker/queueing/worker_cleanup.go | 2 +- go.mod | 4 +- go.sum | 11 +- go.work | 1 - optout/.tool-versions | 1 - optout/go.mod | 37 -- optout/go.sum | 705 -------------------------- optout/s3_file_handler.go | 129 ++--- 10 files changed, 84 insertions(+), 854 deletions(-) delete mode 100644 optout/.tool-versions delete mode 100644 optout/go.mod delete mode 100644 optout/go.sum diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index 941b28cab..765c4db5b 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -21,6 +21,7 @@ import ( "github.com/CMSgov/bcda-app/conf" "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/ssm" ) @@ -28,7 +29,7 @@ func main() { // Localstack is a local-development server that mimics AWS. The endpoint variable // should only be set in local development to avoid making external calls to a real AWS account. if os.Getenv("LOCAL_STACK_ENDPOINT") != "" { - res, err := handleCclfImport(database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) + res, err := handleCclfImport(context.TODO(), database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) if err != nil { fmt.Printf("Failed to run opt out import: %s\n", err.Error()) } else { @@ -71,9 +72,9 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st filepath := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for file %s", e.EventName, filepath) if cclf.CheckIfAttributionCSVFile(e.S3.Object.Key) { - return handleCSVImport(db, s3AssumeRoleArn, filepath) + return handleCSVImport(ctx, db, s3AssumeRoleArn, filepath) } else { - return handleCclfImport(db, s3AssumeRoleArn, filepath) + return handleCclfImport(ctx, db, s3AssumeRoleArn, filepath) } } } @@ -82,7 +83,7 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st return "", nil } -func handleCSVImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { +func handleCSVImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) @@ -91,19 +92,28 @@ func handleCSVImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, pool := database.ConnectPool() defer pool.Close() + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + logger.Error("error loading default config: ", err) + return "", err + } + client := s3.NewFromConfig(cfg) + importer := cclf.CSVImporter{ Logger: logger, PgxPool: pool, FileProcessor: &cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ + Ctx: ctx, + Client: client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, }, }, } - err := importer.ImportCSV(s3ImportPath) + err = importer.ImportCSV(s3ImportPath) if err != nil { logger.Error("error returned from ImportCSV: ", err) return "", err @@ -111,6 +121,7 @@ func handleCSVImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, result := fmt.Sprintf("Completed CSV import. Successfully imported %v. See logs for more details.", s3ImportPath) logger.Info(result) + return result, nil } @@ -132,14 +143,23 @@ func loadBCDAParams() error { return nil } -func handleCclfImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { +func handleCclfImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath}) + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + logger.Error("error loading default config: ", err) + return "", err + } + client := s3.NewFromConfig(cfg) + fileProcessor := cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ + Ctx: ctx, + Client: client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index 50c46609e..ab69787c9 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -22,6 +22,7 @@ import ( "github.com/CMSgov/bcda-app/conf" "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/ssm" ) @@ -29,7 +30,7 @@ func main() { // Localstack is a local-development server that mimics AWS. The endpoint variable // should only be set in local development to avoid making external calls to a real AWS account. if os.Getenv("LOCAL_STACK_ENDPOINT") != "" { - res, err := handleOptOutImport(database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) + res, err := handleOptOutImport(context.Background(), database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) if err != nil { fmt.Printf("Failed to run opt out import: %s\n", err.Error()) } else { @@ -65,7 +66,7 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for directory %s", e.EventName, dir) - return handleOptOutImport(db, s3AssumeRoleArn, dir) + return handleOptOutImport(ctx, db, s3AssumeRoleArn, dir) } } @@ -85,14 +86,23 @@ func loadBfdS3Params(ctx context.Context) (string, error) { return bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) } -func handleOptOutImport(db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { +func handleOptOutImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) repo := postgres.NewRepository(db) + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + logger.Error("error loading default config: ", err) + return "", err + } + client := s3.NewFromConfig(cfg) + importer := suppression.OptOutImporter{ FileHandler: &optout.S3FileHandler{ + Ctx: ctx, + Client: client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, diff --git a/bcdaworker/queueing/worker_cleanup.go b/bcdaworker/queueing/worker_cleanup.go index a2bd8fe42..dee60e2a7 100644 --- a/bcdaworker/queueing/worker_cleanup.go +++ b/bcdaworker/queueing/worker_cleanup.go @@ -49,7 +49,7 @@ func (w *CleanupJobWorker) Work(ctx context.Context, rjob *river.Job[worker_type payloadDir := conf.GetEnv("FHIR_PAYLOAD_DIR") environment := conf.GetEnv("DEPLOYMENT_TARGET") - params, err := getAWSParams() + params, err := getAWSParams(ctx) if err != nil { logger.Error("Unable to extract Slack Token from parameter store: %+v", err) return err diff --git a/go.mod b/go.mod index 61037b113..1229d159a 100644 --- a/go.mod +++ b/go.mod @@ -40,8 +40,9 @@ require ( ) require ( - github.com/CMSgov/bcda-app/optout v0.0.0-20231214212912-50fa0a1c3c8a + github.com/aws/aws-sdk-go-v2 v1.39.3 github.com/aws/aws-sdk-go-v2/config v1.31.13 + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 github.com/ccoveille/go-safecast v1.6.1 @@ -57,7 +58,6 @@ require ( dario.cat/mergo v1.0.1 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect - github.com/aws/aws-sdk-go-v2 v1.39.3 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.18.17 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 // indirect diff --git a/go.sum b/go.sum index 01b2004db..6b9a4ff7e 100644 --- a/go.sum +++ b/go.sum @@ -74,8 +74,6 @@ github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgbutil v0.0.0-20160919175755-f7c97cef3b4e/go.mod h1:uw9h2sd4WWHOPdJ13MQpwK5qYWKYDumDqxWWIknEQ+k= -github.com/CMSgov/bcda-app/optout v0.0.0-20231214212912-50fa0a1c3c8a h1:mGoaslgaLwvwj5ThlT33e1pGlEaHkps8wJj0FRVdT2c= -github.com/CMSgov/bcda-app/optout v0.0.0-20231214212912-50fa0a1c3c8a/go.mod h1:XvnzFRWW2zQAirVykIhafoPcORJdz6/zNzN7NEyMmH4= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= @@ -123,18 +121,24 @@ github.com/aws/aws-sdk-go-v2 v1.39.3 h1:h7xSsanJ4EQJXG5iuW4UqgP7qBopLpj84mpkNx3w github.com/aws/aws-sdk-go-v2 v1.39.3/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 h1:tcFliCWne+zOuUfKNRn8JdFBuWPDuISDH08wD2ULkhk= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= +github.com/aws/aws-sdk-go-v2/config v1.17.7/go.mod h1:dN2gja/QXxFF15hQreyrqYhLBaQo1d9ZKe/v/uplQoI= github.com/aws/aws-sdk-go-v2/config v1.31.13 h1:wcqQB3B0PgRPUF5ZE/QL1JVOyB0mbPevHFoAMpemR9k= github.com/aws/aws-sdk-go-v2/config v1.31.13/go.mod h1:ySB5D5ybwqGbT6c3GszZ+u+3KvrlYCUQNo62+hkKOFk= +github.com/aws/aws-sdk-go-v2/credentials v1.12.20/go.mod h1:UKY5HyIux08bbNA7Blv4PcXQ8cTkGh7ghHMFklaviR4= github.com/aws/aws-sdk-go-v2/credentials v1.18.17 h1:skpEwzN/+H8cdrrtT8y+rvWJGiWWv0DeNAe+4VTf+Vs= github.com/aws/aws-sdk-go-v2/credentials v1.18.17/go.mod h1:Ed+nXsaYa5uBINovJhcAWkALvXw2ZLk36opcuiSZfJM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.17/go.mod h1:yIkQcCDYNsZfXpd5UX2Cy+sWA1jPgIhGTw9cOBzfVnQ= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 h1:UuGVOX48oP4vgQ36oiKmW9RuSeT8jlgQgBFQD+HUiHY= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10/go.mod h1:vM/Ini41PzvudT4YkQyE/+WiQJiQ6jzeDyU8pQKwCac= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 h1:fAoVmNGhir6BR+RU0/EI+6+D7abM+MCwWf8v4ip5jNI= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33/go.mod h1:84XgODVR8uRhmOnUkKGUZKqIMxmjmLOR8Uyp7G/TPwc= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23/go.mod h1:2DFxAQ9pfIRy0imBCJv+vZ2X6RKxves6fbnEuSry6b4= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 h1:mj/bdWleWEh81DtpdHKkw41IrS+r3uw1J/VQtbwYYp8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10/go.mod h1:7+oEMxAZWP8gZCyjcm9VicI0M61Sx4DJtcGfKYv2yKQ= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17/go.mod h1:pRwaTYCJemADaqCbUAxltMoHKata7hmB5PjEXeu0kfg= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 h1:wh+/mn57yhUrFtLIxyFPh2RgxgQz/u+Yrf7hiHGHqKY= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10/go.mod h1:7zirD+ryp5gitJJ2m1BBux56ai8RIRDykXZrJSp540w= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.24/go.mod h1:jULHjqqjDlbyTa7pfM7WICATnOv+iOhjletM3N0Xbu8= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 h1:ZSIPAkAsCCjYrhqfw2+lNzWDzxzHXEckFkTePL5RSWQ= @@ -153,10 +157,13 @@ github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 h1:3/gm/JTX9bX8CpzTgIlrtYpB3EVB github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 h1:45VTQmiADmmooUvYSCiMvoDCln0FBxAEfmj7HDFTa3w= github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= +github.com/aws/aws-sdk-go-v2/service/sso v1.11.23/go.mod h1:/w0eg9IhFGjGyyncHIQrXtU8wvNsTJOP0R6PPj0wf80= github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 h1:fspVFg6qMx0svs40YgRmE7LZXh9VRZvTT35PfdQR6FM= github.com/aws/aws-sdk-go-v2/service/sso v1.29.7/go.mod h1:BQTKL3uMECaLaUV3Zc2L4Qybv8C6BIXjuu1dOPyxTQs= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.13.5/go.mod h1:csZuQY65DAdFBt1oIjO5hhBR49kQqop4+lcuCjf2arA= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 h1:scVnW+NLXasGOhy7HhkdT9AGb6kjgW7fJ5xYkUaqHs0= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2/go.mod h1:FRNCY3zTEWZXBKm2h5UBUPvCVDOecTad9KhynDyGBc0= +github.com/aws/aws-sdk-go-v2/service/sts v1.16.19/go.mod h1:h4J3oPZQbxLhzGnk+j9dfYHi5qIOVJ5kczZd658/ydM= github.com/aws/aws-sdk-go-v2/service/sts v1.38.7 h1:VEO5dqFkMsl8QZ2yHsFDJAIZLAkEbaYDB+xdKi0Feic= github.com/aws/aws-sdk-go-v2/service/sts v1.38.7/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= github.com/aws/smithy-go v1.13.3/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= diff --git a/go.work b/go.work index 520cf979e..6d37db907 100644 --- a/go.work +++ b/go.work @@ -2,5 +2,4 @@ go 1.25.1 use ( . - ./optout ) diff --git a/optout/.tool-versions b/optout/.tool-versions deleted file mode 100644 index 4a2b099ad..000000000 --- a/optout/.tool-versions +++ /dev/null @@ -1 +0,0 @@ -golang 1.25.1 diff --git a/optout/go.mod b/optout/go.mod deleted file mode 100644 index 39c077b53..000000000 --- a/optout/go.mod +++ /dev/null @@ -1,37 +0,0 @@ -module github.com/CMSgov/bcda-app/optout - -go 1.25.1 - -require ( - github.com/CMSgov/bcda-app v0.0.0-20250626141638-2296e4a365d9 - github.com/aws/aws-sdk-go v1.55.8 - github.com/ccoveille/go-safecast v1.6.1 - github.com/pkg/errors v0.9.1 - github.com/sirupsen/logrus v1.9.3 - github.com/stretchr/testify v1.10.0 -) - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/fsnotify/fsnotify v1.5.4 // indirect - github.com/google/go-cmp v0.6.0 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/magiconair/properties v1.8.6 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/pelletier/go-toml v1.9.5 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.12.0 // indirect - github.com/spf13/afero v1.8.2 // indirect - github.com/spf13/cast v1.5.0 // indirect - github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/spf13/viper v1.9.0 // indirect - github.com/subosito/gotenv v1.3.0 // indirect - golang.org/x/sys v0.35.0 // indirect - golang.org/x/text v0.28.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/ini.v1 v1.66.6 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) diff --git a/optout/go.sum b/optout/go.sum deleted file mode 100644 index 42b15dc5c..000000000 --- a/optout/go.sum +++ /dev/null @@ -1,705 +0,0 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/CMSgov/bcda-app v0.0.0-20250626141638-2296e4a365d9 h1:g49+NtrDXiVlT1KUFN/BS3Nyu8kyy3crnO92UVQwl8c= -github.com/CMSgov/bcda-app v0.0.0-20250626141638-2296e4a365d9/go.mod h1:eh1ayimrLIjllJrcbgOF8PEYAE1JR6o02oz+6X/wK8c= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ= -github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/ccoveille/go-safecast v1.6.1 h1:Nb9WMDR8PqhnKCVs2sCB+OqhohwO5qaXtCviZkIff5Q= -github.com/ccoveille/go-safecast v1.6.1/go.mod h1:QqwNjxQ7DAqY0C721OIO9InMk9zCwcsO7tnRuHytad8= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= -github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= -github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= -github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= -github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= -github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.9.0 h1:yR6EXjTp0y0cLN8OZg1CRZmOBdI88UcGkhgyJhu6nZk= -github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= -github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= -golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.66.6 h1:LATuAqN/shcYAOkv3wl2L4rkaKqkcgTBQjOyYDvcPKI= -gopkg.in/ini.v1 v1.66.6/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index 985b3480c..7b11a9f61 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -5,20 +5,21 @@ import ( "bytes" "context" "fmt" - "os" + "time" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/feature/s3/manager" "github.com/aws/aws-sdk-go-v2/service/s3" s3types "github.com/aws/aws-sdk-go-v2/service/s3/types" - "github.com/aws/aws-sdk-go/aws/credentials/stscreds" "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/sirupsen/logrus" ) // S3FileHandler manages files located on AWS S3. type S3FileHandler struct { + Ctx context.Context + Client *s3.Client Logger logrus.FieldLogger // Optional S3 endpoint to use for connection. Endpoint string @@ -47,7 +48,7 @@ func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*Opt var result []*OptOutFilenameMetadata bucket, prefix := ParseS3Uri(path) - s3Objects, err := handler.ListFiles(context.Background(), bucket, prefix) + s3Objects, err := handler.ListFiles(bucket, prefix) if err != nil { return &result, skipped, err @@ -71,8 +72,8 @@ func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*Opt return &result, skipped, err } -func (handler *S3FileHandler) ListFiles(ctx context.Context, bucket, prefix string) (objects []s3types.Object, err error) { - cfg, err := config.LoadDefaultConfig(ctx) +func (handler *S3FileHandler) ListFiles(bucket, prefix string) (objects []s3types.Object, err error) { + cfg, err := config.LoadDefaultConfig(handler.Ctx) if err != nil { return nil, err } @@ -81,7 +82,7 @@ func (handler *S3FileHandler) ListFiles(ctx context.Context, bucket, prefix stri handler.Infof("Listing objects in bucket %s, prefix %s\n", bucket, prefix) - resp, err := client.ListObjects(ctx, &s3.ListObjectsInput{ + resp, err := client.ListObjects(handler.Ctx, &s3.ListObjectsInput{ Bucket: aws.String(bucket), Prefix: aws.String(prefix), }) @@ -106,48 +107,24 @@ func (handler *S3FileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bufio return sc, func() {}, err } -func getHeadObject(ctx context.Context, bucket string, key string, sess *session.Session) (*s3.HeadObjectOutput, error) { - cfg, err := config.LoadDefaultConfig(ctx) - if err != nil { - return nil, err - } - - client := s3.NewFromConfig(cfg) - - input := &s3.HeadObjectInput{ - Bucket: aws.String(bucket), - Key: aws.String(key), - } - - output, err := client.HeadObject(ctx, input) - - if err != nil { - return nil, err - } - - return output, nil -} - func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { handler.Infof("Opening file %s\n", filePath) bucket, file := ParseS3Uri(filePath) - sess, err := handler.createSession() - if err != nil { - return nil, err + input := &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(filePath), } - - downloader := s3manager.NewDownloader(sess) - - headObj, err := getHeadObject(bucket, file, sess) + output, err := handler.Client.HeadObject(handler.Ctx, input) if err != nil { return nil, err } - buf := make([]byte, int(*headObj.ContentLength)) - buff := aws.NewWriteAtBuffer(buf) + buff := make([]byte, int(output.ContentLength)) + w := manager.NewWriteAtBuffer(buff) - numBytes, err := downloader.Download(buff, &s3.GetObjectInput{ + downloader := manager.NewDownloader(handler.Client) + numBytes, err := downloader.Download(handler.Ctx, w, &s3.GetObjectInput{ Bucket: aws.String(bucket), Key: aws.String(file), }) @@ -157,8 +134,8 @@ func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { } handler.Logger.WithField("file_size_bytes", numBytes).Infof("file downloaded: size=%d\n", numBytes) - byte_arr := buff.Bytes() - return byte_arr, err + + return buff, err } func (handler *S3FileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameMetadata) error { @@ -189,69 +166,29 @@ func (handler *S3FileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameM return nil } -// Creates a new AWS S3 session. If the handler is given a custom S3 endpoint -// and/or IAM role ARN to assume, the new session connects using those parameters. -func (handler *S3FileHandler) createSession() (*session.Session, error) { - if handler.Session != nil { - return handler.Session, nil - } - - sess := session.Must(session.NewSession()) - - config := aws.Config{ - Region: aws.String("us-east-1"), - } - - if handler.Endpoint != "" { - config.S3ForcePathStyle = aws.Bool(true) - config.Endpoint = &handler.Endpoint - } - - if handler.AssumeRoleArn != "" && os.Getenv("LOCAL_STACK_ENDPOINT") == "" { - config.Credentials = stscreds.NewCredentials( - sess, - handler.AssumeRoleArn, - ) - } - - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, - }) - - if err == nil { - handler.Session = sess - } - - return sess, err -} - func (handler *S3FileHandler) Delete(filePath string) error { - sess, err := handler.createSession() - - if err != nil { - handler.Errorf("File %s failed to clean up properly, error occurred while creating S3 session: %v\n", filePath, err) - return err - } - bucket, path := ParseS3Uri(filePath) - svc := s3.New(sess) - _, err = svc.DeleteObject(&s3.DeleteObjectInput{Bucket: aws.String(bucket), Key: aws.String(path)}) - - if err != nil { - handler.Errorf("File %s failed to clean up properly, error occurred while deleting object: %v\n", filePath, err) - return err - } - - err = svc.WaitUntilObjectNotExists(&s3.HeadObjectInput{ + _, err := handler.Client.DeleteObject(handler.Ctx, &s3.DeleteObjectInput{ Bucket: aws.String(bucket), Key: aws.String(path), }) - if err != nil { - handler.Errorf("File %s failed to clean up properly, error occurred while waiting for object to be deleted: %v\n", filePath, err) + handler.Errorf("file %s failed to clean up properly, error occurred while deleting object: %v\n", filePath, err) return err + } else { + err = s3.NewObjectNotExistsWaiter(handler.Client).Wait( + handler.Ctx, + &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(path), + }, + time.Minute, + ) + if err != nil { + handler.Errorf("File %s failed to clean up properly, error occurred while waiting for object to be deleted: %v\n", filePath, err) + } } - return nil + return err } From 0253fe35b1858b01fce68a24074b21efccdf9090 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 22 Oct 2025 17:03:16 -0400 Subject: [PATCH 04/16] Params tests working --- .vscode/settings.json | 1 + bcda/aws/parameters_test.go | 168 +++++++++++++++--------------------- bcda/aws/session.go | 54 ++++++------ bcda/aws/session_test.go | 66 +++++++------- docker-compose.test.yml | 8 +- 5 files changed, 134 insertions(+), 163 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index ee08e2e98..920ee7cbf 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,7 @@ { "go.testEnvVars": { "ENV": "local", + "AWS_ENDPOINT_URL": "http://localhost:4566", "LOCAL_STACK_ENDPOINT": "http://localhost:4566", "DB": "postgresql://postgres:toor@localhost:15432", "DB_HOST_URL": "postgresql://postgres:toor@localhost:15432?sslmode=disable", diff --git a/bcda/aws/parameters_test.go b/bcda/aws/parameters_test.go index 34a64c6a6..09a6315ea 100644 --- a/bcda/aws/parameters_test.go +++ b/bcda/aws/parameters_test.go @@ -1,80 +1,63 @@ package bcdaaws import ( + "context" "errors" - "fmt" "testing" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/client" - "github.com/aws/aws-sdk-go/service/ssm" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/ssm" "github.com/stretchr/testify/assert" ) func TestGetParameter(t *testing.T) { key1 := "key1" - parm1 := "parm1" + val1 := "val1" + cfg, err := config.LoadDefaultConfig(context.TODO(), + config.WithRegion("us-east-1"), + ) + assert.Nil(t, err) + client := ssm.NewFromConfig(cfg) + + overwrite := true + paramInput := ssm.PutParameterInput{ + Name: &key1, + Value: &val1, + Overwrite: &overwrite, + Type: "String", + } + + _, err = client.PutParameter(t.Context(), ¶mInput) + assert.Nil(t, err) tests := []struct { - keyname string - expectedValue string - expectedErr error - ssmNew func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM - ssmsvcGetParameter func(c *ssm.SSM, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) + desc string + keyname string + expectedValue string + expectedErr error }{ { - // Happy path + desc: "Happy path", keyname: key1, - expectedValue: parm1, + expectedValue: val1, expectedErr: nil, - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameter: func(c *ssm.SSM, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { - parm := ssm.Parameter{ - Name: &key1, Value: &parm1, - } - getParametersOutput := ssm.GetParameterOutput{Parameter: &parm} - return &getParametersOutput, nil - }, }, { - // GetParameter fails - keyname: key1, + desc: "Missing parameter", + keyname: "asdf", expectedValue: "", - expectedErr: errors.New("error retrieving parameter key1 from parameter store: error"), - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameter: func(c *ssm.SSM, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { - return nil, errors.New("error") - }, - }, - { - // Empty parameter - keyname: key1, - expectedValue: "", - expectedErr: fmt.Errorf("no parameter store value found for %s", key1), - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameter: func(c *ssm.SSM, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { - val := "" - parm := ssm.Parameter{ - Name: &key1, Value: &val, - } - - getParameterOutput := ssm.GetParameterOutput{Parameter: &parm} - return &getParameterOutput, nil - }, + expectedErr: errors.New("error retrieving parameter asdf from parameter store"), }, } for _, test := range tests { - ssmNew = test.ssmNew - ssmsvcGetParameter = test.ssmsvcGetParameter - - value, err := GetParameter(nil, test.keyname) + value, err := GetParameter(t.Context(), client, test.keyname) assert.Equal(t, test.expectedValue, value) if test.expectedErr == nil { assert.Nil(t, err) } else { - assert.Equal(t, test.expectedErr.Error(), err.Error()) + assert.Contains(t, err.Error(), test.expectedErr.Error()) } } } @@ -82,66 +65,57 @@ func TestGetParameter(t *testing.T) { func TestGetParameters(t *testing.T) { key1 := "key1" key2 := "key2" - parm1 := "parm1" - parm2 := "parm2" + val1 := "val1" + val2 := "val2" + cfg, err := config.LoadDefaultConfig(context.TODO(), + config.WithRegion("us-east-1"), + ) + assert.Nil(t, err) + client := ssm.NewFromConfig(cfg) + + overwrite := true + paramInput1 := ssm.PutParameterInput{ + Name: &key1, + Value: &val1, + Overwrite: &overwrite, + Type: "String", + } + _, err = client.PutParameter(t.Context(), ¶mInput1) + assert.Nil(t, err) + + paramInput2 := ssm.PutParameterInput{ + Name: &key2, + Value: &val2, + Overwrite: &overwrite, + Type: "String", + } + _, err = client.PutParameter(t.Context(), ¶mInput2) + assert.Nil(t, err) tests := []struct { - keys []*string - parms map[string]string - err error - ssmNew func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM - ssmsvcGetParameters func(c *ssm.SSM, input *ssm.GetParametersInput) (*ssm.GetParametersOutput, error) + desc string + keys []string + vals map[string]string + err error }{ { - // Happy path - keys: []*string{&key1, &key2}, - parms: map[string]string{key1: parm1, key2: parm2}, - err: nil, - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameters: func(c *ssm.SSM, input *ssm.GetParametersInput) (*ssm.GetParametersOutput, error) { - parms := []*ssm.Parameter{ - {Name: &key1, Value: &parm1}, - {Name: &key2, Value: &parm2}, - } - getParametersOutput := ssm.GetParametersOutput{Parameters: parms} - return &getParametersOutput, nil - }, + desc: "Happy path", + keys: []string{key1, key2}, + vals: map[string]string{key1: val1, key2: val2}, + err: nil, }, { - // GetParameters fails - keys: []*string{&key1, &key2}, - parms: nil, - err: errors.New("error connecting to parameter store: error"), - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameters: func(c *ssm.SSM, input *ssm.GetParametersInput) (*ssm.GetParametersOutput, error) { - return nil, errors.New("error") - }, - }, - { - // Invalid parameter - keys: []*string{&key1, &key2}, - parms: nil, - err: fmt.Errorf("invalid parameters error: %s,\n", key2), - ssmNew: func(p client.ConfigProvider, cfgs ...*aws.Config) *ssm.SSM { return nil }, - ssmsvcGetParameters: func(c *ssm.SSM, input *ssm.GetParametersInput) (*ssm.GetParametersOutput, error) { - parms := []*ssm.Parameter{ - {Name: &key1, Value: &parm1}, - } - invalidParms := []*string{&key2} - - getParametersOutput := ssm.GetParametersOutput{Parameters: parms, InvalidParameters: invalidParms} - return &getParametersOutput, nil - }, + desc: "Invalid parameter", + keys: []string{"invalid", key2}, + vals: nil, + err: errors.New("invalid parameters error: invalid,\n"), }, } for _, test := range tests { - ssmNew = test.ssmNew - ssmsvcGetParameters = test.ssmsvcGetParameters - - parms, err := GetParameters(nil, test.keys) + vals, err := GetParameters(t.Context(), client, test.keys) - assert.Equal(t, test.parms, parms) + assert.Equal(t, test.vals, vals) assert.Equal(t, test.err, err) } } diff --git a/bcda/aws/session.go b/bcda/aws/session.go index f07d9d759..f7b17c0d7 100644 --- a/bcda/aws/session.go +++ b/bcda/aws/session.go @@ -1,13 +1,7 @@ package bcdaaws -import ( - "context" - - "github.com/aws/aws-sdk-go-v2/config" -) - -var s3Region = "us-east-1" -var DefaultRegion = "us-east-1" +// var s3Region = "us-east-1" +// var DefaultRegion = "us-east-1" // Makes these easily mockable for testing // var newSession = session.NewSession @@ -43,31 +37,31 @@ var DefaultRegion = "us-east-1" // return sess, nil // } -func NewAWSConfig(ctx context.Context, roleArn, endpoint string) (config.Config, error) { - return config.LoadDefaultConfig(ctx, config.WithRegion(DefaultRegion)) - // var cfg config.Config +// func NewAWSConfig(ctx context.Context, roleArn, endpoint string) (config.Config, error) { +// return config.LoadDefaultConfig(ctx, config.WithRegion(DefaultRegion)) +// // var cfg config.Config - // // used to override for localstack - // if endpoint != "" { - // cfg = config.LoadDefaultConfig( - // ctx, - // config.WithRegion(DefaultRegion)), +// // // used to override for localstack +// // if endpoint != "" { +// // cfg = config.LoadDefaultConfig( +// // ctx, +// // config.WithRegion(DefaultRegion)), - // // cfg.S3ForcePathStyle = true - // // cfg.Endpoint = &endpoint - // } else if roleArn != "" { +// // // cfg.S3ForcePathStyle = true +// // // cfg.Endpoint = &endpoint +// // } else if roleArn != "" { - // client := stscreds.NewFromConfig(cfg) - // appCreds := stscreds.NewAssumeRoleProvider(client, roleArn) - // creds, err := appCreds.Retrieve(ctx) - // if err != nil { - // return config.Config{}, err - // } +// // client := stscreds.NewFromConfig(cfg) +// // appCreds := stscreds.NewAssumeRoleProvider(client, roleArn) +// // creds, err := appCreds.Retrieve(ctx) +// // if err != nil { +// // return config.Config{}, err +// // } - // cfg.Credentials = creds - // } else { +// // cfg.Credentials = creds +// // } else { - // } +// // } - // return cfg, nil -} +// // return cfg, nil +// } diff --git a/bcda/aws/session_test.go b/bcda/aws/session_test.go index 43c945b1a..9b7abbb1b 100644 --- a/bcda/aws/session_test.go +++ b/bcda/aws/session_test.go @@ -1,40 +1,40 @@ package bcdaaws -import ( - "errors" - "testing" +// import ( +// "errors" +// "testing" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/stretchr/testify/assert" -) +// "github.com/aws/aws-sdk-go/aws" +// "github.com/aws/aws-sdk-go/aws/session" +// "github.com/stretchr/testify/assert" +// ) -func TestNewSession(t *testing.T) { - tests := []struct { - expect *session.Session - err error - newSession func(cfgs ...*aws.Config) (*session.Session, error) - }{ - { - // Happy path - expect: nil, - err: nil, - newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, nil }, - }, - { - // Error returned from NewSession - expect: nil, - err: errors.New("error"), - newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, errors.New("error") }, - }, - } +// func TestNewSession(t *testing.T) { +// tests := []struct { +// expect *session.Session +// err error +// newSession func(cfgs ...*aws.Config) (*session.Session, error) +// }{ +// { +// // Happy path +// expect: nil, +// err: nil, +// newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, nil }, +// }, +// { +// // Error returned from NewSession +// expect: nil, +// err: errors.New("error"), +// newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, errors.New("error") }, +// }, +// } - for _, test := range tests { - newSession = test.newSession +// for _, test := range tests { +// newSession = test.newSession - s, err := NewSession("fake_arn", "fake_endpoint") +// s, err := NewSession("fake_arn", "fake_endpoint") - assert.Equal(t, test.expect, s) - assert.Equal(t, test.err, err) - } -} +// assert.Equal(t, test.expect, s) +// assert.Equal(t, test.err, err) +// } +// } diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 2ad551aae..5be5ce41b 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -13,14 +13,13 @@ services: # Set default values for Localstack to work - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-foobar} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-foobar} + - AWS_ENDPOINT_URL=http://localstack:4566 - LOCAL_STACK_ENDPOINT=${LOCAL_STACK_ENDPOINT:-http://localstack:4566} - BFD_S3_ENDPOINT=${BFD_S3_ENDPOINT:-http://localstack:4566} - ENV=local - DATABASE_URL=postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable - GOLANGCI_LINT_CACHE=/root/.cache/go-build - - TESTCONTAINERS_HOST_OVERRIDE=host.docker.internal - # Remaining environment variables defined in ./shared_files/decrypted/local.env volumes: - ./test_results:/go/src/github.com/CMSgov/bcda-app/test_results @@ -43,11 +42,14 @@ services: image: localstack/localstack:latest environment: - AWS_DEFAULT_REGION=us-east-1 + - AWS_ENDPOINT_URL=http://localstack:4566 - GATEWAY_LISTEN=0.0.0.0:4566 - SERVICES=s3,ssm,sts,iam - DEBUG=1 ports: - - "4566-4583:4566-4583" + # - "4566-4583:4566-4583" + - "127.0.0.1:4566:4566" # LocalStack Gateway + - "127.0.0.1:4510-4559:4510-4559" volumes: - "./.localstack_volume:/var/lib/localstack" - "/var/run/docker.sock:/var/run/docker.sock" From 15a4fed6edc4fe8b5a3bead2e5acd5b076d15ff2 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Thu, 23 Oct 2025 16:59:22 -0400 Subject: [PATCH 05/16] Up aws versions, aco creds tests passing --- bcda/aws/parameters_test.go | 9 ++- bcda/lambda/admin_create_aco_creds/aws.go | 7 +-- .../lambda/admin_create_aco_creds/aws_test.go | 63 +++++++++++-------- .../admin_create_aco_creds/main_test.go | 21 ++++++- docker-compose.test.yml | 13 ++-- go.mod | 20 +++--- go.sum | 54 ++++++---------- go.work.sum | 40 ++++++------ optout/s3_file_handler.go | 2 +- 9 files changed, 119 insertions(+), 110 deletions(-) diff --git a/bcda/aws/parameters_test.go b/bcda/aws/parameters_test.go index 09a6315ea..205082da4 100644 --- a/bcda/aws/parameters_test.go +++ b/bcda/aws/parameters_test.go @@ -5,6 +5,7 @@ import ( "errors" "testing" + "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/ssm" "github.com/stretchr/testify/assert" @@ -19,11 +20,10 @@ func TestGetParameter(t *testing.T) { assert.Nil(t, err) client := ssm.NewFromConfig(cfg) - overwrite := true paramInput := ssm.PutParameterInput{ Name: &key1, Value: &val1, - Overwrite: &overwrite, + Overwrite: aws.Bool(true), Type: "String", } @@ -73,11 +73,10 @@ func TestGetParameters(t *testing.T) { assert.Nil(t, err) client := ssm.NewFromConfig(cfg) - overwrite := true paramInput1 := ssm.PutParameterInput{ Name: &key1, Value: &val1, - Overwrite: &overwrite, + Overwrite: aws.Bool(true), Type: "String", } _, err = client.PutParameter(t.Context(), ¶mInput1) @@ -86,7 +85,7 @@ func TestGetParameters(t *testing.T) { paramInput2 := ssm.PutParameterInput{ Name: &key2, Value: &val2, - Overwrite: &overwrite, + Overwrite: aws.Bool(true), Type: "String", } _, err = client.PutParameter(t.Context(), ¶mInput2) diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index 8df1e2bfd..75f47700d 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -156,13 +156,12 @@ func putObject(ctx context.Context, client *s3.Client, acoID, creds, credsBucket Key: aws.String(fmt.Sprintf("%s-creds", acoID)), } - result, err := client.PutObject(ctx, s3Input) + _, err := client.PutObject(ctx, s3Input) if err != nil { return "", err } - // TODO - fmt.Printf("\n--- result metadata %+v", result.ResultMetadata) - return "", nil + + return (credsBucket + "/" + acoID + "-creds"), nil } func adjustedEnv() string { diff --git a/bcda/lambda/admin_create_aco_creds/aws_test.go b/bcda/lambda/admin_create_aco_creds/aws_test.go index 560a3cc6f..cdbfbb0d7 100644 --- a/bcda/lambda/admin_create_aco_creds/aws_test.go +++ b/bcda/lambda/admin_create_aco_creds/aws_test.go @@ -1,33 +1,42 @@ package main import ( + "context" "os" "testing" "github.com/CMSgov/bcda-app/conf" - "github.com/aws/aws-sdk-go/service/s3" - "github.com/aws/aws-sdk-go/service/s3/s3iface" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/stretchr/testify/assert" ) -type mockS3 struct { - s3iface.S3API -} - -func (m *mockS3) PutObject(*s3.PutObjectInput) (*s3.PutObjectOutput, error) { - return &s3.PutObjectOutput{}, nil -} - func TestPutObject(t *testing.T) { - mock := &mockS3{} + cfg, err := config.LoadDefaultConfig(context.TODO(), + config.WithRegion("us-east-1"), + ) + assert.Nil(t, err) + client := s3.NewFromConfig(cfg, func(o *s3.Options) { + o.UsePathStyle = true + }) + + bucketInput := &s3.CreateBucketInput{ + Bucket: aws.String("test-bucket"), + } + _, err = client.CreateBucket(t.Context(), bucketInput) + assert.Nil(t, err) - result, err := putObject(mock, "test-filename", "test-creds", "test-bucket") + result, err := putObject(t.Context(), client, "test-filename", "test-creds", "test-bucket") assert.Nil(t, err) - assert.Equal(t, result, "{\n\n}") + assert.Equal(t, result, "test-bucket/test-filename-creds") } func TestAdjustedEnv(t *testing.T) { origEnv := conf.GetEnv("ENV") + t.Cleanup(func() { + conf.SetEnv(t, "ENV", origEnv) + }) conf.SetEnv(t, "ENV", "dev") resultEnv := adjustedEnv() @@ -44,8 +53,6 @@ func TestAdjustedEnv(t *testing.T) { conf.SetEnv(t, "ENV", "prod") resultEnv = adjustedEnv() assert.Equal(t, resultEnv, "prod") - - conf.SetEnv(t, "ENV", origEnv) } func TestSetupEnvironment(t *testing.T) { @@ -56,6 +63,20 @@ func TestSetupEnvironment(t *testing.T) { origSSASUseTLS := os.Getenv("SSAS_USE_TLS") origBCDACAFile := os.Getenv("BCDA_CA_FILE") + t.Cleanup(func() { + // restore original env vars + err := os.Setenv("SSAS_URL", origSSASURL) + assert.Nil(t, err) + err = os.Setenv("BCDA_SSAS_CLIENT_ID", origBCDASSASClientID) + assert.Nil(t, err) + err = os.Setenv("BCDA_SSAS_SECRET", origBCDASSASSecret) + assert.Nil(t, err) + err = os.Setenv("SSAS_USE_TLS", origSSASUseTLS) + assert.Nil(t, err) + err = os.Setenv("BCDA_CA_FILE", origBCDACAFile) + assert.Nil(t, err) + }) + err := setupEnvironment(awsParams{ ssasURL: "test-SSAS_URL", clientID: "test-BCDA_SSAS_CLIENT_ID", @@ -70,16 +91,4 @@ func TestSetupEnvironment(t *testing.T) { assert.Equal(t, pemFilePath, os.Getenv("BCDA_CA_FILE")) assert.FileExists(t, pemFilePath) - - // restore original env vars - err = os.Setenv("SSAS_URL", origSSASURL) - assert.Nil(t, err) - err = os.Setenv("BCDA_SSAS_CLIENT_ID", origBCDASSASClientID) - assert.Nil(t, err) - err = os.Setenv("BCDA_SSAS_SECRET", origBCDASSASSecret) - assert.Nil(t, err) - err = os.Setenv("SSAS_USE_TLS", origSSASUseTLS) - assert.Nil(t, err) - err = os.Setenv("BCDA_CA_FILE", origBCDACAFile) - assert.Nil(t, err) } diff --git a/bcda/lambda/admin_create_aco_creds/main_test.go b/bcda/lambda/admin_create_aco_creds/main_test.go index d3b2bdc12..a30b9f30c 100644 --- a/bcda/lambda/admin_create_aco_creds/main_test.go +++ b/bcda/lambda/admin_create_aco_creds/main_test.go @@ -5,6 +5,9 @@ import ( "testing" "github.com/CMSgov/bcda-app/bcda/auth" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/stretchr/testify/assert" ) @@ -16,7 +19,21 @@ func TestHandleCreateACOCreds(t *testing.T) { mockProvider := &auth.MockProvider{} mockProvider.On("FindAndCreateACOCredentials", data.ACOID, data.IPs).Return("creds\nstring", nil) - s3Path, err := handleCreateACOCreds(ctx, data, mockProvider, &mockS3{}, "test-bucket") + cfg, err := config.LoadDefaultConfig(context.TODO(), + config.WithRegion("us-east-1"), + ) assert.Nil(t, err) - assert.Equal(t, s3Path, "{\n\n}") + client := s3.NewFromConfig(cfg, func(o *s3.Options) { + o.UsePathStyle = true + }) + + bucketInput := &s3.CreateBucketInput{ + Bucket: aws.String("test-bucket"), + } + _, err = client.CreateBucket(t.Context(), bucketInput) + assert.Nil(t, err) + + s3Path, err := handleCreateACOCreds(ctx, data, mockProvider, client, "test-bucket") + assert.Nil(t, err) + assert.Equal(t, s3Path, "test-bucket/TEST1234-creds") } diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 5be5ce41b..996abb5fc 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -13,8 +13,9 @@ services: # Set default values for Localstack to work - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-foobar} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-foobar} + - AWS_DEFAULT_REGION=us-east-1 - AWS_ENDPOINT_URL=http://localstack:4566 - - LOCAL_STACK_ENDPOINT=${LOCAL_STACK_ENDPOINT:-http://localstack:4566} + # - LOCAL_STACK_ENDPOINT=${LOCAL_STACK_ENDPOINT:-http://localstack:4566} - BFD_S3_ENDPOINT=${BFD_S3_ENDPOINT:-http://localstack:4566} - ENV=local - DATABASE_URL=postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable @@ -41,15 +42,13 @@ services: localstack: image: localstack/localstack:latest environment: - - AWS_DEFAULT_REGION=us-east-1 - - AWS_ENDPOINT_URL=http://localstack:4566 - - GATEWAY_LISTEN=0.0.0.0:4566 + # - AWS_ENDPOINT_URL=http://localstack:4566 + # - GATEWAY_LISTEN=0.0.0.0:4566 - SERVICES=s3,ssm,sts,iam - DEBUG=1 ports: - # - "4566-4583:4566-4583" - - "127.0.0.1:4566:4566" # LocalStack Gateway - - "127.0.0.1:4510-4559:4510-4559" + - "127.0.0.1:4566:4566" # LocalStack Gateway + - "127.0.0.1:4510-4559:4510-4559" # external services port range volumes: - "./.localstack_volume:/var/lib/localstack" - "/var/run/docker.sock:/var/run/docker.sock" diff --git a/go.mod b/go.mod index 1229d159a..3ece4c539 100644 --- a/go.mod +++ b/go.mod @@ -41,10 +41,10 @@ require ( require ( github.com/aws/aws-sdk-go-v2 v1.39.3 - github.com/aws/aws-sdk-go-v2/config v1.31.13 - github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 - github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 - github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 + github.com/aws/aws-sdk-go-v2/config v1.31.14 + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 + github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 + github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 github.com/ccoveille/go-safecast v1.6.1 github.com/pashagolub/pgxmock/v4 v4.5.0 github.com/riverqueue/river v0.20.2 @@ -58,20 +58,20 @@ require ( dario.cat/mergo v1.0.1 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.18.17 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.18.18 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.38.7 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 // indirect github.com/aws/smithy-go v1.23.1 // indirect github.com/containerd/errdefs v1.0.0 // indirect github.com/containerd/errdefs/pkg v0.3.0 // indirect diff --git a/go.sum b/go.sum index 6b9a4ff7e..6ccdb8af5 100644 --- a/go.sum +++ b/go.sum @@ -116,57 +116,44 @@ github.com/aws/aws-lambda-go v1.49.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7Rfg github.com/aws/aws-sdk-go v1.28.8/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ= github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk= -github.com/aws/aws-sdk-go-v2 v1.16.16/go.mod h1:SwiyXi/1zTUZ6KIAmLK5V5ll8SiURNUYOqTerZPaF9k= github.com/aws/aws-sdk-go-v2 v1.39.3 h1:h7xSsanJ4EQJXG5iuW4UqgP7qBopLpj84mpkNx3wPjM= github.com/aws/aws-sdk-go-v2 v1.39.3/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 h1:tcFliCWne+zOuUfKNRn8JdFBuWPDuISDH08wD2ULkhk= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= -github.com/aws/aws-sdk-go-v2/config v1.17.7/go.mod h1:dN2gja/QXxFF15hQreyrqYhLBaQo1d9ZKe/v/uplQoI= -github.com/aws/aws-sdk-go-v2/config v1.31.13 h1:wcqQB3B0PgRPUF5ZE/QL1JVOyB0mbPevHFoAMpemR9k= -github.com/aws/aws-sdk-go-v2/config v1.31.13/go.mod h1:ySB5D5ybwqGbT6c3GszZ+u+3KvrlYCUQNo62+hkKOFk= -github.com/aws/aws-sdk-go-v2/credentials v1.12.20/go.mod h1:UKY5HyIux08bbNA7Blv4PcXQ8cTkGh7ghHMFklaviR4= -github.com/aws/aws-sdk-go-v2/credentials v1.18.17 h1:skpEwzN/+H8cdrrtT8y+rvWJGiWWv0DeNAe+4VTf+Vs= -github.com/aws/aws-sdk-go-v2/credentials v1.18.17/go.mod h1:Ed+nXsaYa5uBINovJhcAWkALvXw2ZLk36opcuiSZfJM= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.17/go.mod h1:yIkQcCDYNsZfXpd5UX2Cy+sWA1jPgIhGTw9cOBzfVnQ= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 h1:t9yYsydLYNBk9cJ73rgPhPWqOh/52fcWDQB5b1JsKSY= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2/go.mod h1:IusfVNTmiSN3t4rhxWFaBAqn+mcNdwKtPcV16eYdgko= +github.com/aws/aws-sdk-go-v2/config v1.31.14 h1:kj/KpDqvt0UqcEL3WOvCykE9QUpBb6b23hQdnXe+elo= +github.com/aws/aws-sdk-go-v2/config v1.31.14/go.mod h1:X5PaY6QCzViihn/ru7VxnIamcJQrG9NSeTxuSKm2YtU= +github.com/aws/aws-sdk-go-v2/credentials v1.18.18 h1:5AfxTvDN0AJoA7rg/yEc0sHhl6/B9fZ+NtiQuOjWGQM= +github.com/aws/aws-sdk-go-v2/credentials v1.18.18/go.mod h1:m9mE1mJ1s7zI6rrt7V3RQU2SCgUbNaphlfqEksLp+Fs= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 h1:UuGVOX48oP4vgQ36oiKmW9RuSeT8jlgQgBFQD+HUiHY= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10/go.mod h1:vM/Ini41PzvudT4YkQyE/+WiQJiQ6jzeDyU8pQKwCac= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 h1:fAoVmNGhir6BR+RU0/EI+6+D7abM+MCwWf8v4ip5jNI= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33/go.mod h1:84XgODVR8uRhmOnUkKGUZKqIMxmjmLOR8Uyp7G/TPwc= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23/go.mod h1:2DFxAQ9pfIRy0imBCJv+vZ2X6RKxves6fbnEuSry6b4= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 h1:9XV2TkOvCs6Fis10b4scQbv/eDPhklhU/65GikPxXAA= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13/go.mod h1:X5gq64GsjuOIJRIUzR3x3Du96zUF+U1if3Qw/qNx1k8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 h1:mj/bdWleWEh81DtpdHKkw41IrS+r3uw1J/VQtbwYYp8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10/go.mod h1:7+oEMxAZWP8gZCyjcm9VicI0M61Sx4DJtcGfKYv2yKQ= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17/go.mod h1:pRwaTYCJemADaqCbUAxltMoHKata7hmB5PjEXeu0kfg= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 h1:wh+/mn57yhUrFtLIxyFPh2RgxgQz/u+Yrf7hiHGHqKY= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10/go.mod h1:7zirD+ryp5gitJJ2m1BBux56ai8RIRDykXZrJSp540w= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.24/go.mod h1:jULHjqqjDlbyTa7pfM7WICATnOv+iOhjletM3N0Xbu8= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 h1:ZSIPAkAsCCjYrhqfw2+lNzWDzxzHXEckFkTePL5RSWQ= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14/go.mod h1:AyGgqiKv9ECM6IZeNQtdT8NnMvUb3/2wokeq2Fgryto= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9/go.mod h1:a9j48l6yL5XINLHLcOKInjdvknN+vWqPBxqeIDw7ktw= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 h1:FHw90xCTsofzk6vjU808TSuDtDfOOKPNdz5Weyc3tUI= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10/go.mod h1:n8jdIE/8F3UYkg8O4IGkQpn2qUmapg/1K1yl29/uf/c= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 h1:xtuxji5CS0JknaXoACOunXOYOQzgfTvGAc9s2QdCJA4= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2/go.mod h1:zxwi0DIR0rcRcgdbl7E2MSOvxDyyXGBlScvBkARFaLQ= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 h1:BBYoNQt2kUZUUK4bIPsKrCcjVPUMNsgQpNAwhznK/zo= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18/go.mod h1:NS55eQ4YixUJPTC+INxi2/jCqe1y2Uw3rnh9wEOVJxY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17/go.mod h1:4nYOrY41Lrbk2170/BGkcJKBhws9Pfn8MG3aGqjjeFI= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1 h1:ne+eepnDB2Wh5lHKzELgEncIqeVlQ1rSF9fEa4r5I+A= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1/go.mod h1:u0Jkg0L+dcG1ozUq21uFElmpbmjBnhHR5DELHIme4wg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10 h1:DRND0dkCKtJzCj4Xl4OpVbXZgfttY5q712H9Zj7qc/0= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.10/go.mod h1:tGGNmJKOTernmR2+VJ0fCzQRurcPZj9ut60Zu5Fi6us= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 h1:HfVVR1vItaG6le+Bpw6P4midjBDMKnjMyZnw9MXYUcE= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17/go.mod h1:YqMdV+gEKCQ59NrB7rzrJdALeBIsYiVi8Inj3+KcqHI= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 h1:3/gm/JTX9bX8CpzTgIlrtYpB3EVBDxyg/GY/QdcIEZw= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= -github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0 h1:45VTQmiADmmooUvYSCiMvoDCln0FBxAEfmj7HDFTa3w= -github.com/aws/aws-sdk-go-v2/service/ssm v1.66.0/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= -github.com/aws/aws-sdk-go-v2/service/sso v1.11.23/go.mod h1:/w0eg9IhFGjGyyncHIQrXtU8wvNsTJOP0R6PPj0wf80= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10 h1:DA+Hl5adieRyFvE7pCvBWm3VOZTRexGVkXw33SUqNoY= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10/go.mod h1:L+A89dH3/gr8L4ecrdzuXUYd1znoko6myzndVGZx/DA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 h1:Hcb4yllr4GTOHC/BKjEklxWhciWMHIqzeCI9oYf1OIk= +github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6/go.mod h1:N/iojY+8bW3MYol9NUMuKimpSbPEur75cuI1SmtonFM= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 h1:snE061FIWFZv4v8c9iJZ3Cvyu21wYDWy9oNmNHCd+Fc= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 h1:fspVFg6qMx0svs40YgRmE7LZXh9VRZvTT35PfdQR6FM= github.com/aws/aws-sdk-go-v2/service/sso v1.29.7/go.mod h1:BQTKL3uMECaLaUV3Zc2L4Qybv8C6BIXjuu1dOPyxTQs= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.13.5/go.mod h1:csZuQY65DAdFBt1oIjO5hhBR49kQqop4+lcuCjf2arA= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 h1:scVnW+NLXasGOhy7HhkdT9AGb6kjgW7fJ5xYkUaqHs0= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2/go.mod h1:FRNCY3zTEWZXBKm2h5UBUPvCVDOecTad9KhynDyGBc0= -github.com/aws/aws-sdk-go-v2/service/sts v1.16.19/go.mod h1:h4J3oPZQbxLhzGnk+j9dfYHi5qIOVJ5kczZd658/ydM= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.7 h1:VEO5dqFkMsl8QZ2yHsFDJAIZLAkEbaYDB+xdKi0Feic= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.7/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= -github.com/aws/smithy-go v1.13.3/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 h1:xSL4IV19pKDASL2fjWXRfTGmZddPiPPZNPpbv6uZQZY= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.8/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M= github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/bazelbuild/rules_go v0.24.5 h1:8S5qilf+Il5/TPMZQIOfzQDAZtkhB4jALiAnwRuisDM= @@ -435,7 +422,6 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-github/v27 v27.0.4/go.mod h1:/0Gr8pJ55COkmv+S/yPKCczSkUPIM/LnFyubufRNIS0= diff --git a/go.work.sum b/go.work.sum index 457e2fc05..f44f4f68e 100644 --- a/go.work.sum +++ b/go.work.sum @@ -473,31 +473,32 @@ github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA= github.com/aws/aws-sdk-go-v2 v1.16.16 h1:M1fj4FE2lB4NzRb9Y0xdWsn2P0+2UHVxwKyOa4YJNjk= -github.com/aws/aws-sdk-go-v2 v1.16.16/go.mod h1:SwiyXi/1zTUZ6KIAmLK5V5ll8SiURNUYOqTerZPaF9k= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 h1:tcFliCWne+zOuUfKNRn8JdFBuWPDuISDH08wD2ULkhk= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8/go.mod h1:JTnlBSot91steJeti4ryyu/tLd4Sk84O5W22L7O2EQU= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 h1:t9yYsydLYNBk9cJ73rgPhPWqOh/52fcWDQB5b1JsKSY= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2/go.mod h1:IusfVNTmiSN3t4rhxWFaBAqn+mcNdwKtPcV16eYdgko= +github.com/aws/aws-sdk-go-v2/config v1.31.14 h1:kj/KpDqvt0UqcEL3WOvCykE9QUpBb6b23hQdnXe+elo= +github.com/aws/aws-sdk-go-v2/config v1.31.14/go.mod h1:X5PaY6QCzViihn/ru7VxnIamcJQrG9NSeTxuSKm2YtU= github.com/aws/aws-sdk-go-v2/credentials v1.12.20 h1:9+ZhlDY7N9dPnUmf7CDfW9In4sW5Ff3bh7oy4DzS1IE= -github.com/aws/aws-sdk-go-v2/credentials v1.12.20/go.mod h1:UKY5HyIux08bbNA7Blv4PcXQ8cTkGh7ghHMFklaviR4= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33 h1:fAoVmNGhir6BR+RU0/EI+6+D7abM+MCwWf8v4ip5jNI= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.33/go.mod h1:84XgODVR8uRhmOnUkKGUZKqIMxmjmLOR8Uyp7G/TPwc= +github.com/aws/aws-sdk-go-v2/credentials v1.18.18 h1:5AfxTvDN0AJoA7rg/yEc0sHhl6/B9fZ+NtiQuOjWGQM= +github.com/aws/aws-sdk-go-v2/credentials v1.18.18/go.mod h1:m9mE1mJ1s7zI6rrt7V3RQU2SCgUbNaphlfqEksLp+Fs= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 h1:9XV2TkOvCs6Fis10b4scQbv/eDPhklhU/65GikPxXAA= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13/go.mod h1:X5gq64GsjuOIJRIUzR3x3Du96zUF+U1if3Qw/qNx1k8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23 h1:s4g/wnzMf+qepSNgTvaQQHNxyMLKSawNhKCPNy++2xY= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23/go.mod h1:2DFxAQ9pfIRy0imBCJv+vZ2X6RKxves6fbnEuSry6b4= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17 h1:/K482T5A3623WJgWT8w1yRAFK4RzGzEl7y39yhtn9eA= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17/go.mod h1:pRwaTYCJemADaqCbUAxltMoHKata7hmB5PjEXeu0kfg= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14 h1:ZSIPAkAsCCjYrhqfw2+lNzWDzxzHXEckFkTePL5RSWQ= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.14/go.mod h1:AyGgqiKv9ECM6IZeNQtdT8NnMvUb3/2wokeq2Fgryto= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 h1:FHw90xCTsofzk6vjU808TSuDtDfOOKPNdz5Weyc3tUI= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10/go.mod h1:n8jdIE/8F3UYkg8O4IGkQpn2qUmapg/1K1yl29/uf/c= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9 h1:Lh1AShsuIJTwMkoxVCAYPJgNG5H+eN6SmoUn8nOZ5wE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9/go.mod h1:a9j48l6yL5XINLHLcOKInjdvknN+vWqPBxqeIDw7ktw= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18 h1:BBYoNQt2kUZUUK4bIPsKrCcjVPUMNsgQpNAwhznK/zo= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.18/go.mod h1:NS55eQ4YixUJPTC+INxi2/jCqe1y2Uw3rnh9wEOVJxY= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1 h1:ne+eepnDB2Wh5lHKzELgEncIqeVlQ1rSF9fEa4r5I+A= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1/go.mod h1:u0Jkg0L+dcG1ozUq21uFElmpbmjBnhHR5DELHIme4wg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17 h1:Jrd/oMh0PKQc6+BowB+pLEwLIgaQF29eYbe7E1Av9Ug= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17/go.mod h1:4nYOrY41Lrbk2170/BGkcJKBhws9Pfn8MG3aGqjjeFI= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17 h1:HfVVR1vItaG6le+Bpw6P4midjBDMKnjMyZnw9MXYUcE= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.17/go.mod h1:YqMdV+gEKCQ59NrB7rzrJdALeBIsYiVi8Inj3+KcqHI= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11 h1:3/gm/JTX9bX8CpzTgIlrtYpB3EVBDxyg/GY/QdcIEZw= -github.com/aws/aws-sdk-go-v2/service/s3 v1.27.11/go.mod h1:fmgDANqTUCxciViKl9hb/zD5LFbvPINFRgWhDbR+vZo= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10 h1:DA+Hl5adieRyFvE7pCvBWm3VOZTRexGVkXw33SUqNoY= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10/go.mod h1:L+A89dH3/gr8L4ecrdzuXUYd1znoko6myzndVGZx/DA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 h1:Hcb4yllr4GTOHC/BKjEklxWhciWMHIqzeCI9oYf1OIk= +github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6/go.mod h1:N/iojY+8bW3MYol9NUMuKimpSbPEur75cuI1SmtonFM= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 h1:snE061FIWFZv4v8c9iJZ3Cvyu21wYDWy9oNmNHCd+Fc= +github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 h1:xSL4IV19pKDASL2fjWXRfTGmZddPiPPZNPpbv6uZQZY= +github.com/aws/aws-sdk-go-v2/service/sts v1.38.8/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= github.com/aws/smithy-go v1.13.3 h1:l7LYxGuzK6/K+NzJ2mC+VvLUbae0sL3bXU//04MkmnA= -github.com/aws/smithy-go v1.13.3/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/bgentry/que-go v1.0.1 h1:M/cEPOU66X/YewE1rD1IdHjfM79jClXl0BHNWiF+l44= github.com/bgentry/que-go v1.0.1/go.mod h1:brRADvWrR9WUT5E5NxTHwLhPmuhKHWbrRudSun7H6ZU= @@ -689,7 +690,6 @@ github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/flatbuffers v1.11.0 h1:O7CEyB8Cb3/DmtxODGtLHcEvpr81Jm5qLg/hsHnxA2A= github.com/google/flatbuffers v2.0.8+incompatible h1:ivUb1cGomAB101ZM1T0nOiWz9pSrTMoa9+EiY7igmkM= github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY= github.com/google/go-github/v27 v27.0.4 h1:N/EEqsvJLgqTbepTiMBz+12KhwLovv6YvwpRezd+4Fg= diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index 7b11a9f61..eb33573aa 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -120,7 +120,7 @@ func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { return nil, err } - buff := make([]byte, int(output.ContentLength)) + buff := make([]byte, int(*output.ContentLength)) w := manager.NewWriteAtBuffer(buff) downloader := manager.NewDownloader(handler.Client) From c60c784e38288547092f515c2084d61125195751 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 28 Oct 2025 12:32:34 -0400 Subject: [PATCH 06/16] Tests passing --- bcda/aws/metrics.go | 49 +++ bcda/aws/metrics_test.go | 21 ++ bcda/aws/parameters_test.go | 50 +-- bcda/aws/session.go | 67 ---- bcda/aws/session_test.go | 40 --- bcda/bcdacli/cli.go | 284 ++++++++-------- bcda/bcdacli/cli_test.go | 304 +++++++++--------- bcda/cclf/cclf.go | 11 +- bcda/cclf/cclf_test.go | 3 +- bcda/cclf/csv.go | 10 +- bcda/cclf/csv_test.go | 3 +- bcda/cclf/local_fileprocessor.go | 8 +- bcda/cclf/local_fileprocessor_test.go | 6 +- bcda/cclf/s3_fileprocessor.go | 20 +- bcda/cclf/s3_fileprocessor_test.go | 45 +-- bcda/constants/constants.go | 2 + bcda/database/config.go | 48 --- bcda/database/config_test.go | 76 +++-- bcda/insights/insights.go | 68 ---- bcda/insights/insights_test.go | 83 ----- bcda/lambda/admin_aco_deny/db_test.go | 2 +- bcda/lambda/admin_create_aco/db_test.go | 2 +- bcda/lambda/admin_create_aco/main_test.go | 2 +- bcda/lambda/admin_create_aco_creds/aws.go | 44 --- .../lambda/admin_create_aco_creds/aws_test.go | 13 +- .../admin_create_aco_creds/main_test.go | 15 +- bcda/lambda/cclf/main.go | 104 +++--- bcda/lambda/cclf/main_test.go | 27 +- bcda/lambda/optout/main.go | 53 +-- bcda/lambda/optout/main_test.go | 55 ++-- bcda/metrics/metrics.go | 55 ---- bcda/suppression/suppression.go | 23 +- bcda/suppression/suppression_s3_test.go | 48 +-- bcda/suppression/suppression_test.go | 50 +-- bcda/testUtils/utils.go | 282 ++++++---------- bcdaworker/queueing/manager.go | 34 ++ bcdaworker/queueing/river.go | 32 -- bcdaworker/queueing/river_test.go | 2 +- bcdaworker/queueing/worker_process_job.go | 2 +- conf/config.go | 1 - docker-compose.test.yml | 2 +- go.mod | 7 +- go.sum | 14 +- ops/services/root.tofu.tf | 2 +- optout/file_handler.go | 7 +- optout/local_file_handler.go | 14 +- optout/s3_file_handler.go | 47 +-- 47 files changed, 850 insertions(+), 1287 deletions(-) create mode 100644 bcda/aws/metrics.go create mode 100644 bcda/aws/metrics_test.go delete mode 100644 bcda/aws/session.go delete mode 100644 bcda/aws/session_test.go delete mode 100644 bcda/insights/insights.go delete mode 100644 bcda/insights/insights_test.go delete mode 100644 bcda/metrics/metrics.go diff --git a/bcda/aws/metrics.go b/bcda/aws/metrics.go new file mode 100644 index 000000000..2f85b3b09 --- /dev/null +++ b/bcda/aws/metrics.go @@ -0,0 +1,49 @@ +package bcdaaws + +import ( + "context" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/cloudwatch" + "github.com/aws/aws-sdk-go-v2/service/cloudwatch/types" +) + +type Sampler struct { + Ctx context.Context + Namespace string + Unit string + Service *cloudwatch.Client +} + +func PutMetricSample( + ctx context.Context, + namespace string, + name string, + unit types.StandardUnit, + value float64, + dimensions []types.Dimension, +) error { + data := types.MetricDatum{ + Dimensions: dimensions, + MetricName: &name, + Unit: unit, + Value: &value, + } + + input := &cloudwatch.PutMetricDataInput{ + MetricData: []types.MetricDatum{data}, + Namespace: aws.String(namespace), + } + + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + return err + } + + client := cloudwatch.NewFromConfig(cfg) + + _, err = client.PutMetricData(ctx, input) + + return err +} diff --git a/bcda/aws/metrics_test.go b/bcda/aws/metrics_test.go new file mode 100644 index 000000000..8b0ea3708 --- /dev/null +++ b/bcda/aws/metrics_test.go @@ -0,0 +1,21 @@ +package bcdaaws + +import ( + "testing" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/cloudwatch/types" + "github.com/stretchr/testify/assert" +) + +func TestPutMetricSample(t *testing.T) { + err := PutMetricSample( + t.Context(), + "Namespace", + "Name", + "Count", + float64(32), + []types.Dimension{{Name: aws.String("name"), Value: aws.String("value")}}, + ) + assert.Nil(t, err) +} diff --git a/bcda/aws/parameters_test.go b/bcda/aws/parameters_test.go index 205082da4..c6b05531d 100644 --- a/bcda/aws/parameters_test.go +++ b/bcda/aws/parameters_test.go @@ -1,34 +1,18 @@ package bcdaaws import ( - "context" "errors" "testing" - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/service/ssm" + "github.com/CMSgov/bcda-app/bcda/testUtils" "github.com/stretchr/testify/assert" ) func TestGetParameter(t *testing.T) { key1 := "key1" val1 := "val1" - cfg, err := config.LoadDefaultConfig(context.TODO(), - config.WithRegion("us-east-1"), - ) - assert.Nil(t, err) - client := ssm.NewFromConfig(cfg) - - paramInput := ssm.PutParameterInput{ - Name: &key1, - Value: &val1, - Overwrite: aws.Bool(true), - Type: "String", - } - - _, err = client.PutParameter(t.Context(), ¶mInput) - assert.Nil(t, err) + cleanupParam1 := testUtils.SetParameter(t, key1, val1) + defer cleanupParam1() tests := []struct { desc string @@ -50,6 +34,7 @@ func TestGetParameter(t *testing.T) { }, } + client := testUtils.TestSSMClient(t, testUtils.TestAWSConfig(t)) for _, test := range tests { value, err := GetParameter(t.Context(), client, test.keyname) assert.Equal(t, test.expectedValue, value) @@ -67,29 +52,11 @@ func TestGetParameters(t *testing.T) { key2 := "key2" val1 := "val1" val2 := "val2" - cfg, err := config.LoadDefaultConfig(context.TODO(), - config.WithRegion("us-east-1"), - ) - assert.Nil(t, err) - client := ssm.NewFromConfig(cfg) - paramInput1 := ssm.PutParameterInput{ - Name: &key1, - Value: &val1, - Overwrite: aws.Bool(true), - Type: "String", - } - _, err = client.PutParameter(t.Context(), ¶mInput1) - assert.Nil(t, err) - - paramInput2 := ssm.PutParameterInput{ - Name: &key2, - Value: &val2, - Overwrite: aws.Bool(true), - Type: "String", - } - _, err = client.PutParameter(t.Context(), ¶mInput2) - assert.Nil(t, err) + cleanupParam1 := testUtils.SetParameter(t, key1, val1) + cleanupParam2 := testUtils.SetParameter(t, key2, val2) + defer cleanupParam1() + defer cleanupParam2() tests := []struct { desc string @@ -111,6 +78,7 @@ func TestGetParameters(t *testing.T) { }, } + client := testUtils.TestSSMClient(t, testUtils.TestAWSConfig(t)) for _, test := range tests { vals, err := GetParameters(t.Context(), client, test.keys) diff --git a/bcda/aws/session.go b/bcda/aws/session.go deleted file mode 100644 index f7b17c0d7..000000000 --- a/bcda/aws/session.go +++ /dev/null @@ -1,67 +0,0 @@ -package bcdaaws - -// var s3Region = "us-east-1" -// var DefaultRegion = "us-east-1" - -// Makes these easily mockable for testing -// var newSession = session.NewSession - -// NewSession -// Returns a new AWS session using the given roleArn -// func NewSession(roleArn, endpoint string) (*session.Session, error) { -// sess := session.Must(session.NewSession()) -// var err error - -// config := aws.Config{ -// Region: aws.String(s3Region), -// } - -// if endpoint != "" { -// config.S3ForcePathStyle = aws.Bool(true) -// config.Endpoint = &endpoint -// } - -// if roleArn != "" { -// config.Credentials = stscreds.NewCredentials( -// sess, -// roleArn, -// ) -// } - -// sess, err = newSession(&config) - -// if err != nil { -// return nil, err -// } - -// return sess, nil -// } - -// func NewAWSConfig(ctx context.Context, roleArn, endpoint string) (config.Config, error) { -// return config.LoadDefaultConfig(ctx, config.WithRegion(DefaultRegion)) -// // var cfg config.Config - -// // // used to override for localstack -// // if endpoint != "" { -// // cfg = config.LoadDefaultConfig( -// // ctx, -// // config.WithRegion(DefaultRegion)), - -// // // cfg.S3ForcePathStyle = true -// // // cfg.Endpoint = &endpoint -// // } else if roleArn != "" { - -// // client := stscreds.NewFromConfig(cfg) -// // appCreds := stscreds.NewAssumeRoleProvider(client, roleArn) -// // creds, err := appCreds.Retrieve(ctx) -// // if err != nil { -// // return config.Config{}, err -// // } - -// // cfg.Credentials = creds -// // } else { - -// // } - -// // return cfg, nil -// } diff --git a/bcda/aws/session_test.go b/bcda/aws/session_test.go deleted file mode 100644 index 9b7abbb1b..000000000 --- a/bcda/aws/session_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bcdaaws - -// import ( -// "errors" -// "testing" - -// "github.com/aws/aws-sdk-go/aws" -// "github.com/aws/aws-sdk-go/aws/session" -// "github.com/stretchr/testify/assert" -// ) - -// func TestNewSession(t *testing.T) { -// tests := []struct { -// expect *session.Session -// err error -// newSession func(cfgs ...*aws.Config) (*session.Session, error) -// }{ -// { -// // Happy path -// expect: nil, -// err: nil, -// newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, nil }, -// }, -// { -// // Error returned from NewSession -// expect: nil, -// err: errors.New("error"), -// newSession: func(cfgs ...*aws.Config) (*session.Session, error) { return nil, errors.New("error") }, -// }, -// } - -// for _, test := range tests { -// newSession = test.newSession - -// s, err := NewSession("fake_arn", "fake_endpoint") - -// assert.Equal(t, test.expect, s) -// assert.Equal(t, test.err, err) -// } -// } diff --git a/bcda/bcdacli/cli.go b/bcda/bcdacli/cli.go index 5db181853..a311483bd 100644 --- a/bcda/bcdacli/cli.go +++ b/bcda/bcdacli/cli.go @@ -17,12 +17,9 @@ import ( "syscall" "time" - "github.com/ccoveille/go-safecast" - "github.com/CMSgov/bcda-app/bcda/auth" authclient "github.com/CMSgov/bcda-app/bcda/auth/client" - "github.com/CMSgov/bcda-app/bcda/cclf" cclfUtils "github.com/CMSgov/bcda-app/bcda/cclf/utils" "github.com/CMSgov/bcda-app/bcda/constants" "github.com/CMSgov/bcda-app/bcda/database" @@ -30,12 +27,9 @@ import ( "github.com/CMSgov/bcda-app/bcda/models/postgres" "github.com/CMSgov/bcda-app/bcda/service" "github.com/CMSgov/bcda-app/bcda/servicemux" - "github.com/CMSgov/bcda-app/bcda/suppression" "github.com/CMSgov/bcda-app/bcda/utils" "github.com/CMSgov/bcda-app/bcda/web" - "github.com/CMSgov/bcda-app/conf" "github.com/CMSgov/bcda-app/log" - "github.com/CMSgov/bcda-app/optout" pgxv5Pool "github.com/jackc/pgx/v5/pgxpool" "github.com/pborman/uuid" @@ -72,11 +66,12 @@ func setUpApp() *cli.App { log.API.Info(fmt.Sprintf(`Auth is made possible by %T`, provider)) return nil } - var hours, err = safecast.ToUint(utils.GetEnvInt("FILE_ARCHIVE_THRESHOLD_HR", 72)) - if err != nil { - fmt.Println("Error converting FILE_ARCHIVE_THRESHOLD_HR to uint", err) - } - var acoName, acoCMSID, acoID, accessToken, acoSize, filePath, fileSource, s3Endpoint, assumeRoleArn, environment, groupID, groupName, ips, fileType string + // var hours, err = safecast.ToUint(utils.GetEnvInt("FILE_ARCHIVE_THRESHOLD_HR", 72)) + // if err != nil { + // fmt.Println("Error converting FILE_ARCHIVE_THRESHOLD_HR to uint", err) + // } + // var acoName, acoCMSID, acoID, accessToken, acoSize, filePath, fileSource, s3Endpoint, assumeRoleArn, environment, groupID, groupName, ips, fileType string + var acoName, acoCMSID, acoID, accessToken, acoSize, filePath, environment, groupID, groupName, ips, fileType string var httpPort, httpsPort int app.Commands = []cli.Command{ { @@ -281,72 +276,80 @@ func setUpApp() *cli.App { return nil }, }, - { - Name: "import-cclf-directory", - Category: constants.CliDataImpCategory, - Usage: "Import all CCLF files from the specified directory", - Flags: []cli.Flag{ - cli.StringFlag{ - Name: "directory", - Usage: "Directory where CCLF files are located", - Destination: &filePath, - }, - cli.StringFlag{ - Name: "filesource", - Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", - Destination: &fileSource, - }, - cli.StringFlag{ - Name: "s3endpoint", - Usage: "Custom S3 endpoint", - Destination: &s3Endpoint, - }, - cli.StringFlag{ - Name: "assume-role-arn", - Usage: "Optional IAM role ARN to assume for S3", - Destination: &assumeRoleArn, - }, - }, - Action: func(c *cli.Context) error { - ignoreSignals() - var file_processor cclf.CclfFileProcessor - - if fileSource == "s3" { - file_processor = &cclf.S3FileProcessor{ - Handler: optout.S3FileHandler{ - Logger: log.API, - Endpoint: s3Endpoint, - AssumeRoleArn: assumeRoleArn, - }, - } - } else { - file_processor = &cclf.LocalFileProcessor{ - Handler: optout.LocalFileHandler{ - Logger: log.API, - PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), - FileArchiveThresholdHr: hours, - }, - } - } - - importer := cclf.NewCclfImporter(log.API, file_processor, pool) - - success, failure, skipped, err := importer.ImportCCLFDirectory(filePath) - if err != nil { - log.API.Error("error returned from ImportCCLFDirectory: ", err) - return err - - } - if failure > 0 || skipped > 0 { - log.API.Errorf("Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped, err) - err = errors.New("files skipped or failed import. See logs for more details") - return err - } - log.API.Infof("Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) - fmt.Fprintf(app.Writer, "Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) - return err - }, - }, + // I dont believe we import-cclf-directory anymore. We now use the cclf lambda. + // { + // Name: "import-cclf-directory", + // Category: constants.CliDataImpCategory, + // Usage: "Import all CCLF files from the specified directory", + // Flags: []cli.Flag{ + // cli.StringFlag{ + // Name: "directory", + // Usage: "Directory where CCLF files are located", + // Destination: &filePath, + // }, + // cli.StringFlag{ + // Name: "filesource", + // Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", + // Destination: &fileSource, + // }, + // cli.StringFlag{ + // Name: "s3endpoint", + // Usage: "Custom S3 endpoint", + // Destination: &s3Endpoint, + // }, + // cli.StringFlag{ + // Name: "assume-role-arn", + // Usage: "Optional IAM role ARN to assume for S3", + // Destination: &assumeRoleArn, + // }, + // }, + // Action: func(c *cli.Context) error { + // ignoreSignals() + // var file_processor cclf.CclfFileProcessor + + // cfg, err := config.LoadDefaultConfig(context.Background()) + // if err != nil { + // log.API.Error("error loading default config: ", err) + // return err + // } + // s3Client := s3.NewFromConfig(cfg) + + // if fileSource == "s3" { + // file_processor = &cclf.S3FileProcessor{ + // Handler: optout.S3FileHandler{ + // Client: s3Client, + // Logger: log.API, + // Endpoint: s3Endpoint, + // AssumeRoleArn: assumeRoleArn, + // }, + // } + // } else { + // file_processor = &cclf.LocalFileProcessor{ + // Handler: optout.LocalFileHandler{ + // Logger: log.API, + // PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), + // FileArchiveThresholdHr: hours, + // }, + // } + // } + + // importer := cclf.NewCclfImporter(log.API, file_processor, pool) + + // success, failure, skipped, err := importer.ImportCCLFDirectory(filePath) + // if err != nil { + // log.API.Error("error returned from ImportCCLFDirectory: ", err) + // return err + // } + // if failure > 0 || skipped > 0 { + // log.API.Errorf("Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped, err) + // err = errors.New("files skipped or failed import. See logs for more details") + // return err + // } + // log.API.Infof("Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) + // fmt.Fprintf(app.Writer, "Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) + // return err + // }, + // }, { Name: "generate-cclf-runout-files", Category: constants.CliDataImpCategory, @@ -368,65 +371,74 @@ func setUpApp() *cli.App { return nil }, }, - { - Name: "import-suppression-directory", - Category: constants.CliDataImpCategory, - Usage: "Import all 1-800-MEDICARE suppression data files from the specified directory", - Flags: []cli.Flag{ - cli.StringFlag{ - Name: "directory", - Usage: "Directory where suppression files are located", - Destination: &filePath, - }, - cli.StringFlag{ - Name: "filesource", - Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", - Destination: &fileSource, - }, - cli.StringFlag{ - Name: "s3endpoint", - Usage: "Custom S3 endpoint", - Destination: &s3Endpoint, - }, - cli.StringFlag{ - Name: "assume-role-arn", - Usage: "Optional IAM role ARN to assume for S3", - Destination: &assumeRoleArn, - }, - }, - Action: func(c *cli.Context) error { - ignoreSignals() - r := postgres.NewRepository(db) - - var file_handler optout.OptOutFileHandler - - if fileSource == "s3" { - file_handler = &optout.S3FileHandler{ - Logger: log.API, - Endpoint: s3Endpoint, - AssumeRoleArn: assumeRoleArn, - } - } else { - file_handler = &optout.LocalFileHandler{ - Logger: log.API, - PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), - FileArchiveThresholdHr: hours, - } - } - - importer := suppression.OptOutImporter{ - FileHandler: file_handler, - Saver: &suppression.BCDASaver{ - Repo: r, - }, - Logger: log.API, - ImportStatusInterval: utils.GetEnvInt("SUPPRESS_IMPORT_STATUS_RECORDS_INTERVAL", 1000), - } - s, f, sk, err := importer.ImportSuppressionDirectory(filePath) - fmt.Fprintf(app.Writer, "Completed 1-800-MEDICARE suppression data import.\nFiles imported: %v\nFiles failed: %v\nFiles skipped: %v\n", s, f, sk) - return err - }, - }, + // I dont believe we import-suppression-directory anymore. We now use the cclf lambda. + // { + // Name: "import-suppression-directory", + // Category: constants.CliDataImpCategory, + // Usage: "Import all 1-800-MEDICARE suppression data files from the specified directory", + // Flags: []cli.Flag{ + // cli.StringFlag{ + // Name: "directory", + // Usage: "Directory where suppression files are located", + // Destination: &filePath, + // }, + // cli.StringFlag{ + // Name: "filesource", + // Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", + // Destination: &fileSource, + // }, + // cli.StringFlag{ + // Name: "s3endpoint", + // Usage: "Custom S3 endpoint", + // Destination: &s3Endpoint, + // }, + // cli.StringFlag{ + // Name: "assume-role-arn", + // Usage: "Optional IAM role ARN to assume for S3", + // Destination: &assumeRoleArn, + // }, + // }, + // Action: func(c *cli.Context) error { + // ignoreSignals() + // r := postgres.NewRepository(db) + + // var file_handler optout.OptOutFileHandler + // cfg, err := config.LoadDefaultConfig(context.Background()) + // if err != nil { + // log.API.Error("error loading default config: ", err) + // return err + // } + // s3Client := s3.NewFromConfig(cfg) + + // if fileSource == "s3" { + // file_handler = &optout.S3FileHandler{ + // Client: s3Client, + // Logger: log.API, + // Endpoint: s3Endpoint, + // AssumeRoleArn: assumeRoleArn, + // } + // } else { + // file_handler = &optout.LocalFileHandler{ + // Logger: log.API, + // PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), + // FileArchiveThresholdHr: hours, + // } + // } + + // importer := suppression.OptOutImporter{ + // FileHandler: file_handler, + // Saver: &suppression.BCDASaver{ + // Repo: r, + // }, + // Logger: log.API, + // ImportStatusInterval: utils.GetEnvInt("SUPPRESS_IMPORT_STATUS_RECORDS_INTERVAL", 1000), + // } + // ctx := context.Background() + // s, f, sk, err := importer.ImportSuppressionDirectory(ctx, filePath) + // fmt.Fprintf(app.Writer, "Completed 1-800-MEDICARE suppression data import.\nFiles imported: %v\nFiles failed: %v\nFiles skipped: %v\n", s, f, sk) + // return err + // }, + // }, { Name: "import-synthetic-cclf-package", Category: constants.CliDataImpCategory, diff --git a/bcda/bcdacli/cli_test.go b/bcda/bcdacli/cli_test.go index 22c35cb61..1c49202d0 100644 --- a/bcda/bcdacli/cli_test.go +++ b/bcda/bcdacli/cli_test.go @@ -27,8 +27,6 @@ import ( "github.com/CMSgov/bcda-app/bcda/testUtils" "github.com/CMSgov/bcda-app/bcda/utils" "github.com/CMSgov/bcda-app/conf" - logger "github.com/CMSgov/bcda-app/log" - "github.com/pkg/errors" "github.com/go-chi/chi/v5" "github.com/pborman/uuid" @@ -381,157 +379,157 @@ func (s *CLITestSuite) TestCreateACO() { buf.Reset() } -func (s *CLITestSuite) TestImportCCLFDirectory() { - oldVal := conf.GetEnv("LOG_TO_STD_OUT") - conf.SetEnv(s.T(), "LOG_TO_STD_OUT", "false") - s.T().Cleanup(func() { conf.SetEnv(s.T(), "LOG_TO_STD_OUT", oldVal) }) - - targetACO := "A0002" - assert := assert.New(s.T()) - - type test struct { - path string - err error - expectedLogs []string - } - - tests := []test{ - { - path: "../../shared_files/cclf/archives/valid/", - err: errors.New("files skipped or failed import. See logs for more details"), - expectedLogs: []string{"Successfully imported 6 files.", "Failed to import 0 files.", "Skipped 0 files."}, - }, - { - path: "../../shared_files/cclf/archives/invalid_bcd/", - err: errors.New("failed to import 1 files"), - expectedLogs: []string{"missing CCLF0 or CCLF8 file in zip"}, - }, - { - path: "../../shared_files/cclf/archives/skip/", - err: errors.New("files failed to import or no files were imported. See logs for more details."), - expectedLogs: []string{"Successfully imported 0 files.", "Failed to import 0 files.", "Skipped 0 files."}, - }, - } - - for _, tc := range tests { - postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) - defer postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) - path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), tc.path) - defer cleanup() - - // reset global logs to clean up any previous log entries - logger.SetupLoggers() - - args := []string{"bcda", "import-cclf-directory", constants.DirectoryArg, path} - err := s.testApp.Run(args) - if tc.err == nil { - assert.Nil(err) - } - - var failed bool - content, err := os.ReadFile(os.Getenv("BCDA_ERROR_LOG")) - assert.Nil(err) - - // go through each expected log and make sure it exists in all logs - for _, expectedLog := range tc.expectedLogs { - if !strings.Contains(string(content), expectedLog) { - failed = true - } - } - - assert.False(failed) - } -} - -func (s *CLITestSuite) TestImportSuppressionDirectoryFromLocal() { - assert := assert.New(s.T()) - - buf := new(bytes.Buffer) - s.testApp.Writer = buf - - path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") - defer cleanup() - - args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} - err := s.testApp.Run(args) - assert.Nil(err) - assert.Contains(buf.String(), constants.CompleteMedSupDataImp) - assert.Contains(buf.String(), "Files imported: 2") - assert.Contains(buf.String(), "Files failed: 0") - assert.Contains(buf.String(), "Files skipped: 0") - - fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, - "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", - "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") - - assert.Len(fs, 2) - for _, f := range fs { - postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) - } -} - -func (s *CLITestSuite) TestImportSuppressionDirectoryFromS3() { - assert := assert.New(s.T()) - - buf := new(bytes.Buffer) - s.testApp.Writer = buf - - path, cleanup := testUtils.CopyToS3(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") - defer cleanup() - - args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path, constants.FileSourceArg, "s3", constants.S3EndpointArg, conf.GetEnv("BFD_S3_ENDPOINT")} - err := s.testApp.Run(args) - assert.Nil(err) - assert.Contains(buf.String(), constants.CompleteMedSupDataImp) - assert.Contains(buf.String(), "Files imported: 2") - assert.Contains(buf.String(), "Files failed: 0") - assert.Contains(buf.String(), "Files skipped: 0") - - fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, - "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", - "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") - - assert.Len(fs, 2) - for _, f := range fs { - postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) - } -} - -func (s *CLITestSuite) TestImportSuppressionDirectory_Skipped() { - assert := assert.New(s.T()) - - buf := new(bytes.Buffer) - s.testApp.Writer = buf - - path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadFileNames/") - defer cleanup() - - args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} - err := s.testApp.Run(args) - assert.Nil(err) - assert.Contains(buf.String(), constants.CompleteMedSupDataImp) - assert.Contains(buf.String(), "Files imported: 0") - assert.Contains(buf.String(), "Files failed: 0") - assert.Contains(buf.String(), "Files skipped: 2") -} - -func (s *CLITestSuite) TestImportSuppressionDirectory_Failed() { - assert := assert.New(s.T()) - - buf := new(bytes.Buffer) - s.testApp.Writer = buf - - path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadHeader/") - defer cleanup() - - args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} - err := s.testApp.Run(args) - assert.EqualError(err, "one or more suppression files failed to import correctly") - assert.Contains(buf.String(), constants.CompleteMedSupDataImp) - assert.Contains(buf.String(), "Files imported: 0") - assert.Contains(buf.String(), "Files failed: 1") - assert.Contains(buf.String(), "Files skipped: 0") -} +// func (s *CLITestSuite) TestImportCCLFDirectory() { +// oldVal := conf.GetEnv("LOG_TO_STD_OUT") +// conf.SetEnv(s.T(), "LOG_TO_STD_OUT", "false") +// s.T().Cleanup(func() { conf.SetEnv(s.T(), "LOG_TO_STD_OUT", oldVal) }) + +// targetACO := "A0002" +// assert := assert.New(s.T()) + +// type test struct { +// path string +// err error +// expectedLogs []string +// } + +// tests := []test{ +// { +// path: "../../shared_files/cclf/archives/valid/", +// err: errors.New("files skipped or failed import. See logs for more details"), +// expectedLogs: []string{"Successfully imported 6 files.", "Failed to import 0 files.", "Skipped 0 files."}, +// }, +// { +// path: "../../shared_files/cclf/archives/invalid_bcd/", +// err: errors.New("failed to import 1 files"), +// expectedLogs: []string{"missing CCLF0 or CCLF8 file in zip"}, +// }, +// { +// path: "../../shared_files/cclf/archives/skip/", +// err: errors.New("files failed to import or no files were imported. See logs for more details."), +// expectedLogs: []string{"Successfully imported 0 files.", "Failed to import 0 files.", "Skipped 0 files."}, +// }, +// } + +// for _, tc := range tests { +// postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) +// defer postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) +// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), tc.path) +// defer cleanup() + +// // reset global logs to clean up any previous log entries +// logger.SetupLoggers() + +// args := []string{"bcda", "import-cclf-directory", constants.DirectoryArg, path} +// err := s.testApp.Run(args) +// if tc.err == nil { +// assert.Nil(err) +// } + +// var failed bool +// content, err := os.ReadFile(os.Getenv("BCDA_ERROR_LOG")) +// assert.Nil(err) + +// // go through each expected log and make sure it exists in all logs +// for _, expectedLog := range tc.expectedLogs { +// if !strings.Contains(string(content), expectedLog) { +// failed = true +// } +// } + +// assert.False(failed) +// } +// } + +// func (s *CLITestSuite) TestImportSuppressionDirectoryFromLocal() { +// assert := assert.New(s.T()) + +// buf := new(bytes.Buffer) +// s.testApp.Writer = buf + +// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") +// defer cleanup() + +// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} +// err := s.testApp.Run(args) +// assert.Nil(err) +// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) +// assert.Contains(buf.String(), "Files imported: 2") +// assert.Contains(buf.String(), "Files failed: 0") +// assert.Contains(buf.String(), "Files skipped: 0") + +// fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, +// "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", +// "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") + +// assert.Len(fs, 2) +// for _, f := range fs { +// postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) +// } +// } + +// func (s *CLITestSuite) TestImportSuppressionDirectoryFromS3() { +// assert := assert.New(s.T()) + +// buf := new(bytes.Buffer) +// s.testApp.Writer = buf + +// path, cleanup := testUtils.CopyToS3(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") +// defer cleanup() + +// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path, constants.FileSourceArg, "s3", constants.S3EndpointArg, conf.GetEnv("BFD_S3_ENDPOINT")} +// err := s.testApp.Run(args) +// assert.Nil(err) +// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) +// assert.Contains(buf.String(), "Files imported: 2") +// assert.Contains(buf.String(), "Files failed: 0") +// assert.Contains(buf.String(), "Files skipped: 0") + +// fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, +// "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", +// "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") + +// assert.Len(fs, 2) +// for _, f := range fs { +// postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) +// } +// } + +// func (s *CLITestSuite) TestImportSuppressionDirectory_Skipped() { +// assert := assert.New(s.T()) + +// buf := new(bytes.Buffer) +// s.testApp.Writer = buf + +// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadFileNames/") +// defer cleanup() + +// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} +// err := s.testApp.Run(args) +// assert.Nil(err) +// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) +// assert.Contains(buf.String(), "Files imported: 0") +// assert.Contains(buf.String(), "Files failed: 0") +// assert.Contains(buf.String(), "Files skipped: 2") +// } + +// func (s *CLITestSuite) TestImportSuppressionDirectory_Failed() { +// assert := assert.New(s.T()) + +// buf := new(bytes.Buffer) +// s.testApp.Writer = buf + +// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadHeader/") +// defer cleanup() + +// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} +// err := s.testApp.Run(args) +// assert.EqualError(err, "one or more suppression files failed to import correctly") +// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) +// assert.Contains(buf.String(), "Files imported: 0") +// assert.Contains(buf.String(), "Files failed: 1") +// assert.Contains(buf.String(), "Files skipped: 0") +// } func (s *CLITestSuite) TestDenylistACO() { denylistedCMSID := testUtils.RandomHexID()[0:4] diff --git a/bcda/cclf/cclf.go b/bcda/cclf/cclf.go index bb52dfe52..12431524f 100644 --- a/bcda/cclf/cclf.go +++ b/bcda/cclf/cclf.go @@ -52,11 +52,11 @@ type cclfFileValidator struct { // Manages the interaction of CCLF files from a given source type CclfFileProcessor interface { // Load a list of valid CCLF files to be imported - LoadCclfFiles(path string) (cclfList map[string][]*cclfZipMetadata, skipped int, failed int, err error) + LoadCclfFiles(ctx context.Context, path string) (cclfList map[string][]*cclfZipMetadata, skipped int, failed int, err error) // Clean up CCLF files after failed or successful import runs CleanUpCCLF(ctx context.Context, cclfMap map[string][]*cclfZipMetadata) (deletedCount int, err error) // Open a zip archive - OpenZipArchive(name string) (*zip.Reader, func(), error) + OpenZipArchive(ctx context.Context, name string) (*zip.Reader, func(), error) } // Manages the import process for CCLF files from a given source @@ -242,6 +242,7 @@ func (importer CclfImporter) importCCLF8(ctx context.Context, zipMetadata *cclfZ } func (importer CclfImporter) ImportCCLFDirectory(filePath string) (success, failure, skipped int, err error) { + success, failure, skipped = 0, 0, 0 t := metrics.GetTimer() defer t.Close() ctx := metrics.NewContext(context.Background(), t) @@ -249,16 +250,16 @@ func (importer CclfImporter) ImportCCLFDirectory(filePath string) (success, fail // We are not going to create any children from this parent so we can // safely ignored the returned context. _, c := metrics.NewParent(ctx, "ImportCCLFDirectory#sortCCLFArchives") - cclfMap, skipped, failure, err := importer.fileProcessor.LoadCclfFiles(filePath) + cclfMap, skipped, failure, err := importer.fileProcessor.LoadCclfFiles(ctx, filePath) c() if err != nil { - return 0, 0, 0, err + return success, failure, skipped, err } if len(cclfMap) == 0 { importer.logger.Info("Did not find any CCLF files in directory -- returning safely.") - return 0, failure, skipped, err + return success, failure, skipped, err } for acoID := range cclfMap { diff --git a/bcda/cclf/cclf_test.go b/bcda/cclf/cclf_test.go index 18dc6f2da..742f0891e 100644 --- a/bcda/cclf/cclf_test.go +++ b/bcda/cclf/cclf_test.go @@ -376,7 +376,8 @@ func createTemporaryCCLF8ZipFile(t *testing.T, data string) (fileName, cclfName } func buildZipMetadata(t *testing.T, processor CclfFileProcessor, cmsID, zipName, cclf0Name, cclf8Name string, fileType models.CCLFFileType) (*cclfZipMetadata, func()) { - zipReader, zipCloser, err := processor.OpenZipArchive(zipName) + ctx := context.Background() + zipReader, zipCloser, err := processor.OpenZipArchive(ctx, zipName) assert.Nil(t, err) metadata := cclfZipMetadata{ diff --git a/bcda/cclf/csv.go b/bcda/cclf/csv.go index aa8b7305e..7c4b3ff6d 100644 --- a/bcda/cclf/csv.go +++ b/bcda/cclf/csv.go @@ -25,9 +25,9 @@ import ( // This interface has two implementations; one for ingesting and testing locally, and one for ingesting in s3. type CSVFileProcessor interface { // Fetch the csv attribution file to be imported. - LoadCSV(path string) (*bytes.Reader, func(), error) + LoadCSV(ctx context.Context, path string) (*bytes.Reader, func(), error) // Remove csv attribution file that was successfully imported. - CleanUpCSV(file csvFile) (err error) + CleanUpCSV(ctx context.Context, file csvFile) (err error) } type csvFile struct { @@ -55,7 +55,7 @@ type CSVImporter struct { PgxPool *pgxv5Pool.Pool } -func (importer CSVImporter) ImportCSV(filepath string) error { +func (importer CSVImporter) ImportCSV(ctx context.Context, filepath string) error { file := csvFile{filepath: filepath} @@ -72,7 +72,7 @@ func (importer CSVImporter) ImportCSV(filepath string) error { } file.metadata = metadata - data, _, err := importer.FileProcessor.LoadCSV(filepath) + data, _, err := importer.FileProcessor.LoadCSV(ctx, filepath) if err != nil { if errors.Is(err, &ers.AttributionFileMismatchedEnv{}) { importer.Logger.WithFields(logrus.Fields{"file": filepath}).Info(err) @@ -89,7 +89,7 @@ func (importer CSVImporter) ImportCSV(filepath string) error { return err } - err = importer.FileProcessor.CleanUpCSV(file) + err = importer.FileProcessor.CleanUpCSV(ctx, file) if err != nil { return err } diff --git a/bcda/cclf/csv_test.go b/bcda/cclf/csv_test.go index d9e0f8275..e72603e48 100644 --- a/bcda/cclf/csv_test.go +++ b/bcda/cclf/csv_test.go @@ -103,6 +103,7 @@ func TestCSVTestSuite(t *testing.T) { } func (s *CSVTestSuite) TestImportCSV_Integration() { + ctx := context.Background() conf.SetEnv(s.T(), "CCLF_REF_DATE", "181201") tests := []struct { name string @@ -120,7 +121,7 @@ func (s *CSVTestSuite) TestImportCSV_Integration() { for _, test := range tests { s.T().Run(test.name, func(tt *testing.T) { filename := filepath.Clean(test.filepath) - err := s.importer.ImportCSV(test.filepath) + err := s.importer.ImportCSV(ctx, test.filepath) if test.err == nil { assert.Nil(s.T(), err) } else { diff --git a/bcda/cclf/local_fileprocessor.go b/bcda/cclf/local_fileprocessor.go index ae7f269de..02f77fe65 100644 --- a/bcda/cclf/local_fileprocessor.go +++ b/bcda/cclf/local_fileprocessor.go @@ -25,7 +25,7 @@ type LocalFileProcessor struct { Handler optout.LocalFileHandler } -func (processor *LocalFileProcessor) LoadCclfFiles(path string) (cclfList map[string][]*cclfZipMetadata, skipped int, failed int, err error) { +func (processor *LocalFileProcessor) LoadCclfFiles(ctx context.Context, path string) (cclfList map[string][]*cclfZipMetadata, skipped int, failed int, err error) { return processCCLFArchives(path) } @@ -269,7 +269,7 @@ func (processor *LocalFileProcessor) CleanUpCCLF(ctx context.Context, cclfMap ma return deletedCount, nil } -func (processor *LocalFileProcessor) OpenZipArchive(filePath string) (*zip.Reader, func(), error) { +func (processor *LocalFileProcessor) OpenZipArchive(ctx context.Context, filePath string) (*zip.Reader, func(), error) { reader, err := zip.OpenReader(filePath) if err != nil { return nil, nil, err @@ -283,7 +283,7 @@ func (processor *LocalFileProcessor) OpenZipArchive(filePath string) (*zip.Reade }, err } -func (processor *LocalFileProcessor) CleanUpCSV(file csvFile) error { +func (processor *LocalFileProcessor) CleanUpCSV(ctx context.Context, file csvFile) error { var err error func() { @@ -325,7 +325,7 @@ func (processor *LocalFileProcessor) CleanUpCSV(file csvFile) error { return err } -func (processor *LocalFileProcessor) LoadCSV(filepath string) (*bytes.Reader, func(), error) { +func (processor *LocalFileProcessor) LoadCSV(ctx context.Context, filepath string) (*bytes.Reader, func(), error) { c := fp.Clean(filepath) if !strings.HasPrefix(filepath, "/tmp") { return nil, nil, fmt.Errorf("invalid path, %s", filepath) diff --git a/bcda/cclf/local_fileprocessor_test.go b/bcda/cclf/local_fileprocessor_test.go index 5014bb9f6..cd76905e0 100644 --- a/bcda/cclf/local_fileprocessor_test.go +++ b/bcda/cclf/local_fileprocessor_test.go @@ -374,6 +374,7 @@ func (s *LocalFileProcessorTestSuite) TestCleanupCCLF() { } func (s *LocalFileProcessorTestSuite) TestLoadCSV() { + ctx := context.Background() tests := []struct { name string file string @@ -386,7 +387,7 @@ func (s *LocalFileProcessorTestSuite) TestLoadCSV() { for _, test := range tests { s.T().Run(test.name, func(tt *testing.T) { file := filepath.Join(s.basePath, test.file) - e, _, err := s.csvProcessor.LoadCSV(file) + e, _, err := s.csvProcessor.LoadCSV(ctx, file) if test.err != nil { assert.Nil(s.T(), e) assert.NotNil(s.T(), err) @@ -400,6 +401,7 @@ func (s *LocalFileProcessorTestSuite) TestLoadCSV() { } func (s *LocalFileProcessorTestSuite) TestCleanUpCSV() { + ctx := context.Background() expiredTime, _ := time.Parse(time.RFC3339, constants.TestFileTime) file := csvFile{ metadata: csvFileMetadata{ @@ -441,7 +443,7 @@ func (s *LocalFileProcessorTestSuite) TestCleanUpCSV() { file.metadata.deliveryDate = test.deliverytime file.imported = test.imported file.filepath = filepath.Join(s.basePath, test.filename) - err := s.csvProcessor.CleanUpCSV(file) + err := s.csvProcessor.CleanUpCSV(ctx, file) assert.Nil(s.T(), err) delDir, err := os.ReadDir(conf.GetEnv("PENDING_DELETION_DIR")) if err != nil { diff --git a/bcda/cclf/s3_fileprocessor.go b/bcda/cclf/s3_fileprocessor.go index 867985dae..c6d57bedb 100644 --- a/bcda/cclf/s3_fileprocessor.go +++ b/bcda/cclf/s3_fileprocessor.go @@ -17,10 +17,10 @@ type S3FileProcessor struct { Handler optout.S3FileHandler } -func (processor *S3FileProcessor) LoadCclfFiles(path string) (cclfMap map[string][]*cclfZipMetadata, skipped int, failed int, err error) { +func (processor *S3FileProcessor) LoadCclfFiles(ctx context.Context, path string) (cclfMap map[string][]*cclfZipMetadata, skipped int, failed int, err error) { cclfMap = make(map[string][]*cclfZipMetadata) bucket, prefix := optout.ParseS3Uri(path) - s3Objects, err := processor.Handler.ListFiles(bucket, prefix) + s3Objects, err := processor.Handler.ListFiles(ctx, bucket, prefix) if err != nil { return cclfMap, skipped, failed, err @@ -53,7 +53,7 @@ func (processor *S3FileProcessor) LoadCclfFiles(path string) (cclfMap map[string continue } - zipReader, zipCloser, err := processor.OpenZipArchive(filepath.Join(bucket, *obj.Key)) + zipReader, zipCloser, err := processor.OpenZipArchive(ctx, filepath.Join(bucket, *obj.Key)) if err != nil { failed++ @@ -138,7 +138,7 @@ func (processor *S3FileProcessor) CleanUpCCLF(ctx context.Context, cclfMap map[s } processor.Handler.Infof("Cleaning up file %s\n", cclfZipMetadata.filePath) - err := processor.Handler.Delete(cclfZipMetadata.filePath) + err := processor.Handler.Delete(ctx, cclfZipMetadata.filePath) if err != nil { errCount++ @@ -157,8 +157,8 @@ func (processor *S3FileProcessor) CleanUpCCLF(ctx context.Context, cclfMap map[s return deletedCount, nil } -func (processor *S3FileProcessor) OpenZipArchive(filePath string) (*zip.Reader, func(), error) { - byte_arr, err := processor.Handler.OpenFileBytes(filePath) +func (processor *S3FileProcessor) OpenZipArchive(ctx context.Context, filePath string) (*zip.Reader, func(), error) { + byte_arr, err := processor.Handler.OpenFileBytes(ctx, filePath) if err != nil { processor.Handler.Errorf("Failed to download %s\n", filePath) @@ -169,7 +169,7 @@ func (processor *S3FileProcessor) OpenZipArchive(filePath string) (*zip.Reader, return reader, func() {}, err } -func (processor *S3FileProcessor) CleanUpCSV(file csvFile) error { +func (processor *S3FileProcessor) CleanUpCSV(ctx context.Context, file csvFile) error { close := metrics.NewChild(context.Background(), "cleanUpCCLFZip") defer close() @@ -182,7 +182,7 @@ func (processor *S3FileProcessor) CleanUpCSV(file csvFile) error { } processor.Handler.Infof("Cleaning up file %s\n", file.filepath) - err := processor.Handler.Delete(file.filepath) + err := processor.Handler.Delete(ctx, file.filepath) if err != nil { processor.Handler.Logger.Error("Failed to clean up file %s\n", file.filepath) @@ -193,12 +193,12 @@ func (processor *S3FileProcessor) CleanUpCSV(file csvFile) error { return nil } -func (processor *S3FileProcessor) LoadCSV(filepath string) (*bytes.Reader, func(), error) { +func (processor *S3FileProcessor) LoadCSV(ctx context.Context, filepath string) (*bytes.Reader, func(), error) { if !optout.IsForCurrentEnv(filepath) { processor.Handler.Infof("Skipping file for different environment: %s", filepath) return nil, nil, &ers.AttributionFileMismatchedEnv{} } - byte_arr, err := processor.Handler.OpenFileBytes(filepath) + byte_arr, err := processor.Handler.OpenFileBytes(ctx, filepath) if err != nil { processor.Handler.Errorf("Failed to download %s\n", filepath) return nil, nil, err diff --git a/bcda/cclf/s3_fileprocessor_test.go b/bcda/cclf/s3_fileprocessor_test.go index 47b0331eb..07252dc3a 100644 --- a/bcda/cclf/s3_fileprocessor_test.go +++ b/bcda/cclf/s3_fileprocessor_test.go @@ -30,16 +30,19 @@ type S3ProcessorTestSuite struct { func (s *S3ProcessorTestSuite) SetupSuite() { s.cclfRefDate = conf.GetEnv("CCLF_REF_DATE") conf.SetEnv(s.T(), "CCLF_REF_DATE", "181201") // Needed to allow our static CCLF files to continue to be processed + client := testUtils.TestS3Client(s.T(), testUtils.TestAWSConfig(s.T())) s.basePath = "../../shared_files" s.cclfProcessor = &S3FileProcessor{ Handler: optout.S3FileHandler{ + Client: client, Logger: logrus.StandardLogger(), Endpoint: conf.GetEnv("BFD_S3_ENDPOINT"), }, } s.csvProcessor = &S3FileProcessor{ Handler: optout.S3FileHandler{ + Client: client, Logger: logrus.StandardLogger(), Endpoint: conf.GetEnv("BFD_S3_ENDPOINT"), }, @@ -51,6 +54,7 @@ func (s *S3ProcessorTestSuite) TearDownSuite() { } func (s *S3ProcessorTestSuite) TestLoadCclfFiles() { + ctx := context.Background() cmsID := "A0001" tests := []struct { path string @@ -71,7 +75,7 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles() { bucketName, cleanup := testUtils.CopyToS3(s.T(), filepath.Join(s.basePath, tt.path)) defer cleanup() - cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(filepath.Join(bucketName, tt.path)) + cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(ctx, filepath.Join(bucketName, tt.path)) cclfZipFiles := cclfMap[cmsID] assert.NoError(t, err) assert.Equal(t, tt.skipped, skipped) @@ -88,13 +92,13 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles() { } func (s *S3ProcessorTestSuite) TestLoadCclfFiles_SkipOtherEnvs() { + ctx := context.Background() cleanupEnvVars := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: "dev"}}) - defer cleanupEnvVars() - + s.T().Cleanup(func() { cleanupEnvVars() }) s3Bucket, cleanupS3 := testUtils.CreateZipsInS3(s.T(), testUtils.ZipInput{ZipName: "blah/not-dev/T.BCD.A0001.ZCY18.D181120.T1000000", CclfNames: []string{"T.BCD.A0001.ZC0Y18.D181120.T1000000", "T.BCD.A0001.ZC8Y18.D181120.T1000000"}}) - defer cleanupS3() + s.T().Cleanup(func() { cleanupS3() }) - cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(s3Bucket) + cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(ctx, s3Bucket) assert.Nil(s.T(), err) assert.Equal(s.T(), 0, skipped) assert.Equal(s.T(), 0, failure) @@ -102,6 +106,7 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles_SkipOtherEnvs() { } func (s *S3ProcessorTestSuite) TestLoadCclfFiles_DuplicateCCLFs() { + ctx := context.Background() bucketName, cleanupS3 := testUtils.CreateZipsInS3(s.T(), // Multiple CCLF0s testUtils.ZipInput{ @@ -116,7 +121,7 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles_DuplicateCCLFs() { ) defer cleanupS3() - cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(bucketName) + cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(ctx, bucketName) assert.Nil(s.T(), err) assert.Equal(s.T(), 0, skipped) assert.Equal(s.T(), 2, failure) @@ -124,6 +129,7 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles_DuplicateCCLFs() { } func (s *S3ProcessorTestSuite) TestLoadCclfFiles_SingleFile() { + ctx := context.Background() cmsID := "A0001" tests := []struct { path string @@ -140,7 +146,7 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles_SingleFile() { bucketName, cleanup := testUtils.CopyToS3(s.T(), filepath.Join(s.basePath, tt.path)) defer cleanup() - cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(filepath.Join(bucketName, tt.path, tt.filename)) + cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(ctx, filepath.Join(bucketName, tt.path, tt.filename)) cclfZipFiles := cclfMap[cmsID] assert.NoError(t, err) assert.Equal(t, tt.skipped, skipped) @@ -158,8 +164,9 @@ func (s *S3ProcessorTestSuite) TestLoadCclfFiles_SingleFile() { } func (s *S3ProcessorTestSuite) TestLoadCclfFiles_InvalidPath() { - cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles("foo") - assert.ErrorContains(s.T(), err, "NoSuchBucket: The specified bucket does not exist") + ctx := context.Background() + cclfMap, skipped, failure, err := s.cclfProcessor.LoadCclfFiles(ctx, "foo") + assert.ErrorContains(s.T(), err, "NoSuchBucket") assert.Equal(s.T(), 0, skipped) assert.Equal(s.T(), 0, failure) assert.Empty(s.T(), cclfMap) @@ -170,10 +177,11 @@ func TestS3ProcessorTestSuite(t *testing.T) { } func (s *S3ProcessorTestSuite) TestMultipleFileTypes() { + ctx := context.Background() // Hard code the reference date to ensure we do not reject any CCLF files because they are too old. cclfRefDate := conf.GetEnv("CCLF_REF_DATE") conf.SetEnv(s.T(), "CCLF_REF_DATE", "201201") - defer conf.SetEnv(s.T(), "CCLF_REF_DATE", cclfRefDate) + s.T().Cleanup(func() { conf.SetEnv(s.T(), "CCLF_REF_DATE", cclfRefDate) }) // Create various CCLF files that have unique perfYear:fileType bucketName, cleanup := testUtils.CreateZipsInS3(s.T(), @@ -197,10 +205,9 @@ func (s *S3ProcessorTestSuite) TestMultipleFileTypes() { CclfNames: []string{"T.BCD.A9990.ZC0R19.D201113.T0000010", "T.BCD.A9990.ZC8R19.D201113.T0000010"}, }, ) + s.T().Cleanup(func() { cleanup() }) - defer cleanup() - - m, skipped, f, err := s.cclfProcessor.LoadCclfFiles(bucketName) + m, skipped, f, err := s.cclfProcessor.LoadCclfFiles(ctx, bucketName) assert.NoError(s.T(), err) assert.Equal(s.T(), 0, skipped) assert.Equal(s.T(), 0, f) @@ -266,6 +273,7 @@ func (s *S3ProcessorTestSuite) TestCleanupCCLF() { func (s *S3ProcessorTestSuite) TestCleanupCSV() { assert := assert.New(s.T()) + ctx := context.Background() path := "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000" bucketName, cleanup := testUtils.CopyToS3(s.T(), filepath.Join(s.basePath, path)) @@ -287,7 +295,7 @@ func (s *S3ProcessorTestSuite) TestCleanupCSV() { imported: test.imported, filepath: test.filepath, } - err := s.csvProcessor.CleanUpCSV(csv) + err := s.csvProcessor.CleanUpCSV(ctx, csv) assert.Nil(err) }) @@ -297,9 +305,11 @@ func (s *S3ProcessorTestSuite) TestCleanupCSV() { func (s *S3ProcessorTestSuite) TestLoadCSV() { assert := assert.New(s.T()) + ctx := context.Background() path := "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000" bucketName, cleanup := testUtils.CopyToS3(s.T(), filepath.Join(s.basePath, path)) + defer cleanup() tests := []struct { name string @@ -313,7 +323,7 @@ func (s *S3ProcessorTestSuite) TestLoadCSV() { for _, test := range tests { s.T().Run(test.name, func(tt *testing.T) { defer cleanup() - r, _, err := s.csvProcessor.LoadCSV(test.filepath) + r, _, err := s.csvProcessor.LoadCSV(ctx, test.filepath) if test.err == nil { assert.Nil(err) assert.NotNil(r) @@ -322,7 +332,6 @@ func (s *S3ProcessorTestSuite) TestLoadCSV() { assert.NotNil(err) assert.Nil(r) } - }) } } @@ -331,7 +340,7 @@ func (s *S3ProcessorTestSuite) TestLoadCSV_InvalidPath() { } func (s *S3ProcessorTestSuite) TestLoadCSV_SkipOtherEnvs() { - + ctx := context.Background() cleanupEnvVars := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: "dev"}}) defer cleanupEnvVars() @@ -339,7 +348,7 @@ func (s *S3ProcessorTestSuite) TestLoadCSV_SkipOtherEnvs() { bucketName, cleanup := testUtils.CopyToS3(s.T(), filepath.Join(s.basePath, path)) defer cleanup() - _, _, err := s.csvProcessor.LoadCSV(filepath.Join(bucketName, path)) + _, _, err := s.csvProcessor.LoadCSV(ctx, filepath.Join(bucketName, path)) assert.NotNil(s.T(), err) assert.Contains(s.T(), err.Error(), "Skipping import") diff --git a/bcda/constants/constants.go b/bcda/constants/constants.go index eeefc3712..69acc6091 100644 --- a/bcda/constants/constants.go +++ b/bcda/constants/constants.go @@ -54,3 +54,5 @@ const V3Version = "v3" const GetExistingBenes = "GetExistingBenes" const GetNewAndExistingBenes = "GetNewAndExistingBenes" + +const DefaultRegion = "us-east-1" diff --git a/bcda/database/config.go b/bcda/database/config.go index 5feb3dc36..1374eda24 100644 --- a/bcda/database/config.go +++ b/bcda/database/config.go @@ -25,21 +25,6 @@ func LoadConfig() (cfg *Config, err error) { return nil, err } - // if cfg.DatabaseURL == "" { - // // Attempt to load database config from parameter store if ENV var is set. - // // This generally indicates that we are running within our lambda environment. - // env := os.Getenv("ENV") - - // if env != "" { - // cfg, err = LoadConfigFromParameterStore( - // fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) - - // if err != nil { - // return nil, err - // } - // } - // } - if cfg.DatabaseURL == "" { return nil, errors.New("invalid config, DatabaseURL must be set") } @@ -47,36 +32,3 @@ func LoadConfig() (cfg *Config, err error) { log.API.Info("Successfully loaded configuration for Database.") return cfg, nil } - -// Loads database URL from parameter store instead of from environment variables. -// func LoadConfigFromParameterStore(dbUrlKey string) (cfg *Config, err error) { -// cfg = &Config{} -// if err := conf.Checkout(cfg); err != nil { -// return nil, err -// } - -// // bcdaSession, err := bcdaaws.NewSession("", os.Getenv("LOCAL_STACK_ENDPOINT")) -// // if err != nil { -// // return nil, err -// // } - -// cfg, err := config.LoadDefaultConfig(ctx) -// if err != nil { -// return awsParams{}, err -// } -// ssmClient := ssm.NewFromConfig(cfg) - -// params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) -// if err != nil { -// return awsParams{}, err -// } - -// params, err := bcdaaws.GetParameters(bcdaSession, []*string{&dbUrlKey}) -// if err != nil { -// return nil, err -// } - -// cfg.DatabaseURL = params[dbUrlKey] - -// return cfg, nil -// } diff --git a/bcda/database/config_test.go b/bcda/database/config_test.go index 6dff91a8b..0f77540ef 100644 --- a/bcda/database/config_test.go +++ b/bcda/database/config_test.go @@ -1,11 +1,9 @@ package database import ( - "fmt" "testing" "github.com/CMSgov/bcda-app/bcda/testUtils" - "github.com/pborman/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -46,40 +44,40 @@ func (s *DatabaseConfigSuite) TestLoadConfigMissingDatabaseUrl() { assert.Contains(err.Error(), "invalid config, DatabaseURL must be set") } -func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreSuccess() { - assert := assert.New(s.T()) - - env := uuid.NewUUID() - cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ - {Name: "ENV", Value: env.String()}, - {Name: "DATABASE_URL", Value: ""}, - }) - defer cleanupEnv() - - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{ - {Name: fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), Value: "my-super-secure-database-url", Type: "SecureString"}, - }) - defer cleanupParams() - - cfg, err := LoadConfig() - assert.Nil(err) - assert.Equal("my-super-secure-database-url", cfg.DatabaseURL) -} - -func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreMissingDatabaseUrl() { - assert := assert.New(s.T()) - - env := uuid.NewUUID() - cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ - {Name: "ENV", Value: env.String()}, - {Name: "DATABASE_URL", Value: ""}, - }) - defer cleanupEnv() - - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{}) - defer cleanupParams() - - cfg, err := LoadConfig() - assert.Nil(cfg) - assert.Contains(err.Error(), fmt.Sprintf("invalid parameters error: /bcda/%s/api/DATABASE_URL", env)) -} +// func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreSuccess() { +// assert := assert.New(s.T()) + +// env := uuid.NewUUID() +// cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ +// {Name: "ENV", Value: env.String()}, +// {Name: "DATABASE_URL", Value: ""}, +// }) +// defer cleanupEnv() + +// cleanupParams := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "my-super-secure-database-url") +// // {Name: , Value: "my-super-secure-database-url", Type: "SecureString"}, +// // }) +// defer cleanupParams() + +// cfg, err := LoadConfig() +// assert.Nil(err) +// assert.Equal("my-super-secure-database-url", cfg.DatabaseURL) +// } + +// func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreMissingDatabaseUrl() { +// assert := assert.New(s.T()) + +// env := uuid.NewUUID() +// cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ +// {Name: "ENV", Value: env.String()}, +// {Name: "DATABASE_URL", Value: ""}, +// }) +// defer cleanupEnv() + +// cleanupParams := testUtils.SetParameter(s.T(), "", "") +// defer cleanupParams() + +// cfg, err := LoadConfig() +// assert.Nil(cfg) +// assert.Contains(err.Error(), fmt.Sprintf("invalid parameters error: /bcda/%s/api/DATABASE_URL", env)) +// } diff --git a/bcda/insights/insights.go b/bcda/insights/insights.go deleted file mode 100644 index 78d634927..000000000 --- a/bcda/insights/insights.go +++ /dev/null @@ -1,68 +0,0 @@ -package insights - -import ( - "encoding/json" - "sync" - "time" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/firehose" - "github.com/aws/aws-sdk-go/service/firehose/firehoseiface" - - "github.com/CMSgov/bcda-app/bcda/utils" - "github.com/CMSgov/bcda-app/conf" - "github.com/CMSgov/bcda-app/log" -) - -var instance *firehose.Firehose -var once sync.Once - -func GetFirehose() *firehose.Firehose { - once.Do(func() { - sess := session.Must(session.NewSession()) - instance = firehose.New(sess, aws.NewConfig().WithRegion("us-east-1")) - }) - return instance -} - -type Event struct { - Name string `json:"name"` - Timestamp int64 `json:"timestamp"` - Result string `json:"json_result"` -} - -func PutEvent(svc firehoseiface.FirehoseAPI, name string, event string) { - - if utils.GetEnvBool("BCDA_ENABLE_INSIGHTS_EVENTS", true) { - - targetEnv := conf.GetEnv("DEPLOYMENT_TARGET") - streamName := "bfd-insights-bcda-" + targetEnv + "-event_processor" - - recordInput := &firehose.PutRecordInput{} - recordInput = recordInput.SetDeliveryStreamName(streamName) - - data := Event{ - Name: name, - Timestamp: time.Now().UnixNano() / 1e6, - Result: "{\"event\":\"" + event + "\"}", - } - - b, err := json.Marshal(data) - - if err != nil { - log.API.Error(err) - } - - record := &firehose.Record{Data: b} - recordInput = recordInput.SetRecord(record) - - _, err = svc.PutRecord(recordInput) - - if err != nil { - log.API.Error(err) - } - } else { - log.API.Info("Insights is not enabled for the application. No data was sent to BFD.") - } -} diff --git a/bcda/insights/insights_test.go b/bcda/insights/insights_test.go deleted file mode 100644 index 3e45340dc..000000000 --- a/bcda/insights/insights_test.go +++ /dev/null @@ -1,83 +0,0 @@ -package insights - -import ( - "bytes" - "testing" - - "github.com/CMSgov/bcda-app/conf" - "github.com/CMSgov/bcda-app/log" - - "github.com/aws/aws-sdk-go/service/firehose" - "github.com/aws/aws-sdk-go/service/firehose/firehoseiface" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" -) - -type mockFirehoseClient struct { - firehoseiface.FirehoseAPI -} - -type InsightsTestSuite struct { - suite.Suite - mockSvc *mockFirehoseClient -} - -func (s *InsightsTestSuite) SetupTest() { - s.mockSvc = &mockFirehoseClient{} -} - -func TestInsightsTestSuite(t *testing.T) { - suite.Run(t, new(InsightsTestSuite)) -} - -func (m *mockFirehoseClient) PutRecord(input *firehose.PutRecordInput) (*firehose.PutRecordOutput, error) { - log.API.Infof("Mock called with DeliveryStreamName %s and PutRecordInput: %s", *input.DeliveryStreamName, input.Record.Data) - return nil, nil -} - -func (s *InsightsTestSuite) TestInsightsDisabled() { - origSetting := conf.GetEnv("BCDA_ENABLE_INSIGHTS_EVENTS") - conf.SetEnv(s.T(), "BCDA_ENABLE_INSIGHTS_EVENTS", "false") - originalLog := log.API - - s.T().Cleanup(func() { - conf.SetEnv(s.T(), "BCDA_ENABLE_INSIGHTS_EVENTS", origSetting) - log.API = originalLog - }) - - // Override log.API so we can verify the output - buf := new(bytes.Buffer) - newLog := logrus.New() - newLog.SetOutput(buf) - log.API = newLog - - PutEvent(s.mockSvc, "TestInsightsDisabledName", "TestInsightsDisabledEvent") - assert.Contains(s.T(), buf.String(), "Insights is not enabled for the application. No data was sent to BFD.") -} - -func (s *InsightsTestSuite) TestInsightsEnabled() { - - origSetting := conf.GetEnv("BCDA_ENABLE_INSIGHTS_EVENTS") - origEnv := conf.GetEnv("DEPLOYMENT_TARGET") - conf.SetEnv(s.T(), "BCDA_ENABLE_INSIGHTS_EVENTS", "true") - conf.SetEnv(s.T(), "DEPLOYMENT_TARGET", "unit-test") - originalLog := log.API - - s.T().Cleanup(func() { - conf.SetEnv(s.T(), "BCDA_ENABLE_INSIGHTS_EVENTS", origSetting) - conf.SetEnv(s.T(), "DEPLOYMENT_TARGET", origEnv) - log.API = originalLog - }) - - // Override log.API so we can verify the output - buf := new(bytes.Buffer) - newLog := logrus.New() - newLog.SetOutput(buf) - log.API = newLog - - PutEvent(s.mockSvc, "TestInsightsEnabledName", "TestInsightsEnabledEvent") - assert.Contains(s.T(), buf.String(), "TestInsightsEnabledName") - assert.Contains(s.T(), buf.String(), "TestInsightsEnabledEvent") - assert.Contains(s.T(), buf.String(), "bfd-insights-bcda-unit-test-event_processor") -} diff --git a/bcda/lambda/admin_aco_deny/db_test.go b/bcda/lambda/admin_aco_deny/db_test.go index 4cf6a8f67..b64dfbec1 100644 --- a/bcda/lambda/admin_aco_deny/db_test.go +++ b/bcda/lambda/admin_aco_deny/db_test.go @@ -54,7 +54,7 @@ func TestDenyACOsQueryFailure(t *testing.T) { func TestDenyACOs_Integration(t *testing.T) { ctx := context.Background() - params, err := getAWSParams() + params, err := getAWSParams(ctx) assert.Nil(t, err) conn, err := pgx.Connect(ctx, params.DBURL) diff --git a/bcda/lambda/admin_create_aco/db_test.go b/bcda/lambda/admin_create_aco/db_test.go index da7ae8a88..091fd6e50 100644 --- a/bcda/lambda/admin_create_aco/db_test.go +++ b/bcda/lambda/admin_create_aco/db_test.go @@ -25,7 +25,7 @@ type CreateACOTestSuite struct { func (c *CreateACOTestSuite) SetupTest() { c.ctx = context.Background() - params, err := getAWSParams() + params, err := getAWSParams(c.ctx) if err != nil { assert.FailNow(c.T(), "Failed to get AWS Params") } diff --git a/bcda/lambda/admin_create_aco/main_test.go b/bcda/lambda/admin_create_aco/main_test.go index d80bd5c4f..f884d15e4 100644 --- a/bcda/lambda/admin_create_aco/main_test.go +++ b/bcda/lambda/admin_create_aco/main_test.go @@ -22,7 +22,7 @@ type HandleCreateACOTestSuite struct { func (c *HandleCreateACOTestSuite) SetupTest() { c.ctx = context.Background() - params, err := getAWSParams() + params, err := getAWSParams(c.ctx) if err != nil { assert.FailNow(c.T(), "Failed to get AWS Params") } diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index 75f47700d..eba214c14 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -60,50 +60,6 @@ func getAWSParams(ctx context.Context) (awsParams, error) { params[ssasPEMName], params[credsBucketName], }, nil - // output, err := client.GetParameters(ctx, input) - // if err != nil { - // return awsParams{}, err - // } - // slackToken := getParamFromOutput(output, "/slack/token/workflow-alerts") - // ssasURL := getParamFromOutput(output, "/bcda/%s/api/SSAS_URL") - // clientID := getParamFromOutput(output, "/bcda/%s/api/BCDA_SSAS_CLIENT_ID") - // clientSecret := getParamFromOutput(output, "/bcda/%s/api/BCDA_SSAS_SECRET") - // ssasPEM := getParamFromOutput(output, "/bcda/%s/api/BCDA_CA_FILE.pem") - // credsBucket := getParamFromOutput(output, "/bcda/%s/aco_creds_bucket") - - // return awsParams{slackToken, ssasURL, clientID, clientSecret, ssasPEM, credsBucket}, nil - - // slackToken, err := bcdaaws.GetParameter(session, "/slack/token/workflow-alerts") - // if err != nil { - // return awsParams{}, err - // } - - // ssasURL, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/SSAS_URL", env)) - // if err != nil { - // return awsParams{}, err - // } - - // clientID, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env)) - // if err != nil { - // return awsParams{}, err - // } - - // clientSecret, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env)) - // if err != nil { - // return awsParams{}, err - // } - - // ssasPEM, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env)) - // if err != nil { - // return awsParams{}, err - // } - - // credsBucket, err := bcdaaws.GetParameter(session, fmt.Sprintf("/bcda/%s/aco_creds_bucket", env)) - // if err != nil { - // return awsParams{}, err - // } - - // return awsParams{slackToken, ssasURL, clientID, clientSecret, ssasPEM, credsBucket}, nil } func setupEnvironment(params awsParams) error { diff --git a/bcda/lambda/admin_create_aco_creds/aws_test.go b/bcda/lambda/admin_create_aco_creds/aws_test.go index cdbfbb0d7..d2be10d35 100644 --- a/bcda/lambda/admin_create_aco_creds/aws_test.go +++ b/bcda/lambda/admin_create_aco_creds/aws_test.go @@ -1,30 +1,23 @@ package main import ( - "context" "os" "testing" + "github.com/CMSgov/bcda-app/bcda/testUtils" "github.com/CMSgov/bcda-app/conf" "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/stretchr/testify/assert" ) func TestPutObject(t *testing.T) { - cfg, err := config.LoadDefaultConfig(context.TODO(), - config.WithRegion("us-east-1"), - ) - assert.Nil(t, err) - client := s3.NewFromConfig(cfg, func(o *s3.Options) { - o.UsePathStyle = true - }) + client := testUtils.TestS3Client(t, testUtils.TestAWSConfig(t)) bucketInput := &s3.CreateBucketInput{ Bucket: aws.String("test-bucket"), } - _, err = client.CreateBucket(t.Context(), bucketInput) + _, err := client.CreateBucket(t.Context(), bucketInput) assert.Nil(t, err) result, err := putObject(t.Context(), client, "test-filename", "test-creds", "test-bucket") diff --git a/bcda/lambda/admin_create_aco_creds/main_test.go b/bcda/lambda/admin_create_aco_creds/main_test.go index a30b9f30c..e0f21f18d 100644 --- a/bcda/lambda/admin_create_aco_creds/main_test.go +++ b/bcda/lambda/admin_create_aco_creds/main_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/CMSgov/bcda-app/bcda/auth" + "github.com/CMSgov/bcda-app/bcda/testUtils" "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/stretchr/testify/assert" ) @@ -19,18 +19,11 @@ func TestHandleCreateACOCreds(t *testing.T) { mockProvider := &auth.MockProvider{} mockProvider.On("FindAndCreateACOCredentials", data.ACOID, data.IPs).Return("creds\nstring", nil) - cfg, err := config.LoadDefaultConfig(context.TODO(), - config.WithRegion("us-east-1"), - ) - assert.Nil(t, err) - client := s3.NewFromConfig(cfg, func(o *s3.Options) { - o.UsePathStyle = true - }) + client := testUtils.TestS3Client(t, testUtils.TestAWSConfig(t)) - bucketInput := &s3.CreateBucketInput{ + _, err := client.CreateBucket(t.Context(), &s3.CreateBucketInput{ Bucket: aws.String("test-bucket"), - } - _, err = client.CreateBucket(t.Context(), bucketInput) + }) assert.Nil(t, err) s3Path, err := handleCreateACOCreds(ctx, data, mockProvider, client, "test-bucket") diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index 765c4db5b..1ea9c3872 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -2,7 +2,6 @@ package main import ( "context" - "database/sql" "errors" "fmt" "os" @@ -11,6 +10,7 @@ import ( "github.com/aws/aws-lambda-go/events" "github.com/aws/aws-lambda-go/lambda" + "github.com/jackc/pgx/v5/pgxpool" "github.com/sirupsen/logrus" bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" @@ -26,25 +26,13 @@ import ( ) func main() { - // Localstack is a local-development server that mimics AWS. The endpoint variable - // should only be set in local development to avoid making external calls to a real AWS account. - if os.Getenv("LOCAL_STACK_ENDPOINT") != "" { - res, err := handleCclfImport(context.TODO(), database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) - if err != nil { - fmt.Printf("Failed to run opt out import: %s\n", err.Error()) - } else { - fmt.Println(res) - } - } else { - lambda.Start(attributionImportHandler) - } + lambda.Start(attributionImportHandler) } func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) - db := database.Connect() s3Event, err := bcdaaws.ParseSQSEvent(sqsEvent) @@ -56,25 +44,28 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st return "", nil } + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + logger.Error("Failed to load Default Config") + return "", err + } + ssmClient := ssm.NewFromConfig(cfg) + s3Client := s3.NewFromConfig(cfg) + + // Create pgx pool for bulk operations + pool := database.ConnectPool() + defer pool.Close() + for _, e := range s3Event.Records { if strings.Contains(e.EventName, "ObjectCreated") { - s3AssumeRoleArn, err := loadBfdS3Params(ctx) - if err != nil { - return "", err - } - err = loadBCDAParams() - if err != nil { - return "", err - } - // Send the entire filepath into the CCLF Importer so we are only // importing the one file that was sent in the trigger. filepath := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for file %s", e.EventName, filepath) if cclf.CheckIfAttributionCSVFile(e.S3.Object.Key) { - return handleCSVImport(ctx, db, s3AssumeRoleArn, filepath) + return handleCSVImport(ctx, pool, s3Client, ssmClient, filepath) } else { - return handleCclfImport(ctx, db, s3AssumeRoleArn, filepath) + return handleCclfImport(ctx, pool, s3Client, ssmClient, filepath) } } } @@ -83,29 +74,29 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st return "", nil } -func handleCSVImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { +func handleCSVImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath}) - pool := database.ConnectPool() - defer pool.Close() + err := loadBCDAParams() + if err != nil { + return "", err + } - cfg, err := config.LoadDefaultConfig(ctx) + s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) if err != nil { - logger.Error("error loading default config: ", err) + logger.Errorf("error getting param: %+v", err) return "", err } - client := s3.NewFromConfig(cfg) importer := cclf.CSVImporter{ Logger: logger, PgxPool: pool, FileProcessor: &cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ - Ctx: ctx, - Client: client, + Client: s3Client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, @@ -113,7 +104,7 @@ func handleCSVImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportP }, } - err = importer.ImportCSV(s3ImportPath) + err = importer.ImportCSV(ctx, s3ImportPath) if err != nil { logger.Error("error returned from ImportCSV: ", err) return "", err @@ -125,55 +116,34 @@ func handleCSVImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportP return result, nil } -func loadBfdS3Params(ctx context.Context) (string, error) { +func handleCclfImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") + appName := conf.GetEnv("APP_NAME") + logger := configureLogger(env, appName) + logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath}) - cfg, err := config.LoadDefaultConfig(ctx) + err := loadBCDAParams() if err != nil { return "", err } - ssmClient := ssm.NewFromConfig(cfg) - - return bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) -} - -func loadBCDAParams() error { - env := conf.GetEnv("ENV") - conf.LoadLambdaEnvVars(env) - return nil -} -func handleCclfImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { - env := conf.GetEnv("ENV") - appName := conf.GetEnv("APP_NAME") - logger := configureLogger(env, appName) - logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath}) - - cfg, err := config.LoadDefaultConfig(ctx) + s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) if err != nil { - logger.Error("error loading default config: ", err) + logger.Errorf("error getting param: %+v", err) return "", err } - client := s3.NewFromConfig(cfg) fileProcessor := cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ - Ctx: ctx, - Client: client, + Client: s3Client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, }, } - // Create pgx pool for bulk operations - pool := database.ConnectPool() - defer pool.Close() - importer := cclf.NewCclfImporter(logger, &fileProcessor, pool) - success, failure, skipped, err := importer.ImportCCLFDirectory(s3ImportPath) - if err != nil { logger.Error("error returned from ImportCCLFDirectory: ", err) return "", err @@ -185,14 +155,20 @@ func handleCclfImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3Import err = errors.New("files skipped or failed import. See logs for more details") return result, err - } result := fmt.Sprintf("Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) logger.Info(result) + return result, nil } +func loadBCDAParams() error { + env := conf.GetEnv("ENV") + conf.LoadLambdaEnvVars(env) + return nil +} + func configureLogger(env, appName string) *logrus.Entry { log := logrus.New() log.SetFormatter(&logrus.JSONFormatter{ diff --git a/bcda/lambda/cclf/main_test.go b/bcda/lambda/cclf/main_test.go index c40f5ec5c..da15a32ad 100644 --- a/bcda/lambda/cclf/main_test.go +++ b/bcda/lambda/cclf/main_test.go @@ -37,6 +37,10 @@ func TestAttributionImportMainSuite(t *testing.T) { func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { targetACO := "A0001" assert := assert.New(s.T()) + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + pool := database.ConnectPool() env := uuid.NewUUID() cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ @@ -45,11 +49,10 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { }) defer cleanupEnv() - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{ - {Name: fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env), Value: "arn:aws:iam::000000000000:user/fake-arn", Type: "String"}, - {Name: fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), Value: "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable", Type: "SecureString"}, - }) - defer cleanupParams() + cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") + cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam1() + defer cleanupParam2() type test struct { path string @@ -59,19 +62,20 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { } tests := []test{ - {path: "../../../shared_files/cclf/archives/valid/", filename: "cclf/archives/valid/T.BCD.A0001.ZCY18.D181120.T1000000", expectedLogs: []string{"Successfully imported 2 files.", "Failed to import 0 files.", "Skipped 0 files."}}, + {path: "../../../shared_files/cclf/archives/valid/", filename: "cclf/archives/valid/T.BCD.A0001.ZCY18.D181120.T1000000", expectedLogs: []string{"Successfully imported", "Failed to import 0 files.", "Skipped 0 files."}}, {path: "../../../shared_files/cclf/archives/invalid_bcd/", filename: "cclf/archives/invalid_bcd/P.BCD.A0009.ZCY18.D181120.T0001000", err: errors.New("files skipped or failed import. See logs for more details"), expectedLogs: []string{}}, {path: "../../../shared_files/cclf/archives/skip/", filename: "cclf/archives/skip/T.BCD.ACOB.ZC0Y18.D181120.T0001000", expectedLogs: []string{"Successfully imported 0 files.", "Failed to import 0 files.", "Skipped 0 files."}}, } for _, tc := range tests { + fmt.Printf("\n----- path: %s\n", tc.path) postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) defer postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) path, cleanup := testUtils.CopyToS3(s.T(), tc.path) defer cleanup() - res, err := attributionImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), path, tc.filename)) + res, err := handleCclfImport(context.Background(), pool, s3Client, ssmClient, path) if tc.err == nil { assert.Nil(err) @@ -88,6 +92,11 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { func (s *AttributionImportMainSuite) TestHandlerMissingS3AssumeRoleArn() { assert := assert.New(s.T()) - _, err := attributionImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), "doesn't-matter", "fake_filename")) - assert.Contains(err.Error(), "error retrieving parameter /cclf-import/bcda/local/bfd-bucket-role-arn from parameter store: ParameterNotFound: Parameter /cclf-import/bcda/local/bfd-bucket-role-arn not found.") + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + pool := database.ConnectPool() + + _, err := handleCclfImport(context.Background(), pool, s3Client, ssmClient, "asdf") + assert.Contains(err.Error(), "error retrieving parameter /cclf-import/bcda/local/bfd-bucket-role-arn from parameter store") } diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index ab69787c9..6d1dd4dcb 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -27,18 +27,7 @@ import ( ) func main() { - // Localstack is a local-development server that mimics AWS. The endpoint variable - // should only be set in local development to avoid making external calls to a real AWS account. - if os.Getenv("LOCAL_STACK_ENDPOINT") != "" { - res, err := handleOptOutImport(context.Background(), database.Connect(), os.Getenv("BFD_BUCKET_ROLE_ARN"), os.Getenv("BFD_S3_IMPORT_PATH")) - if err != nil { - fmt.Printf("Failed to run opt out import: %s\n", err.Error()) - } else { - fmt.Println(res) - } - } else { - lambda.Start(optOutImportHandler) - } + lambda.Start(optOutImportHandler) } func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, error) { @@ -48,7 +37,6 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, db := database.Connect() s3Event, err := bcdaaws.ParseSQSEvent(sqsEvent) - if err != nil { logger.Errorf("Failed to parse S3 event: %v", err) return "", err @@ -57,16 +45,19 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", nil } + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + logger.Error("Failed to load Default Config") + return "", err + } + ssmClient := ssm.NewFromConfig(cfg) + s3Client := s3.NewFromConfig(cfg) + for _, e := range s3Event.Records { if strings.Contains(e.EventName, "ObjectCreated") { - s3AssumeRoleArn, err := loadBfdS3Params(ctx) - if err != nil { - return "", err - } - dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for directory %s", e.EventName, dir) - return handleOptOutImport(ctx, db, s3AssumeRoleArn, dir) + return handleOptOutImport(ctx, db, s3Client, ssmClient, dir) } } @@ -74,35 +65,21 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", nil } -func loadBfdS3Params(ctx context.Context) (string, error) { - env := conf.GetEnv("ENV") - - cfg, err := config.LoadDefaultConfig(ctx) - if err != nil { - return "", err - } - ssmClient := ssm.NewFromConfig(cfg) - - return bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) -} - -func handleOptOutImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3ImportPath string) (string, error) { +func handleOptOutImport(ctx context.Context, db *sql.DB, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) repo := postgres.NewRepository(db) - cfg, err := config.LoadDefaultConfig(ctx) + s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env)) if err != nil { - logger.Error("error loading default config: ", err) + logger.Errorf("error getting param: %+v", err) return "", err } - client := s3.NewFromConfig(cfg) importer := suppression.OptOutImporter{ FileHandler: &optout.S3FileHandler{ - Ctx: ctx, - Client: client, + Client: s3Client, Logger: logger, Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), AssumeRoleArn: s3AssumeRoleArn, @@ -114,7 +91,7 @@ func handleOptOutImport(ctx context.Context, db *sql.DB, s3AssumeRoleArn, s3Impo ImportStatusInterval: utils.GetEnvInt("SUPPRESS_IMPORT_STATUS_RECORDS_INTERVAL", 1000), } - s, f, sk, err := importer.ImportSuppressionDirectory(s3ImportPath) + s, f, sk, err := importer.ImportSuppressionDirectory(ctx, s3ImportPath) result := fmt.Sprintf("Completed 1-800-MEDICARE suppression data import.\nFiles imported: %v\nFiles failed: %v\nFiles skipped: %v\n", s, f, sk) logger.Info(result) return result, err diff --git a/bcda/lambda/optout/main_test.go b/bcda/lambda/optout/main_test.go index 698bab7f4..1f2849894 100644 --- a/bcda/lambda/optout/main_test.go +++ b/bcda/lambda/optout/main_test.go @@ -28,8 +28,12 @@ func TestOptOutImportMainSuite(t *testing.T) { suite.Run(t, new(OptOutImportMainSuite)) } -func (s *OptOutImportMainSuite) TestOptOutImportHandlerSuccess() { +func (s *OptOutImportMainSuite) TestHandleOptOutImportSuccess() { assert := assert.New(s.T()) + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/synthetic1800MedicareFiles/test2/") defer cleanup() @@ -37,13 +41,12 @@ func (s *OptOutImportMainSuite) TestOptOutImportHandlerSuccess() { cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: env.String()}}) defer cleanupEnv() - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{ - {Name: fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), Value: "arn:aws:iam::000000000000:user/fake-arn", Type: "String"}, - {Name: fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), Value: "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable", Type: "SecureString"}, - }) - defer cleanupParams() + cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") + cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam1() + defer cleanupParam2() - res, err := optOutImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), path, "fake_filename")) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) assert.Nil(err) assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 2") @@ -63,6 +66,10 @@ func (s *OptOutImportMainSuite) TestOptOutImportHandlerSuccess() { func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Skipped() { assert := assert.New(s.T()) + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/suppressionfile_BadFileNames/") defer cleanup() @@ -70,13 +77,12 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Skipped() { cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: env.String()}}) defer cleanupEnv() - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{ - {Name: fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), Value: "arn:aws:iam::000000000000:user/fake-arn", Type: "String"}, - {Name: fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), Value: "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable", Type: "SecureString"}, - }) - defer cleanupParams() + cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") + cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam1() + defer cleanupParam2() - res, err := optOutImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), path, "fake_filename")) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) assert.Nil(err) assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 0") @@ -86,6 +92,10 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Skipped() { func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Failed() { assert := assert.New(s.T()) + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/suppressionfile_BadHeader/") defer cleanup() @@ -93,13 +103,12 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Failed() { cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: env.String()}}) defer cleanupEnv() - cleanupParams := testUtils.SetParameters(s.T(), []testUtils.AwsParameter{ - {Name: fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), Value: "arn:aws:iam::000000000000:user/fake-arn", Type: "String"}, - {Name: fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), Value: "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable", Type: "SecureString"}, - }) - defer cleanupParams() + cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") + cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam1() + defer cleanupParam2() - res, err := optOutImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), path, "fake_filename")) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) assert.EqualError(err, "one or more suppression files failed to import correctly") assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 0") @@ -109,6 +118,10 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Failed() { func (s *OptOutImportMainSuite) TestHandlerMissingS3AssumeRoleArn() { assert := assert.New(s.T()) - _, err := optOutImportHandler(context.Background(), testUtils.GetSQSEvent(s.T(), "doesn't-matter", "fake_filename")) - assert.Contains(err.Error(), "error retrieving parameter /opt-out-import/bcda/local/bfd-bucket-role-arn from parameter store: ParameterNotFound: Parameter /opt-out-import/bcda/local/bfd-bucket-role-arn not found.") + cfg := testUtils.TestAWSConfig(s.T()) + s3Client := testUtils.TestS3Client(s.T(), cfg) + ssmClient := testUtils.TestSSMClient(s.T(), cfg) + + _, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, "asdf") + assert.Contains(err.Error(), "error retrieving parameter /opt-out-import/bcda/local/bfd-bucket-role-arn from parameter store") } diff --git a/bcda/metrics/metrics.go b/bcda/metrics/metrics.go deleted file mode 100644 index 05920b69c..000000000 --- a/bcda/metrics/metrics.go +++ /dev/null @@ -1,55 +0,0 @@ -package metrics - -import ( - "fmt" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/cloudwatch" -) - -type Dimension struct { - Name string - Value string -} - -type Sampler struct { - Namespace string - Unit string - Service *cloudwatch.CloudWatch -} - -func (s *Sampler) PutSample(name string, value float64, dimensions []Dimension) error { - var d []*cloudwatch.Dimension - - for _, v := range dimensions { - def := &cloudwatch.Dimension{ - Name: &v.Name, - Value: &v.Value, - } - d = append(d, def) - } - - data := &cloudwatch.MetricDatum{ - Dimensions: d, - MetricName: &name, - Unit: &s.Unit, - Value: &value, - } - - input := &cloudwatch.PutMetricDataInput{ - MetricData: []*cloudwatch.MetricDatum{data}, - Namespace: &s.Namespace, - } - fmt.Println(input) - _, err := s.Service.PutMetricData(input) - return err -} - -func NewSampler(ns, unit string) (*Sampler, error) { - s := session.Must(session.NewSession(&aws.Config{ - Region: aws.String("us-east-1"), - })) - svc := cloudwatch.New(s) - return &Sampler{ns, unit, svc}, nil -} diff --git a/bcda/suppression/suppression.go b/bcda/suppression/suppression.go index 341b217d0..a31111a89 100644 --- a/bcda/suppression/suppression.go +++ b/bcda/suppression/suppression.go @@ -2,6 +2,7 @@ package suppression import ( "bytes" + "context" "fmt" "strconv" @@ -28,8 +29,8 @@ type OptOutImporter struct { ImportStatusInterval int } -func (importer OptOutImporter) ImportSuppressionDirectory(path string) (success, failure, skipped int, err error) { - suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(path) +func (importer OptOutImporter) ImportSuppressionDirectory(ctx context.Context, path string) (success, failure, skipped int, err error) { + suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(ctx, path) if err != nil { return 0, 0, 0, err } @@ -40,12 +41,12 @@ func (importer OptOutImporter) ImportSuppressionDirectory(path string) (success, } for _, metadata := range *suppresslist { - err = importer.validate(metadata) + err = importer.validate(ctx, metadata) if err != nil { importer.Logger.Errorf("Failed to validate suppression file: %s", metadata) failure++ } else { - if err = importer.ImportSuppressionData(metadata); err != nil { + if err = importer.ImportSuppressionData(ctx, metadata); err != nil { importer.Logger.Errorf("Failed to import suppression file: %s ", metadata) failure++ } else { @@ -54,7 +55,7 @@ func (importer OptOutImporter) ImportSuppressionDirectory(path string) (success, } } } - err = importer.FileHandler.CleanupOptOutFiles(*suppresslist) + err = importer.FileHandler.CleanupOptOutFiles(ctx, *suppresslist) if err != nil { importer.Logger.Error(err) } @@ -68,11 +69,11 @@ func (importer OptOutImporter) ImportSuppressionDirectory(path string) (success, return success, failure, skipped, err } -func (importer OptOutImporter) validate(metadata *optout.OptOutFilenameMetadata) error { +func (importer OptOutImporter) validate(ctx context.Context, metadata *optout.OptOutFilenameMetadata) error { importer.Logger.Infof("Validating suppression file %s...", metadata) count := 0 - sc, close, err := importer.FileHandler.OpenFile(metadata) + sc, close, err := importer.FileHandler.OpenFile(ctx, metadata) if err != nil { err = errors.Wrapf(err, "could not read file %s", metadata) importer.Logger.Error(err) @@ -119,10 +120,10 @@ func (importer OptOutImporter) validate(metadata *optout.OptOutFilenameMetadata) return nil } -func (importer OptOutImporter) ImportSuppressionData(metadata *optout.OptOutFilenameMetadata) error { +func (importer OptOutImporter) ImportSuppressionData(ctx context.Context, metadata *optout.OptOutFilenameMetadata) error { optOutCount := 0 optInCount := 0 - err := importer.importSuppressionMetadata(metadata, func(fileID uint, b []byte) error { + err := importer.importSuppressionMetadata(ctx, metadata, func(fileID uint, b []byte) error { suppression, err := optout.ParseRecord(metadata, b) if err != nil { @@ -153,7 +154,7 @@ func (importer OptOutImporter) ImportSuppressionData(metadata *optout.OptOutFile return nil } -func (importer OptOutImporter) importSuppressionMetadata(metadata *optout.OptOutFilenameMetadata, importFunc func(uint, []byte) error) error { +func (importer OptOutImporter) importSuppressionMetadata(ctx context.Context, metadata *optout.OptOutFilenameMetadata, importFunc func(uint, []byte) error) error { importer.Logger.Infof("Importing suppression file %s...", metadata) var ( @@ -176,7 +177,7 @@ func (importer OptOutImporter) importSuppressionMetadata(metadata *optout.OptOut importedCount := 0 - sc, close, err := importer.FileHandler.OpenFile(metadata) + sc, close, err := importer.FileHandler.OpenFile(ctx, metadata) if err != nil { err = errors.Wrapf(err, "could not read file %s", metadata) importer.Logger.Error(err) diff --git a/bcda/suppression/suppression_s3_test.go b/bcda/suppression/suppression_s3_test.go index 06cbd41e3..f1488f562 100644 --- a/bcda/suppression/suppression_s3_test.go +++ b/bcda/suppression/suppression_s3_test.go @@ -1,6 +1,7 @@ package suppression import ( + "context" "fmt" "path/filepath" "strings" @@ -8,6 +9,8 @@ import ( "time" "github.com/DATA-DOG/go-sqlmock" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/s3" log "github.com/sirupsen/logrus" "github.com/CMSgov/bcda-app/bcda/constants" @@ -25,12 +28,17 @@ import ( type SuppressionS3TestSuite struct { suite.Suite + ctx context.Context } func (s *SuppressionS3TestSuite) createImporter() (OptOutImporter, *optout.FakeSaver) { saver := optout.FakeSaver{} + s.ctx = context.Background() + client := testUtils.TestS3Client(s.T(), testUtils.TestAWSConfig(s.T())) + return OptOutImporter{ FileHandler: &optout.S3FileHandler{ + Client: client, Logger: log.StandardLogger(), Endpoint: conf.GetEnv("BFD_S3_ENDPOINT"), }, @@ -47,7 +55,7 @@ func TestSuppressionS3TestSuite(t *testing.T) { func (s *SuppressionS3TestSuite) TestImportSuppression() { assert := assert.New(s.T()) bucketName, cleanup := testUtils.CopyToS3(s.T(), "../../shared_files/synthetic1800MedicareFiles/test/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") - defer cleanup() + s.T().Cleanup(func() { cleanup() }) // 181120 file fileTime, _ := time.Parse(time.RFC3339, "2018-11-20T10:00:00Z") @@ -59,7 +67,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppression() { } importer, saver := s.createImporter() - err := importer.ImportSuppressionData(metadata) + err := importer.ImportSuppressionData(s.ctx, metadata) assert.Nil(err) assert.Len(saver.Files, 1) @@ -81,7 +89,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppression() { // 190816 file T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241390 bucketName, cleanup = testUtils.CopyToS3(s.T(), "../../shared_files/synthetic1800MedicareFiles/test/T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241390") - defer cleanup() + s.T().Cleanup(func() { cleanup() }) fileTime, _ = time.Parse(time.RFC3339, "2019-08-16T02:41:39Z") metadata = &optout.OptOutFilenameMetadata{ @@ -92,7 +100,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppression() { } importer, saver = s.createImporter() - err = importer.ImportSuppressionData(metadata) + err = importer.ImportSuppressionData(s.ctx, metadata) assert.Nil(err) assert.Len(saver.Files, 1) @@ -120,7 +128,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppression_MissingData() { // Verify empty file is rejected metadata := &optout.OptOutFilenameMetadata{} importer, _ := s.createImporter() - err := importer.ImportSuppressionData(metadata) + err := importer.ImportSuppressionData(s.ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), "could not read file") @@ -160,7 +168,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppression_MissingData() { } } - err = importer.ImportSuppressionData(metadata) + err = importer.ImportSuppressionData(s.ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), fmt.Sprintf("%s: %s", tt.expErr, fp)) @@ -182,12 +190,12 @@ func (s *SuppressionS3TestSuite) TestValidate() { suppressionfilePath := filepath.Join(bucketName, "synthetic1800MedicareFiles/test/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") metadata := &optout.OptOutFilenameMetadata{Timestamp: time.Now(), FilePath: suppressionfilePath} - err := importer.validate(metadata) + err := importer.validate(s.ctx, metadata) assert.Nil(err) // bad file path metadata.FilePath = metadata.FilePath + "/blah/" - err = importer.validate(metadata) + err = importer.validate(s.ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), "could not read file "+metadata.FilePath) @@ -196,7 +204,7 @@ func (s *SuppressionS3TestSuite) TestValidate() { defer cleanup() metadata.FilePath = filepath.Join(bucketName, "suppressionfile_BadHeader/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") - err = importer.validate(metadata) + err = importer.validate(s.ctx, metadata) assert.EqualError(err, "invalid file header for file: "+metadata.FilePath) // missing record count @@ -204,7 +212,7 @@ func (s *SuppressionS3TestSuite) TestValidate() { defer cleanup() metadata.FilePath = filepath.Join(bucketName, "suppressionfile_MissingData/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") - err = importer.validate(metadata) + err = importer.validate(s.ctx, metadata) assert.EqualError(err, "failed to parse record count from file: "+metadata.FilePath) // incorrect record count @@ -212,7 +220,7 @@ func (s *SuppressionS3TestSuite) TestValidate() { defer cleanup() metadata.FilePath = filepath.Join(bucketName, "suppressionfile_MissingData/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010") - err = importer.validate(metadata) + err = importer.validate(s.ctx, metadata) assert.EqualError(err, "incorrect number of records found from file: '"+metadata.FilePath+"'. Expected record count: 5, Actual record count: 4") } @@ -224,7 +232,7 @@ func (s *SuppressionS3TestSuite) TestLoadOptOutFiles() { defer cleanup() filePath := filepath.Join(bucketName, constants.TestSynthMedFilesPath) - suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(s.ctx, filePath) assert.Nil(err) assert.Equal(2, len(*suppresslist)) assert.Equal(0, skipped) @@ -233,7 +241,7 @@ func (s *SuppressionS3TestSuite) TestLoadOptOutFiles() { defer cleanup() filePath = filepath.Join(bucketName, "suppressionfile_BadFileNames/") - suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(s.ctx, filePath) assert.Nil(err) assert.Equal(0, len(*suppresslist)) assert.Equal(2, skipped) @@ -282,11 +290,15 @@ func (s *SuppressionS3TestSuite) TestCleanupSuppression() { } suppresslist = []*optout.OptOutFilenameMetadata{metadata, metadata2, metadata3} - err := importer.FileHandler.CleanupOptOutFiles(suppresslist) + err := importer.FileHandler.CleanupOptOutFiles(s.ctx, suppresslist) assert.Nil(err) - objects := testUtils.ListS3Objects(s.T(), bucketName, "") - assert.True(len(objects) == 0) + client := testUtils.TestS3Client(s.T(), testUtils.TestAWSConfig(s.T())) + output, err := client.ListObjectsV2(s.T().Context(), &s3.ListObjectsV2Input{ + Bucket: aws.String(bucketName), + }) + assert.Nil(s.T(), err) + assert.True(len(output.Contents) == 0) } func (s *SuppressionS3TestSuite) TestImportSuppressionDirectoryTable() { @@ -312,7 +324,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppressionDirectoryTable() { {name: "Valid test", directory: "../../shared_files/synthetic1800MedicareFiles/test2/", success: 2, failure: 0, skipped: 0, errorExpected: false, errMessage: "", deleteFiles: true}, {name: "Import failure", directory: "../../shared_files/suppressionfile_BadHeader/", success: 0, failure: 1, skipped: 0, errorExpected: true, errMessage: "one or more suppression files failed to import correctly", deleteFiles: false}, {name: "Skipped import", directory: "../../shared_files/suppressionfile_BadFileNames/", success: 0, failure: 0, skipped: 2, errorExpected: false, errMessage: "", deleteFiles: false}, - {name: "Carriage char in path", directory: "../../shared_files/suppressionfile_BadFileNames/", success: 0, failure: 0, skipped: 0, errorExpected: true, errMessage: "The specified bucket does not exist", deleteFiles: false, insertCarriage: true}, + {name: "Carriage char in path", directory: "../../shared_files/suppressionfile_BadFileNames/", success: 0, failure: 0, skipped: 0, errorExpected: true, errMessage: "Failed to parse uri", deleteFiles: false, insertCarriage: true}, } for _, tt := range tests { @@ -324,7 +336,7 @@ func (s *SuppressionS3TestSuite) TestImportSuppressionDirectoryTable() { bucketName += "\n" } - success, failure, skipped, err := importer.ImportSuppressionDirectory(bucketName) + success, failure, skipped, err := importer.ImportSuppressionDirectory(s.ctx, bucketName) if tt.errorExpected { assert.Equal(true, strings.Contains(err.Error(), tt.errMessage)) } else { diff --git a/bcda/suppression/suppression_test.go b/bcda/suppression/suppression_test.go index faa7b9195..488ad1051 100644 --- a/bcda/suppression/suppression_test.go +++ b/bcda/suppression/suppression_test.go @@ -1,6 +1,7 @@ package suppression import ( + "context" "fmt" "os" "path/filepath" @@ -74,6 +75,7 @@ func TestSuppressionTestSuite(t *testing.T) { func (s *SuppressionTestSuite) TestImportSuppression() { assert := assert.New(s.T()) + ctx := context.Background() hook := test.NewLocal(log.StandardLogger()) // 181120 file @@ -86,7 +88,7 @@ func (s *SuppressionTestSuite) TestImportSuppression() { } importer, saver := s.createImporter() - err := importer.ImportSuppressionData(metadata) + err := importer.ImportSuppressionData(ctx, metadata) assert.Nil(err) assert.Len(saver.Files, 1) @@ -116,7 +118,7 @@ func (s *SuppressionTestSuite) TestImportSuppression() { } importer, saver = s.createImporter() - err = importer.ImportSuppressionData(metadata) + err = importer.ImportSuppressionData(ctx, metadata) assert.Nil(err) assert.Len(saver.Files, 1) @@ -155,11 +157,12 @@ func (s *SuppressionTestSuite) TestImportSuppression() { func (s *SuppressionTestSuite) TestImportSuppression_MissingData() { assert := assert.New(s.T()) + ctx := context.Background() // Verify empty file is rejected metadata := &optout.OptOutFilenameMetadata{} importer, _ := s.createImporter() - err := importer.ImportSuppressionData(metadata) + err := importer.ImportSuppressionData(ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), "could not read file") @@ -196,7 +199,7 @@ func (s *SuppressionTestSuite) TestImportSuppression_MissingData() { } } - err = importer.ImportSuppressionData(metadata) + err = importer.ImportSuppressionData(ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), fmt.Sprintf("%s: %s", tt.expErr, fp)) @@ -211,53 +214,55 @@ func (s *SuppressionTestSuite) TestImportSuppression_MissingData() { func (s *SuppressionTestSuite) TestValidate() { assert := assert.New(s.T()) importer, _ := s.createImporter() + ctx := context.Background() // positive suppressionfilePath := filepath.Join(s.basePath, "synthetic1800MedicareFiles/test/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") metadata := &optout.OptOutFilenameMetadata{Timestamp: time.Now(), FilePath: suppressionfilePath} - err := importer.validate(metadata) + err := importer.validate(ctx, metadata) assert.Nil(err) // bad file path metadata.FilePath = metadata.FilePath + "/blah/" - err = importer.validate(metadata) + err = importer.validate(ctx, metadata) assert.NotNil(err) assert.Contains(err.Error(), "could not read file "+metadata.FilePath) // invalid file header metadata.FilePath = filepath.Join(s.basePath, "suppressionfile_BadHeader/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") - err = importer.validate(metadata) + err = importer.validate(ctx, metadata) assert.EqualError(err, "invalid file header for file: "+metadata.FilePath) // missing record count metadata.FilePath = filepath.Join(s.basePath, "suppressionfile_MissingData/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009") - err = importer.validate(metadata) + err = importer.validate(ctx, metadata) assert.EqualError(err, "failed to parse record count from file: "+metadata.FilePath) // incorrect record count metadata.FilePath = filepath.Join(s.basePath, "suppressionfile_MissingData/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010") - err = importer.validate(metadata) + err = importer.validate(ctx, metadata) assert.EqualError(err, "incorrect number of records found from file: '"+metadata.FilePath+"'. Expected record count: 5, Actual record count: 4") } func (s *SuppressionTestSuite) TestLoadOptOutFiles() { assert := assert.New(s.T()) importer, _ := s.createImporter() + ctx := context.Background() filePath := filepath.Join(s.basePath, constants.TestSynthMedFilesPath) - suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(ctx, filePath) assert.Nil(err) assert.Equal(2, len(*suppresslist)) assert.Equal(0, skipped) filePath = filepath.Join(s.basePath, "suppressionfile_BadFileNames/") - suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(ctx, filePath) assert.Nil(err) assert.Equal(0, len(*suppresslist)) assert.Equal(2, skipped) filePath = filepath.Join(s.basePath, constants.TestSynthMedFilesPath) - suppresslist, _, err = importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, _, err = importer.FileHandler.LoadOptOutFiles(ctx, filePath) assert.Nil(err) modtimeAfter := time.Now().Truncate(time.Second) // check current value and change mod time @@ -272,7 +277,7 @@ func (s *SuppressionTestSuite) TestLoadOptOutFiles() { } filePath = filepath.Join(s.basePath, constants.TestSynthMedFilesPath) - suppresslist, _, err = importer.FileHandler.LoadOptOutFiles(filePath) + suppresslist, _, err = importer.FileHandler.LoadOptOutFiles(ctx, filePath) assert.Nil(err) for _, f := range *suppresslist { assert.Equal(modtimeAfter.Format("010203040506"), f.DeliveryDate.Format("010203040506")) @@ -281,6 +286,7 @@ func (s *SuppressionTestSuite) TestLoadOptOutFiles() { func (s *SuppressionTestSuite) TestLoadOptOutFiles_TimeChange() { assert := assert.New(s.T()) + ctx := context.Background() importer, _ := s.createImporter() importer.Saver = &BCDASaver{ Repo: postgres.NewRepository(database.Connect()), @@ -295,7 +301,7 @@ func (s *SuppressionTestSuite) TestLoadOptOutFiles_TimeChange() { s.FailNow(constants.TestChangeTimeErr, err) } - suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(folderPath) + suppresslist, skipped, err := importer.FileHandler.LoadOptOutFiles(ctx, folderPath) assert.Nil(err) assert.Equal(0, len(*suppresslist)) assert.Equal(2, skipped) @@ -311,7 +317,7 @@ func (s *SuppressionTestSuite) TestLoadOptOutFiles_TimeChange() { s.FailNow(constants.TestChangeTimeErr, err) } - suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(folderPath) + suppresslist, skipped, err = importer.FileHandler.LoadOptOutFiles(ctx, folderPath) assert.Nil(err) assert.Equal(0, len(*suppresslist)) assert.Equal(2, skipped) @@ -333,12 +339,13 @@ func (s *SuppressionTestSuite) TestLoadOptOutFiles_TimeChange() { } importer.FileHandler.(*optout.LocalFileHandler).PendingDeletionDir = "\n" - _, _, err = importer.FileHandler.LoadOptOutFiles(folderPath) + _, _, err = importer.FileHandler.LoadOptOutFiles(ctx, folderPath) assert.Equal(true, strings.Contains(err.Error(), "error moving unknown file")) } func (s *SuppressionTestSuite) TestCleanupSuppression() { assert := assert.New(s.T()) + ctx := context.Background() importer, _ := s.createImporter() var suppresslist []*optout.OptOutFilenameMetadata @@ -373,7 +380,7 @@ func (s *SuppressionTestSuite) TestCleanupSuppression() { } suppresslist = []*optout.OptOutFilenameMetadata{metadata, metadata2, metadata3} - err := importer.FileHandler.CleanupOptOutFiles(suppresslist) + err := importer.FileHandler.CleanupOptOutFiles(ctx, suppresslist) assert.Nil(err) files, err := os.ReadDir(conf.GetEnv("PENDING_DELETION_DIR")) @@ -393,6 +400,7 @@ func (s *SuppressionTestSuite) TestCleanupSuppression() { func (s *SuppressionTestSuite) TestCleanupSuppression_Bad() { assert := assert.New(s.T()) + ctx := context.Background() importer, _ := s.createImporter() importer.FileHandler.(*optout.LocalFileHandler).PendingDeletionDir = "\n" @@ -418,12 +426,13 @@ func (s *SuppressionTestSuite) TestCleanupSuppression_Bad() { } suppresslist = []*optout.OptOutFilenameMetadata{metadata1, metadata2} - err := importer.FileHandler.CleanupOptOutFiles(suppresslist) + err := importer.FileHandler.CleanupOptOutFiles(ctx, suppresslist) assert.EqualError(err, "2 files could not be cleaned up") } func (s *SuppressionTestSuite) TestCleanupSuppression_RenameFileError() { assert := assert.New(s.T()) + ctx := context.Background() importer, _ := s.createImporter() importer.FileHandler.(*optout.LocalFileHandler).PendingDeletionDir = "\n" @@ -440,7 +449,7 @@ func (s *SuppressionTestSuite) TestCleanupSuppression_RenameFileError() { } suppresslist = []*optout.OptOutFilenameMetadata{metadata1} - err := importer.FileHandler.CleanupOptOutFiles(suppresslist) + err := importer.FileHandler.CleanupOptOutFiles(ctx, suppresslist) assert.EqualError(err, "1 files could not be cleaned up") } @@ -448,6 +457,7 @@ func (s *SuppressionTestSuite) TestImportSuppressionDirectoryTable() { assert := assert.New(s.T()) importer, _ := s.createImporter() db := database.Connect() + ctx := context.Background() importer.Saver = &BCDASaver{ Repo: postgres.NewRepository(db), @@ -479,7 +489,7 @@ func (s *SuppressionTestSuite) TestImportSuppressionDirectoryTable() { path += "\n" } - success, failure, skipped, err := importer.ImportSuppressionDirectory(path) + success, failure, skipped, err := importer.ImportSuppressionDirectory(ctx, path) if tt.errorExpected { assert.Equal(true, strings.Contains(err.Error(), tt.errMessage)) } else { diff --git a/bcda/testUtils/utils.go b/bcda/testUtils/utils.go index 84d7ff06d..5641021d0 100644 --- a/bcda/testUtils/utils.go +++ b/bcda/testUtils/utils.go @@ -26,11 +26,12 @@ import ( "github.com/CMSgov/bcda-app/conf" "github.com/CMSgov/bcda-app/middleware" "github.com/aws/aws-lambda-go/events" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3" - "github.com/aws/aws-sdk-go/service/s3/s3manager" - "github.com/aws/aws-sdk-go/service/ssm" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/feature/s3/manager" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/aws-sdk-go-v2/service/s3/types" + "github.com/aws/aws-sdk-go-v2/service/ssm" "github.com/go-chi/chi/v5" "github.com/pborman/uuid" "github.com/sirupsen/logrus" @@ -147,38 +148,46 @@ func CopyToTemporaryDirectory(t *testing.T, src string) (string, func()) { return newPath, cleanup } -// CopyToS3 copies all of the content found at src into a temporary S3 folder within localstack. -// The path to the temporary S3 directory is returned along with a function that can be called to clean up the data. -func CopyToS3(t *testing.T, src string) (string, func()) { - tempBucket := uuid.NewUUID() +func TestAWSConfig(t *testing.T) aws.Config { + ctx := context.Background() - endpoint := conf.GetEnv("BFD_S3_ENDPOINT") + cfg, err := config.LoadDefaultConfig(ctx, + config.WithRegion(constants.DefaultRegion), + ) + assert.Nil(t, err) - config := aws.Config{ - Region: aws.String("us-east-1"), - S3ForcePathStyle: aws.Bool(true), - Endpoint: &endpoint, - } + return cfg +} - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, +func TestS3Client(t *testing.T, cfg aws.Config) *s3.Client { + return s3.NewFromConfig(cfg, func(o *s3.Options) { + o.UsePathStyle = true // required for localstack buckets }) +} - if err != nil { - t.Fatalf("Failed to create new session for S3: %s", err.Error()) - } +func TestSSMClient(t *testing.T, cfg aws.Config) *ssm.Client { + return ssm.NewFromConfig(cfg) +} + +// CopyToS3 copies all of the content found at src into a temporary S3 folder within localstack. +// The path to the temporary S3 directory is returned along with a function that can be called to clean up the data. +func CopyToS3(t *testing.T, src string) (string, func()) { + ctx := context.Background() + tempBucket := uuid.NewUUID().String() - svc := s3.New(sess) + client := TestS3Client(t, TestAWSConfig(t)) - _, err = svc.CreateBucket(&s3.CreateBucketInput{ - Bucket: aws.String(tempBucket.String()), - }) + bucketInput := &s3.CreateBucketInput{ + Bucket: aws.String(tempBucket), + } + _, err := client.CreateBucket(ctx, bucketInput) + assert.Nil(t, err) if err != nil { - t.Fatalf("Failed to create bucket %s: %s", tempBucket.String(), err.Error()) + t.Fatalf("Failed to create bucket %s: %s", tempBucket, err.Error()) } - uploader := s3manager.NewUploader(sess) + uploader := manager.NewUploader(client) err = filepath.Walk(src, func(path string, info os.FileInfo, err error) error { if err != nil { @@ -200,8 +209,8 @@ func CopyToS3(t *testing.T, src string) (string, func()) { key = parts[1] } - _, err = uploader.Upload(&s3manager.UploadInput{ - Bucket: aws.String(tempBucket.String()), + _, err = uploader.Upload(ctx, &s3.PutObjectInput{ + Bucket: aws.String(tempBucket), Key: aws.String(key), Body: f, }) @@ -210,7 +219,7 @@ func CopyToS3(t *testing.T, src string) (string, func()) { return err } - fmt.Printf("Uploaded file in bucket %s, key %s\n", tempBucket.String(), key) + fmt.Printf("Uploaded file in bucket %s, key %s\n", tempBucket, key) return nil }) @@ -219,18 +228,26 @@ func CopyToS3(t *testing.T, src string) (string, func()) { } cleanup := func() { - svc := s3.New(sess) - iter := s3manager.NewDeleteListIterator(svc, &s3.ListObjectsInput{ - Bucket: aws.String(tempBucket.String()), + output, err := client.ListObjectsV2(ctx, &s3.ListObjectsV2Input{ + Bucket: aws.String(tempBucket), }) + assert.Nil(t, err) - // Traverse iterator deleting each object - if err := s3manager.NewBatchDeleteWithClient(svc).Delete(aws.BackgroundContext(), iter); err != nil { - log.Printf("Unable to delete objects from bucket %s, %s\n", tempBucket, err) + var objIds []types.ObjectIdentifier + for _, obj := range output.Contents { + objIds = append(objIds, types.ObjectIdentifier{Key: obj.Key}) + } + input := s3.DeleteObjectsInput{ + Bucket: aws.String(tempBucket), + Delete: &types.Delete{ + Objects: objIds, + Quiet: aws.Bool(true), + }, } + client.DeleteObjects(ctx, &input) //nolint:errcheck } - return tempBucket.String(), cleanup + return tempBucket, cleanup } type ZipInput struct { @@ -239,32 +256,16 @@ type ZipInput struct { } func CreateZipsInS3(t *testing.T, zipInputs ...ZipInput) (string, func()) { - tempBucket := uuid.NewUUID() - endpoint := conf.GetEnv("BFD_S3_ENDPOINT") - - config := aws.Config{ - Region: aws.String("us-east-1"), - S3ForcePathStyle: aws.Bool(true), - Endpoint: &endpoint, - } - - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, - }) - - if err != nil { - t.Fatalf("Failed to create new session for S3: %s", err.Error()) - } - - svc := s3.New(sess) + ctx := context.Background() + tempBucket := uuid.NewUUID().String() - _, err = svc.CreateBucket(&s3.CreateBucketInput{ - Bucket: aws.String(tempBucket.String()), - }) + client := TestS3Client(t, TestAWSConfig(t)) - if err != nil { - t.Fatalf("Failed to create bucket %s: %s", tempBucket.String(), err.Error()) + bucketInput := &s3.CreateBucketInput{ + Bucket: aws.String(tempBucket), } + _, err := client.CreateBucket(ctx, bucketInput) + assert.Nil(t, err) for _, input := range zipInputs { var b bytes.Buffer @@ -279,10 +280,10 @@ func CreateZipsInS3(t *testing.T, zipInputs ...ZipInput) (string, func()) { assert.NoError(t, w.Close()) assert.NoError(t, f.Flush()) - uploader := s3manager.NewUploader(sess) + uploader := manager.NewUploader(client) - _, s3Err := s3manager.Uploader.Upload(*uploader, &s3manager.UploadInput{ - Bucket: aws.String(tempBucket.String()), + _, s3Err := uploader.Upload(ctx, &s3.PutObjectInput{ + Bucket: aws.String(tempBucket), Key: aws.String(input.ZipName), Body: bytes.NewReader(b.Bytes()), }) @@ -291,145 +292,72 @@ func CreateZipsInS3(t *testing.T, zipInputs ...ZipInput) (string, func()) { } cleanup := func() { - svc := s3.New(sess) - iter := s3manager.NewDeleteListIterator(svc, &s3.ListObjectsInput{ - Bucket: aws.String(tempBucket.String()), + output, err := client.ListObjectsV2(ctx, &s3.ListObjectsV2Input{ + Bucket: aws.String(tempBucket), }) + assert.Nil(t, err) - // Traverse iterator deleting each object - if err := s3manager.NewBatchDeleteWithClient(svc).Delete(aws.BackgroundContext(), iter); err != nil { - logrus.Printf("Unable to delete objects from bucket %s, %s\n", tempBucket, err) + var objIds []types.ObjectIdentifier + for _, obj := range output.Contents { + objIds = append(objIds, types.ObjectIdentifier{Key: obj.Key}) } + input := s3.DeleteObjectsInput{ + Bucket: aws.String(tempBucket), + Delete: &types.Delete{ + Objects: objIds, + Quiet: aws.Bool(true), + }, + } + client.DeleteObjects(ctx, &input) //nolint:errcheck } - return tempBucket.String(), cleanup -} - -func ListS3Objects(t *testing.T, bucket string, prefix string) []*s3.Object { - endpoint := conf.GetEnv("BFD_S3_ENDPOINT") - - config := aws.Config{ - Region: aws.String("us-east-1"), - S3ForcePathStyle: aws.Bool(true), - Endpoint: &endpoint, - } - - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, - }) - - if err != nil { - t.Fatalf("Failed to create new session for S3: %s", err.Error()) - } - - svc := s3.New(sess) - - fmt.Printf("Listing objects in bucket %s, prefix %s", bucket, prefix) - - resp, err := svc.ListObjects(&s3.ListObjectsInput{ - Bucket: aws.String(bucket), - Prefix: aws.String(prefix), - }) - - if err != nil { - t.Fatalf("Failed to list objects in S3 bucket %s, prefix %s: %s", bucket, prefix, err) - } - - return resp.Contents + return tempBucket, cleanup } // Inserts the provided parameter into localstack. -func PutParameter(t *testing.T, input *ssm.PutParameterInput) error { - endpoint := conf.GetEnv("LOCAL_STACK_ENDPOINT") - - config := aws.Config{ - Region: aws.String("us-east-1"), - S3ForcePathStyle: aws.Bool(true), - Endpoint: &endpoint, - } - - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, - }) - - if err != nil { - t.Fatalf("Failed to create new session for SSM: %s", err.Error()) - } - - fmt.Printf("Inserting parameter %s with value %s\n", *input.Name, *input.Value) +func putParameter(t *testing.T, input ssm.PutParameterInput) error { + ctx := context.Background() - svc := ssm.New(sess) - _, err = svc.PutParameter(input) + cfg, err := config.LoadDefaultConfig(ctx, + config.WithRegion(constants.DefaultRegion), + ) + assert.Nil(t, err) + client := ssm.NewFromConfig(cfg) - if err != nil { - t.Fatalf("Failed to insert parameter %s with value %s: %s\n", *input.Name, *input.Value, err) - } + _, err = client.PutParameter(ctx, &input) + assert.Nil(t, err) return nil } // Deletes the provided parameters from localstack. -func DeleteParameters(t *testing.T, input *ssm.DeleteParametersInput) error { - endpoint := conf.GetEnv("LOCAL_STACK_ENDPOINT") - - config := aws.Config{ - Region: aws.String("us-east-1"), - S3ForcePathStyle: aws.Bool(true), - Endpoint: &endpoint, - } - - sess, err := session.NewSessionWithOptions(session.Options{ - Config: config, - }) - - if err != nil { - t.Fatalf("Failed to create new session for SSM: %s", err.Error()) - } - - fmt.Printf("Deleting parameters from parameter store\n") +func deleteParameters(t *testing.T, input ssm.DeleteParametersInput) error { + ctx := context.Background() - svc := ssm.New(sess) - _, err = svc.DeleteParameters(input) + cfg, err := config.LoadDefaultConfig(ctx, + config.WithRegion(constants.DefaultRegion), + ) + assert.Nil(t, err) + client := ssm.NewFromConfig(cfg) - if err != nil { - t.Fatalf("Failed to delete parameters: %s", err) - } + _, err = client.DeleteParameters(ctx, &input) + assert.Nil(t, err) return nil } -type AwsParameter struct { - Name string - Value string - Type string -} - // Insert all given parameters into localstack and return a method for deferring cleanup. -func SetParameters(t *testing.T, params []AwsParameter) func() { - var paramKeys []*string - - for _, paramInput := range params { - err := PutParameter(t, &ssm.PutParameterInput{ - Name: ¶mInput.Name, - Value: ¶mInput.Value, - Type: ¶mInput.Type, - }) - - assert.Nil(t, err) - - name := paramInput.Name - paramKeys = append(paramKeys, &name) - } +func SetParameter(t *testing.T, name, value string) func() { + err := putParameter(t, ssm.PutParameterInput{ + Name: &name, + Value: &value, + Type: "String", + }) + assert.Nil(t, err) cleanup := func() { - if len(paramKeys) > 0 { - for _, paramInput := range paramKeys { - fmt.Printf("Deleting %s\n", *paramInput) - } - - err := DeleteParameters(t, &ssm.DeleteParametersInput{Names: paramKeys}) - assert.Nil(t, err) - } + err := deleteParameters(t, ssm.DeleteParametersInput{Names: []string{name}}) + assert.Nil(t, err) } return cleanup diff --git a/bcdaworker/queueing/manager.go b/bcdaworker/queueing/manager.go index af94123ad..6b69449b6 100644 --- a/bcdaworker/queueing/manager.go +++ b/bcdaworker/queueing/manager.go @@ -2,16 +2,21 @@ package queueing import ( "context" + "database/sql" goerrors "errors" "fmt" "time" + bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/bcda/models" "github.com/CMSgov/bcda-app/bcda/utils" "github.com/CMSgov/bcda-app/bcdaworker/queueing/worker_types" "github.com/CMSgov/bcda-app/bcdaworker/repository" "github.com/CMSgov/bcda-app/bcdaworker/worker" + "github.com/CMSgov/bcda-app/conf" "github.com/CMSgov/bcda-app/log" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/cloudwatch/types" "github.com/ccoveille/go-safecast" pgxv5 "github.com/jackc/pgx/v5" "github.com/pkg/errors" @@ -134,3 +139,32 @@ func checkIfCancelled( } } } + +// Update the AWS Cloudwatch Metric for job queue count +func updateJobQueueCountCloudwatchMetric(ctx context.Context, db *sql.DB, log logrus.FieldLogger) { + cloudWatchEnv := conf.GetEnv("DEPLOYMENT_TARGET") + if cloudWatchEnv != "" { + err := bcdaaws.PutMetricSample( + ctx, + "JobQueueCount", + "BCDA", + "Count", + getQueueJobCount(db, log), + []types.Dimension{{Name: aws.String("Environment"), Value: aws.String(cloudWatchEnv)}}, + ) + if err != nil { + log.Error(err) + } + } +} + +func getQueueJobCount(db *sql.DB, log logrus.FieldLogger) float64 { + row := db.QueryRow(`SELECT COUNT(*) FROM river_job WHERE state NOT IN ('completed', 'cancelled', 'discarded');`) + + var count int + if err := row.Scan(&count); err != nil { + log.Error(err) + } + + return float64(count) +} diff --git a/bcdaworker/queueing/river.go b/bcdaworker/queueing/river.go index d1931c68b..629d95cc3 100644 --- a/bcdaworker/queueing/river.go +++ b/bcdaworker/queueing/river.go @@ -18,7 +18,6 @@ package queueing import ( "context" "database/sql" - "fmt" "log/slog" "os" "path/filepath" @@ -27,7 +26,6 @@ import ( bcdaaws "github.com/CMSgov/bcda-app/bcda/aws" "github.com/CMSgov/bcda-app/bcda/constants" "github.com/CMSgov/bcda-app/bcda/database" - "github.com/CMSgov/bcda-app/bcda/metrics" "github.com/CMSgov/bcda-app/bcda/utils" "github.com/CMSgov/bcda-app/bcdaworker/queueing/worker_types" "github.com/CMSgov/bcda-app/bcdaworker/repository/postgres" @@ -147,36 +145,6 @@ func (q queue) StopRiver() { } } -// TODO: once we remove que library and upgrade to pgx5 we can move the below functions into manager -// Update the AWS Cloudwatch Metric for job queue count -func updateJobQueueCountCloudwatchMetric(db *sql.DB, log logrus.FieldLogger) { - cloudWatchEnv := conf.GetEnv("DEPLOYMENT_TARGET") - if cloudWatchEnv != "" { - sampler, err := metrics.NewSampler("BCDA", "Count") - if err != nil { - fmt.Println("Warning: failed to create new metric sampler...") - } else { - err := sampler.PutSample("JobQueueCount", getQueueJobCount(db, log), []metrics.Dimension{ - {Name: "Environment", Value: cloudWatchEnv}, - }) - if err != nil { - log.Error(err) - } - } - } -} - -func getQueueJobCount(db *sql.DB, log logrus.FieldLogger) float64 { - row := db.QueryRow(`SELECT COUNT(*) FROM river_job WHERE state NOT IN ('completed', 'cancelled', 'discarded');`) - - var count int - if err := row.Scan(&count); err != nil { - log.Error(err) - } - - return float64(count) -} - func getCutOffTime() time.Time { cutoff := time.Now().Add(-time.Hour * time.Duration(utils.GetEnvInt("ARCHIVE_THRESHOLD_HR", 24))) return cutoff diff --git a/bcdaworker/queueing/river_test.go b/bcdaworker/queueing/river_test.go index cd0832836..c9d35f1d4 100644 --- a/bcdaworker/queueing/river_test.go +++ b/bcdaworker/queueing/river_test.go @@ -206,7 +206,7 @@ func TestGetAWSParams(t *testing.T) { expectedToken := "local-token" conf.SetEnv(t, "workflow-alerts", expectedToken) - token, err := getAWSParams() + token, err := getAWSParams(t.Context()) assert.NoError(t, err) assert.Equal(t, expectedToken, token) }) diff --git a/bcdaworker/queueing/worker_process_job.go b/bcdaworker/queueing/worker_process_job.go index 8f933e382..728a7eee1 100644 --- a/bcdaworker/queueing/worker_process_job.go +++ b/bcdaworker/queueing/worker_process_job.go @@ -36,7 +36,7 @@ func (w *JobWorker) Work(ctx context.Context, rjob *river.Job[worker_types.JobEn workerInstance := worker.NewWorker(mainDB) repo := postgres.NewRepository(mainDB) - defer updateJobQueueCountCloudwatchMetric(mainDB, logger) + defer updateJobQueueCountCloudwatchMetric(ctx, mainDB, logger) exportJob, err, ackJob := validateJob(ctx, ValidateJobConfig{ WorkerInstance: workerInstance, diff --git a/conf/config.go b/conf/config.go index 45fec3b90..7fe122fa7 100644 --- a/conf/config.go +++ b/conf/config.go @@ -99,7 +99,6 @@ func LoadLambdaEnvVars(env string) { var configPaths []string configPaths = append(configPaths, envPath) envVars, state = loadConfigs(configPaths...) - } // This is the private helper function that sets up viper. This function is diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 996abb5fc..3b51d7867 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -44,7 +44,7 @@ services: environment: # - AWS_ENDPOINT_URL=http://localstack:4566 # - GATEWAY_LISTEN=0.0.0.0:4566 - - SERVICES=s3,ssm,sts,iam + - SERVICES=s3,ssm,sts,iam,cloudwatch - DEBUG=1 ports: - "127.0.0.1:4566:4566" # LocalStack Gateway diff --git a/go.mod b/go.mod index 3ece4c539..20088d9d3 100644 --- a/go.mod +++ b/go.mod @@ -40,9 +40,10 @@ require ( ) require ( - github.com/aws/aws-sdk-go-v2 v1.39.3 + github.com/aws/aws-sdk-go-v2 v1.39.4 github.com/aws/aws-sdk-go-v2/config v1.31.14 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 + github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.51.4 github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 github.com/ccoveille/go-safecast v1.6.1 @@ -61,8 +62,8 @@ require ( github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.18.18 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 // indirect diff --git a/go.sum b/go.sum index 6ccdb8af5..9cac29e1e 100644 --- a/go.sum +++ b/go.sum @@ -116,8 +116,8 @@ github.com/aws/aws-lambda-go v1.49.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7Rfg github.com/aws/aws-sdk-go v1.28.8/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ= github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk= -github.com/aws/aws-sdk-go-v2 v1.39.3 h1:h7xSsanJ4EQJXG5iuW4UqgP7qBopLpj84mpkNx3wPjM= -github.com/aws/aws-sdk-go-v2 v1.39.3/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= +github.com/aws/aws-sdk-go-v2 v1.39.4 h1:qTsQKcdQPHnfGYBBs+Btl8QwxJeoWcOcPcixK90mRhg= +github.com/aws/aws-sdk-go-v2 v1.39.4/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 h1:t9yYsydLYNBk9cJ73rgPhPWqOh/52fcWDQB5b1JsKSY= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2/go.mod h1:IusfVNTmiSN3t4rhxWFaBAqn+mcNdwKtPcV16eYdgko= github.com/aws/aws-sdk-go-v2/config v1.31.14 h1:kj/KpDqvt0UqcEL3WOvCykE9QUpBb6b23hQdnXe+elo= @@ -128,14 +128,16 @@ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 h1:UuGVOX48oP4vgQ36oiKmW9 github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10/go.mod h1:vM/Ini41PzvudT4YkQyE/+WiQJiQ6jzeDyU8pQKwCac= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 h1:9XV2TkOvCs6Fis10b4scQbv/eDPhklhU/65GikPxXAA= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13/go.mod h1:X5gq64GsjuOIJRIUzR3x3Du96zUF+U1if3Qw/qNx1k8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10 h1:mj/bdWleWEh81DtpdHKkw41IrS+r3uw1J/VQtbwYYp8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.10/go.mod h1:7+oEMxAZWP8gZCyjcm9VicI0M61Sx4DJtcGfKYv2yKQ= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10 h1:wh+/mn57yhUrFtLIxyFPh2RgxgQz/u+Yrf7hiHGHqKY= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.10/go.mod h1:7zirD+ryp5gitJJ2m1BBux56ai8RIRDykXZrJSp540w= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11 h1:7AANQZkF3ihM8fbdftpjhken0TP9sBzFbV/Ze/Y4HXA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11/go.mod h1:NTF4QCGkm6fzVwncpkFQqoquQyOolcyXfbpC98urj+c= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11 h1:ShdtWUZT37LCAA4Mw2kJAJtzaszfSHFb5n25sdcv4YE= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11/go.mod h1:7bUb2sSr2MZ3M/N+VyETLTQtInemHXb/Fl3s8CLzm0Y= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 h1:FHw90xCTsofzk6vjU808TSuDtDfOOKPNdz5Weyc3tUI= github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10/go.mod h1:n8jdIE/8F3UYkg8O4IGkQpn2qUmapg/1K1yl29/uf/c= +github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.51.4 h1:/XGR3fYTRE1zQiepHO1NIIMVN8u/WR/uei41rh7IEMw= +github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.51.4/go.mod h1:Gt6Vp7huej9kFI8bmZd0ZkPeFn29GrQPkJoFN2b7h3A= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2 h1:xtuxji5CS0JknaXoACOunXOYOQzgfTvGAc9s2QdCJA4= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.2/go.mod h1:zxwi0DIR0rcRcgdbl7E2MSOvxDyyXGBlScvBkARFaLQ= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1 h1:ne+eepnDB2Wh5lHKzELgEncIqeVlQ1rSF9fEa4r5I+A= diff --git a/ops/services/root.tofu.tf b/ops/services/root.tofu.tf index fb48b7b24..15fff42b8 100644 --- a/ops/services/root.tofu.tf +++ b/ops/services/root.tofu.tf @@ -11,7 +11,7 @@ variable "env" { } variable "region" { - default = "us-east-1" + default = constants.DefaultRegion nullable = false type = string } diff --git a/optout/file_handler.go b/optout/file_handler.go index 5ee4ad312..389cddd17 100644 --- a/optout/file_handler.go +++ b/optout/file_handler.go @@ -2,6 +2,7 @@ package optout import ( "bufio" + "context" ) // File handlers can load opt out files from a given source and can optionally clean them up afterwards. @@ -11,10 +12,10 @@ type OptOutFileHandler interface { // // Return a list of metadata parsed from valid filenames, // and the number of files skipped due to unknown filenames. - LoadOptOutFiles(path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) + LoadOptOutFiles(ctx context.Context, path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) // Cleanup any opt out files that were successfully imported, and handle // any files that failed to be imported. - CleanupOptOutFiles(suppressList []*OptOutFilenameMetadata) error + CleanupOptOutFiles(ctx context.Context, suppressList []*OptOutFilenameMetadata) error // Open a given opt out file, specified by the metadata struct. - OpenFile(metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) + OpenFile(ctx context.Context, metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) } diff --git a/optout/local_file_handler.go b/optout/local_file_handler.go index 1c5220fd6..53afb5b5a 100644 --- a/optout/local_file_handler.go +++ b/optout/local_file_handler.go @@ -2,12 +2,14 @@ package optout import ( "bufio" + "context" "fmt" - "github.com/ccoveille/go-safecast" "os" "path/filepath" "time" + "github.com/ccoveille/go-safecast" + "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -20,13 +22,13 @@ type LocalFileHandler struct { FileArchiveThresholdHr uint } -func (handler *LocalFileHandler) LoadOptOutFiles(path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) { +func (handler *LocalFileHandler) LoadOptOutFiles(ctx context.Context, path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) { var result []*OptOutFilenameMetadata - err = filepath.Walk(path, handler.getOptOutFileMetadata(&result, &skipped)) + err = filepath.Walk(path, handler.getOptOutFileMetadata(ctx, &result, &skipped)) return &result, skipped, err } -func (handler *LocalFileHandler) getOptOutFileMetadata(suppresslist *[]*OptOutFilenameMetadata, skipped *int) filepath.WalkFunc { +func (handler *LocalFileHandler) getOptOutFileMetadata(ctx context.Context, suppresslist *[]*OptOutFilenameMetadata, skipped *int) filepath.WalkFunc { return func(path string, info os.FileInfo, err error) error { if err != nil { var fileName = "nil" @@ -78,7 +80,7 @@ func (handler *LocalFileHandler) getOptOutFileMetadata(suppresslist *[]*OptOutFi } } -func (handler *LocalFileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) { +func (handler *LocalFileHandler) OpenFile(ctx context.Context, metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) { f, err := os.Open(metadata.FilePath) if err != nil { fmt.Printf("Could not read file %s.\n", metadata) @@ -95,7 +97,7 @@ func (handler *LocalFileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bu }, nil } -func (handler *LocalFileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameMetadata) error { +func (handler *LocalFileHandler) CleanupOptOutFiles(ctx context.Context, suppresslist []*OptOutFilenameMetadata) error { errCount := 0 for _, suppressionFile := range suppresslist { fmt.Printf("Cleaning up file %s.\n", suppressionFile) diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index eb33573aa..cf114bfda 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -8,25 +8,20 @@ import ( "time" "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/feature/s3/manager" "github.com/aws/aws-sdk-go-v2/service/s3" s3types "github.com/aws/aws-sdk-go-v2/service/s3/types" - "github.com/aws/aws-sdk-go/aws/session" "github.com/sirupsen/logrus" ) // S3FileHandler manages files located on AWS S3. type S3FileHandler struct { - Ctx context.Context Client *s3.Client Logger logrus.FieldLogger // Optional S3 endpoint to use for connection. Endpoint string // Optional role to assume when connecting to S3. AssumeRoleArn string - // AWS session, created once and cached here. - Session *session.Session } // Define logger functions to ensure that logs get sent to: @@ -44,12 +39,11 @@ func (handler *S3FileHandler) Errorf(format string, rest ...interface{}) { handler.Logger.Errorf(format, rest...) } -func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) { +func (handler *S3FileHandler) LoadOptOutFiles(ctx context.Context, path string) (suppressList *[]*OptOutFilenameMetadata, skipped int, err error) { var result []*OptOutFilenameMetadata bucket, prefix := ParseS3Uri(path) - s3Objects, err := handler.ListFiles(bucket, prefix) - + s3Objects, err := handler.ListFiles(ctx, bucket, prefix) if err != nil { return &result, skipped, err } @@ -72,17 +66,10 @@ func (handler *S3FileHandler) LoadOptOutFiles(path string) (suppressList *[]*Opt return &result, skipped, err } -func (handler *S3FileHandler) ListFiles(bucket, prefix string) (objects []s3types.Object, err error) { - cfg, err := config.LoadDefaultConfig(handler.Ctx) - if err != nil { - return nil, err - } - - client := s3.NewFromConfig(cfg) - +func (handler *S3FileHandler) ListFiles(ctx context.Context, bucket, prefix string) (objects []s3types.Object, err error) { handler.Infof("Listing objects in bucket %s, prefix %s\n", bucket, prefix) - resp, err := client.ListObjects(handler.Ctx, &s3.ListObjectsInput{ + resp, err := handler.Client.ListObjects(ctx, &s3.ListObjectsInput{ Bucket: aws.String(bucket), Prefix: aws.String(prefix), }) @@ -95,9 +82,8 @@ func (handler *S3FileHandler) ListFiles(bucket, prefix string) (objects []s3type return resp.Contents, nil } -func (handler *S3FileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) { - byte_arr, err := handler.OpenFileBytes(metadata.FilePath) - +func (handler *S3FileHandler) OpenFile(ctx context.Context, metadata *OptOutFilenameMetadata) (*bufio.Scanner, func(), error) { + byte_arr, err := handler.OpenFileBytes(ctx, metadata.FilePath) if err != nil { handler.Errorf("Failed to download %s\n", metadata.FilePath) return nil, nil, err @@ -107,15 +93,16 @@ func (handler *S3FileHandler) OpenFile(metadata *OptOutFilenameMetadata) (*bufio return sc, func() {}, err } -func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { +func (handler *S3FileHandler) OpenFileBytes(ctx context.Context, filePath string) ([]byte, error) { handler.Infof("Opening file %s\n", filePath) bucket, file := ParseS3Uri(filePath) input := &s3.HeadObjectInput{ Bucket: aws.String(bucket), - Key: aws.String(filePath), + Key: aws.String(file), } - output, err := handler.Client.HeadObject(handler.Ctx, input) + + output, err := handler.Client.HeadObject(ctx, input) if err != nil { return nil, err } @@ -124,11 +111,10 @@ func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { w := manager.NewWriteAtBuffer(buff) downloader := manager.NewDownloader(handler.Client) - numBytes, err := downloader.Download(handler.Ctx, w, &s3.GetObjectInput{ + numBytes, err := downloader.Download(ctx, w, &s3.GetObjectInput{ Bucket: aws.String(bucket), Key: aws.String(file), }) - if err != nil { return nil, err } @@ -138,8 +124,9 @@ func (handler *S3FileHandler) OpenFileBytes(filePath string) ([]byte, error) { return buff, err } -func (handler *S3FileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameMetadata) error { +func (handler *S3FileHandler) CleanupOptOutFiles(ctx context.Context, suppresslist []*OptOutFilenameMetadata) error { errCount := 0 + for _, suppressionFile := range suppresslist { if !suppressionFile.Imported { // Don't do anything. The S3 bucket should have a retention policy that @@ -149,7 +136,7 @@ func (handler *S3FileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameM } handler.Infof("Cleaning up file %s\n", suppressionFile) - err := handler.Delete(suppressionFile.FilePath) + err := handler.Delete(ctx, suppressionFile.FilePath) if err != nil { errCount++ @@ -166,10 +153,10 @@ func (handler *S3FileHandler) CleanupOptOutFiles(suppresslist []*OptOutFilenameM return nil } -func (handler *S3FileHandler) Delete(filePath string) error { +func (handler *S3FileHandler) Delete(ctx context.Context, filePath string) error { bucket, path := ParseS3Uri(filePath) - _, err := handler.Client.DeleteObject(handler.Ctx, &s3.DeleteObjectInput{ + _, err := handler.Client.DeleteObject(ctx, &s3.DeleteObjectInput{ Bucket: aws.String(bucket), Key: aws.String(path), }) @@ -178,7 +165,7 @@ func (handler *S3FileHandler) Delete(filePath string) error { return err } else { err = s3.NewObjectNotExistsWaiter(handler.Client).Wait( - handler.Ctx, + ctx, &s3.HeadObjectInput{ Bucket: aws.String(bucket), Key: aws.String(path), From 32a41450dc1fa774d8c3454e82ce30e67b6c5297 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 28 Oct 2025 13:27:08 -0400 Subject: [PATCH 07/16] Cleanup --- Makefile | 10 ++++----- bcda/constants/test_constants.go | 3 ++- bcda/database/config_test.go | 38 -------------------------------- bcda/lambda/cclf/main_test.go | 1 - docker-compose.test.yml | 3 --- 5 files changed, 7 insertions(+), 48 deletions(-) diff --git a/Makefile b/Makefile index cdc4cf77b..b70e4681a 100644 --- a/Makefile +++ b/Makefile @@ -151,11 +151,11 @@ load-synthetic-cclf-data: docker compose run --rm api sh -c "bcda import-synthetic-cclf-package --acoSize='$$IMPROVED_SIZE' --environment='improved' --fileType='runout' " ; \ done -load-synthetic-suppression-data: - docker compose run api sh -c 'bcda import-suppression-directory --directory=../shared_files/synthetic1800MedicareFiles' - # Update the suppression entries to guarantee there are qualified entries when searching for suppressed benes. - # See postgres#GetSuppressedMBIs for more information - docker compose exec -T db sh -c 'PGPASSWORD=$$POSTGRES_PASSWORD psql -v ON_ERROR_STOP=1 $$POSTGRES_DB postgres -c "UPDATE suppressions SET effective_date = now(), preference_indicator = '"'"'N'"'"' WHERE effective_date = (SELECT max(effective_date) FROM suppressions);"' +# load-synthetic-suppression-data: +# docker compose run api sh -c 'bcda import-suppression-directory --directory=../shared_files/synthetic1800MedicareFiles' +# # Update the suppression entries to guarantee there are qualified entries when searching for suppressed benes. +# # See postgres#GetSuppressedMBIs for more information +# docker compose exec -T db sh -c 'PGPASSWORD=$$POSTGRES_PASSWORD psql -v ON_ERROR_STOP=1 $$POSTGRES_DB postgres -c "UPDATE suppressions SET effective_date = now(), preference_indicator = '"'"'N'"'"' WHERE effective_date = (SELECT max(effective_date) FROM suppressions);"' load-fixtures-ssas: docker compose up -d db diff --git a/bcda/constants/test_constants.go b/bcda/constants/test_constants.go index dca78579e..638464a85 100644 --- a/bcda/constants/test_constants.go +++ b/bcda/constants/test_constants.go @@ -61,7 +61,8 @@ const CreateACOID = "create-aco" const GenClientCred = "generate-client-credentials" const ResetClientCred = "reset-client-credentials" // #nosec - G101 credentials for unit testing const ArchJobFiles = "archive-job-files" -const ImportSupDir = "import-suppression-directory" + +// const ImportSupDir = "import-suppression-directory" const DirectoryArg = "--directory" const FileSourceArg = "--filesource" const S3EndpointArg = "--s3endpoint" diff --git a/bcda/database/config_test.go b/bcda/database/config_test.go index 0f77540ef..e80364616 100644 --- a/bcda/database/config_test.go +++ b/bcda/database/config_test.go @@ -43,41 +43,3 @@ func (s *DatabaseConfigSuite) TestLoadConfigMissingDatabaseUrl() { assert.Nil(cfg) assert.Contains(err.Error(), "invalid config, DatabaseURL must be set") } - -// func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreSuccess() { -// assert := assert.New(s.T()) - -// env := uuid.NewUUID() -// cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ -// {Name: "ENV", Value: env.String()}, -// {Name: "DATABASE_URL", Value: ""}, -// }) -// defer cleanupEnv() - -// cleanupParams := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "my-super-secure-database-url") -// // {Name: , Value: "my-super-secure-database-url", Type: "SecureString"}, -// // }) -// defer cleanupParams() - -// cfg, err := LoadConfig() -// assert.Nil(err) -// assert.Equal("my-super-secure-database-url", cfg.DatabaseURL) -// } - -// func (s *DatabaseConfigSuite) TestLoadConfigFromParameterStoreMissingDatabaseUrl() { -// assert := assert.New(s.T()) - -// env := uuid.NewUUID() -// cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{ -// {Name: "ENV", Value: env.String()}, -// {Name: "DATABASE_URL", Value: ""}, -// }) -// defer cleanupEnv() - -// cleanupParams := testUtils.SetParameter(s.T(), "", "") -// defer cleanupParams() - -// cfg, err := LoadConfig() -// assert.Nil(cfg) -// assert.Contains(err.Error(), fmt.Sprintf("invalid parameters error: /bcda/%s/api/DATABASE_URL", env)) -// } diff --git a/bcda/lambda/cclf/main_test.go b/bcda/lambda/cclf/main_test.go index da15a32ad..745761970 100644 --- a/bcda/lambda/cclf/main_test.go +++ b/bcda/lambda/cclf/main_test.go @@ -68,7 +68,6 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { } for _, tc := range tests { - fmt.Printf("\n----- path: %s\n", tc.path) postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) defer postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 3b51d7867..201cd883e 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -15,7 +15,6 @@ services: - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-foobar} - AWS_DEFAULT_REGION=us-east-1 - AWS_ENDPOINT_URL=http://localstack:4566 - # - LOCAL_STACK_ENDPOINT=${LOCAL_STACK_ENDPOINT:-http://localstack:4566} - BFD_S3_ENDPOINT=${BFD_S3_ENDPOINT:-http://localstack:4566} - ENV=local - DATABASE_URL=postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable @@ -42,8 +41,6 @@ services: localstack: image: localstack/localstack:latest environment: - # - AWS_ENDPOINT_URL=http://localstack:4566 - # - GATEWAY_LISTEN=0.0.0.0:4566 - SERVICES=s3,ssm,sts,iam,cloudwatch - DEBUG=1 ports: From 194520a91cd95b802747cd2c2f0a2699196b2d59 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 28 Oct 2025 14:13:34 -0400 Subject: [PATCH 08/16] Cleanup --- bcda/lambda/admin_aco_deny/main.go | 6 +++--- bcda/lambda/admin_create_aco/main.go | 6 +++--- bcda/lambda/admin_create_aco_creds/aws.go | 6 +++--- bcda/lambda/admin_create_group/main.go | 10 +++++----- bcda/suppression/suppression_s3_test.go | 3 +-- go.mod | 2 -- go.sum | 6 ------ go.work.sum | 24 ++++------------------- optout/s3_file_handler.go | 12 ++++++------ optout/utils.go | 2 +- 10 files changed, 26 insertions(+), 51 deletions(-) diff --git a/bcda/lambda/admin_aco_deny/main.go b/bcda/lambda/admin_aco_deny/main.go index ef75deafa..f3e39d47c 100644 --- a/bcda/lambda/admin_aco_deny/main.go +++ b/bcda/lambda/admin_aco_deny/main.go @@ -104,12 +104,12 @@ func getAWSParams(ctx context.Context) (awsParams, error) { ssmClient := ssm.NewFromConfig(cfg) dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) - slackTokenName := "/slack/token/workflow-alerts" - paramNames := []string{slackTokenName, dbURLName} + slackParamName := "/slack/token/workflow-alerts" + paramNames := []string{slackParamName, dbURLName} params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { return awsParams{}, err } - return awsParams{params[dbURLName], params[slackTokenName]}, nil + return awsParams{params[dbURLName], params[slackParamName]}, nil } diff --git a/bcda/lambda/admin_create_aco/main.go b/bcda/lambda/admin_create_aco/main.go index 832ff9692..50703ed8d 100644 --- a/bcda/lambda/admin_create_aco/main.go +++ b/bcda/lambda/admin_create_aco/main.go @@ -150,12 +150,12 @@ func getAWSParams(ctx context.Context) (awsParams, error) { ssmClient := ssm.NewFromConfig(cfg) dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) - slackTokenName := "/slack/token/workflow-alerts" - paramNames := []string{slackTokenName, dbURLName} + slackParamName := "/slack/token/workflow-alerts" + paramNames := []string{slackParamName, dbURLName} params, err := bcdaaws.GetParameters(ctx, ssmClient, paramNames) if err != nil { return awsParams{}, err } - return awsParams{params[dbURLName], params[slackTokenName]}, nil + return awsParams{params[dbURLName], params[slackParamName]}, nil } diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index eba214c14..7d7a91a7e 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -25,7 +25,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { return awsParams{}, nil } - slackTokenName := "/slack/token/workflow-alerts" + slackParamName := "/slack/token/workflow-alerts" ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) clientIDName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) clientSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) @@ -33,7 +33,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { credsBucketName := fmt.Sprintf("/bcda/%s/aco_creds_bucket", env) paramNames := []string{ - slackTokenName, + slackParamName, ssasURLName, clientIDName, clientSecretName, @@ -53,7 +53,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { } return awsParams{ - params[slackTokenName], + params[slackParamName], params[ssasURLName], params[clientIDName], params[clientSecretName], diff --git a/bcda/lambda/admin_create_group/main.go b/bcda/lambda/admin_create_group/main.go index 77922906e..831713166 100644 --- a/bcda/lambda/admin_create_group/main.go +++ b/bcda/lambda/admin_create_group/main.go @@ -148,13 +148,13 @@ func setupEnv(ctx context.Context) (string, error) { } ssmClient := ssm.NewFromConfig(cfg) - slackTokenName := "/slack/token/workflow-alerts" + slackParamName := "/slack/token/workflow-alerts" ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) ssasClientName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) ssasSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) caFileName := fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env) paramNames := []string{ - slackTokenName, + slackParamName, ssasURLName, ssasClientName, ssasSecretName, @@ -165,9 +165,9 @@ func setupEnv(ctx context.Context) (string, error) { return "", err } - err = os.Setenv(slackTokenName, params[slackTokenName]) + err = os.Setenv(slackParamName, params[slackParamName]) if err != nil { - log.Errorf("Error setting slackTokenName env var: %+v", err) + log.Errorf("Error setting slackParamName env var: %+v", err) return "", err } err = os.Setenv(ssasURLName, params[ssasURLName]) @@ -209,5 +209,5 @@ func setupEnv(ctx context.Context) (string, error) { return "", err } - return params[slackTokenName], nil + return params[slackParamName], nil } diff --git a/bcda/suppression/suppression_s3_test.go b/bcda/suppression/suppression_s3_test.go index f1488f562..b2497486c 100644 --- a/bcda/suppression/suppression_s3_test.go +++ b/bcda/suppression/suppression_s3_test.go @@ -294,10 +294,9 @@ func (s *SuppressionS3TestSuite) TestCleanupSuppression() { assert.Nil(err) client := testUtils.TestS3Client(s.T(), testUtils.TestAWSConfig(s.T())) - output, err := client.ListObjectsV2(s.T().Context(), &s3.ListObjectsV2Input{ + output, _ := client.ListObjectsV2(s.T().Context(), &s3.ListObjectsV2Input{ Bucket: aws.String(bucketName), }) - assert.Nil(s.T(), err) assert.True(len(output.Contents) == 0) } diff --git a/go.mod b/go.mod index 20088d9d3..645f57a2f 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,6 @@ require ( github.com/BurntSushi/toml v0.4.1 github.com/DATA-DOG/go-sqlmock v1.5.0 github.com/aws/aws-lambda-go v1.49.0 - github.com/aws/aws-sdk-go v1.55.8 github.com/cenkalti/backoff/v4 v4.2.1 github.com/go-chi/chi/v5 v5.2.3 github.com/go-chi/render v1.0.1 @@ -147,7 +146,6 @@ require ( github.com/hashicorp/hcl v1.0.0 // indirect github.com/huandu/xstrings v1.3.2 // indirect github.com/influxdata/tdigest v0.0.1 // indirect - github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jonboulle/clockwork v0.1.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect diff --git a/go.sum b/go.sum index 9cac29e1e..4b900db83 100644 --- a/go.sum +++ b/go.sum @@ -114,8 +114,6 @@ github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:l github.com/aws/aws-lambda-go v1.49.0 h1:z4VhTqkFZPM3xpEtTqWqRqsRH4TZBMJqTkRiBPYLqIQ= github.com/aws/aws-lambda-go v1.49.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= github.com/aws/aws-sdk-go v1.28.8/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ= -github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk= github.com/aws/aws-sdk-go-v2 v1.39.4 h1:qTsQKcdQPHnfGYBBs+Btl8QwxJeoWcOcPcixK90mRhg= github.com/aws/aws-sdk-go-v2 v1.39.4/go.mod h1:yWSxrnioGUZ4WVv9TgMrNUeLV3PFESn/v+6T/Su8gnM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 h1:t9yYsydLYNBk9cJ73rgPhPWqOh/52fcWDQB5b1JsKSY= @@ -592,10 +590,6 @@ github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= diff --git a/go.work.sum b/go.work.sum index f44f4f68e..08482f338 100644 --- a/go.work.sum +++ b/go.work.sum @@ -472,32 +472,14 @@ github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878 h1:EFSB7Zo9Eg91v7 github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA= +github.com/aws/aws-sdk-go v1.49.6 h1:yNldzF5kzLBRvKlKz1S0bkvc2+04R1kt13KfBWQBfFA= +github.com/aws/aws-sdk-go v1.49.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go-v2 v1.16.16 h1:M1fj4FE2lB4NzRb9Y0xdWsn2P0+2UHVxwKyOa4YJNjk= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 h1:t9yYsydLYNBk9cJ73rgPhPWqOh/52fcWDQB5b1JsKSY= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2/go.mod h1:IusfVNTmiSN3t4rhxWFaBAqn+mcNdwKtPcV16eYdgko= -github.com/aws/aws-sdk-go-v2/config v1.31.14 h1:kj/KpDqvt0UqcEL3WOvCykE9QUpBb6b23hQdnXe+elo= -github.com/aws/aws-sdk-go-v2/config v1.31.14/go.mod h1:X5PaY6QCzViihn/ru7VxnIamcJQrG9NSeTxuSKm2YtU= github.com/aws/aws-sdk-go-v2/credentials v1.12.20 h1:9+ZhlDY7N9dPnUmf7CDfW9In4sW5Ff3bh7oy4DzS1IE= -github.com/aws/aws-sdk-go-v2/credentials v1.18.18 h1:5AfxTvDN0AJoA7rg/yEc0sHhl6/B9fZ+NtiQuOjWGQM= -github.com/aws/aws-sdk-go-v2/credentials v1.18.18/go.mod h1:m9mE1mJ1s7zI6rrt7V3RQU2SCgUbNaphlfqEksLp+Fs= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 h1:9XV2TkOvCs6Fis10b4scQbv/eDPhklhU/65GikPxXAA= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13/go.mod h1:X5gq64GsjuOIJRIUzR3x3Du96zUF+U1if3Qw/qNx1k8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.23 h1:s4g/wnzMf+qepSNgTvaQQHNxyMLKSawNhKCPNy++2xY= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.17 h1:/K482T5A3623WJgWT8w1yRAFK4RzGzEl7y39yhtn9eA= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10 h1:FHw90xCTsofzk6vjU808TSuDtDfOOKPNdz5Weyc3tUI= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.10/go.mod h1:n8jdIE/8F3UYkg8O4IGkQpn2qUmapg/1K1yl29/uf/c= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.9 h1:Lh1AShsuIJTwMkoxVCAYPJgNG5H+eN6SmoUn8nOZ5wE= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1 h1:ne+eepnDB2Wh5lHKzELgEncIqeVlQ1rSF9fEa4r5I+A= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.1/go.mod h1:u0Jkg0L+dcG1ozUq21uFElmpbmjBnhHR5DELHIme4wg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.17 h1:Jrd/oMh0PKQc6+BowB+pLEwLIgaQF29eYbe7E1Av9Ug= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10 h1:DA+Hl5adieRyFvE7pCvBWm3VOZTRexGVkXw33SUqNoY= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10/go.mod h1:L+A89dH3/gr8L4ecrdzuXUYd1znoko6myzndVGZx/DA= -github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 h1:Hcb4yllr4GTOHC/BKjEklxWhciWMHIqzeCI9oYf1OIk= -github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6/go.mod h1:N/iojY+8bW3MYol9NUMuKimpSbPEur75cuI1SmtonFM= -github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 h1:snE061FIWFZv4v8c9iJZ3Cvyu21wYDWy9oNmNHCd+Fc= -github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1/go.mod h1:L5XWT5tckol5yKkYc8O2+jZBZgF/tFzVQ5QE00PJUjU= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 h1:xSL4IV19pKDASL2fjWXRfTGmZddPiPPZNPpbv6uZQZY= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.8/go.mod h1:L1xxV3zAdB+qVrVW/pBIrIAnHFWHo6FBbFe4xOGsG/o= github.com/aws/smithy-go v1.13.3 h1:l7LYxGuzK6/K+NzJ2mC+VvLUbae0sL3bXU//04MkmnA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/bgentry/que-go v1.0.1 h1:M/cEPOU66X/YewE1rD1IdHjfM79jClXl0BHNWiF+l44= @@ -766,6 +748,8 @@ github.com/jackc/puddle v1.1.0 h1:musOWczZC/rSbqut475Vfcczg7jJsdUQf0D6oKPLgNU= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index cf114bfda..7f075dcb3 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -55,7 +55,7 @@ func (handler *S3FileHandler) LoadOptOutFiles(ctx context.Context, path string) if err != nil { // Skip files with a bad name. An unknown file in this dir isn't a blocker - handler.Warningf("Unknown file found: %s. Skipping.\n", metadata) + handler.Warningf("Unknown file found: %s. Skipping", metadata) skipped = skipped + 1 continue } @@ -75,7 +75,7 @@ func (handler *S3FileHandler) ListFiles(ctx context.Context, bucket, prefix stri }) if err != nil { - handler.Errorf("Failed to list objects in S3 bucket %s, prefix %s: %s\n", bucket, prefix, err) + handler.Errorf("Failed to list objects in S3 bucket %s, prefix %s: %s", bucket, prefix, err) return nil, err } @@ -119,7 +119,7 @@ func (handler *S3FileHandler) OpenFileBytes(ctx context.Context, filePath string return nil, err } - handler.Logger.WithField("file_size_bytes", numBytes).Infof("file downloaded: size=%d\n", numBytes) + handler.Logger.WithField("file_size_bytes", numBytes).Infof("file downloaded: size=%d", numBytes) return buff, err } @@ -131,7 +131,7 @@ func (handler *S3FileHandler) CleanupOptOutFiles(ctx context.Context, suppressli if !suppressionFile.Imported { // Don't do anything. The S3 bucket should have a retention policy that // automatically cleans up files after a specified period of time, - handler.Warningf("File %s was not imported successfully. Skipping cleanup.\n", suppressionFile) + handler.Warningf("File %s was not imported successfully. Skipping cleanup", suppressionFile) continue } @@ -143,11 +143,11 @@ func (handler *S3FileHandler) CleanupOptOutFiles(ctx context.Context, suppressli continue } - handler.Infof("File %s successfully ingested and deleted from S3.\n", suppressionFile) + handler.Infof("File %s successfully ingested and deleted from S3", suppressionFile) } if errCount > 0 { - return fmt.Errorf("%d files could not be cleaned up\n", errCount) + return fmt.Errorf("%d files could not be cleaned up", errCount) } return nil diff --git a/optout/utils.go b/optout/utils.go index 0600eb19e..e763da189 100644 --- a/optout/utils.go +++ b/optout/utils.go @@ -33,7 +33,7 @@ func ParseMetadata(filename string) (OptOutFilenameMetadata, error) { // ignore files for different environments if !IsForCurrentEnv(filename) { - return metadata, fmt.Errorf("Skipping file for different environment: %s", filename) + return metadata, fmt.Errorf("skipping file for different environment: %s", filename) } filenameDate := matches[3] From 5411a8fa69a049f3eae98785085b914e9bd9e948 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 28 Oct 2025 15:29:14 -0400 Subject: [PATCH 09/16] Fix tests --- bcda/testUtils/utils.go | 35 +++++++++++++++++++++-------------- optout/utils_test.go | 2 +- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/bcda/testUtils/utils.go b/bcda/testUtils/utils.go index 5641021d0..5600cba9f 100644 --- a/bcda/testUtils/utils.go +++ b/bcda/testUtils/utils.go @@ -237,14 +237,17 @@ func CopyToS3(t *testing.T, src string) (string, func()) { for _, obj := range output.Contents { objIds = append(objIds, types.ObjectIdentifier{Key: obj.Key}) } - input := s3.DeleteObjectsInput{ - Bucket: aws.String(tempBucket), - Delete: &types.Delete{ - Objects: objIds, - Quiet: aws.Bool(true), - }, + if len(objIds) > 0 { + input := s3.DeleteObjectsInput{ + Bucket: aws.String(tempBucket), + Delete: &types.Delete{ + Objects: objIds, + Quiet: aws.Bool(true), + }, + } + _, err = client.DeleteObjects(ctx, &input) + assert.Nil(t, err) } - client.DeleteObjects(ctx, &input) //nolint:errcheck } return tempBucket, cleanup @@ -301,14 +304,18 @@ func CreateZipsInS3(t *testing.T, zipInputs ...ZipInput) (string, func()) { for _, obj := range output.Contents { objIds = append(objIds, types.ObjectIdentifier{Key: obj.Key}) } - input := s3.DeleteObjectsInput{ - Bucket: aws.String(tempBucket), - Delete: &types.Delete{ - Objects: objIds, - Quiet: aws.Bool(true), - }, + + if len(objIds) > 0 { + input := s3.DeleteObjectsInput{ + Bucket: aws.String(tempBucket), + Delete: &types.Delete{ + Objects: objIds, + Quiet: aws.Bool(true), + }, + } + _, err = client.DeleteObjects(ctx, &input) + assert.Nil(t, err) } - client.DeleteObjects(ctx, &input) //nolint:errcheck } return tempBucket, cleanup diff --git a/optout/utils_test.go b/optout/utils_test.go index db9f5ea50..9d1314a40 100644 --- a/optout/utils_test.go +++ b/optout/utils_test.go @@ -66,7 +66,7 @@ func (s *OptOutTestSuite) TestParseMetadata_DifferentEnv() { }() _, err := ParseMetadata("blah/not-someenv/T#EFT.ON.ACO.NGD1800.DPRF.D191220.T2109420") - assert.EqualError(err, "Skipping file for different environment: blah/not-someenv/T#EFT.ON.ACO.NGD1800.DPRF.D191220.T2109420") + assert.EqualError(err, "skipping file for different environment: blah/not-someenv/T#EFT.ON.ACO.NGD1800.DPRF.D191220.T2109420") } func (s *OptOutTestSuite) TestParseMetadata_InvalidData() { From 928ce6e0fbf9651966bca8abf16bc00b0dafeb4d Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Tue, 28 Oct 2025 16:46:55 -0400 Subject: [PATCH 10/16] Make sure db url is available for functions --- bcda/lambda/admin_create_group/main.go | 6 ++++++ bcda/lambda/cclf/main.go | 17 ++++++++++++++--- bcda/lambda/optout/main.go | 15 ++++++++++++++- docker-compose.test.yml | 2 +- 4 files changed, 35 insertions(+), 5 deletions(-) diff --git a/bcda/lambda/admin_create_group/main.go b/bcda/lambda/admin_create_group/main.go index 831713166..92e829b6b 100644 --- a/bcda/lambda/admin_create_group/main.go +++ b/bcda/lambda/admin_create_group/main.go @@ -149,6 +149,7 @@ func setupEnv(ctx context.Context) (string, error) { ssmClient := ssm.NewFromConfig(cfg) slackParamName := "/slack/token/workflow-alerts" + dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) ssasClientName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) ssasSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) @@ -170,6 +171,11 @@ func setupEnv(ctx context.Context) (string, error) { log.Errorf("Error setting slackParamName env var: %+v", err) return "", err } + err = os.Setenv(dbURLName, params[dbURLName]) + if err != nil { + log.Errorf("Error setting dbURLName env var: %+v", err) + return "", err + } err = os.Setenv(ssasURLName, params[ssasURLName]) if err != nil { log.Errorf("Error setting ssasURLName env var: %+v", err) diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index 1ea9c3872..b8b8db17c 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -35,9 +35,8 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st logger := configureLogger(env, appName) s3Event, err := bcdaaws.ParseSQSEvent(sqsEvent) - if err != nil { - logger.Errorf("Failed to parse S3 event: %v", err) + logger.Errorf("failed to parse S3 event: %v", err) return "", err } else if s3Event == nil { logger.Info("No S3 event found, skipping safely.") @@ -46,12 +45,24 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st cfg, err := config.LoadDefaultConfig(ctx) if err != nil { - logger.Error("Failed to load Default Config") + logger.Error("failed to load Default Config") return "", err } ssmClient := ssm.NewFromConfig(cfg) s3Client := s3.NewFromConfig(cfg) + dbURL, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) + if err != nil { + logger.Error("failed to load DB URL") + return "", err + } + + err = os.Setenv("DATABASE_URL", dbURL) + if err != nil { + logger.Errorf("error setting dbURL env var: %+v", err) + return "", err + } + // Create pgx pool for bulk operations pool := database.ConnectPool() defer pool.Close() diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index 6d1dd4dcb..3f270a8a2 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -34,7 +34,6 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) - db := database.Connect() s3Event, err := bcdaaws.ParseSQSEvent(sqsEvent) if err != nil { @@ -53,6 +52,20 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, ssmClient := ssm.NewFromConfig(cfg) s3Client := s3.NewFromConfig(cfg) + dbURL, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) + if err != nil { + logger.Error("failed to load DB URL") + return "", err + } + + err = os.Setenv("DATABASE_URL", dbURL) + if err != nil { + logger.Errorf("error setting dbURL env var: %+v", err) + return "", err + } + + db := database.Connect() + for _, e := range s3Event.Records { if strings.Contains(e.EventName, "ObjectCreated") { dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 3ccf5bfbe..0a778a504 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -5,7 +5,7 @@ services: environment: - AWS_DEFAULT_REGION=us-east-1 - GATEWAY_LISTEN=0.0.0.0:4566 - - SERVICES=s3,ssm,sts,iam + - SERVICES=s3,ssm,sts,iam,cloudwatch - DEBUG=1 volumes: - "./.localstack_volume:/var/lib/localstack" From 348b03fae449d1f80fb1402a28cc85de48525e72 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 29 Oct 2025 12:13:58 -0400 Subject: [PATCH 11/16] Add s3 assume role for lambdas --- .golangci.yml | 5 ---- bcda/lambda/cclf/main.go | 47 +++++++++++++++------------------ bcda/lambda/cclf/main_test.go | 20 +++----------- bcda/lambda/optout/main.go | 31 +++++++++++++--------- bcda/lambda/optout/main_test.go | 25 ++++-------------- docker-compose.yml | 2 +- go.mod | 4 +-- optout/s3_file_handler.go | 2 -- 8 files changed, 51 insertions(+), 85 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 4d614f33b..52035aaa5 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -23,11 +23,6 @@ linters: - linters: - errcheck text: "conf.UnsetEnv|conf.SetEnv" # these are used and unchecked in over 280 test files - # disable package deprecation for aws-sdk-go until https://jira.cms.gov/browse/BCDA-9484 is finished - - linters: - - staticcheck - text: "aws-sdk-go" - #- end disable for BCDA-9484 formatters: enable: - gofmt diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index b8b8db17c..678979a08 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -21,8 +21,10 @@ import ( "github.com/CMSgov/bcda-app/conf" "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials/stscreds" "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/ssm" + "github.com/aws/aws-sdk-go-v2/service/sts" ) func main() { @@ -49,7 +51,18 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st return "", err } ssmClient := ssm.NewFromConfig(cfg) - s3Client := s3.NewFromConfig(cfg) + + s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) + if err != nil { + logger.Errorf("error getting param: %+v", err) + return "", err + } + stsClient := sts.NewFromConfig(cfg) + appCreds := stscreds.NewAssumeRoleProvider(stsClient, s3AssumeRoleArn) + + s3Client := s3.NewFromConfig(cfg, func(o *s3.Options) { + o.Credentials = appCreds + }) dbURL, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) if err != nil { @@ -74,9 +87,9 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st filepath := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for file %s", e.EventName, filepath) if cclf.CheckIfAttributionCSVFile(e.S3.Object.Key) { - return handleCSVImport(ctx, pool, s3Client, ssmClient, filepath) + return handleCSVImport(ctx, pool, s3Client, filepath) } else { - return handleCclfImport(ctx, pool, s3Client, ssmClient, filepath) + return handleCclfImport(ctx, pool, s3Client, filepath) } } } @@ -85,7 +98,7 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st return "", nil } -func handleCSVImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { +func handleCSVImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) @@ -96,21 +109,13 @@ func handleCSVImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Clien return "", err } - s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) - if err != nil { - logger.Errorf("error getting param: %+v", err) - return "", err - } - importer := cclf.CSVImporter{ Logger: logger, PgxPool: pool, FileProcessor: &cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ - Client: s3Client, - Logger: logger, - Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), - AssumeRoleArn: s3AssumeRoleArn, + Client: s3Client, + Logger: logger, }, }, } @@ -127,7 +132,7 @@ func handleCSVImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Clien return result, nil } -func handleCclfImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { +func handleCclfImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) @@ -138,18 +143,10 @@ func handleCclfImport(ctx context.Context, pool *pgxpool.Pool, s3Client *s3.Clie return "", err } - s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) - if err != nil { - logger.Errorf("error getting param: %+v", err) - return "", err - } - fileProcessor := cclf.S3FileProcessor{ Handler: optout.S3FileHandler{ - Client: s3Client, - Logger: logger, - Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), - AssumeRoleArn: s3AssumeRoleArn, + Client: s3Client, + Logger: logger, }, } diff --git a/bcda/lambda/cclf/main_test.go b/bcda/lambda/cclf/main_test.go index 745761970..ecdcdb217 100644 --- a/bcda/lambda/cclf/main_test.go +++ b/bcda/lambda/cclf/main_test.go @@ -39,7 +39,6 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { assert := assert.New(s.T()) cfg := testUtils.TestAWSConfig(s.T()) s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) pool := database.ConnectPool() env := uuid.NewUUID() @@ -49,10 +48,8 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { }) defer cleanupEnv() - cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") - cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") - defer cleanupParam1() - defer cleanupParam2() + cleanupParam := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam() type test struct { path string @@ -74,7 +71,7 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { path, cleanup := testUtils.CopyToS3(s.T(), tc.path) defer cleanup() - res, err := handleCclfImport(context.Background(), pool, s3Client, ssmClient, path) + res, err := handleCclfImport(context.Background(), pool, s3Client, path) if tc.err == nil { assert.Nil(err) @@ -88,14 +85,3 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { } } } - -func (s *AttributionImportMainSuite) TestHandlerMissingS3AssumeRoleArn() { - assert := assert.New(s.T()) - cfg := testUtils.TestAWSConfig(s.T()) - s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) - pool := database.ConnectPool() - - _, err := handleCclfImport(context.Background(), pool, s3Client, ssmClient, "asdf") - assert.Contains(err.Error(), "error retrieving parameter /cclf-import/bcda/local/bfd-bucket-role-arn from parameter store") -} diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index 3f270a8a2..b7da3abc4 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -22,8 +22,10 @@ import ( "github.com/CMSgov/bcda-app/conf" "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials/stscreds" "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/ssm" + "github.com/aws/aws-sdk-go-v2/service/sts" ) func main() { @@ -50,7 +52,18 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", err } ssmClient := ssm.NewFromConfig(cfg) - s3Client := s3.NewFromConfig(cfg) + + s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/cclf-import/bcda/%s/bfd-bucket-role-arn", env)) + if err != nil { + logger.Errorf("error getting param: %+v", err) + return "", err + } + stsClient := sts.NewFromConfig(cfg) + appCreds := stscreds.NewAssumeRoleProvider(stsClient, s3AssumeRoleArn) + + s3Client := s3.NewFromConfig(cfg, func(o *s3.Options) { + o.Credentials = appCreds + }) dbURL, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env)) if err != nil { @@ -70,7 +83,7 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, if strings.Contains(e.EventName, "ObjectCreated") { dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) logger.Infof("Reading %s event for directory %s", e.EventName, dir) - return handleOptOutImport(ctx, db, s3Client, ssmClient, dir) + return handleOptOutImport(ctx, db, s3Client, dir) } } @@ -78,24 +91,16 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", nil } -func handleOptOutImport(ctx context.Context, db *sql.DB, s3Client *s3.Client, ssmClient *ssm.Client, s3ImportPath string) (string, error) { +func handleOptOutImport(ctx context.Context, db *sql.DB, s3Client *s3.Client, s3ImportPath string) (string, error) { env := conf.GetEnv("ENV") appName := conf.GetEnv("APP_NAME") logger := configureLogger(env, appName) repo := postgres.NewRepository(db) - s3AssumeRoleArn, err := bcdaaws.GetParameter(ctx, ssmClient, fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env)) - if err != nil { - logger.Errorf("error getting param: %+v", err) - return "", err - } - importer := suppression.OptOutImporter{ FileHandler: &optout.S3FileHandler{ - Client: s3Client, - Logger: logger, - Endpoint: os.Getenv("LOCAL_STACK_ENDPOINT"), - AssumeRoleArn: s3AssumeRoleArn, + Client: s3Client, + Logger: logger, }, Saver: &suppression.BCDASaver{ Repo: repo, diff --git a/bcda/lambda/optout/main_test.go b/bcda/lambda/optout/main_test.go index 1f2849894..47eb7266b 100644 --- a/bcda/lambda/optout/main_test.go +++ b/bcda/lambda/optout/main_test.go @@ -32,7 +32,6 @@ func (s *OptOutImportMainSuite) TestHandleOptOutImportSuccess() { assert := assert.New(s.T()) cfg := testUtils.TestAWSConfig(s.T()) s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/synthetic1800MedicareFiles/test2/") defer cleanup() @@ -41,12 +40,10 @@ func (s *OptOutImportMainSuite) TestHandleOptOutImportSuccess() { cleanupEnv := testUtils.SetEnvVars(s.T(), []testUtils.EnvVar{{Name: "ENV", Value: env.String()}}) defer cleanupEnv() - cleanupParam1 := testUtils.SetParameter(s.T(), fmt.Sprintf("/opt-out-import/bcda/%s/bfd-bucket-role-arn", env), "arn:aws:iam::000000000000:user/fake-arn") - cleanupParam2 := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") - defer cleanupParam1() - defer cleanupParam2() + cleanupParam := testUtils.SetParameter(s.T(), fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable") + defer cleanupParam() - res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, path) assert.Nil(err) assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 2") @@ -68,7 +65,6 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Skipped() { assert := assert.New(s.T()) cfg := testUtils.TestAWSConfig(s.T()) s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/suppressionfile_BadFileNames/") defer cleanup() @@ -82,7 +78,7 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Skipped() { defer cleanupParam1() defer cleanupParam2() - res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, path) assert.Nil(err) assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 0") @@ -94,7 +90,6 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Failed() { assert := assert.New(s.T()) cfg := testUtils.TestAWSConfig(s.T()) s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) path, cleanup := testUtils.CopyToS3(s.T(), "../../../shared_files/suppressionfile_BadHeader/") defer cleanup() @@ -108,20 +103,10 @@ func (s *OptOutImportMainSuite) TestImportSuppressionDirectory_Failed() { defer cleanupParam1() defer cleanupParam2() - res, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, path) + res, err := handleOptOutImport(context.Background(), s.db, s3Client, path) assert.EqualError(err, "one or more suppression files failed to import correctly") assert.Contains(res, constants.CompleteMedSupDataImp) assert.Contains(res, "Files imported: 0") assert.Contains(res, "Files failed: 1") assert.Contains(res, "Files skipped: 0") } - -func (s *OptOutImportMainSuite) TestHandlerMissingS3AssumeRoleArn() { - assert := assert.New(s.T()) - cfg := testUtils.TestAWSConfig(s.T()) - s3Client := testUtils.TestS3Client(s.T(), cfg) - ssmClient := testUtils.TestSSMClient(s.T(), cfg) - - _, err := handleOptOutImport(context.Background(), s.db, s3Client, ssmClient, "asdf") - assert.Contains(err.Error(), "error retrieving parameter /opt-out-import/bcda/local/bfd-bucket-role-arn from parameter store") -} diff --git a/docker-compose.yml b/docker-compose.yml index 60a17ca3a..7e55563ce 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,7 +14,7 @@ services: environment: - AWS_DEFAULT_REGION=us-east-1 - GATEWAY_LISTEN=0.0.0.0:4566 - - SERVICES=s3,ssm,sts,iam + - SERVICES=s3,ssm,sts,iam,cloudwatch - DEBUG=1 ports: - "4566-4583:4566-4583" diff --git a/go.mod b/go.mod index 645f57a2f..b0b7c3f7b 100644 --- a/go.mod +++ b/go.mod @@ -41,10 +41,12 @@ require ( require ( github.com/aws/aws-sdk-go-v2 v1.39.4 github.com/aws/aws-sdk-go-v2/config v1.31.14 + github.com/aws/aws-sdk-go-v2/credentials v1.18.18 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.19.13 github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.51.4 github.com/aws/aws-sdk-go-v2/service/s3 v1.88.6 github.com/aws/aws-sdk-go-v2/service/ssm v1.66.1 + github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 github.com/ccoveille/go-safecast v1.6.1 github.com/pashagolub/pgxmock/v4 v4.5.0 github.com/riverqueue/river v0.20.2 @@ -59,7 +61,6 @@ require ( github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.2 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.18.18 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.10 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.11 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.11 // indirect @@ -71,7 +72,6 @@ require ( github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.10 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.29.7 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.2 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.38.8 // indirect github.com/aws/smithy-go v1.23.1 // indirect github.com/containerd/errdefs v1.0.0 // indirect github.com/containerd/errdefs/pkg v0.3.0 // indirect diff --git a/optout/s3_file_handler.go b/optout/s3_file_handler.go index 7f075dcb3..aed87dc6e 100644 --- a/optout/s3_file_handler.go +++ b/optout/s3_file_handler.go @@ -20,8 +20,6 @@ type S3FileHandler struct { Logger logrus.FieldLogger // Optional S3 endpoint to use for connection. Endpoint string - // Optional role to assume when connecting to S3. - AssumeRoleArn string } // Define logger functions to ensure that logs get sent to: From 4f17c08b7570e3957e396a2c98c9329488427374 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 29 Oct 2025 15:39:42 -0400 Subject: [PATCH 12/16] Add various missing test coverage --- bcda/lambda/admin_aco_deny/db_test.go | 10 ++++ bcda/lambda/admin_aco_deny/main.go | 4 -- bcda/lambda/admin_create_aco/db_test.go | 8 +-- bcda/lambda/admin_create_aco/main.go | 8 +-- bcda/lambda/admin_create_aco/main_test.go | 26 +++++++-- bcda/lambda/admin_create_aco_creds/aws.go | 5 +- .../lambda/admin_create_aco_creds/aws_test.go | 35 +++++++++++- bcda/lambda/admin_create_group/main.go | 21 ++----- bcda/lambda/admin_create_group/main_test.go | 57 +++++++++++++++++++ bcda/lambda/cclf/main_test.go | 12 ++++ bcdaworker/queueing/river.go | 6 -- bcdaworker/queueing/river_test.go | 24 +++----- shared_files/csv/valid.csv | 3 + 13 files changed, 154 insertions(+), 65 deletions(-) create mode 100644 shared_files/csv/valid.csv diff --git a/bcda/lambda/admin_aco_deny/db_test.go b/bcda/lambda/admin_aco_deny/db_test.go index b64dfbec1..f04159449 100644 --- a/bcda/lambda/admin_aco_deny/db_test.go +++ b/bcda/lambda/admin_aco_deny/db_test.go @@ -4,10 +4,14 @@ import ( "context" "encoding/json" "errors" + "fmt" + "os" "testing" "time" "github.com/CMSgov/bcda-app/bcda/models" + "github.com/CMSgov/bcda-app/bcda/testUtils" + "github.com/CMSgov/bcda-app/conf" "github.com/google/uuid" "github.com/jackc/pgx/v5" "github.com/pashagolub/pgxmock/v4" @@ -53,6 +57,12 @@ func TestDenyACOsQueryFailure(t *testing.T) { func TestDenyACOs_Integration(t *testing.T) { ctx := context.Background() + env := conf.GetEnv("ENV") + + cleanupParam1 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), os.Getenv("DATABASE_URL")) + t.Cleanup(func() { cleanupParam1() }) + cleanupParam2 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "test-slack-token") + t.Cleanup(func() { cleanupParam2() }) params, err := getAWSParams(ctx) assert.Nil(t, err) diff --git a/bcda/lambda/admin_aco_deny/main.go b/bcda/lambda/admin_aco_deny/main.go index f3e39d47c..8807d4740 100644 --- a/bcda/lambda/admin_aco_deny/main.go +++ b/bcda/lambda/admin_aco_deny/main.go @@ -93,10 +93,6 @@ func handleACODenies(ctx context.Context, conn PgxConnection, data payload) erro func getAWSParams(ctx context.Context) (awsParams, error) { env := conf.GetEnv("ENV") - if env == "local" { - return awsParams{conf.GetEnv("DATABASE_URL"), ""}, nil - } - cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return awsParams{}, err diff --git a/bcda/lambda/admin_create_aco/db_test.go b/bcda/lambda/admin_create_aco/db_test.go index 091fd6e50..84ff8a9da 100644 --- a/bcda/lambda/admin_create_aco/db_test.go +++ b/bcda/lambda/admin_create_aco/db_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/CMSgov/bcda-app/bcda/models" + "github.com/CMSgov/bcda-app/conf" "github.com/jackc/pgx/v5" "github.com/pborman/uuid" @@ -25,12 +26,7 @@ type CreateACOTestSuite struct { func (c *CreateACOTestSuite) SetupTest() { c.ctx = context.Background() - params, err := getAWSParams(c.ctx) - if err != nil { - assert.FailNow(c.T(), "Failed to get AWS Params") - } - - conn, err := pgx.Connect(c.ctx, params.dbURL) + conn, err := pgx.Connect(c.ctx, conf.GetEnv("DATABASE_URL")) if err != nil { assert.FailNow(c.T(), "Failed to setup pgx connection") } diff --git a/bcda/lambda/admin_create_aco/main.go b/bcda/lambda/admin_create_aco/main.go index 50703ed8d..8a2558868 100644 --- a/bcda/lambda/admin_create_aco/main.go +++ b/bcda/lambda/admin_create_aco/main.go @@ -71,7 +71,6 @@ func handler(ctx context.Context, event json.RawMessage) error { id := uuid.NewRandom() if data.CleanUp == nil { - // run the regular logic (non-rollback transaction) err = handleCreateACO(ctx, conn, data, id) if err != nil { @@ -108,7 +107,6 @@ func handler(ctx context.Context, event json.RawMessage) error { } func handleCreateACO(ctx context.Context, conn PgxConnection, data payload, id uuid.UUID) error { - if data.Name == "" { return errors.New("ACO name must be provided") } @@ -128,7 +126,7 @@ func handleCreateACO(ctx context.Context, conn PgxConnection, data payload, id u aco := models.ACO{Name: data.Name, CMSID: cmsIDPt, UUID: id, ClientID: id.String()} - err := createACO(context.Background(), conn, aco) + err := createACO(ctx, conn, aco) if err != nil { return err } @@ -139,10 +137,6 @@ func handleCreateACO(ctx context.Context, conn PgxConnection, data payload, id u func getAWSParams(ctx context.Context) (awsParams, error) { env := conf.GetEnv("ENV") - if env == "local" { - return awsParams{conf.GetEnv("DATABASE_URL"), ""}, nil - } - cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return awsParams{}, err diff --git a/bcda/lambda/admin_create_aco/main_test.go b/bcda/lambda/admin_create_aco/main_test.go index f884d15e4..662f8f73a 100644 --- a/bcda/lambda/admin_create_aco/main_test.go +++ b/bcda/lambda/admin_create_aco/main_test.go @@ -3,8 +3,12 @@ package main import ( "context" "errors" + "fmt" + "os" "testing" + "github.com/CMSgov/bcda-app/bcda/testUtils" + "github.com/CMSgov/bcda-app/conf" "github.com/jackc/pgx/v5" "github.com/pashagolub/pgxmock/v4" "github.com/pborman/uuid" @@ -22,12 +26,7 @@ type HandleCreateACOTestSuite struct { func (c *HandleCreateACOTestSuite) SetupTest() { c.ctx = context.Background() - params, err := getAWSParams(c.ctx) - if err != nil { - assert.FailNow(c.T(), "Failed to get AWS Params") - } - - conn, err := pgx.Connect(c.ctx, params.dbURL) + conn, err := pgx.Connect(c.ctx, os.Getenv("DATABASE_URL")) if err != nil { assert.FailNow(c.T(), "Failed to setup pgx connection") } @@ -141,3 +140,18 @@ func (c *HandleCreateACOTestSuite) TestHandleCreateACOMissingCMSID() { err := handleCreateACO(c.ctx, c.tx, data, id) assert.ErrorContains(c.T(), err, "CMSID must be provided") } + +func TestGetAWSParams(t *testing.T) { + env := conf.GetEnv("ENV") + + cleanupParam1 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "test-db-url") + t.Cleanup(func() { cleanupParam1() }) + cleanupParam2 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "test-slack-token") + t.Cleanup(func() { cleanupParam2() }) + + params, err := getAWSParams(context.Background()) + + assert.Nil(t, err) + assert.Equal(t, "test-db-url", params.dbURL) + assert.Equal(t, "test-slack-token", params.slackToken) +} diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index 7d7a91a7e..112516718 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -21,9 +21,6 @@ var pemFilePath = "/tmp/BCDA_CA_FILE.pem" func getAWSParams(ctx context.Context) (awsParams, error) { env := adjustedEnv() - if env == "local" { - return awsParams{}, nil - } slackParamName := "/slack/token/workflow-alerts" ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) @@ -123,7 +120,7 @@ func putObject(ctx context.Context, client *s3.Client, acoID, creds, credsBucket func adjustedEnv() string { env := conf.GetEnv("ENV") if env == "sbx" { - env = "opensbx" + env = "sandbox" } return env } diff --git a/bcda/lambda/admin_create_aco_creds/aws_test.go b/bcda/lambda/admin_create_aco_creds/aws_test.go index d2be10d35..7430a985b 100644 --- a/bcda/lambda/admin_create_aco_creds/aws_test.go +++ b/bcda/lambda/admin_create_aco_creds/aws_test.go @@ -1,6 +1,8 @@ package main import ( + "context" + "fmt" "os" "testing" @@ -25,6 +27,33 @@ func TestPutObject(t *testing.T) { assert.Equal(t, result, "test-bucket/test-filename-creds") } +func TestGetAWSParams(t *testing.T) { + env := conf.GetEnv("ENV") + + cleanupParam1 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "slack-val") + t.Cleanup(func() { cleanupParam1() }) + cleanupParam2 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/aco_creds_bucket", env), "test-CREDS_BUCKET") + t.Cleanup(func() { cleanupParam2() }) + cleanupParam3 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/SSAS_URL", env), "test-SSAS_URL") + t.Cleanup(func() { cleanupParam3() }) + cleanupParam4 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env), "test-BCDA_SSAS_CLIENT_ID") + t.Cleanup(func() { cleanupParam4() }) + cleanupParam5 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env), "test-BCDA_SSAS_SECRET") + t.Cleanup(func() { cleanupParam5() }) + cleanupParam6 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env), "test-BCDA_CA_FILE") + t.Cleanup(func() { cleanupParam6() }) + + params, err := getAWSParams(context.Background()) + assert.Nil(t, err) + + assert.Equal(t, "slack-val", params.slackToken) + assert.Equal(t, "test-CREDS_BUCKET", params.credsBucket) + assert.Equal(t, "test-SSAS_URL", params.ssasURL) + assert.Equal(t, "test-BCDA_SSAS_CLIENT_ID", params.clientID) + assert.Equal(t, "test-BCDA_SSAS_SECRET", params.clientSecret) + assert.Equal(t, "test-BCDA_CA_FILE", params.ssasPEM) +} + func TestAdjustedEnv(t *testing.T) { origEnv := conf.GetEnv("ENV") t.Cleanup(func() { @@ -41,11 +70,15 @@ func TestAdjustedEnv(t *testing.T) { conf.SetEnv(t, "ENV", "sbx") resultEnv = adjustedEnv() - assert.Equal(t, resultEnv, "opensbx") + assert.Equal(t, resultEnv, "sandbox") conf.SetEnv(t, "ENV", "prod") resultEnv = adjustedEnv() assert.Equal(t, resultEnv, "prod") + + conf.SetEnv(t, "ENV", "asdf") + resultEnv = adjustedEnv() + assert.Equal(t, resultEnv, "asdf") } func TestSetupEnvironment(t *testing.T) { diff --git a/bcda/lambda/admin_create_group/main.go b/bcda/lambda/admin_create_group/main.go index 92e829b6b..8ebdaa8e3 100644 --- a/bcda/lambda/admin_create_group/main.go +++ b/bcda/lambda/admin_create_group/main.go @@ -166,37 +166,26 @@ func setupEnv(ctx context.Context) (string, error) { return "", err } - err = os.Setenv(slackParamName, params[slackParamName]) - if err != nil { - log.Errorf("Error setting slackParamName env var: %+v", err) - return "", err - } - err = os.Setenv(dbURLName, params[dbURLName]) + err = os.Setenv("DATABASE_URL", params[dbURLName]) if err != nil { log.Errorf("Error setting dbURLName env var: %+v", err) return "", err } - err = os.Setenv(ssasURLName, params[ssasURLName]) + err = os.Setenv("SSAS_URL", params[ssasURLName]) if err != nil { log.Errorf("Error setting ssasURLName env var: %+v", err) return "", err } - err = os.Setenv(ssasClientName, params[ssasClientName]) + err = os.Setenv("BCDA_SSAS_CLIENT_ID", params[ssasClientName]) if err != nil { log.Errorf("Error setting ssasClientName env var: %+v", err) return "", err } - err = os.Setenv(ssasSecretName, params[ssasSecretName]) + err = os.Setenv("BCDA_SSAS_SECRET", params[ssasSecretName]) if err != nil { log.Errorf("Error setting ssasSecretName env var: %+v", err) return "", err } - err = os.Setenv(caFileName, params[caFileName]) - if err != nil { - log.Errorf("Error setting caFileName env var: %+v", err) - return "", err - } - err = os.Setenv("BCDA_CA_FILE", "/tmp/BCDA_CA_FILE.pem") if err != nil { log.Errorf("Error setting SSAS_USE_TLS env var: %+v", err) @@ -210,7 +199,7 @@ func setupEnv(ctx context.Context) (string, error) { return "", err } defer f.Close() - _, err = f.Write([]byte(conf.GetEnv("BCDA_CA_FILE.pem"))) + _, err = f.Write([]byte(params[caFileName])) if err != nil { return "", err } diff --git a/bcda/lambda/admin_create_group/main_test.go b/bcda/lambda/admin_create_group/main_test.go index 1dafbb428..aa84ba960 100644 --- a/bcda/lambda/admin_create_group/main_test.go +++ b/bcda/lambda/admin_create_group/main_test.go @@ -1,10 +1,15 @@ package main import ( + "context" + "fmt" + "os" "testing" "github.com/CMSgov/bcda-app/bcda/database/databasetest" "github.com/CMSgov/bcda-app/bcda/models/postgres" + "github.com/CMSgov/bcda-app/bcda/testUtils" + "github.com/CMSgov/bcda-app/conf" "github.com/go-testfixtures/testfixtures/v3" "github.com/stretchr/testify/assert" ) @@ -58,3 +63,55 @@ func TestHandleCreateGroup(t *testing.T) { }) } } + +func TestSetupEnvironment(t *testing.T) { + env := conf.GetEnv("ENV") + + // store env vars to restore later + origDBURL := os.Getenv("DATABASE_URL") + origSSASURL := os.Getenv("SSAS_URL") + origBCDASSASClientID := os.Getenv("BCDA_SSAS_CLIENT_ID") + origBCDASSASSecret := os.Getenv("BCDA_SSAS_SECRET") + origSSASUseTLS := os.Getenv("SSAS_USE_TLS") + origBCDACAFile := os.Getenv("BCDA_CA_FILE") + t.Cleanup(func() { + // restore original env vars + err := os.Setenv("DATABASE_URL", origDBURL) + assert.Nil(t, err) + err = os.Setenv("SSAS_URL", origSSASURL) + assert.Nil(t, err) + err = os.Setenv("BCDA_SSAS_CLIENT_ID", origBCDASSASClientID) + assert.Nil(t, err) + err = os.Setenv("BCDA_SSAS_SECRET", origBCDASSASSecret) + assert.Nil(t, err) + err = os.Setenv("SSAS_USE_TLS", origSSASUseTLS) + assert.Nil(t, err) + err = os.Setenv("BCDA_CA_FILE", origBCDACAFile) + assert.Nil(t, err) + }) + + cleanupParam1 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "slack-val") + t.Cleanup(func() { cleanupParam1() }) + cleanupParam2 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "test-DB_URL") + t.Cleanup(func() { cleanupParam2() }) + cleanupParam3 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/SSAS_URL", env), "test-SSAS_URL") + t.Cleanup(func() { cleanupParam3() }) + cleanupParam4 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env), "test-BCDA_SSAS_CLIENT_ID") + t.Cleanup(func() { cleanupParam4() }) + cleanupParam5 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env), "test-BCDA_SSAS_SECRET") + t.Cleanup(func() { cleanupParam5() }) + cleanupParam6 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env), "test-BCDA_CA_FILE") + t.Cleanup(func() { cleanupParam6() }) + + slackName, err := setupEnv(context.Background()) + assert.Nil(t, err) + + assert.Equal(t, "slack-val", slackName) + assert.Equal(t, "true", os.Getenv("SSAS_USE_TLS")) + assert.Equal(t, "test-SSAS_URL", os.Getenv("SSAS_URL")) + assert.Equal(t, "test-BCDA_SSAS_CLIENT_ID", os.Getenv("BCDA_SSAS_CLIENT_ID")) + assert.Equal(t, "test-BCDA_SSAS_SECRET", os.Getenv("BCDA_SSAS_SECRET")) + assert.Equal(t, "true", os.Getenv("SSAS_USE_TLS")) + assert.Equal(t, "/tmp/BCDA_CA_FILE.pem", os.Getenv("BCDA_CA_FILE")) + assert.FileExists(t, "/tmp/BCDA_CA_FILE.pem") +} diff --git a/bcda/lambda/cclf/main_test.go b/bcda/lambda/cclf/main_test.go index ecdcdb217..7363a24e1 100644 --- a/bcda/lambda/cclf/main_test.go +++ b/bcda/lambda/cclf/main_test.go @@ -85,3 +85,15 @@ func (s *AttributionImportMainSuite) TestImportCCLFDirectory() { } } } + +func TestHandleCSVImport_NoACOConfig(t *testing.T) { + cfg := testUtils.TestAWSConfig(t) + s3Client := testUtils.TestS3Client(t, cfg) + pool := database.ConnectPool() + + path, cleanup := testUtils.CopyToS3(t, "../../../shared_files/csv/valid.csv") + defer cleanup() + + _, err := handleCSVImport(context.Background(), pool, s3Client, path) + assert.ErrorContains(t, err, "CSV Attribution metadata invalid: No ACO configs found") +} diff --git a/bcdaworker/queueing/river.go b/bcdaworker/queueing/river.go index 629d95cc3..9722bd66c 100644 --- a/bcdaworker/queueing/river.go +++ b/bcdaworker/queueing/river.go @@ -151,12 +151,6 @@ func getCutOffTime() time.Time { } func getAWSParams(ctx context.Context) (string, error) { - env := conf.GetEnv("ENV") - - if env == "local" { - return conf.GetEnv("workflow-alerts"), nil - } - cfg, err := config.LoadDefaultConfig(ctx) if err != nil { return "", err diff --git a/bcdaworker/queueing/river_test.go b/bcdaworker/queueing/river_test.go index c9d35f1d4..df288e3c5 100644 --- a/bcdaworker/queueing/river_test.go +++ b/bcdaworker/queueing/river_test.go @@ -128,6 +128,9 @@ func TestCleanupJobWorker_Work(t *testing.T) { var logger = logrus.New() client.SetLogger(logger) + cleanupParam1 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "slack-val") + t.Cleanup(func() { cleanupParam1() }) + // Create mock objects mockCleanupJob := new(MockCleanupJob) mockArchiveExpiring := new(MockArchiveExpiring) @@ -195,21 +198,12 @@ func TestGetCutOffTime(t *testing.T) { } func TestGetAWSParams(t *testing.T) { - defer func(env, workflowAlerts, localStackEndpoint string) { - conf.SetEnv(t, "ENV", env) - conf.SetEnv(t, "workflow-alerts", workflowAlerts) - os.Setenv("LOCAL_STACK_ENDPOINT", localStackEndpoint) - }(conf.GetEnv("ENV"), conf.GetEnv("workflow-alerts"), os.Getenv("LOCAL_STACK_ENDPOINT")) - - t.Run("Local Environment", func(t *testing.T) { - conf.SetEnv(t, "ENV", "local") - expectedToken := "local-token" - conf.SetEnv(t, "workflow-alerts", expectedToken) - - token, err := getAWSParams(t.Context()) - assert.NoError(t, err) - assert.Equal(t, expectedToken, token) - }) + cleanupParam1 := testUtils.SetParameter(t, "/slack/token/workflow-alerts", "slack-val") + t.Cleanup(func() { cleanupParam1() }) + + token, err := getAWSParams(context.Background()) + assert.NoError(t, err) + assert.Equal(t, "slack-val", token) } func TestNewCleanupJobWorker(t *testing.T) { diff --git a/shared_files/csv/valid.csv b/shared_files/csv/valid.csv new file mode 100644 index 000000000..216c66eb9 --- /dev/null +++ b/shared_files/csv/valid.csv @@ -0,0 +1,3 @@ +1111111 +2222222 +3333333 \ No newline at end of file From 4c5f5adbd784d866fcb144f28d768869f3f36340 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Wed, 29 Oct 2025 19:05:05 -0400 Subject: [PATCH 13/16] Fix aco creds tests, PR review changes --- bcda/cclf/cclf_test.go | 3 +-- bcda/cclf/csv_test.go | 3 +-- bcda/lambda/admin_create_aco_creds/aws.go | 8 ++++++++ bcda/lambda/admin_create_aco_creds/aws_test.go | 8 ++++++++ bcda/lambda/admin_create_aco_creds/main.go | 1 + 5 files changed, 19 insertions(+), 4 deletions(-) diff --git a/bcda/cclf/cclf_test.go b/bcda/cclf/cclf_test.go index 742f0891e..f34d0b857 100644 --- a/bcda/cclf/cclf_test.go +++ b/bcda/cclf/cclf_test.go @@ -376,8 +376,7 @@ func createTemporaryCCLF8ZipFile(t *testing.T, data string) (fileName, cclfName } func buildZipMetadata(t *testing.T, processor CclfFileProcessor, cmsID, zipName, cclf0Name, cclf8Name string, fileType models.CCLFFileType) (*cclfZipMetadata, func()) { - ctx := context.Background() - zipReader, zipCloser, err := processor.OpenZipArchive(ctx, zipName) + zipReader, zipCloser, err := processor.OpenZipArchive(context.Background(), zipName) assert.Nil(t, err) metadata := cclfZipMetadata{ diff --git a/bcda/cclf/csv_test.go b/bcda/cclf/csv_test.go index e72603e48..841d68c08 100644 --- a/bcda/cclf/csv_test.go +++ b/bcda/cclf/csv_test.go @@ -103,7 +103,6 @@ func TestCSVTestSuite(t *testing.T) { } func (s *CSVTestSuite) TestImportCSV_Integration() { - ctx := context.Background() conf.SetEnv(s.T(), "CCLF_REF_DATE", "181201") tests := []struct { name string @@ -121,7 +120,7 @@ func (s *CSVTestSuite) TestImportCSV_Integration() { for _, test := range tests { s.T().Run(test.name, func(tt *testing.T) { filename := filepath.Clean(test.filepath) - err := s.importer.ImportCSV(ctx, test.filepath) + err := s.importer.ImportCSV(context.Background(), test.filepath) if test.err == nil { assert.Nil(s.T(), err) } else { diff --git a/bcda/lambda/admin_create_aco_creds/aws.go b/bcda/lambda/admin_create_aco_creds/aws.go index 112516718..7f2a4251f 100644 --- a/bcda/lambda/admin_create_aco_creds/aws.go +++ b/bcda/lambda/admin_create_aco_creds/aws.go @@ -23,6 +23,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { env := adjustedEnv() slackParamName := "/slack/token/workflow-alerts" + dbURLName := fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env) ssasURLName := fmt.Sprintf("/bcda/%s/api/SSAS_URL", env) clientIDName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_CLIENT_ID", env) clientSecretName := fmt.Sprintf("/bcda/%s/api/BCDA_SSAS_SECRET", env) @@ -31,6 +32,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { paramNames := []string{ slackParamName, + dbURLName, ssasURLName, clientIDName, clientSecretName, @@ -51,6 +53,7 @@ func getAWSParams(ctx context.Context) (awsParams, error) { return awsParams{ params[slackParamName], + params[dbURLName], params[ssasURLName], params[clientIDName], params[clientSecretName], @@ -66,6 +69,11 @@ func setupEnvironment(params awsParams) error { log.Errorf("Error setting SSAS_URL env var: %+v", err) return err } + err = os.Setenv("DATABASE_URL", params.dbURL) + if err != nil { + log.Errorf("Error setting DATABASE_URL env var: %+v", err) + return err + } err = os.Setenv("BCDA_SSAS_CLIENT_ID", params.clientID) if err != nil { log.Errorf("Error setting BCDA_SSAS_CLIENT_ID env var: %+v", err) diff --git a/bcda/lambda/admin_create_aco_creds/aws_test.go b/bcda/lambda/admin_create_aco_creds/aws_test.go index 7430a985b..6e04f0dee 100644 --- a/bcda/lambda/admin_create_aco_creds/aws_test.go +++ b/bcda/lambda/admin_create_aco_creds/aws_test.go @@ -42,6 +42,8 @@ func TestGetAWSParams(t *testing.T) { t.Cleanup(func() { cleanupParam5() }) cleanupParam6 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/BCDA_CA_FILE.pem", env), "test-BCDA_CA_FILE") t.Cleanup(func() { cleanupParam6() }) + cleanupParam7 := testUtils.SetParameter(t, fmt.Sprintf("/bcda/%s/api/DATABASE_URL", env), "test-DB_URL") + t.Cleanup(func() { cleanupParam7() }) params, err := getAWSParams(context.Background()) assert.Nil(t, err) @@ -52,6 +54,7 @@ func TestGetAWSParams(t *testing.T) { assert.Equal(t, "test-BCDA_SSAS_CLIENT_ID", params.clientID) assert.Equal(t, "test-BCDA_SSAS_SECRET", params.clientSecret) assert.Equal(t, "test-BCDA_CA_FILE", params.ssasPEM) + assert.Equal(t, "test-DB_URL", params.dbURL) } func TestAdjustedEnv(t *testing.T) { @@ -84,6 +87,7 @@ func TestAdjustedEnv(t *testing.T) { func TestSetupEnvironment(t *testing.T) { // store env vars to restore later origSSASURL := os.Getenv("SSAS_URL") + origDBURL := os.Getenv("DATABASE_URL") origBCDASSASClientID := os.Getenv("BCDA_SSAS_CLIENT_ID") origBCDASSASSecret := os.Getenv("BCDA_SSAS_SECRET") origSSASUseTLS := os.Getenv("SSAS_USE_TLS") @@ -93,6 +97,8 @@ func TestSetupEnvironment(t *testing.T) { // restore original env vars err := os.Setenv("SSAS_URL", origSSASURL) assert.Nil(t, err) + err = os.Setenv("DATABASE_URL", origDBURL) + assert.Nil(t, err) err = os.Setenv("BCDA_SSAS_CLIENT_ID", origBCDASSASClientID) assert.Nil(t, err) err = os.Setenv("BCDA_SSAS_SECRET", origBCDASSASSecret) @@ -105,12 +111,14 @@ func TestSetupEnvironment(t *testing.T) { err := setupEnvironment(awsParams{ ssasURL: "test-SSAS_URL", + dbURL: "test-DB_URL", clientID: "test-BCDA_SSAS_CLIENT_ID", clientSecret: "test-BCDA_SSAS_SECRET", }) assert.Nil(t, err) assert.Equal(t, "test-SSAS_URL", os.Getenv("SSAS_URL")) + assert.Equal(t, "test-DB_URL", os.Getenv("DATABASE_URL")) assert.Equal(t, "test-BCDA_SSAS_CLIENT_ID", os.Getenv("BCDA_SSAS_CLIENT_ID")) assert.Equal(t, "test-BCDA_SSAS_SECRET", os.Getenv("BCDA_SSAS_SECRET")) assert.Equal(t, "true", os.Getenv("SSAS_USE_TLS")) diff --git a/bcda/lambda/admin_create_aco_creds/main.go b/bcda/lambda/admin_create_aco_creds/main.go index 012eb850d..706f5d753 100644 --- a/bcda/lambda/admin_create_aco_creds/main.go +++ b/bcda/lambda/admin_create_aco_creds/main.go @@ -27,6 +27,7 @@ type payload struct { type awsParams struct { slackToken string + dbURL string ssasURL string clientID string clientSecret string From 31da1d48911d4b5efcffc6b5dc18779ed520f731 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Thu, 30 Oct 2025 16:59:20 -0400 Subject: [PATCH 14/16] Add back in still used cli script --- .vscode/settings.json | 1 - Makefile | 10 +- bcda/bcdacli/cli.go | 115 ++++++++------------ bcda/bcdacli/cli_test.go | 177 +++++-------------------------- bcda/constants/test_constants.go | 2 +- docker-compose.test.yml | 4 +- 6 files changed, 77 insertions(+), 232 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 920ee7cbf..7bfcc3624 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -2,7 +2,6 @@ "go.testEnvVars": { "ENV": "local", "AWS_ENDPOINT_URL": "http://localhost:4566", - "LOCAL_STACK_ENDPOINT": "http://localhost:4566", "DB": "postgresql://postgres:toor@localhost:15432", "DB_HOST_URL": "postgresql://postgres:toor@localhost:15432?sslmode=disable", "TEST_DB_URL": "postgresql://postgres:toor@localhost:15432/bcda_test?sslmode=disable", diff --git a/Makefile b/Makefile index 0e4e1828c..32fed35c8 100644 --- a/Makefile +++ b/Makefile @@ -154,11 +154,11 @@ load-synthetic-cclf-data: docker compose exec api sh -c "bcda import-synthetic-cclf-package --acoSize='$$IMPROVED_SIZE' --environment='improved' --fileType='runout' " ; \ done -# load-synthetic-suppression-data: -# docker compose exec api sh -c 'bcda import-suppression-directory --directory=../shared_files/synthetic1800MedicareFiles' -# # Update the suppression entries to guarantee there are qualified entries when searching for suppressed benes. -# # See postgres#GetSuppressedMBIs for more information -# docker compose exec -T db sh -c 'PGPASSWORD=$$POSTGRES_PASSWORD psql -v ON_ERROR_STOP=1 $$POSTGRES_DB postgres -c "UPDATE suppressions SET effective_date = now(), preference_indicator = '"'"'N'"'"' WHERE effective_date = (SELECT max(effective_date) FROM suppressions);"' +load-synthetic-suppression-data: + docker compose exec api sh -c 'bcda import-suppression-directory --directory=../shared_files/synthetic1800MedicareFiles' + # Update the suppression entries to guarantee there are qualified entries when searching for suppressed benes. + # See postgres#GetSuppressedMBIs for more information + docker compose exec -T db sh -c 'PGPASSWORD=$$POSTGRES_PASSWORD psql -v ON_ERROR_STOP=1 $$POSTGRES_DB postgres -c "UPDATE suppressions SET effective_date = now(), preference_indicator = '"'"'N'"'"' WHERE effective_date = (SELECT max(effective_date) FROM suppressions);"' load-fixtures-ssas: docker compose up -d db diff --git a/bcda/bcdacli/cli.go b/bcda/bcdacli/cli.go index a311483bd..7bb459296 100644 --- a/bcda/bcdacli/cli.go +++ b/bcda/bcdacli/cli.go @@ -19,6 +19,10 @@ import ( "github.com/CMSgov/bcda-app/bcda/auth" authclient "github.com/CMSgov/bcda-app/bcda/auth/client" + "github.com/CMSgov/bcda-app/bcda/suppression" + "github.com/CMSgov/bcda-app/conf" + "github.com/CMSgov/bcda-app/optout" + "github.com/ccoveille/go-safecast" cclfUtils "github.com/CMSgov/bcda-app/bcda/cclf/utils" "github.com/CMSgov/bcda-app/bcda/constants" @@ -66,11 +70,10 @@ func setUpApp() *cli.App { log.API.Info(fmt.Sprintf(`Auth is made possible by %T`, provider)) return nil } - // var hours, err = safecast.ToUint(utils.GetEnvInt("FILE_ARCHIVE_THRESHOLD_HR", 72)) - // if err != nil { - // fmt.Println("Error converting FILE_ARCHIVE_THRESHOLD_HR to uint", err) - // } - // var acoName, acoCMSID, acoID, accessToken, acoSize, filePath, fileSource, s3Endpoint, assumeRoleArn, environment, groupID, groupName, ips, fileType string + var hours, err = safecast.ToUint(utils.GetEnvInt("FILE_ARCHIVE_THRESHOLD_HR", 72)) + if err != nil { + fmt.Println("Error converting FILE_ARCHIVE_THRESHOLD_HR to uint", err) + } var acoName, acoCMSID, acoID, accessToken, acoSize, filePath, environment, groupID, groupName, ips, fileType string var httpPort, httpsPort int app.Commands = []cli.Command{ @@ -371,74 +374,42 @@ func setUpApp() *cli.App { return nil }, }, - // I dont believe we import-suppression-directory anymore. We now use the cclf lambda. - // { - // Name: "import-suppression-directory", - // Category: constants.CliDataImpCategory, - // Usage: "Import all 1-800-MEDICARE suppression data files from the specified directory", - // Flags: []cli.Flag{ - // cli.StringFlag{ - // Name: "directory", - // Usage: "Directory where suppression files are located", - // Destination: &filePath, - // }, - // cli.StringFlag{ - // Name: "filesource", - // Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", - // Destination: &fileSource, - // }, - // cli.StringFlag{ - // Name: "s3endpoint", - // Usage: "Custom S3 endpoint", - // Destination: &s3Endpoint, - // }, - // cli.StringFlag{ - // Name: "assume-role-arn", - // Usage: "Optional IAM role ARN to assume for S3", - // Destination: &assumeRoleArn, - // }, - // }, - // Action: func(c *cli.Context) error { - // ignoreSignals() - // r := postgres.NewRepository(db) - - // var file_handler optout.OptOutFileHandler - // cfg, err := config.LoadDefaultConfig(context.Background()) - // if err != nil { - // log.API.Error("error loading default config: ", err) - // return err - // } - // s3Client := s3.NewFromConfig(cfg) - - // if fileSource == "s3" { - // file_handler = &optout.S3FileHandler{ - // Client: s3Client, - // Logger: log.API, - // Endpoint: s3Endpoint, - // AssumeRoleArn: assumeRoleArn, - // } - // } else { - // file_handler = &optout.LocalFileHandler{ - // Logger: log.API, - // PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), - // FileArchiveThresholdHr: hours, - // } - // } + { + Name: "import-suppression-directory", + Category: constants.CliDataImpCategory, + Usage: "Import all 1-800-MEDICARE suppression data files from the specified directory", + Flags: []cli.Flag{ + cli.StringFlag{ + Name: "directory", + Usage: "Directory where suppression files are located", + Destination: &filePath, + }, + }, + Action: func(c *cli.Context) error { + ignoreSignals() + r := postgres.NewRepository(db) + + var file_handler optout.OptOutFileHandler + file_handler = &optout.LocalFileHandler{ + Logger: log.API, + PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), + FileArchiveThresholdHr: hours, + } - // importer := suppression.OptOutImporter{ - // FileHandler: file_handler, - // Saver: &suppression.BCDASaver{ - // Repo: r, - // }, - // Logger: log.API, - // ImportStatusInterval: utils.GetEnvInt("SUPPRESS_IMPORT_STATUS_RECORDS_INTERVAL", 1000), - // } - // ctx := context.Background() - // s, f, sk, err := importer.ImportSuppressionDirectory(ctx, filePath) - // fmt.Fprintf(app.Writer, "Completed 1-800-MEDICARE suppression data import.\nFiles imported: %v\nFiles failed: %v\nFiles skipped: %v\n", s, f, sk) - // return err - // }, - // }, + importer := suppression.OptOutImporter{ + FileHandler: file_handler, + Saver: &suppression.BCDASaver{ + Repo: r, + }, + Logger: log.API, + ImportStatusInterval: utils.GetEnvInt("SUPPRESS_IMPORT_STATUS_RECORDS_INTERVAL", 1000), + } + ctx := context.Background() + s, f, sk, err := importer.ImportSuppressionDirectory(ctx, filePath) + fmt.Fprintf(app.Writer, "Completed 1-800-MEDICARE suppression data import.\nFiles imported: %v\nFiles failed: %v\nFiles skipped: %v\n", s, f, sk) + return err + }, + }, { Name: "import-synthetic-cclf-package", Category: constants.CliDataImpCategory, diff --git a/bcda/bcdacli/cli_test.go b/bcda/bcdacli/cli_test.go index 1c49202d0..d2560757a 100644 --- a/bcda/bcdacli/cli_test.go +++ b/bcda/bcdacli/cli_test.go @@ -379,157 +379,32 @@ func (s *CLITestSuite) TestCreateACO() { buf.Reset() } -// func (s *CLITestSuite) TestImportCCLFDirectory() { -// oldVal := conf.GetEnv("LOG_TO_STD_OUT") -// conf.SetEnv(s.T(), "LOG_TO_STD_OUT", "false") -// s.T().Cleanup(func() { conf.SetEnv(s.T(), "LOG_TO_STD_OUT", oldVal) }) - -// targetACO := "A0002" -// assert := assert.New(s.T()) - -// type test struct { -// path string -// err error -// expectedLogs []string -// } - -// tests := []test{ -// { -// path: "../../shared_files/cclf/archives/valid/", -// err: errors.New("files skipped or failed import. See logs for more details"), -// expectedLogs: []string{"Successfully imported 6 files.", "Failed to import 0 files.", "Skipped 0 files."}, -// }, -// { -// path: "../../shared_files/cclf/archives/invalid_bcd/", -// err: errors.New("failed to import 1 files"), -// expectedLogs: []string{"missing CCLF0 or CCLF8 file in zip"}, -// }, -// { -// path: "../../shared_files/cclf/archives/skip/", -// err: errors.New("files failed to import or no files were imported. See logs for more details."), -// expectedLogs: []string{"Successfully imported 0 files.", "Failed to import 0 files.", "Skipped 0 files."}, -// }, -// } - -// for _, tc := range tests { -// postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) -// defer postgrestest.DeleteCCLFFilesByCMSID(s.T(), s.db, targetACO) -// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), tc.path) -// defer cleanup() - -// // reset global logs to clean up any previous log entries -// logger.SetupLoggers() - -// args := []string{"bcda", "import-cclf-directory", constants.DirectoryArg, path} -// err := s.testApp.Run(args) -// if tc.err == nil { -// assert.Nil(err) -// } - -// var failed bool -// content, err := os.ReadFile(os.Getenv("BCDA_ERROR_LOG")) -// assert.Nil(err) - -// // go through each expected log and make sure it exists in all logs -// for _, expectedLog := range tc.expectedLogs { -// if !strings.Contains(string(content), expectedLog) { -// failed = true -// } -// } - -// assert.False(failed) -// } -// } - -// func (s *CLITestSuite) TestImportSuppressionDirectoryFromLocal() { -// assert := assert.New(s.T()) - -// buf := new(bytes.Buffer) -// s.testApp.Writer = buf - -// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") -// defer cleanup() - -// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} -// err := s.testApp.Run(args) -// assert.Nil(err) -// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) -// assert.Contains(buf.String(), "Files imported: 2") -// assert.Contains(buf.String(), "Files failed: 0") -// assert.Contains(buf.String(), "Files skipped: 0") - -// fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, -// "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", -// "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") - -// assert.Len(fs, 2) -// for _, f := range fs { -// postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) -// } -// } - -// func (s *CLITestSuite) TestImportSuppressionDirectoryFromS3() { -// assert := assert.New(s.T()) - -// buf := new(bytes.Buffer) -// s.testApp.Writer = buf - -// path, cleanup := testUtils.CopyToS3(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") -// defer cleanup() - -// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path, constants.FileSourceArg, "s3", constants.S3EndpointArg, conf.GetEnv("BFD_S3_ENDPOINT")} -// err := s.testApp.Run(args) -// assert.Nil(err) -// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) -// assert.Contains(buf.String(), "Files imported: 2") -// assert.Contains(buf.String(), "Files failed: 0") -// assert.Contains(buf.String(), "Files skipped: 0") - -// fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, -// "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", -// "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") - -// assert.Len(fs, 2) -// for _, f := range fs { -// postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) -// } -// } - -// func (s *CLITestSuite) TestImportSuppressionDirectory_Skipped() { -// assert := assert.New(s.T()) - -// buf := new(bytes.Buffer) -// s.testApp.Writer = buf - -// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadFileNames/") -// defer cleanup() - -// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} -// err := s.testApp.Run(args) -// assert.Nil(err) -// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) -// assert.Contains(buf.String(), "Files imported: 0") -// assert.Contains(buf.String(), "Files failed: 0") -// assert.Contains(buf.String(), "Files skipped: 2") -// } - -// func (s *CLITestSuite) TestImportSuppressionDirectory_Failed() { -// assert := assert.New(s.T()) - -// buf := new(bytes.Buffer) -// s.testApp.Writer = buf - -// path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/suppressionfile_BadHeader/") -// defer cleanup() - -// args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} -// err := s.testApp.Run(args) -// assert.EqualError(err, "one or more suppression files failed to import correctly") -// assert.Contains(buf.String(), constants.CompleteMedSupDataImp) -// assert.Contains(buf.String(), "Files imported: 0") -// assert.Contains(buf.String(), "Files failed: 1") -// assert.Contains(buf.String(), "Files skipped: 0") -// } +func (s *CLITestSuite) TestImportSuppressionDirectoryFromLocal() { + assert := assert.New(s.T()) + + buf := new(bytes.Buffer) + s.testApp.Writer = buf + + path, cleanup := testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/synthetic1800MedicareFiles/test2/") + defer cleanup() + + args := []string{"bcda", constants.ImportSupDir, constants.DirectoryArg, path} + err := s.testApp.Run(args) + assert.Nil(err) + assert.Contains(buf.String(), constants.CompleteMedSupDataImp) + assert.Contains(buf.String(), "Files imported: 2") + assert.Contains(buf.String(), "Files failed: 0") + assert.Contains(buf.String(), "Files skipped: 0") + + fs := postgrestest.GetSuppressionFileByName(s.T(), s.db, + "T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010", + "T#EFT.ON.ACO.NGD1800.DPRF.D190816.T0241391") + + assert.Len(fs, 2) + for _, f := range fs { + postgrestest.DeleteSuppressionFileByID(s.T(), s.db, f.ID) + } +} func (s *CLITestSuite) TestDenylistACO() { denylistedCMSID := testUtils.RandomHexID()[0:4] diff --git a/bcda/constants/test_constants.go b/bcda/constants/test_constants.go index 638464a85..64772a948 100644 --- a/bcda/constants/test_constants.go +++ b/bcda/constants/test_constants.go @@ -62,7 +62,7 @@ const GenClientCred = "generate-client-credentials" const ResetClientCred = "reset-client-credentials" // #nosec - G101 credentials for unit testing const ArchJobFiles = "archive-job-files" -// const ImportSupDir = "import-suppression-directory" +const ImportSupDir = "import-suppression-directory" const DirectoryArg = "--directory" const FileSourceArg = "--filesource" const S3EndpointArg = "--s3endpoint" diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 0a778a504..f527b4741 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -27,8 +27,8 @@ services: - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-foobar} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-foobar} - AWS_DEFAULT_REGION=us-east-1 - - AWS_ENDPOINT_URL=http://localstack:4566 - - BFD_S3_ENDPOINT=${BFD_S3_ENDPOINT:-http://localstack:4566} + - AWS_ENDPOINT_URL=http://localstack-unit-test:4566 + - BFD_S3_ENDPOINT=${BFD_S3_ENDPOINT:-http://localstack-unit-test:4566} - ENV=local - DATABASE_URL=postgresql://postgres:toor@db-unit-test:5432/bcda_test?sslmode=disable - GOLANGCI_LINT_CACHE=/root/.cache/go-build From f45bbf097b7fbc477dd615627a86e484525ed9e5 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Thu, 30 Oct 2025 16:59:34 -0400 Subject: [PATCH 15/16] Add back in still used cli script --- bcda/bcdacli/cli.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bcda/bcdacli/cli.go b/bcda/bcdacli/cli.go index 7bb459296..1d42315bc 100644 --- a/bcda/bcdacli/cli.go +++ b/bcda/bcdacli/cli.go @@ -389,8 +389,7 @@ func setUpApp() *cli.App { ignoreSignals() r := postgres.NewRepository(db) - var file_handler optout.OptOutFileHandler - file_handler = &optout.LocalFileHandler{ + var file_handler optout.OptOutFileHandler = &optout.LocalFileHandler{ Logger: log.API, PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), FileArchiveThresholdHr: hours, From d8e5d198482da1150e6889bce96ce610bbf457e5 Mon Sep 17 00:00:00 2001 From: carlpartridge Date: Thu, 30 Oct 2025 17:03:36 -0400 Subject: [PATCH 16/16] Remove commented out code --- bcda/bcdacli/cli.go | 74 --------------------------------------------- 1 file changed, 74 deletions(-) diff --git a/bcda/bcdacli/cli.go b/bcda/bcdacli/cli.go index 1d42315bc..715f93188 100644 --- a/bcda/bcdacli/cli.go +++ b/bcda/bcdacli/cli.go @@ -279,80 +279,6 @@ func setUpApp() *cli.App { return nil }, }, - // I dont believe we import-cclf-directory anymore. We now use the cclf lambda. - // { - // Name: "import-cclf-directory", - // Category: constants.CliDataImpCategory, - // Usage: "Import all CCLF files from the specified directory", - // Flags: []cli.Flag{ - // cli.StringFlag{ - // Name: "directory", - // Usage: "Directory where CCLF files are located", - // Destination: &filePath, - // }, - // cli.StringFlag{ - // Name: "filesource", - // Usage: "Source of files. Must be one of 'local', 's3'. Defaults to 'local'", - // Destination: &fileSource, - // }, - // cli.StringFlag{ - // Name: "s3endpoint", - // Usage: "Custom S3 endpoint", - // Destination: &s3Endpoint, - // }, - // cli.StringFlag{ - // Name: "assume-role-arn", - // Usage: "Optional IAM role ARN to assume for S3", - // Destination: &assumeRoleArn, - // }, - // }, - // Action: func(c *cli.Context) error { - // ignoreSignals() - // var file_processor cclf.CclfFileProcessor - - // cfg, err := config.LoadDefaultConfig(context.Background()) - // if err != nil { - // log.API.Error("error loading default config: ", err) - // return err - // } - // s3Client := s3.NewFromConfig(cfg) - - // if fileSource == "s3" { - // file_processor = &cclf.S3FileProcessor{ - // Handler: optout.S3FileHandler{ - // Client: s3Client, - // Logger: log.API, - // Endpoint: s3Endpoint, - // AssumeRoleArn: assumeRoleArn, - // }, - // } - // } else { - // file_processor = &cclf.LocalFileProcessor{ - // Handler: optout.LocalFileHandler{ - // Logger: log.API, - // PendingDeletionDir: conf.GetEnv("PENDING_DELETION_DIR"), - // FileArchiveThresholdHr: hours, - // }, - // } - // } - - // importer := cclf.NewCclfImporter(log.API, file_processor, pool) - - // success, failure, skipped, err := importer.ImportCCLFDirectory(filePath) - // if err != nil { - // log.API.Error("error returned from ImportCCLFDirectory: ", err) - // return err - // } - // if failure > 0 || skipped > 0 { - // log.API.Errorf("Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped, err) - // err = errors.New("files skipped or failed import. See logs for more details") - // return err - // } - // log.API.Infof("Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) - // fmt.Fprintf(app.Writer, "Completed CCLF import. Successfully imported %v files. Failed to import %v files. Skipped %v files. See logs for more details.", success, failure, skipped) - // return err - // }, - // }, { Name: "generate-cclf-runout-files", Category: constants.CliDataImpCategory,