diff --git a/auth.go b/auth.go index e5910b266f9..f8be8380f05 100644 --- a/auth.go +++ b/auth.go @@ -20,7 +20,8 @@ var ErrUnableToCreateToken = &errors.Error{ // Authorization is an authorization. 🎉 type Authorization struct { ID platform.ID `json:"id"` - Token string `json:"token"` + Token string `json:"token,omitempty"` + HashedToken string `json:"hashedToken,omitempty"` Status Status `json:"status"` Description string `json:"description"` OrgID platform.ID `json:"orgID"` @@ -35,7 +36,60 @@ type AuthorizationUpdate struct { Description *string `json:"description,omitempty"` } +const ( + // authTokenClearValue is used to indicate Token or HashedToken are cleared (not set). + authTokenClearValue = "" +) + +// IsAuthTokenSet returns true if token is considered set. Applies +// to be both unhashed tokens (Authorization.Token) and +// hashed tokens (Authorization.HashedToken). +func IsAuthTokenSet(token string) bool { + return token != authTokenClearValue +} + +// IsTokenSet returns true if Token is set. +func (a *Authorization) IsTokenSet() bool { + return IsAuthTokenSet(a.Token) +} + +// IsTokenClear returns true if Token is unset. +func (a *Authorization) IsTokenClear() bool { + return !a.IsTokenSet() +} + +// ClearToken clears Token. +func (a *Authorization) ClearToken() { + a.Token = authTokenClearValue +} + +// IsHashedTokenSet returns true if HashedToken is set. +func (a *Authorization) IsHashedTokenSet() bool { + return IsAuthTokenSet(a.HashedToken) +} + +// IsHashedTokenClear returns true if Token is unset. +func (a *Authorization) IsHashedTokenClear() bool { + return !a.IsHashedTokenSet() +} + +// ClearToken clears HashedToken. +func (a *Authorization) ClearHashedToken() { + a.HashedToken = authTokenClearValue +} + +// NoTokensSet returns true if neither Token nor HashedToken is set. +func (a *Authorization) NoTokensSet() bool { + return a.IsTokenClear() && a.IsHashedTokenClear() +} + +// BothTokensSet returns true if both Token and Hashed token is set. +func (a *Authorization) BothTokensSet() bool { + return a.IsTokenSet() && a.IsHashedTokenSet() +} + // Valid ensures that the authorization is valid. +// Valid does not check if tokens are set properly. func (a *Authorization) Valid() error { for _, p := range a.Permissions { if p.Resource.OrgID != nil && *p.Resource.OrgID != a.OrgID { diff --git a/authorization/error.go b/authorization/error.go index 2aec2ac32ee..aeb5b79a73e 100644 --- a/authorization/error.go +++ b/authorization/error.go @@ -50,12 +50,12 @@ func ErrInvalidAuthIDError(err error) *errors.Error { } } -// UnexpectedAuthIndexError is used when the error comes from an internal system. -func UnexpectedAuthIndexError(err error) *errors.Error { +// UnexpectedAuthBucketError is used when the error comes from an internal system. +func UnexpectedAuthBucketError(index []byte, err error) *errors.Error { var e *errors.Error if !errors2.As(err, &e) { e = &errors.Error{ - Msg: fmt.Sprintf("unexpected error retrieving auth index; Err: %v", err), + Msg: fmt.Sprintf("unexpected error retrieving auth bucket %q; Err: %v", index, err), Code: errors.EInternal, Err: err, } diff --git a/authorization/hasher.go b/authorization/hasher.go new file mode 100644 index 00000000000..26b2e0c55cd --- /dev/null +++ b/authorization/hasher.go @@ -0,0 +1,148 @@ +package authorization + +import ( + "errors" + "fmt" + + "github.com/go-crypt/crypt" + "github.com/go-crypt/crypt/algorithm" + influxdb2_algo "github.com/influxdata/influxdb/v2/pkg/crypt/algorithm/influxdb2" +) + +var ( + ErrNoDecoders = errors.New("no authorization decoders specified") +) + +type AuthorizationHasher struct { + // hasher encodes tokens into hashed PHC-encoded tokens. + hasher algorithm.Hash + + // decoder decodes hashed PHC-encoded tokens into crypt.Digest objects. + decoder *crypt.Decoder + + // allHashers is the list of all hashers which could be used for hashed index lookup. + allHashers []algorithm.Hash +} + +const ( + DefaultHashVariant = influxdb2_algo.VariantSHA256 + DefaultHashVariantName = influxdb2_algo.VariantIdentifierSHA256 + + // HashVariantNameUnknown is the placeholder name used for unknown or unsupported hash variants. + HashVariantNameUnknown = "N/A" +) + +type authorizationHasherOptions struct { + hasherVariant influxdb2_algo.Variant + decoderVariants []influxdb2_algo.Variant +} + +type AuthorizationHasherOption func(o *authorizationHasherOptions) + +func WithHasherVariant(variant influxdb2_algo.Variant) AuthorizationHasherOption { + return func(o *authorizationHasherOptions) { + o.hasherVariant = variant + } +} + +func WithDecoderVariants(variants []influxdb2_algo.Variant) AuthorizationHasherOption { + return func(o *authorizationHasherOptions) { + o.decoderVariants = variants + } +} + +// NewAuthorizationHasher creates an AuthorizationHasher for influxdb2 algorithm hashed tokens. +// variantName specifies which token hashing variant to use, with blank indicating to use the default +// hashing variant. By default, all variants of the influxdb2 hashing scheme are supported for +// maximal compatibility. +func NewAuthorizationHasher(opts ...AuthorizationHasherOption) (*AuthorizationHasher, error) { + options := authorizationHasherOptions{ + hasherVariant: DefaultHashVariant, + decoderVariants: influxdb2_algo.AllVariants, + } + + for _, o := range opts { + o(&options) + } + + if len(options.decoderVariants) == 0 { + return nil, fmt.Errorf("error in NewAuthorizationHasher: %w", ErrNoDecoders) + } + + // Create the hasher used for hashing new tokens before storage. + hasher, err := influxdb2_algo.New(influxdb2_algo.WithVariant(options.hasherVariant)) + if err != nil { + return nil, fmt.Errorf("creating hasher %s for AuthorizationHasher: %w", options.hasherVariant.Prefix(), err) + } + + // Create decoder and register all requested decoder variants. + decoder := crypt.NewDecoder() + for _, variant := range options.decoderVariants { + if err := variant.RegisterDecoder(decoder); err != nil { + return nil, fmt.Errorf("registering variant %s with decoder: %w", variant.Prefix(), err) + } + } + + // Create all variant hashers needed for requested decoder variants. This is required for operations where + // all potential variations of a raw token must be hashed, such as looking up a hash in the hashed token index. + var allHashers []algorithm.Hash + for _, variant := range options.decoderVariants { + h, err := influxdb2_algo.New(influxdb2_algo.WithVariant(variant)) + if err != nil { + return nil, fmt.Errorf("creating hasher %s for authorization service index lookups: %w", variant.Prefix(), err) + } + allHashers = append(allHashers, h) + } + + return &AuthorizationHasher{ + hasher: hasher, + decoder: decoder, + allHashers: allHashers, + }, nil +} + +// Hash generates a PHC-encoded hash of token using the selected hash algorithm variant. +func (h *AuthorizationHasher) Hash(token string) (string, error) { + digest, err := h.hasher.Hash(token) + if err != nil { + return "", fmt.Errorf("hashing raw token failed: %w", err) + } + return digest.Encode(), nil +} + +// AllHashes generates a list of PHC-encoded hashes of token for all deterministic (i.e. non-salted) supported hashes. +func (h *AuthorizationHasher) AllHashes(token string) ([]string, error) { + hashes := make([]string, len(h.allHashers)) + for idx, hasher := range h.allHashers { + digest, err := hasher.Hash(token) + if err != nil { + variantName := HashVariantNameUnknown + if influxdb_hasher, ok := hasher.(*influxdb2_algo.Hasher); ok { + variantName = influxdb_hasher.Variant().Prefix() + } + return nil, fmt.Errorf("hashing raw token failed (variant=%s): %w", variantName, err) + } + hashes[idx] = digest.Encode() + } + return hashes, nil +} + +// AllHashesCount returns the number of hash variants available through AllHashes. +func (h *AuthorizationHasher) AllHashesCount() int { + return len(h.allHashers) +} + +// Decode decodes a PHC-encoded hash into a Digest object that can be matched. +func (h *AuthorizationHasher) Decode(phc string) (algorithm.Digest, error) { + return h.decoder.Decode(phc) +} + +// Match determines if a raw token matches a PHC-encoded token. +func (h *AuthorizationHasher) Match(phc string, token string) (bool, error) { + digest, err := h.Decode(phc) + if err != nil { + return false, err + } + + return digest.MatchAdvanced(token) +} diff --git a/authorization/hasher_test.go b/authorization/hasher_test.go new file mode 100644 index 00000000000..3eac4534e44 --- /dev/null +++ b/authorization/hasher_test.go @@ -0,0 +1,45 @@ +package authorization_test + +import ( + "testing" + + "github.com/go-crypt/crypt/algorithm" + "github.com/influxdata/influxdb/v2/authorization" + influxdb2_algo "github.com/influxdata/influxdb/v2/pkg/crypt/algorithm/influxdb2" + "github.com/stretchr/testify/require" +) + +func Test_NewAuthorizationHasher_EmptyDecoderVariants(t *testing.T) { + hasher, err := authorization.NewAuthorizationHasher( + authorization.WithDecoderVariants([]influxdb2_algo.Variant{}), + ) + + require.ErrorIs(t, err, authorization.ErrNoDecoders) + require.Nil(t, hasher) +} + +func TestNewAuthorizationHasher_WithInvalidDecoderVariant(t *testing.T) { + // Test that using an invalid decoder variant returns an error + hasher, err := authorization.NewAuthorizationHasher( + authorization.WithDecoderVariants([]influxdb2_algo.Variant{ + influxdb2_algo.Variant(-1), // Invalid variant + }), + ) + + // Should return an error and nil hasher + require.ErrorIs(t, err, algorithm.ErrParameterInvalid) + require.Contains(t, err.Error(), "registering variant") + require.Nil(t, hasher) +} + +func TestNewAuthorizationHasher_WithHasherVariantInvalid(t *testing.T) { + // Test that using VariantNone returns an error + hasher, err := authorization.NewAuthorizationHasher( + authorization.WithHasherVariant(influxdb2_algo.Variant(-1)), + ) + + // Should return an error and nil hasher + require.ErrorIs(t, err, algorithm.ErrParameterInvalid) + require.ErrorContains(t, err, "creating hasher") + require.Nil(t, hasher) +} diff --git a/authorization/http_server_test.go b/authorization/http_server_test.go index c2cf01fe741..d9d29b7ce2b 100644 --- a/authorization/http_server_test.go +++ b/authorization/http_server_test.go @@ -144,65 +144,57 @@ func TestService_handlePostAuthorization(t *testing.T) { } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Helper() + for _, useHashedTokens := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", tt.name, useHashedTokens), func(t *testing.T) { + s := itesting.NewTestInmemStore(t) - s := itesting.NewTestInmemStore(t) - storage, err := NewStore(s) - if err != nil { - t.Fatal(err) - } + storage, err := NewStore(context.Background(), s, useHashedTokens) + require.NoError(t, err) - svc := NewService(storage, tt.fields.TenantService) + svc := NewService(storage, tt.fields.TenantService) - handler := NewHTTPAuthHandler(zaptest.NewLogger(t), svc, tt.fields.TenantService) - router := chi.NewRouter() - router.Mount(handler.Prefix(), handler) + handler := NewHTTPAuthHandler(zaptest.NewLogger(t), svc, tt.fields.TenantService) + router := chi.NewRouter() + router.Mount(handler.Prefix(), handler) - req, err := newPostAuthorizationRequest(tt.args.authorization) - if err != nil { - t.Fatalf("failed to create new authorization request: %v", err) - } - b, err := json.Marshal(req) - if err != nil { - t.Fatalf("failed to unmarshal authorization: %v", err) - } + req, err := newPostAuthorizationRequest(tt.args.authorization) + require.NoError(t, err) - r := httptest.NewRequest("GET", "http://any.url", bytes.NewReader(b)) - r = r.WithContext(context.WithValue( - context.Background(), - httprouter.ParamsKey, - httprouter.Params{ - { - Key: "userID", - Value: fmt.Sprintf("%d", tt.args.session.UserID), - }, - })) + b, err := json.Marshal(req) + require.NoError(t, err) - w := httptest.NewRecorder() + r := httptest.NewRequest("GET", "http://any.url", bytes.NewReader(b)) + r = r.WithContext(context.WithValue( + context.Background(), + httprouter.ParamsKey, + httprouter.Params{ + { + Key: "userID", + Value: fmt.Sprintf("%d", tt.args.session.UserID), + }, + })) - ctx := icontext.SetAuthorizer(context.Background(), tt.args.session) - r = r.WithContext(ctx) + w := httptest.NewRecorder() - handler.handlePostAuthorization(w, r) + ctx := icontext.SetAuthorizer(context.Background(), tt.args.session) + r = r.WithContext(ctx) - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := io.ReadAll(res.Body) + handler.handlePostAuthorization(w, r) - if res.StatusCode != tt.wants.statusCode { - t.Logf("headers: %v body: %s", res.Header, body) - t.Errorf("%q. handlePostAuthorization() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handlePostAuthorization() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if diff, err := jsonDiff(string(body), tt.wants.body); diff != "" { - t.Errorf("%q. handlePostAuthorization() = ***%s***", tt.name, diff) - } else if err != nil { - t.Errorf("%q, handlePostAuthorization() error: %v", tt.name, err) - } - }) + res := w.Result() + contentType := res.Header.Get("Content-Type") + body, err := io.ReadAll(res.Body) + require.NoError(t, err) + + require.Equalf(t, tt.wants.statusCode, res.StatusCode, "headers: %v body: %s", res.Header, body) + if tt.wants.contentType != "" { + require.Equal(t, tt.wants.contentType, contentType) + } + diff, err := jsonDiff(string(body), tt.wants.body) + require.NoError(t, err, "jsonDiff failed") + require.Empty(t, diff, "authorization endpoint returned unexpected result") + }) + } } } @@ -340,8 +332,6 @@ func TestService_handleGetAuthorization(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - t.Helper() - handler := NewHTTPAuthHandler(zaptest.NewLogger(t), tt.fields.AuthorizationService, tt.fields.TenantService) router := chi.NewRouter() router.Mount(handler.Prefix(), handler) @@ -356,21 +346,17 @@ func TestService_handleGetAuthorization(t *testing.T) { handler.handleGetAuthorization(w, r) res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := io.ReadAll(res.Body) + contentType := res.Header.Get("Content-Type") + body, err := io.ReadAll(res.Body) + require.NoError(t, err) - if res.StatusCode != tt.wants.statusCode { - t.Logf("headers: %v body: %s", res.Header, body) - t.Errorf("%q. handleGetAuthorization() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleGetAuthorization() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if diff, err := jsonDiff(string(body), tt.wants.body); err != nil { - t.Errorf("%q, handleGetAuthorization. error unmarshalling json %v", tt.name, err) - } else if tt.wants.body != "" && diff != "" { - t.Errorf("%q. handleGetAuthorization() = -got/+want %s**", tt.name, diff) + require.Equalf(t, tt.wants.statusCode, res.StatusCode, "headers: %v body: %s", res.Header, body) + if tt.wants.contentType != "" { + require.Equal(t, tt.wants.contentType, contentType) } + diff, err := jsonDiff(string(body), tt.wants.body) + require.NoError(t, err, "jsonDiff failed") + require.Empty(t, diff, "authorization endpoint returned unexpected result") }) } } @@ -715,52 +701,49 @@ func TestService_handleGetAuthorizations(t *testing.T) { } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Helper() + for _, useHashedTokens := range []bool{false, true} { + t.Run(tt.name, func(t *testing.T) { + t.Helper() - s := itesting.NewTestInmemStore(t) - storage, err := NewStore(s) - if err != nil { - t.Fatal(err) - } + s := itesting.NewTestInmemStore(t) - svc := NewService(storage, tt.fields.TenantService) + storage, err := NewStore(context.Background(), s, useHashedTokens) + require.NoError(t, err) - handler := NewHTTPAuthHandler(zaptest.NewLogger(t), svc, tt.fields.TenantService) - router := chi.NewRouter() - router.Mount(handler.Prefix(), handler) + svc := NewService(storage, tt.fields.TenantService) - r := httptest.NewRequest("GET", "http://any.url", nil) + handler := NewHTTPAuthHandler(zaptest.NewLogger(t), svc, tt.fields.TenantService) + router := chi.NewRouter() + router.Mount(handler.Prefix(), handler) - qp := r.URL.Query() - for k, vs := range tt.args.queryParams { - for _, v := range vs { - qp.Add(k, v) - } - } - r.URL.RawQuery = qp.Encode() + r := httptest.NewRequest("GET", "http://any.url", nil) - w := httptest.NewRecorder() + qp := r.URL.Query() + for k, vs := range tt.args.queryParams { + for _, v := range vs { + qp.Add(k, v) + } + } + r.URL.RawQuery = qp.Encode() - handler.handleGetAuthorizations(w, r) + w := httptest.NewRecorder() - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := io.ReadAll(res.Body) + handler.handleGetAuthorizations(w, r) - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handleGetAuthorizations() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleGetAuthorizations() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if diff, err := jsonDiff(string(body), tt.wants.body); diff != "" { - t.Errorf("%q. handleGetAuthorizations() = ***%s***", tt.name, diff) - } else if err != nil { - t.Errorf("%q, handleGetAuthorizations() error: %v", tt.name, err) - } + res := w.Result() + contentType := res.Header.Get("Content-Type") + body, err := io.ReadAll(res.Body) + require.NoError(t, err) - }) + require.Equal(t, tt.wants.statusCode, res.StatusCode) + if tt.wants.contentType != "" { + require.Equal(t, tt.wants.contentType, contentType) + } + diff, err := jsonDiff(string(body), tt.wants.body) + require.NoError(t, err, "jsonDiff failed") + require.Empty(t, diff, "authorization endpoint returned unexpected results") + }) + } } } @@ -846,22 +829,19 @@ func TestService_handleDeleteAuthorization(t *testing.T) { handler.handleDeleteAuthorization(w, r) res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := io.ReadAll(res.Body) + contentType := res.Header.Get("Content-Type") + body, err := io.ReadAll(res.Body) + require.NoError(t, err) - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handleDeleteAuthorization() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleDeleteAuthorization() = %v, want %v", tt.name, content, tt.wants.contentType) + require.Equal(t, tt.wants.statusCode, res.StatusCode) + if tt.wants.contentType != "" { + require.Equal(t, tt.wants.contentType, contentType, "handleDeleteAuthorization") } if tt.wants.body != "" { - if diff, err := jsonDiff(string(body), tt.wants.body); err != nil { - t.Errorf("%q, handleDeleteAuthorization(). error unmarshalling json %v", tt.name, err) - } else if diff != "" { - t.Errorf("%q. handleDeleteAuthorization() = ***%s***", tt.name, diff) - } + diff, err := jsonDiff(string(body), tt.wants.body) + require.NoError(t, err, "jsonDiff failed") + require.Empty(t, diff, "authorization endpoint returned unexpected results") } }) } @@ -895,7 +875,7 @@ func jsonDiff(s1, s2 string) (diff string, err error) { var authorizationCmpOptions = cmp.Options{ cmpopts.EquateEmpty(), - cmpopts.IgnoreFields(influxdb.Authorization{}, "ID", "Token", "CreatedAt", "UpdatedAt"), + cmpopts.IgnoreFields(influxdb.Authorization{}, "ID", "Token", "HashedToken", "CreatedAt", "UpdatedAt"), cmp.Comparer(func(x, y []byte) bool { return bytes.Equal(x, y) }), diff --git a/authorization/service.go b/authorization/service.go index 196c4a7b9e4..007a0283b4b 100644 --- a/authorization/service.go +++ b/authorization/service.go @@ -19,6 +19,7 @@ type Service struct { tenantService TenantService } +// NewService creates a new authorization service with a given Store and TenantService. func NewService(st *Store, ts TenantService) influxdb.AuthorizationService { return &Service{ store: st, @@ -52,7 +53,7 @@ func (s *Service) CreateAuthorization(ctx context.Context, a *influxdb.Authoriza return ErrTokenAlreadyExistsError } - if a.Token == "" { + if a.NoTokensSet() { token, err := s.tokenGenerator.Token() if err != nil { return &errors.Error{ @@ -137,6 +138,7 @@ func (s *Service) FindAuthorizations(ctx context.Context, filter influxdb.Author if filter.Token != nil { var auth *influxdb.Authorization err := s.store.View(ctx, func(tx kv.Tx) error { + // GetAuthorizationsByToken also looks for equivalent hashed tokens. a, e := s.store.GetAuthorizationByToken(ctx, tx, *filter.Token) if e != nil { return e diff --git a/authorization/service_test.go b/authorization/service_test.go index c729227dddd..8b4e1ec966a 100644 --- a/authorization/service_test.go +++ b/authorization/service_test.go @@ -11,23 +11,24 @@ import ( influxdbtesting "github.com/influxdata/influxdb/v2/testing" ) -func initBoltAuthService(f influxdbtesting.AuthorizationFields, t *testing.T) (influxdb.AuthorizationService, string, func()) { +func initBoltAuthService(f influxdbtesting.AuthorizationFields, useHashedTokens bool, t *testing.T) (influxdb.AuthorizationService, string, func()) { s, closeBolt := influxdbtesting.NewTestBoltStore(t) - svc, closeSvc := initAuthService(s, f, t) + svc, closeSvc := initAuthService(s, f, useHashedTokens, t) return svc, "service_auth", func() { closeSvc() closeBolt() } } -func initAuthService(s kv.Store, f influxdbtesting.AuthorizationFields, t *testing.T) (influxdb.AuthorizationService, func()) { +func initAuthService(s kv.Store, f influxdbtesting.AuthorizationFields, useHashedTokens bool, t *testing.T) (influxdb.AuthorizationService, func()) { st := tenant.NewStore(s) if f.OrgIDGenerator != nil { st.OrgIDGen = f.OrgIDGenerator } ts := tenant.NewService(st) - storage, err := authorization.NewStore(s) + + storage, err := authorization.NewStore(context.Background(), s, useHashedTokens) if err != nil { t.Fatal(err) } @@ -63,5 +64,10 @@ func initAuthService(s kv.Store, f influxdbtesting.AuthorizationFields, t *testi func TestBoltAuthService(t *testing.T) { t.Parallel() - influxdbtesting.AuthorizationService(initBoltAuthService, t) + for _, useHashedTokens := range []bool{false, true} { + init := func(f influxdbtesting.AuthorizationFields, t *testing.T) (influxdb.AuthorizationService, string, func()) { + return initBoltAuthService(f, useHashedTokens, t) + } + influxdbtesting.AuthorizationService(init, t) + } } diff --git a/authorization/storage.go b/authorization/storage.go index 4b8c8fdec3b..15e7d01bb28 100644 --- a/authorization/storage.go +++ b/authorization/storage.go @@ -2,33 +2,336 @@ package authorization import ( "context" + goerrors "errors" + "fmt" + "maps" + "slices" + "github.com/go-crypt/crypt" + "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/kit/platform" "github.com/influxdata/influxdb/v2/kit/platform/errors" "github.com/influxdata/influxdb/v2/kit/tracing" "github.com/influxdata/influxdb/v2/kv" + influxdb2_algo "github.com/influxdata/influxdb/v2/pkg/crypt/algorithm/influxdb2" "github.com/influxdata/influxdb/v2/snowflake" + "go.uber.org/zap" +) + +/*--- +Token storage and verification + +Storage of hashed tokens has been added as an optional feature. This stores only the hash of a token +in BoltDB. Token hashing is enabled with the `--use-hashed-tokens` option. + +Upgrading the BoltDB schema is automatic on startup when using a new version of InfluxDB with token hashing support. +Additionally, raw tokens are automatically migrated to hashed tokens if `--use-hashed-tokens` is configured. +Due to the schema changes, to use a version of InfluxDB without hashed token support, a manual downgrade using +`influxd downgrade` must be run. Any tokens stored as hashed tokens will be unusable by the old version of InfluxDB +and must be reset or recreated. + +The implementation has the following behaviors under different scenarios: +* Token hashing is enabled. + * On startup and upgrade, any raw tokens in BoltDB are automatically hashed. The hashed token is + stored and the raw token is removed. No raw tokens remain in BoltDB. + * On downgrade, hashed tokens are deleted from BoltDB along with their indices. User confirmation + is required to proceed if hashed tokens are present. + * Token verification is performed by hashing the token and finding the hashed token in BoltDB. + * New tokens generated are stored only as hashes. + * When creating a backup, only hashed tokens are exported. + * When restoring a backup, any raw tokens are converted to hashed tokens and stored only as hashes. + No raw tokens are imported into BoltDB. + * When listing tokens, the hash algorithm used (e.g. `SHA-512`) is returned instead of the hashed token value. +* Token hashing is disabled. + * On upgrade and startup, no user visible action is taken. Any hashed tokens in BoltDB remain unchanged. + The BoltDB store is updated to support hashed tokens, but no existing tokens are migrated. + * On downgrade, hashed tokens are deleted from BoltDB along with their indices. User confirmation + is required to proceed if hashed tokens are present. + * Token verification is performed by looking up the raw token value, if the provided token is not in PHC format. + If the raw token value is not found, then the hashed token value is calculated and token lookup attempted again. + * New tokens generated are stored as raw tokens. + * When creating a backup, tokens are exported in the format found in BoltDB (raw or hashed). + * When restoring a backup, both raw tokens and hashed tokens are restored unchanged. + * When listing tokens, raw tokens in BoltDB are returned. Hashed tokens in BoltDB are returned as + the hash algorithm (e.g. `SHA-512``) instead of the hashed token value. +* Downgrading to an older InfluxDB without hashed token support with a BoltDB containing hashed tokens. + * Downgrading requires a manual `influxd downgrade` command. If hashed tokens are found in the + BoltDB, user confirmation is required. The user can also list impacted tokens. When downgrade is + complete, all hashed tokens have been deleted from BoltDB along with their indices. The tokens with + deleted hashes are no longer useable. + * Operations are as usual in InfluxDB 2.7. + +The hashed tokens in `Authorization.HashedToken` are stored in PHC format. PHC allows specifying the +algorithm used and any parameters. This allows gradual token algorithm transitions if new token hashing +algorithms are added in the future. PHC is used over MCF (Modular Crypt Format) because PHC is more +flexible and MCF does not support our chosen hash scheme. + +When token hashing is enabled, on every startup (not just upgrade) InfluxDB scans the BoltDB for raw tokens in +the `Authorization.Token` field. When found, a hash is immediately stored in the `Authorization.HashedToken` +field and the `Authorization.Token` field is cleared. The token index is also updated to use the hashed +token value instead of the raw token value. This migration must occur on every startup and not just upgrades +because hashed tokens can be turned on and off with configuration. + +When a backup is made, the format stored in BoltDB is exported as-is. If hashed tokens are enabled, only hashed +tokens are exported since only hashed tokens are stored. Without enabling hashed tokens, a mix of raw and +hashed tokens may be present in the backup. + +When token hashing is enabled and a backup is restored, raw tokens are hashed before importing +into BoltDB. Raw tokens are not stored. + +To verify tokens when hashed tokens are enabled, the presented token's hash is calculated and used +for token index lookup. The rest of the authorization flow is unchanged. + +The hashed token index is separate from the raw token index. In addition, the token presented by the API +is rejected if it is a PHC encoded hash before starting authorization. The separate index and rejected +PHC tokens from the API prevent the following attack: +1. Hashed token is extracted from BoltDB. +2. Token hashing is disabled. +3. The hashed token is presented to the API, which will misinterpret it as a raw token and allow access. +This attack is not possible due to the independent indices for raw and hashed tokens. When the hashed token +is presented, it will not match any tokens in the raw index. The hashed token will then be hashed again +before lookup in the hashed index, and will not match any tokens. + +The token hashing algorithm is SHA-512. This provides a good level of security and is allowed by FIPS 140-2. +Because the token hashes must be useable as index lookups, salted password hashes (e.g. bcrypt, PBKDF2, Argon) +can not be used. + +A potential future security would be optionally storing "peppered" hashes. This would require retrieving +the pepper key from outside of BoltDB, for example from Vault. + +When listing tokens, hashed tokens are listed as "REDACTED" instead of the hashed +token value. Raw token values are returned as in previous versions. + +---*/ + +const ( + // TokenRedactedMessage is the user facing message used when a hashed token is redacted. + TokenRedactedMessage = "REDACTED" + + // TokenNotAvailableMessage is the user facing message when no token is available, plaintext or hashed. + TokenNotAvailableMessage = "N/A" ) const MaxIDGenerationN = 100 const ReservedIDs = 1000 var ( - authBucket = []byte("authorizationsv1") - authIndex = []byte("authorizationindexv1") + ErrReadOnly = goerrors.New("authorization store is read-only") +) + +var ( + authBucketName = []byte("authorizationsv1") + authIndexName = []byte("authorizationindexv1") + hashedAuthIndexName = []byte("authorizationhashedindexv1") ) type Store struct { kvStore kv.Store IDGen platform.IDGenerator + hasher *AuthorizationHasher + + // Indicates if tokens should be stored in hashed PHC format. + useHashedTokens bool + + // Logger + log *zap.Logger + + // Indicates if Store is read-only. + readOnly bool + + // ignoreMissingHashIndex indicates if missing hash indices in store should be ignored. + // This is almost exclusively for testing. + ignoreMissingHashIndex bool } -func NewStore(kvStore kv.Store) (*Store, error) { - st := &Store{ - kvStore: kvStore, - IDGen: snowflake.NewDefaultIDGenerator(), +type storePlusOptions struct { + *Store + hasherVariantName string +} + +type StoreOption func(*storePlusOptions) + +func WithAuthorizationHasher(hasher *AuthorizationHasher) StoreOption { + return func(s *storePlusOptions) { + s.hasher = hasher + } +} + +func WithAuthorizationHashVariantName(name string) StoreOption { + return func(s *storePlusOptions) { + s.hasherVariantName = name + } +} + +func WithIgnoreMissingHashIndex(allowMissing bool) StoreOption { + return func(s *storePlusOptions) { + s.ignoreMissingHashIndex = allowMissing + } +} + +func WithLogger(log *zap.Logger) StoreOption { + return func(s *storePlusOptions) { + s.log = log } - return st, st.setup() +} + +func WithReadOnly(readOnly bool) StoreOption { + return func(s *storePlusOptions) { + s.readOnly = readOnly + } +} + +// NewStore creates a new authorization.Store object. kvStore is the underlying key-value store. +func NewStore(ctx context.Context, kvStore kv.Store, useHashedTokens bool, opts ...StoreOption) (*Store, error) { + s := &storePlusOptions{ + Store: &Store{ + kvStore: kvStore, + IDGen: snowflake.NewDefaultIDGenerator(), + useHashedTokens: useHashedTokens, + }, + hasherVariantName: DefaultHashVariantName, + } + + for _, o := range opts { + o(s) + } + + if s.log == nil { + s.log = zap.NewNop() + } + + if err := s.setup(ctx); err != nil { + return nil, fmt.Errorf("error during authorization store setup: %w", err) + } + + foundVariants, err := s.findHashVariants(ctx) + if err != nil { + return nil, fmt.Errorf("error creating authorization store during findVariants: %w", err) + } + if len(foundVariants) > 0 && !useHashedTokens { + s.log.Warn("Token hashing is disabled, but hashed tokens found in authorization store. Was influxd accidentally started without --use-hashed-tokens?") + } + + if s.hasher == nil { + hasher, err := s.autogenerateHasher(ctx, foundVariants, s.hasherVariantName) + if err != nil { + return nil, fmt.Errorf("error creating authorization store during autogenerateHasher: %w", err) + } + s.hasher = hasher + } + + // Perform hashed token migration if needed. This can not be performed by the migration service + // because it requires configuration, and the migration service is more concerned with schema + // and does not have configuration. + if err := s.hashedTokenMigration(ctx); err != nil { + return nil, fmt.Errorf("error during hashed token migration: %w", err) + } + + return s.Store, nil +} + +// findHashVariants scans all authorizations and returns a list of hash variants found. +func (s *Store) findHashVariants(ctx context.Context) ([]influxdb2_algo.Variant, error) { + // Determine which variants are present in the store. + tempDecoder := crypt.NewDecoder() + if err := influxdb2_algo.RegisterDecoder(tempDecoder); err != nil { + return nil, fmt.Errorf("error registering test decoders for authorization store: %w", err) + } + + var auths []*influxdb.Authorization + err := s.View(ctx, func(tx kv.Tx) error { + as, err := s.ListAuthorizations(ctx, tx, influxdb.AuthorizationFilter{}) + if err != nil { + return err + } + auths = as + return nil + }) + if err != nil { + return nil, fmt.Errorf("error fetching authorization records for hash variant inventory: %w", err) + } + + foundVariants := make(map[influxdb2_algo.Variant]struct{}) + for _, a := range auths { + if a.IsHashedTokenSet() { + digest, err := tempDecoder.Decode(a.HashedToken) + if err == nil { + if influxdbDigest, ok := digest.(*influxdb2_algo.Digest); ok { + foundVariants[influxdbDigest.Variant] = struct{}{} + } + } else { + s.log.Warn("error decoding hash variant for token during hash variant inventory", zap.Error(err), zap.Uint64("tokenID", uint64(a.ID))) + } + } + } + + return slices.Collect(maps.Keys(foundVariants)), nil +} + +// autogenerateHasher generates an AuthorizationHasher that hashes using variantName. +// The decoders include variantName plus any other variants that are included in +// foundVariants. +func (s *Store) autogenerateHasher(ctx context.Context, foundVariants []influxdb2_algo.Variant, variantName string) (*AuthorizationHasher, error) { + var decoderVariants []influxdb2_algo.Variant + // Make sure we have the hasher variant we will make in there and that it is first in the list, + // so that it is the first one we try to lookup a given token. + hasherVariant := influxdb2_algo.NewVariant(variantName) + decoderVariants = append(decoderVariants, hasherVariant) + for _, variant := range foundVariants { + // Avoid having 2 hasherVariant decoders. + if variant != hasherVariant { + decoderVariants = append(decoderVariants, variant) + } + } + + hasher, err := NewAuthorizationHasher(WithHasherVariant(hasherVariant), WithDecoderVariants(decoderVariants)) + if err != nil { + return nil, fmt.Errorf("error creating authorization hasher for authorization store: %w", err) + } + + return hasher, nil +} + +// hashedTokenMigration migrates any unhashed tokens in the store to hashed tokens. +func (s *Store) hashedTokenMigration(ctx context.Context) error { + if !s.useHashedTokens || s.readOnly { + return nil + } + + // Figure out which authorization records need to be updated. + var authsNeedingUpdate []*influxdb.Authorization + err := s.View(ctx, func(tx kv.Tx) error { + return s.forEachAuthorization(ctx, tx, nil, func(a *influxdb.Authorization) bool { + if a.IsHashedTokenClear() { + if a.IsTokenSet() { + authsNeedingUpdate = append(authsNeedingUpdate, a) + } else { + s.log.Warn("during hashed token migration, found authorization without any token set", zap.Uint64("ID", uint64(a.ID)), zap.String("description", a.Description)) + } + } + return true + }) + }) + if err != nil { + return err + } + + for batch := range slices.Chunk(authsNeedingUpdate, 100) { + err := s.Update(ctx, func(tx kv.Tx) error { + // Now update them. This really seems too simple, but s.UpdateAuthorization() is magical. + for _, a := range batch { + if _, err := s.UpdateAuthorization(ctx, tx, a.ID, a); err != nil { + return fmt.Errorf("failed to update authorization for %d (%s): %w", a.ID, a.Description, err) + } + } + return nil + }) + if err != nil { + return fmt.Errorf("error migrating hashed tokens: %w", err) + } + } + + return nil } // View opens up a transaction that will not write to any data. Implementing interfaces @@ -39,17 +342,29 @@ func (s *Store) View(ctx context.Context, fn func(kv.Tx) error) error { // Update opens up a transaction that will mutate data. func (s *Store) Update(ctx context.Context, fn func(kv.Tx) error) error { + if s.readOnly { + return ErrReadOnly + } return s.kvStore.Update(ctx, fn) } -func (s *Store) setup() error { - return s.Update(context.Background(), func(tx kv.Tx) error { - if _, err := tx.Bucket(authBucket); err != nil { +func (s *Store) setup(ctx context.Context) error { + return s.View(ctx, func(tx kv.Tx) error { + if _, err := authBucket(tx); err != nil { return err } if _, err := authIndexBucket(tx); err != nil { return err } + if _, err := hashedAuthIndexBucket(tx); err != nil { + if goerrors.Is(err, kv.ErrBucketNotFound) { + if !s.ignoreMissingHashIndex || (s.useHashedTokens && !s.readOnly) { + return fmt.Errorf("missing required index, upgrade required: %w", err) + } + } else { + return err + } + } return nil }) @@ -93,7 +408,7 @@ func (s *Store) uniqueID(ctx context.Context, tx kv.Tx, bucket []byte, id platfo } } - b, err := tx.Bucket(bucket) + b, err := getNamedAuthBucket(tx, bucket) if err != nil { return err } diff --git a/authorization/storage_authorization.go b/authorization/storage_authorization.go index 59637d10370..ce48fb57890 100644 --- a/authorization/storage_authorization.go +++ b/authorization/storage_authorization.go @@ -2,7 +2,11 @@ package authorization import ( "context" + "crypto/subtle" "encoding/json" + goerrors "errors" + "fmt" + "slices" "github.com/buger/jsonparser" "github.com/influxdata/influxdb/v2" @@ -10,22 +14,45 @@ import ( "github.com/influxdata/influxdb/v2/kit/platform/errors" "github.com/influxdata/influxdb/v2/kv" jsonp "github.com/influxdata/influxdb/v2/pkg/jsonparser" + "go.uber.org/zap" ) +var ( + ErrNilAuthorization = goerrors.New("authorization cannot be nil") + ErrHashedTokenMismatch = goerrors.New("HashedToken does not match Token") + ErrNoTokenAvailable = goerrors.New("no token available for authorization") +) + +func getNamedAuthBucket(tx kv.Tx, bucketName []byte) (kv.Bucket, error) { + b, err := tx.Bucket(bucketName) + if err != nil { + return nil, UnexpectedAuthBucketError(bucketName, err) + } + + return b, nil +} + +func authBucket(tx kv.Tx) (kv.Bucket, error) { + return getNamedAuthBucket(tx, authBucketName) +} + func authIndexKey(n string) []byte { return []byte(n) } func authIndexBucket(tx kv.Tx) (kv.Bucket, error) { - b, err := tx.Bucket([]byte(authIndex)) - if err != nil { - return nil, UnexpectedAuthIndexError(err) - } + return getNamedAuthBucket(tx, authIndexName) +} - return b, nil +func hashedAuthIndexKey(n string) []byte { + return []byte(n) +} + +func hashedAuthIndexBucket(tx kv.Tx) (kv.Bucket, error) { + return getNamedAuthBucket(tx, hashedAuthIndexName) } -func encodeAuthorization(a *influxdb.Authorization) ([]byte, error) { +func (s *Store) encodeAuthorization(a *influxdb.Authorization) ([]byte, error) { switch a.Status { case influxdb.Active, influxdb.Inactive: case "": @@ -33,16 +60,35 @@ func encodeAuthorization(a *influxdb.Authorization) ([]byte, error) { default: return nil, &errors.Error{ Code: errors.EInvalid, - Msg: "unknown authorization status", + Msg: "encodeAuthorization: unknown authorization status", } } - return json.Marshal(a) + // Redact Token, if needed. This is done at the lowest level so it is impossible to serialize + // raw tokens if hashing is enabled. + if s.useHashedTokens { + // Redact a copy, not the original. The raw Token value is still needed by the caller in some cases. + // Note that this is an empty string, not TokenRedactedMessage. TokenRedactedMessage is only used for + // user-facing output. The empty string signals that the plaintext token is not available and that + // the hashed token should be used instead. + redactedAuth := *a + redactedAuth.ClearToken() + a = &redactedAuth + } + if d, err := json.Marshal(a); err == nil { + return d, nil + } else { + return nil, &errors.Error{ + Code: errors.EInvalid, + Msg: "encodeAuthorization: marshalling error", + Err: err, + } + } } func decodeAuthorization(b []byte, a *influxdb.Authorization) error { if err := json.Unmarshal(b, a); err != nil { - return err + return fmt.Errorf("decodeAuthorization: %w", err) } if a.Status == "" { a.Status = influxdb.Active @@ -50,63 +96,79 @@ func decodeAuthorization(b []byte, a *influxdb.Authorization) error { return nil } +// transformToken updates a.Token and a.HashedToken to match configuration state, +// if needed. If needed, transformToken generates the a.HashedToken from a.Token when +// token hashing is enabled. transformToken will also clear a.HashedToken if token +// hashing is turned off and a.Token is set to the matching token. If a.HashedToken and +// a.Token are both set but do not match (a.HashedToken is a hash of a.Token), then an +// error is returned. +func (s *Store) transformToken(a *influxdb.Authorization) error { + // Verify Token and HashedToken match if both are set. + if a.BothTokensSet() { + match, err := s.hasher.Match(a.HashedToken, a.Token) + if err != nil { + return fmt.Errorf("transformToken: error matching tokens: %w", err) + } + if !match { + return fmt.Errorf("transformToken: %w", ErrHashedTokenMismatch) + } + } + + if a.IsTokenSet() { + if s.useHashedTokens { + // Need to generate HashedToken from Token. Redaction of the hashed token takes + // place when the record is written to the KV store. In some cases the client + // code that triggered commit needs access to the raw Token, such as when a + // token is initially created so it can be shown to the user. + // Note that even if a.HashedToken is set, we will regenerate it here. This ensures + // that a.HashedToken will be stored using the currently configured hashing algorithm. + if hashedToken, err := s.hasher.Hash(a.Token); err != nil { + return fmt.Errorf("transformToken: error hashing token for token %d (%s): %w", a.ID, a.Description, err) + } else { + a.HashedToken = hashedToken + } + } else { + // Token hashing disabled, a.Token is available, clear a.HashedToken if set. + a.ClearHashedToken() + } + } + + return nil +} + // CreateAuthorization takes an Authorization object and saves it in storage using its token -// using its token property as an index +// using its token property as an index. The contents of a should be considered invalid if an +// error occurs. func (s *Store) CreateAuthorization(ctx context.Context, tx kv.Tx, a *influxdb.Authorization) (retErr error) { defer func() { retErr = errors.ErrInternalServiceError(retErr, errors.WithErrorOp(influxdb.OpCreateAuthorization)) }() + + if a == nil { + return ErrNilAuthorization + } + // if the provided ID is invalid, or already maps to an existing Auth, then generate a new one if !a.ID.Valid() { - id, err := s.generateSafeID(ctx, tx, authBucket) + id, err := s.generateSafeID(ctx, tx, authBucketName) if err != nil { return nil } a.ID = id } else if err := uniqueID(ctx, tx, a.ID); err != nil { - id, err := s.generateSafeID(ctx, tx, authBucket) + id, err := s.generateSafeID(ctx, tx, authBucketName) if err != nil { return nil } a.ID = id } + // Token must be unique to create authorization. if err := s.uniqueAuthToken(ctx, tx, a); err != nil { - return ErrTokenAlreadyExistsError - } - - v, err := encodeAuthorization(a) - if err != nil { - return &errors.Error{ - Code: errors.EInvalid, - Err: err, - } - } - - encodedID, err := a.ID.Encode() - if err != nil { - return ErrInvalidAuthIDError(err) - } - - idx, err := authIndexBucket(tx) - if err != nil { - return err - } - - if err := idx.Put(authIndexKey(a.Token), encodedID); err != nil { - return err - } - - b, err := tx.Bucket(authBucket) - if err != nil { return err } - if err := b.Put(encodedID, v); err != nil { - return err - } - - return nil + return s.commitAuthorization(ctx, tx, a) } // GetAuthorization gets an authorization by its ID from the auth bucket in kv @@ -119,7 +181,7 @@ func (s *Store) GetAuthorizationByID(ctx context.Context, tx kv.Tx, id platform. return nil, ErrInvalidAuthID } - b, err := tx.Bucket(authBucket) + b, err := authBucket(tx) if err != nil { return nil, err } @@ -141,6 +203,27 @@ func (s *Store) GetAuthorizationByID(ctx context.Context, tx kv.Tx, id platform. return a, nil } +// validateToken checks if token matches that token stored in auth. If auth.Token is set, that is +// compared first. Otherwise, auth.HashedToken is used to verify token. If neither field in auth is set, then +// the comparison fails. +func (s *Store) validateToken(auth *influxdb.Authorization, token string) (bool, error) { + if auth.IsTokenSet() { + return subtle.ConstantTimeCompare([]byte(auth.Token), []byte(token)) == 1, nil + } + + if auth.IsHashedTokenSet() { + match, err := s.hasher.Match(auth.HashedToken, token) + if err != nil { + return false, fmt.Errorf("error matching hashed token %d (%s) for validation: %w", auth.ID, auth.Description, err) + } + return match, nil + } + + return false, ErrNoTokenAvailable +} + +// GetAuthorizationsByToken searches for an authorization by its raw (unhashed) token value. It will also search +// for entires with equivalent hashed tokens if the raw token is not directly found. func (s *Store) GetAuthorizationByToken(ctx context.Context, tx kv.Tx, token string) (auth *influxdb.Authorization, retErr error) { defer func() { retErr = errors.ErrInternalServiceError(retErr, errors.WithErrorOp(influxdb.OpFindAuthorizationByToken)) @@ -153,10 +236,53 @@ func (s *Store) GetAuthorizationByToken(ctx context.Context, tx kv.Tx, token str // use the token to look up the authorization's ID idKey, err := idx.Get(authIndexKey(token)) if kv.IsNotFound(err) { - return nil, &errors.Error{ + authNotFoundErr := &errors.Error{ Code: errors.ENotFound, Msg: "authorization not found", } + + // Look for hashed token in hashed index. We have to do this even if hashed token storage is + // currently turned off, because it may have been enabled previously, which means the token + // could still be indexed by the hash. + hashIdx, err := hashedAuthIndexBucket(tx) + if err != nil { + if s.ignoreMissingHashIndex && goerrors.Is(err, kv.ErrBucketNotFound) { + return nil, authNotFoundErr + } else { + return nil, err + } + } + + // Try to look up token in hashed index. We have to do the lookup for all potential hash variants. + // We also have to do this even if hashed token storage is off, because we might have indexed by + // the hash when it previously enabled. + allHashes, err := s.hasher.AllHashes(token) + if err != nil { + return nil, err + } else if len(allHashes) == 0 { + // No hashed tokens to lookup (shouldn't happen, but just in case it does). + return nil, authNotFoundErr + } + found := false // found shouldn't really be needed since we know allHashes is not empty, but it's nice for extra safety. + for _, hashedToken := range allHashes { + // Very important we update the existing idKey and err variables and don't create new ones here. + idKey, err = hashIdx.Get(hashedAuthIndexKey(hashedToken)) + if err == nil { + // We found it! Stop looking. err will be nil after loop. + found = true + break + } else { + // Keep looking if we got a not found error. + if !kv.IsNotFound(err) { + return nil, err + } + } + } + if !found || kv.IsNotFound(err) { + return nil, authNotFoundErr + } else if err != nil { + return nil, err + } } var id platform.ID @@ -167,7 +293,26 @@ func (s *Store) GetAuthorizationByToken(ctx context.Context, tx kv.Tx, token str } } - return s.GetAuthorizationByID(ctx, tx, id) + // Verify that the token stored in auth matches the requested token. This should be superfluous check, but + // we will just in case somehow the authorization record got out of sync with the index. + auth, err = s.GetAuthorizationByID(ctx, tx, id) + if err != nil { + return nil, &errors.Error{ + Code: errors.EInternal, + Err: err, + } + } + match, err := s.validateToken(auth, token) + if err != nil { + return nil, &errors.Error{ + Code: errors.EInternal, + Err: err, + } + } + if !match { + return nil, errors.EIncorrectPassword + } + return auth, nil } // ListAuthorizations returns all the authorizations matching a set of FindOptions. This function is used for @@ -177,8 +322,8 @@ func (s *Store) ListAuthorizations(ctx context.Context, tx kv.Tx, f influxdb.Aut retErr = errors.ErrInternalServiceError(retErr, errors.WithErrorOp(influxdb.OpFindAuthorizations)) }() var as []*influxdb.Authorization - pred := authorizationsPredicateFn(f) - filterFn := filterAuthorizationsFn(f) + pred := s.authorizationsPredicateFn(f) + filterFn := s.filterAuthorizationsFn(f) err := s.forEachAuthorization(ctx, tx, pred, func(a *influxdb.Authorization) bool { if filterFn(a) { as = append(as, a) @@ -194,7 +339,7 @@ func (s *Store) ListAuthorizations(ctx context.Context, tx kv.Tx, f influxdb.Aut // forEachAuthorization will iterate through all authorizations while fn returns true. func (s *Store) forEachAuthorization(ctx context.Context, tx kv.Tx, pred kv.CursorPredicateFunc, fn func(*influxdb.Authorization) bool) error { - b, err := tx.Bucket(authBucket) + b, err := authBucket(tx) if err != nil { return err } @@ -226,41 +371,129 @@ func (s *Store) forEachAuthorization(ctx context.Context, tx kv.Tx, pred kv.Curs return nil } -// UpdateAuthorization updates the status and description only of an authorization -func (s *Store) UpdateAuthorization(ctx context.Context, tx kv.Tx, id platform.ID, a *influxdb.Authorization) (auth *influxdb.Authorization, retErr error) { - defer func() { - retErr = errors.ErrInternalServiceError(retErr, errors.WithErrorOp(influxdb.OpUpdateAuthorization)) - }() - v, err := encodeAuthorization(a) +// commitAuthorization performs pre-commit checks and updates to an authorization record, commits it, +// and makes sure indices point to it. It does not delete any indices. The updated authorization is +// returned on success. +func (s *Store) commitAuthorization(ctx context.Context, tx kv.Tx, a *influxdb.Authorization) error { + if err := s.transformToken(a); err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) + } + + // Sanity check that a is actually set. Shouldn't be possible during normal operation. + if a.NoTokensSet() { + return fmt.Errorf("commitAuthorization: %w", ErrNoTokenAvailable) + } + + v, err := s.encodeAuthorization(a) if err != nil { - return nil, errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInvalid)) + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInvalid)) } encodedID, err := a.ID.Encode() if err != nil { - return nil, errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.ENotFound)) + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.ENotFound)) } - idx, err := authIndexBucket(tx) + if !s.useHashedTokens && a.IsTokenSet() { + idx, err := authIndexBucket(tx) + if err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) + } + + if err := idx.Put(authIndexKey(a.Token), encodedID); err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) + } + } + + // If we have a hashed token, we need to add it to the index even if hashed tokens are not + // available. This is because if hashed tokens are enabled and then disabled, we will + // only have hashed tokens available for some authorization records. They would be unusable + // if we did not maintain their hashed indices. + if a.IsHashedTokenSet() { + idx, err := hashedAuthIndexBucket(tx) + // Don't ignore a missing index here, we want an error. + if err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) + } + + if err := idx.Put(hashedAuthIndexKey(a.HashedToken), encodedID); err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) + } + } + + b, err := authBucket(tx) if err != nil { - return nil, err + return err // authBucket already wraps the error } - if err := idx.Put(authIndexKey(a.Token), encodedID); err != nil { - return nil, err + if err := b.Put(encodedID, v); err != nil { + return errors.ErrInternalServiceError(err, errors.WithErrorCode(errors.EInternal)) } - b, err := tx.Bucket(authBucket) + return nil +} + +// deleteIndices removes indices for the given token and hashedToken. +func (s *Store) deleteIndices(ctx context.Context, tx kv.Tx, token, hashedToken string) error { + authIdx, err := authIndexBucket(tx) + if err != nil { + return err + } + + hashedAuthIdx, err := hashedAuthIndexBucket(tx) + // Don't ignore missing index during an update. if err != nil { + return err + } + + if influxdb.IsAuthTokenSet(token) { + if err := authIdx.Delete([]byte(token)); err != nil { + return fmt.Errorf("deleteIndices: error deleting from authIndex: %w", err) + } + } + + if influxdb.IsAuthTokenSet(hashedToken) { + if err := hashedAuthIdx.Delete([]byte(hashedToken)); err != nil { + return fmt.Errorf("deleteIndices: error deleting from hashedAuthIndex: %w", err) + } + } + + return nil +} + +// UpdateAuthorization updates the status and description only of an authorization +func (s *Store) UpdateAuthorization(ctx context.Context, tx kv.Tx, id platform.ID, a *influxdb.Authorization) (auth *influxdb.Authorization, retErr error) { + defer func() { + retErr = errors.ErrInternalServiceError(retErr, errors.WithErrorOp(influxdb.OpUpdateAuthorization)) + }() + + if a == nil { + return nil, ErrNilAuthorization + } + + initialToken := a.Token + initialHashedToken := a.HashedToken + + if err := s.commitAuthorization(ctx, tx, a); err != nil { return nil, err } - if err := b.Put(encodedID, v); err != nil { + // Delete dangling indices from old raw tokens or hashed tokens. + var removedToken string + if influxdb.IsAuthTokenSet(initialToken) && (a.Token != initialToken || s.useHashedTokens) { + removedToken = initialToken + } + + var removedHashedToken string + if influxdb.IsAuthTokenSet(initialHashedToken) && a.HashedToken != initialHashedToken { + removedHashedToken = initialHashedToken + } + + if err := s.deleteIndices(ctx, tx, removedToken, removedHashedToken); err != nil { return nil, err } return a, nil - } // DeleteAuthorization removes an authorization from storage @@ -278,17 +511,12 @@ func (s *Store) DeleteAuthorization(ctx context.Context, tx kv.Tx, id platform.I return ErrInvalidAuthID } - idx, err := authIndexBucket(tx) - if err != nil { - return err - } - - b, err := tx.Bucket(authBucket) + b, err := authBucket(tx) if err != nil { return err } - if err := idx.Delete([]byte(a.Token)); err != nil { + if err := s.deleteIndices(ctx, tx, a.Token, a.HashedToken); err != nil { return err } @@ -299,22 +527,57 @@ func (s *Store) DeleteAuthorization(ctx context.Context, tx kv.Tx, id platform.I return nil } -func (s *Store) uniqueAuthToken(ctx context.Context, tx kv.Tx, a *influxdb.Authorization) error { - err := unique(ctx, tx, authIndex, authIndexKey(a.Token)) - if err == kv.NotUniqueError { +func (s *Store) uniqueAuthTokenByIndex(ctx context.Context, tx kv.Tx, index, key []byte) error { + if err := unique(ctx, tx, index, key); err == nil { + return nil + } else if err == kv.NotUniqueError { // by returning a generic error we are trying to hide when // a token is non-unique. return influxdb.ErrUnableToCreateToken + } else { + // otherwise, this is some sort of internal server error and we + // should provide some debugging information. + return fmt.Errorf("error in uniqueAuthTokenByIndex for index %q: %w", index, err) + } +} + +func (s *Store) uniqueAuthToken(ctx context.Context, tx kv.Tx, a *influxdb.Authorization) error { + // Check if the raw token is unique. + if a.IsTokenSet() { + if err := s.uniqueAuthTokenByIndex(ctx, tx, authIndexName, authIndexKey(a.Token)); err != nil { + return err + } + } + + // If Token is available, check for the uniqueness of the hashed version of Token using all + // potential hashing schemes. If HashedToken was directly given, we must also check for it. + allHashedTokens := make([]string, 0, s.hasher.AllHashesCount()+1) + if a.IsHashedTokenSet() { + allHashedTokens = append(allHashedTokens, a.HashedToken) + } + if a.IsTokenSet() { + allRawHashes, err := s.hasher.AllHashes(a.Token) + if err != nil { + return err + } + allHashedTokens = append(allHashedTokens, allRawHashes...) + } + + for _, hashedToken := range allHashedTokens { + if err := s.uniqueAuthTokenByIndex(ctx, tx, hashedAuthIndexName, hashedAuthIndexKey(hashedToken)); err != nil { + if !s.ignoreMissingHashIndex || !goerrors.Is(err, kv.ErrBucketNotFound) { + return err + } + } } - // otherwise, this is some sort of internal server error and we - // should provide some debugging information. - return err + + return nil } func unique(ctx context.Context, tx kv.Tx, indexBucket, indexKey []byte) error { - bucket, err := tx.Bucket(indexBucket) + bucket, err := getNamedAuthBucket(tx, indexBucket) if err != nil { - return kv.UnexpectedIndexError(err) + return err } _, err = bucket.Get(indexKey) @@ -339,9 +602,9 @@ func uniqueID(ctx context.Context, tx kv.Tx, id platform.ID) error { return ErrInvalidAuthID } - b, err := tx.Bucket(authBucket) + b, err := authBucket(tx) if err != nil { - return errors.ErrInternalServiceError(err) + return err // authBucket already wraps the error } _, err = b.Get(encodedID) @@ -358,7 +621,7 @@ func uniqueID(ctx context.Context, tx kv.Tx, id platform.ID) error { return kv.UnexpectedIndexError(err) } -func authorizationsPredicateFn(f influxdb.AuthorizationFilter) kv.CursorPredicateFunc { +func (s *Store) authorizationsPredicateFn(f influxdb.AuthorizationFilter) kv.CursorPredicateFunc { // if any errors occur reading the JSON data, the predicate will always return true // to ensure the value is included and handled higher up. @@ -374,14 +637,36 @@ func authorizationsPredicateFn(f influxdb.AuthorizationFilter) kv.CursorPredicat } if f.Token != nil { - exp := *f.Token + token := *f.Token + allHashes, err := s.hasher.AllHashes(token) + if err != nil { + s.log.Error("error generating hashes in authorizationsPredicateFn", zap.Error(err)) + // On error, continue onward. allHashes is empty and we'll effectively ignore hashedToken, + // but we'll still look at the unhashed Token if it is available. + } return func(_, value []byte) bool { - // it is assumed that token never has escaped string data - got, _, _, err := jsonparser.Get(value, "token") - if err != nil { - return true + // Check if "token" matches. It is assumed that token never has escaped string data. + if got, _, _, err := jsonparser.Get(value, "token"); err == nil { + if len(got) > 0 { + return string(got) == token + } + } else { + return true // predicate must return true on errors + } + + // Check if "hashedToken" matches, if applicable. + if len(allHashes) > 0 { + if got, _, _, err := jsonparser.Get(value, "hashedToken"); err == nil { + if len(got) > 0 { + return slices.Contains(allHashes, string(got)) + } + } else { + return true // predicate must return true on errors + } } - return string(got) == exp + + // No match on "token" or "hashedToken", do not include this record. + return false } } @@ -411,7 +696,7 @@ func authorizationsPredicateFn(f influxdb.AuthorizationFilter) kv.CursorPredicat return pred } -func filterAuthorizationsFn(filter influxdb.AuthorizationFilter) func(a *influxdb.Authorization) bool { +func (s *Store) filterAuthorizationsFn(filter influxdb.AuthorizationFilter) func(a *influxdb.Authorization) bool { if filter.ID != nil { return func(a *influxdb.Authorization) bool { return a.ID == *filter.ID @@ -419,8 +704,19 @@ func filterAuthorizationsFn(filter influxdb.AuthorizationFilter) func(a *influxd } if filter.Token != nil { + token := *filter.Token + allHashes, err := s.hasher.AllHashes(token) + if err != nil { + s.log.Error("error generating hashes in filterPredicateFn", zap.Error(err)) + // On error, continue onward. allHashes is empty and we'll effectively ignore hashedToken, + // but we'll still look at the unhashed Token if it is available. + } + return func(a *influxdb.Authorization) bool { - return a.Token == *filter.Token + if subtle.ConstantTimeCompare([]byte(a.Token), []byte(token)) == 1 { + return true + } + return slices.Contains(allHashes, a.HashedToken) } } diff --git a/authorization/storage_authorization_test.go b/authorization/storage_authorization_test.go index 79406a2d249..f0af9a4bb83 100644 --- a/authorization/storage_authorization_test.go +++ b/authorization/storage_authorization_test.go @@ -3,7 +3,6 @@ package authorization_test import ( "context" "fmt" - "reflect" "testing" "github.com/influxdata/influxdb/v2" @@ -12,214 +11,583 @@ import ( "github.com/influxdata/influxdb/v2/kit/platform" "github.com/influxdata/influxdb/v2/kv" "github.com/influxdata/influxdb/v2/kv/migration/all" + influxdb2_algo "github.com/influxdata/influxdb/v2/pkg/crypt/algorithm/influxdb2" + "github.com/stretchr/testify/require" "go.uber.org/zap/zaptest" ) +const ( + authIndexName = "authorizationindexv1" + hashedAuthIndexName = "authorizationhashedindexv1" +) + func TestAuth(t *testing.T) { - setup := func(t *testing.T, store *authorization.Store, tx kv.Tx) { - for i := 1; i <= 10; i++ { + const initialTokenCount = 10 + generateToken := func(i int) string { return fmt.Sprintf("randomtoken%d", i) } + + checkIndexCounts := func(t *testing.T, tx kv.Tx, expAuthIndexCount, expHashedAuthIndexCount int) { + t.Helper() + + indexCount := make(map[string]int) + for _, indexName := range []string{authIndexName, hashedAuthIndexName} { + index, err := tx.Bucket([]byte(indexName)) + require.NoError(t, err) + cur, err := index.Cursor() + require.NoError(t, err) + for k, _ := cur.First(); k != nil; k, _ = cur.Next() { + indexCount[indexName]++ + } + } + + require.Equal(t, expAuthIndexCount, indexCount[authIndexName]) + require.Equal(t, expHashedAuthIndexCount, indexCount[hashedAuthIndexName]) + } + + setup := func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { err := store.CreateAuthorization(context.Background(), tx, &influxdb.Authorization{ ID: platform.ID(i), - Token: fmt.Sprintf("randomtoken%d", i), + Token: generateToken(i), OrgID: platform.ID(i), UserID: platform.ID(i), Status: influxdb.Active, }) + require.NoError(t, err) + } - if err != nil { - t.Fatal(err) + // Perform sanity checks on Token vs HashedToken and indices. + for i := 1; i <= initialTokenCount; i++ { + expToken := generateToken(i) + a, err := store.GetAuthorizationByToken(context.Background(), tx, expToken) + require.NoError(t, err) + if useHashedTokens { + require.Empty(t, a.Token) + hashedToken, err := hasher.Hash(expToken) + require.NoError(t, err) + require.Equal(t, hashedToken, a.HashedToken) + } else { + require.Equal(t, expToken, a.Token) + require.Empty(t, a.HashedToken) } } + + var expAuthIndexCount, expHashedAuthIndexCount int + if useHashedTokens { + expHashedAuthIndexCount = initialTokenCount + } else { + expAuthIndexCount = initialTokenCount + } + checkIndexCounts(t, tx, expAuthIndexCount, expHashedAuthIndexCount) } tt := []struct { name string - setup func(*testing.T, *authorization.Store, kv.Tx) - update func(*testing.T, *authorization.Store, kv.Tx) - results func(*testing.T, *authorization.Store, kv.Tx) + setup func(*testing.T, bool, *authorization.Store, *authorization.AuthorizationHasher, kv.Tx) + update func(*testing.T, *authorization.Store, *authorization.AuthorizationHasher, kv.Tx) + results func(*testing.T, bool, *authorization.Store, *authorization.AuthorizationHasher, kv.Tx) }{ { - name: "create", + name: "create duplicate token", setup: setup, - results: func(t *testing.T, store *authorization.Store, tx kv.Tx) { + update: func(t *testing.T, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + // should not be able to create two authorizations with identical tokens + err := store.CreateAuthorization(context.Background(), tx, &influxdb.Authorization{ + ID: platform.ID(1), + Token: generateToken(1), + OrgID: platform.ID(1), + UserID: platform.ID(1), + }) + require.ErrorIs(t, err, influxdb.ErrUnableToCreateToken) + }, + results: func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { auths, err := store.ListAuthorizations(context.Background(), tx, influxdb.AuthorizationFilter{}) - if err != nil { - t.Fatal(err) - } - - if len(auths) != 10 { - t.Fatalf("expected 10 authorizations, got: %d", len(auths)) - } + require.NoError(t, err) + require.Len(t, auths, initialTokenCount) expected := []*influxdb.Authorization{} - for i := 1; i <= 10; i++ { - expected = append(expected, &influxdb.Authorization{ + for i := 1; i <= initialTokenCount; i++ { + a := &influxdb.Authorization{ ID: platform.ID(i), - Token: fmt.Sprintf("randomtoken%d", i), + Token: generateToken(i), OrgID: platform.ID(i), UserID: platform.ID(i), Status: "active", - }) - } - if !reflect.DeepEqual(auths, expected) { - t.Fatalf("expected identical authorizations: \n%+v\n%+v", auths, expected) + } + if useHashedTokens { + hashedToken, err := hasher.Hash(a.Token) + require.NoError(t, err) + a.HashedToken = hashedToken + a.ClearToken() + } + expected = append(expected, a) } + require.Equal(t, auths, expected) - // should not be able to create two authorizations with identical tokens - err = store.CreateAuthorization(context.Background(), tx, &influxdb.Authorization{ - ID: platform.ID(1), - Token: fmt.Sprintf("randomtoken%d", 1), - OrgID: platform.ID(1), - UserID: platform.ID(1), - }) - if err == nil { - t.Fatalf("expected to be unable to create authorizations with identical tokens") + var expAuthIndexCount, expHashedAuthIndexCount int + if useHashedTokens { + expHashedAuthIndexCount = initialTokenCount + } else { + expAuthIndexCount = initialTokenCount } + checkIndexCounts(t, tx, expAuthIndexCount, expHashedAuthIndexCount) }, }, { name: "read", setup: setup, - results: func(t *testing.T, store *authorization.Store, tx kv.Tx) { - for i := 1; i <= 10; i++ { + results: func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { expectedAuth := &influxdb.Authorization{ ID: platform.ID(i), - Token: fmt.Sprintf("randomtoken%d", i), + Token: generateToken(i), OrgID: platform.ID(i), UserID: platform.ID(i), Status: influxdb.Active, } - - authByID, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) - if err != nil { - t.Fatalf("Unexpectedly could not acquire Authorization by ID [Error]: %v", err) + if useHashedTokens { + hashedToken, err := hasher.Hash(expectedAuth.Token) + require.NoError(t, err) + expectedAuth.HashedToken = hashedToken + expectedAuth.ClearToken() } - if !reflect.DeepEqual(authByID, expectedAuth) { - t.Fatalf("ID TEST: expected identical authorizations:\n[Expected]: %+#v\n[Got]: %+#v", expectedAuth, authByID) - } - - authByToken, err := store.GetAuthorizationByToken(context.Background(), tx, fmt.Sprintf("randomtoken%d", i)) - if err != nil { - t.Fatalf("cannot get authorization by Token [Error]: %v", err) - } + authByID, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) + require.NoError(t, err) + require.Equal(t, expectedAuth, authByID) - if !reflect.DeepEqual(authByToken, expectedAuth) { - t.Fatalf("TOKEN TEST: expected identical authorizations:\n[Expected]: %+#v\n[Got]: %+#v", expectedAuth, authByToken) - } + authByToken, err := store.GetAuthorizationByToken(context.Background(), tx, generateToken(i)) + require.NoError(t, err) + require.Equal(t, expectedAuth, authByToken) } + var expAuthIndexCount, expHashedAuthIndexCount int + if useHashedTokens { + expHashedAuthIndexCount = initialTokenCount + } else { + expAuthIndexCount = initialTokenCount + } + checkIndexCounts(t, tx, expAuthIndexCount, expHashedAuthIndexCount) }, }, { name: "update", setup: setup, - update: func(t *testing.T, store *authorization.Store, tx kv.Tx) { - for i := 1; i <= 10; i++ { + update: func(t *testing.T, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { auth, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) - if err != nil { - t.Fatalf("Could not get authorization [Error]: %v", err) - } + require.NoError(t, err) auth.Status = influxdb.Inactive + copyAuth := *auth - _, err = store.UpdateAuthorization(context.Background(), tx, platform.ID(i), auth) - if err != nil { - t.Fatalf("Could not get updated authorization [Error]: %v", err) - } + updatedAuth, err := store.UpdateAuthorization(context.Background(), tx, platform.ID(i), auth) + require.NoError(t, err) + require.Equal(t, auth, updatedAuth) /* should be the same pointer */ + require.Equal(t, copyAuth, *auth) /* should be the same contents */ } }, - results: func(t *testing.T, store *authorization.Store, tx kv.Tx) { + results: func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { - for i := 1; i <= 10; i++ { + for i := 1; i <= initialTokenCount; i++ { auth, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) - if err != nil { - t.Fatalf("Could not get authorization [Error]: %v", err) - } + require.NoError(t, err) expectedAuth := &influxdb.Authorization{ ID: platform.ID(i), - Token: fmt.Sprintf("randomtoken%d", i), + Token: generateToken(i), OrgID: platform.ID(i), UserID: platform.ID(i), Status: influxdb.Inactive, } - - if !reflect.DeepEqual(auth, expectedAuth) { - t.Fatalf("expected identical authorizations:\n[Expected] %+#v\n[Got] %+#v", expectedAuth, auth) + if useHashedTokens { + hashedToken, err := hasher.Hash(expectedAuth.Token) + require.NoError(t, err) + expectedAuth.HashedToken = hashedToken + expectedAuth.ClearToken() } + + require.Equal(t, expectedAuth, auth) } + var expAuthIndexCount, expHashedAuthIndexCount int + if useHashedTokens { + expHashedAuthIndexCount = initialTokenCount + } else { + expAuthIndexCount = initialTokenCount + } + checkIndexCounts(t, tx, expAuthIndexCount, expHashedAuthIndexCount) }, }, { name: "delete", setup: setup, - update: func(t *testing.T, store *authorization.Store, tx kv.Tx) { - for i := 1; i <= 10; i++ { + update: func(t *testing.T, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { err := store.DeleteAuthorization(context.Background(), tx, platform.ID(i)) - if err != nil { - t.Fatalf("Could not delete authorization [Error]: %v", err) - } + require.NoError(t, err) } }, - results: func(t *testing.T, store *authorization.Store, tx kv.Tx) { - for i := 1; i <= 10; i++ { - _, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) - if err == nil { - t.Fatal("Authorization was not deleted correctly") + results: func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { + a, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) + require.ErrorIs(t, err, authorization.ErrAuthNotFound) + require.Nil(t, a) + } + checkIndexCounts(t, tx, 0, 0) + }, + }, + { + // This is an artificial test to set both Token and HashedToken. This should not occur in normal operation, but + // we want to make sure we have the correct behavior. + name: "set Token and HashedToken", + setup: setup, + update: func(t *testing.T, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { + auth, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) + require.NoError(t, err) + + auth.Token = generateToken(i) + hashedToken, err := hasher.Hash(auth.Token) + require.NoError(t, err) + auth.HashedToken = hashedToken + + newAuth, err := store.UpdateAuthorization(context.Background(), tx, platform.ID(i), auth) + require.NoError(t, err) + require.NotNil(t, newAuth) + + // Make sure update fails if tokens mismatch. + auth.Token = "Hadouken" + badHashedToken, err := hasher.Hash("Shoryuken") + require.NoError(t, err) + auth.HashedToken = badHashedToken + newAuth, err = store.UpdateAuthorization(context.Background(), tx, platform.ID(i), auth) + require.ErrorIs(t, err, authorization.ErrHashedTokenMismatch) + require.Nil(t, newAuth) + } + }, + results: func(t *testing.T, useHashedTokens bool, store *authorization.Store, hasher *authorization.AuthorizationHasher, tx kv.Tx) { + for i := 1; i <= initialTokenCount; i++ { + authByID, err := store.GetAuthorizationByID(context.Background(), tx, platform.ID(i)) + require.NoError(t, err) + if !useHashedTokens { + require.Equal(t, generateToken(i), authByID.Token) + require.Empty(t, authByID.HashedToken) + } else { + require.Empty(t, authByID.Token) + hashedToken, err := hasher.Hash(generateToken(i)) + require.NoError(t, err) + require.Equal(t, hashedToken, authByID.HashedToken) } + + // Should get the exact same record when fetching by the token. + authByToken, err := store.GetAuthorizationByToken(context.Background(), tx, generateToken(i)) + require.NoError(t, err) + require.Equal(t, *authByID, *authByToken) + } + + if !useHashedTokens { + // All unhashed index entries. + checkIndexCounts(t, tx, initialTokenCount, 0) + } else { + // All hashed index entries. + checkIndexCounts(t, tx, 0, initialTokenCount) } }, }, } for _, testScenario := range tt { - t.Run(testScenario.name, func(t *testing.T) { - store := inmem.NewKVStore() - if err := all.Up(context.Background(), zaptest.NewLogger(t), store); err != nil { - t.Fatal(err) - } + for _, useHashedTokens := range []bool{false, true} { - ts, err := authorization.NewStore(store) - if err != nil { - t.Fatal(err) - } + t.Run(testScenario.name, func(t *testing.T) { + store := inmem.NewKVStore() + err := all.Up(context.Background(), zaptest.NewLogger(t), store) + require.NoError(t, err) - // setup - if testScenario.setup != nil { - err := ts.Update(context.Background(), func(tx kv.Tx) error { - testScenario.setup(t, ts, tx) - return nil - }) + hasher, err := authorization.NewAuthorizationHasher() + require.NoError(t, err) + require.NotNil(t, hasher) + + ts, err := authorization.NewStore(context.Background(), store, useHashedTokens, authorization.WithAuthorizationHasher(hasher)) + require.NoError(t, err) + require.NotNil(t, ts) - if err != nil { - t.Fatal(err) + // setup + if testScenario.setup != nil { + err := ts.Update(context.Background(), func(tx kv.Tx) error { + testScenario.setup(t, useHashedTokens, ts, hasher, tx) + return nil + }) + require.NoError(t, err) } - } - // update - if testScenario.update != nil { - err := ts.Update(context.Background(), func(tx kv.Tx) error { - testScenario.update(t, ts, tx) - return nil - }) + // update + if testScenario.update != nil { + err := ts.Update(context.Background(), func(tx kv.Tx) error { + testScenario.update(t, ts, hasher, tx) + return nil + }) + require.NoError(t, err) + } - if err != nil { - t.Fatal(err) + // results + if testScenario.results != nil { + err := ts.View(context.Background(), func(tx kv.Tx) error { + testScenario.results(t, useHashedTokens, ts, hasher, tx) + return nil + }) + require.NoError(t, err) + } + }) + } + } +} + +func TestAuthorizationStore_HashingConfigChanges(t *testing.T) { + sha256, err := influxdb2_algo.New(influxdb2_algo.WithVariant(influxdb2_algo.VariantSHA256)) + require.NoError(t, err) + sha512, err := influxdb2_algo.New(influxdb2_algo.WithVariant(influxdb2_algo.VariantSHA512)) + require.NoError(t, err) + + type authData struct { + ID platform.ID + Token string + HashedToken string + } + type testConfig struct { + enabled bool + algo string + } + type testCase struct { + desc string + config testConfig + action func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) + exp []authData + hashedTokens []string // tokens which only exists as hashes + } + cases := []testCase{ + { + desc: "initial unhashed", + config: testConfig{enabled: false}, + action: func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) { + a := &influxdb.Authorization{ + ID: platform.ID(1), + OrgID: platform.ID(1), + UserID: platform.ID(1), + Token: "Token#1", + } + require.NoError(t, store.CreateAuthorization(ctx, tx, a)) + }, + exp: []authData{ + {ID: platform.ID(1), Token: "Token#1"}, + }, + }, + { + desc: "upgrade hashed #1", // update hash and indices + config: testConfig{enabled: true, algo: influxdb2_algo.VariantIdentifierSHA256}, + exp: []authData{ + {ID: platform.ID(1), HashedToken: sha256.MustHash("Token#1").Encode()}, + }, + hashedTokens: []string{"Token#1"}, + }, + { + desc: "downgrade hashed #1", // can't unhash + config: testConfig{enabled: false, algo: influxdb2_algo.VariantIdentifierSHA256}, + action: func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) { + a := &influxdb.Authorization{ + ID: platform.ID(2), + OrgID: platform.ID(2), + UserID: platform.ID(2), + Token: "Token#2", + } + require.NoError(t, store.CreateAuthorization(ctx, tx, a)) + }, + exp: []authData{ + {ID: platform.ID(1), HashedToken: sha256.MustHash("Token#1").Encode()}, + {ID: platform.ID(2), Token: "Token#2"}, + }, + hashedTokens: []string{"Token#1"}, + }, + { + desc: "upgrade hashed sha512", // can't rehash existing, use new algo for new auths + config: testConfig{enabled: true, algo: influxdb2_algo.VariantIdentifierSHA512}, + action: func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) { + a := &influxdb.Authorization{ + ID: platform.ID(3), + OrgID: platform.ID(3), + UserID: platform.ID(3), + Token: "Token#3", + } + require.NoError(t, store.CreateAuthorization(ctx, tx, a)) + }, + exp: []authData{ + {ID: platform.ID(1), HashedToken: sha256.MustHash("Token#1").Encode()}, + {ID: platform.ID(2), HashedToken: sha512.MustHash("Token#2").Encode()}, + {ID: platform.ID(3), HashedToken: sha512.MustHash("Token#3").Encode()}, + }, + hashedTokens: []string{"Token#1", "Token#2", "Token#3"}, + }, + + // The following tests are artificial tests intended to check proper operation when both + // Token and HashedToken are set on an update. This should not occur in normal operation because, + // we do not alter tokens like this. However, there is nothing to prevent this so we want to make sure + // it works properly. + { + desc: "set Token and HashedToken with hashing enabled", + config: testConfig{enabled: true, algo: influxdb2_algo.VariantIdentifierSHA512}, + action: func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) { + for i := 1; i <= 3; i++ { + token := fmt.Sprintf("Token#%d", i) + auth, err := store.GetAuthorizationByToken(ctx, tx, token) + require.NoError(t, err) + require.Empty(t, auth.Token, "only HashedToken should be stored") + + // Set Token and update. + auth.Token = token + newAuth, err := store.UpdateAuthorization(ctx, tx, platform.ID(i), auth) + require.NoError(t, err) + + // newAuth.Token should not have been saved to BoltDB, but newAuth.Token should still be present + require.Equal(t, token, newAuth.Token) + } + }, + // NOTE: All hashes should be updated to the currently configured algorithm. + exp: []authData{ + {ID: platform.ID(1), HashedToken: sha512.MustHash("Token#1").Encode()}, + {ID: platform.ID(2), HashedToken: sha512.MustHash("Token#2").Encode()}, + {ID: platform.ID(3), HashedToken: sha512.MustHash("Token#3").Encode()}, + }, + hashedTokens: []string{"Token#1", "Token#2", "Token#3"}, + }, + { + desc: "set Token and HashedToken with hashing disabled", + config: testConfig{enabled: false}, + action: func(t *testing.T, ctx context.Context, store *authorization.Store, tx kv.Tx) { + for i := 1; i <= 3; i++ { + token := fmt.Sprintf("Token#%d", i) + auth, err := store.GetAuthorizationByToken(ctx, tx, token) + require.NoError(t, err) + require.Empty(t, auth.Token, "only HashedToken should be stored") + require.NotEmpty(t, auth.HashedToken, "HashedToken should be set") + + // Set Token and update. + auth.Token = token + newAuth, err := store.UpdateAuthorization(ctx, tx, platform.ID(i), auth) + require.NoError(t, err) + + // newAuth.Token should be set, but newAuth.HashedToken should be cleared. + require.Equal(t, token, newAuth.Token) + require.Empty(t, newAuth.HashedToken) + } + }, + exp: []authData{ + {ID: platform.ID(1), Token: "Token#1"}, + {ID: platform.ID(2), Token: "Token#2"}, + {ID: platform.ID(3), Token: "Token#3"}, + }, + hashedTokens: []string{}, + }, + } + + ctx := context.Background() + + // The underlying kv store persists across tests cases. This allows for testing how opening with + // new authentication configurations impacts the data. + kvStore := inmem.NewKVStore() + err = all.Up(ctx, zaptest.NewLogger(t), kvStore) + require.NoError(t, err) + + for _, tc := range cases { + t.Run(tc.desc, func(t *testing.T) { + // Create new authorization.Store for test cases using existing kvStore. + variantName := tc.config.algo + if variantName == "" { + if !tc.config.enabled { + variantName = authorization.DefaultHashVariantName + } else { + require.Fail(t, "Must specific algo if hashing is enabled for test case") } } + store, err := authorization.NewStore(ctx, kvStore, tc.config.enabled, authorization.WithAuthorizationHashVariantName(variantName)) + require.NoError(t, err) + require.NotNil(t, store) - // results - if testScenario.results != nil { - err := ts.View(context.Background(), func(tx kv.Tx) error { - testScenario.results(t, ts, tx) + // Execute action, if given. Simply opening the store with a different configuration may be the "action". + if tc.action != nil { + err = kvStore.Update(ctx, func(tx kv.Tx) error { + tc.action(t, ctx, store, tx) return nil }) + require.NoError(t, err) + } - if err != nil { - t.Fatal(err) + // Check results. + err = kvStore.View(ctx, func(tx kv.Tx) error { + // Collect all authorization data from store. + storedAuths, err := store.ListAuthorizations(ctx, tx, influxdb.AuthorizationFilter{}) + require.NoError(t, err) + + // Collect auth data from data currently in store + actualAuthData := make([]authData, 0, len(storedAuths)) + for _, sa := range storedAuths { + ad := authData{ID: sa.ID, Token: sa.Token, HashedToken: sa.HashedToken} + actualAuthData = append(actualAuthData, ad) } - } + + // Check that authData matches exp. + require.ElementsMatch(t, tc.exp, actualAuthData) + + // Collect data from kvStore's token index. + collectIndex := func(indexName string) map[string]platform.ID { + indexMap := make(map[string]platform.ID) + index, err := tx.Bucket([]byte(indexName)) + require.NoError(t, err) + cursor, err := index.Cursor() + require.NoError(t, err) + for k, v := cursor.First(); k != nil; k, v = cursor.Next() { + var id platform.ID + require.NoError(t, id.Decode(v)) + indexMap[string(k)] = id + } + return indexMap + } + actualTokenIndex := collectIndex(authIndexName) + actualHashedIndex := collectIndex(hashedAuthIndexName) + + // Collect expected token and hashed indices. + expTokenIndex := make(map[string]platform.ID) + expHashedIndex := make(map[string]platform.ID) + for _, d := range tc.exp { + if d.Token != "" { + expTokenIndex[d.Token] = d.ID + } + if d.HashedToken != "" { + expHashedIndex[d.HashedToken] = d.ID + } + } + + // Compare indices. + require.Equal(t, expTokenIndex, actualTokenIndex) + require.Equal(t, expHashedIndex, actualHashedIndex) + + // Make sure we can lookup all tokens. + var allTokens []string + for _, d := range tc.exp { + if d.Token != "" { + allTokens = append(allTokens, d.Token) + } + } + allTokens = append(allTokens, tc.hashedTokens...) + + for _, token := range allTokens { + auth, err := store.GetAuthorizationByToken(ctx, tx, token) + require.NoError(t, err, "error looking up token %q", token) + require.NotNil(t, auth) + } + + return nil + }) + require.NoError(t, err) }) + } } diff --git a/authorizer/task_test.go b/authorizer/task_test.go index 2b504bc5210..359ad4aa622 100644 --- a/authorizer/task_test.go +++ b/authorizer/task_test.go @@ -2,6 +2,7 @@ package authorizer_test import ( "context" + "fmt" "testing" "time" @@ -19,38 +20,39 @@ import ( "github.com/influxdata/influxdb/v2/task/taskmodel" "github.com/influxdata/influxdb/v2/tenant" "github.com/pkg/errors" + "github.com/stretchr/testify/require" "go.uber.org/zap/zaptest" ) func TestOnboardingValidation(t *testing.T) { - _, onboard := setup(t) - - ts := authorizer.NewTaskService(zaptest.NewLogger(t), mockTaskService(3, 2, 1)) - - r, err := onboard.OnboardInitialUser(context.Background(), &influxdb.OnboardingRequest{ - User: "Setec Astronomy", - Password: "too many secrets", - Org: "thing", - Bucket: "holder", - RetentionPeriodSeconds: 1, - }) - if err != nil { - t.Fatal(err) - } - - ctx := pctx.SetAuthorizer(context.Background(), r.Auth) - - _, err = ts.CreateTask(ctx, taskmodel.TaskCreate{ - OrganizationID: r.Org.ID, - OwnerID: r.Auth.GetUserID(), - Flux: `option task = { + for _, useHashedTokens := range []bool{false, true} { + t.Run(fmt.Sprintf("TestOnboardingValidation/HashedTokens=%t", useHashedTokens), func(t *testing.T) { + _, onboard := setup(t, useHashedTokens) + + ts := authorizer.NewTaskService(zaptest.NewLogger(t), mockTaskService(3, 2, 1)) + + r, err := onboard.OnboardInitialUser(context.Background(), &influxdb.OnboardingRequest{ + User: "Setec Astronomy", + Password: "too many secrets", + Org: "thing", + Bucket: "holder", + RetentionPeriodSeconds: 1, + }) + require.NoError(t, err, "OnboardInitialUser failed") + + ctx := pctx.SetAuthorizer(context.Background(), r.Auth) + + _, err = ts.CreateTask(ctx, taskmodel.TaskCreate{ + OrganizationID: r.Org.ID, + OwnerID: r.Auth.GetUserID(), + Flux: `option task = { name: "my_task", every: 1s, } from(bucket:"holder") |> range(start:-5m) |> to(bucket:"holder", org:"thing")`, - }) - if err != nil { - t.Fatal(err) + }) + require.NoError(t, err) + }) } } @@ -119,13 +121,19 @@ from(bucket:"holder") |> range(start:-5m) |> to(bucket:"holder", org:"thing")`, } func TestValidations(t *testing.T) { + for _, useHashedTokens := range []bool{false, true} { + runTestValidations(useHashedTokens, t) + } +} + +func runTestValidations(useHashedTokens bool, t *testing.T) { var ( taskID = platform.ID(0x7456) runID = platform.ID(0x402) otherOrg = &influxdb.Organization{Name: "other_org"} ) - svc, onboard := setup(t) + svc, onboard := setup(t, useHashedTokens) r, err := onboard.OnboardInitialUser(context.Background(), &influxdb.OnboardingRequest{ User: "Setec Astronomy", @@ -134,22 +142,16 @@ func TestValidations(t *testing.T) { Bucket: "holder", RetentionPeriodSeconds: 1, }) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "OnboardInitialUser failed") - if err := svc.CreateOrganization(context.Background(), otherOrg); err != nil { - t.Fatal(err) - } + require.NoError(t, svc.CreateOrganization(context.Background(), otherOrg)) otherBucket := &influxdb.Bucket{ Name: "other_bucket", OrgID: otherOrg.ID, } - if err = svc.CreateBucket(context.Background(), otherBucket); err != nil { - t.Fatal(err) - } + require.NoError(t, svc.CreateBucket(context.Background(), otherBucket)) var ( orgID = r.Org.ID @@ -565,7 +567,7 @@ from(bucket:"holder") |> range(start:-5m) |> to(bucket:"holder", org:"thing")` } for _, test := range tests { - t.Run(test.name, func(t *testing.T) { + t.Run(fmt.Sprintf("%s/HashedTokens=%t", test.name, useHashedTokens), func(t *testing.T) { ctx := pctx.SetAuthorizer(context.Background(), test.auth) if err := test.check(ctx, validTaskService); err != nil { if aerr, ok := err.(http.AuthzError); ok { @@ -577,17 +579,15 @@ from(bucket:"holder") |> range(start:-5m) |> to(bucket:"holder", org:"thing")` } } -func setup(t *testing.T) (*tenant.Service, influxdb.OnboardingService) { +func setup(t *testing.T, useHashedTokens bool) (*tenant.Service, influxdb.OnboardingService) { t.Helper() store := newStore(t) svc := tenant.NewService(tenant.NewStore(store)) - authStore, err := authorization.NewStore(store) - if err != nil { - t.Fatal(err) - } + authStore, err := authorization.NewStore(context.Background(), store, useHashedTokens) + require.NoError(t, err) authSvc := authorization.NewService(authStore, svc) @@ -601,9 +601,7 @@ func newStore(t *testing.T) kv.Store { store := inmem.NewKVStore() - if err := all.Up(context.Background(), zaptest.NewLogger(t), store); err != nil { - t.Fatal(err) - } + require.NoError(t, all.Up(context.Background(), zaptest.NewLogger(t), store)) return store } diff --git a/cmd/influxd/downgrade/downgrade.go b/cmd/influxd/downgrade/downgrade.go index 4e343f7c85b..8e9ec2ecd49 100644 --- a/cmd/influxd/downgrade/downgrade.go +++ b/cmd/influxd/downgrade/downgrade.go @@ -75,7 +75,7 @@ influxd binary to boot successfully. The target version of the downgrade must be specified, i.e. "influxd downgrade 2.0". `, ValidArgs: validDowngradeTargets, - Args: cobra.ExactValidArgs(1), + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), RunE: func(cmd *cobra.Command, args []string) error { logconf := &influxlogger.Config{ Format: "auto", diff --git a/cmd/influxd/inspect/verify_tsm/verify_tsm_test.go b/cmd/influxd/inspect/verify_tsm/verify_tsm_test.go index 6584e67b6f3..f2a31317d3b 100644 --- a/cmd/influxd/inspect/verify_tsm/verify_tsm_test.go +++ b/cmd/influxd/inspect/verify_tsm/verify_tsm_test.go @@ -18,6 +18,7 @@ func TestInvalidChecksum(t *testing.T) { verify := NewTSMVerifyCommand() b := bytes.NewBufferString("") verify.SetOut(b) + verify.SetErr(b) verify.SetArgs([]string{"--engine-path", path}) require.NoError(t, verify.Execute()) @@ -33,6 +34,7 @@ func TestValidChecksum(t *testing.T) { verify := NewTSMVerifyCommand() b := bytes.NewBufferString("") verify.SetOut(b) + verify.SetErr(b) verify.SetArgs([]string{"--engine-path", path}) require.NoError(t, verify.Execute()) @@ -58,6 +60,7 @@ func TestValidUTF8(t *testing.T) { verify := NewTSMVerifyCommand() b := bytes.NewBufferString("") verify.SetOut(b) + verify.SetErr(b) verify.SetArgs([]string{"--engine-path", path, "--check-utf8"}) require.NoError(t, verify.Execute()) diff --git a/cmd/influxd/inspect/verify_wal/verify_wal_test.go b/cmd/influxd/inspect/verify_wal/verify_wal_test.go index b39c0e9285f..4228e7b6478 100644 --- a/cmd/influxd/inspect/verify_wal/verify_wal_test.go +++ b/cmd/influxd/inspect/verify_wal/verify_wal_test.go @@ -85,6 +85,7 @@ func runCommand(args testInfo) { b := bytes.NewBufferString("") verify.SetOut(b) + verify.SetErr(b) if args.withStdErr { verify.SetErr(b) } diff --git a/cmd/influxd/launcher/backup_restore_test.go b/cmd/influxd/launcher/backup_restore_test.go index 868e75593e3..ae545701589 100644 --- a/cmd/influxd/launcher/backup_restore_test.go +++ b/cmd/influxd/launcher/backup_restore_test.go @@ -2,6 +2,7 @@ package launcher_test import ( "context" + "fmt" "testing" "github.com/influxdata/influx-cli/v2/clients/backup" @@ -13,7 +14,26 @@ import ( "go.uber.org/zap" ) +func runBackupRestoreTests(t *testing.T, name string, testFunc func(bool, bool, *testing.T)) { + t.Helper() + for _, backupHashedTokens := range []bool{false, true} { + for _, restoreHashedTokens := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/BackupHashedTokens=%t/RestoreHashedTokens=%t", name, backupHashedTokens, restoreHashedTokens), + func() func(*testing.T) { + return func(t *testing.T) { + testFunc(backupHashedTokens, restoreHashedTokens, t) + } + }()) + } + } +} + func TestBackupRestore_Full(t *testing.T) { + t.Helper() + runBackupRestoreTests(t, "TestBackupRestore_Full", runTestBackupRestore_Full) +} + +func runTestBackupRestore_Full(backupHashedTokens, restoreHashedTokens bool, t *testing.T) { t.Parallel() ctx := context.Background() @@ -24,6 +44,7 @@ func TestBackupRestore_Full(t *testing.T) { o.StoreType = "bolt" o.Testing = false o.LogLevel = zap.InfoLevel + o.UseHashedTokens = backupHashedTokens }) originalAuth := *l1.Auth l1.WritePointsOrFail(t, "m,k=v1 f=100i 946684800000000000\nm,k=v2 f=200i 946684800000000001") @@ -49,6 +70,7 @@ func TestBackupRestore_Full(t *testing.T) { o.StoreType = "bolt" o.Testing = false o.LogLevel = zap.InfoLevel + o.UseHashedTokens = restoreHashedTokens }) defer l2.ShutdownOrFail(t, ctx) @@ -73,7 +95,11 @@ func TestBackupRestore_Full(t *testing.T) { }, "m,k=v5 f=100i 946684800000000005\nm,k=v7 f=200i 946684800000000006") // Perform a full restore from the previous backups. - l2.RestoreOrFail(t, ctx, restore.Params{Path: backupDir, Full: true}) + restoreParams := restore.Params{Path: backupDir, Full: true} + if backupHashedTokens { + restoreParams.OperatorToken = originalAuth.Token + } + l2.RestoreOrFail(t, ctx, restoreParams) // A full restore also restores the original token l2.Auth = &originalAuth @@ -110,6 +136,11 @@ func TestBackupRestore_Full(t *testing.T) { } func TestBackupRestore_Partial(t *testing.T) { + t.Helper() + runBackupRestoreTests(t, "TestBackupRestore_Full", runTestBackupRestore_Partial) +} + +func runTestBackupRestore_Partial(backupHashedTokens, restoreHashedTokens bool, t *testing.T) { t.Parallel() ctx := context.Background() @@ -120,6 +151,7 @@ func TestBackupRestore_Partial(t *testing.T) { o.StoreType = "bolt" o.Testing = false o.LogLevel = zap.InfoLevel + o.UseHashedTokens = backupHashedTokens }) l1.WritePointsOrFail(t, "m,k=v1 f=100i 946684800000000000\nm,k=v2 f=200i 946684800000000001") l1.BackupOrFail(t, ctx, backup.Params{Path: backupDir}) @@ -144,6 +176,7 @@ func TestBackupRestore_Partial(t *testing.T) { o.StoreType = "bolt" o.Testing = false o.LogLevel = zap.InfoLevel + o.UseHashedTokens = restoreHashedTokens }) defer l2.ShutdownOrFail(t, ctx) diff --git a/cmd/influxd/launcher/cmd.go b/cmd/influxd/launcher/cmd.go index 5fd1ffbb6f1..cdd28603d39 100644 --- a/cmd/influxd/launcher/cmd.go +++ b/cmd/influxd/launcher/cmd.go @@ -198,6 +198,7 @@ type InfluxdOpts struct { // TemplateFileUrlsDisabled disables file protocol URIs in templates. TemplateFileUrlsDisabled bool StrongPasswords bool + UseHashedTokens bool } // NewOpts constructs options with default values. @@ -702,6 +703,12 @@ func (o *InfluxdOpts) BindCliOpts() []cli.Opt { Default: o.StrongPasswords, Desc: "enable password strength enforcement", }, + { + DestP: &o.UseHashedTokens, + Flag: "use-hashed-tokens", + Default: o.UseHashedTokens, + Desc: "enable storing hashed API tokens on disk (improves security, but prevents downgrades to < 2.8)", + }, } } diff --git a/cmd/influxd/launcher/launcher.go b/cmd/influxd/launcher/launcher.go index 115a1495b23..31b0af88b03 100644 --- a/cmd/influxd/launcher/launcher.go +++ b/cmd/influxd/launcher/launcher.go @@ -292,9 +292,11 @@ func (m *Launcher) run(ctx context.Context, opts *InfluxdOpts) (err error) { var authSvc platform.AuthorizationService { - authStore, err := authorization.NewStore(m.kvStore) + hasherVariantName := authorization.DefaultHashVariantName // This value could come from opts in the future. + authStoreLogger := m.log.With(zap.String("store", "auth")) + authStore, err := authorization.NewStore(ctx, m.kvStore, opts.UseHashedTokens, authorization.WithAuthorizationHashVariantName(hasherVariantName), authorization.WithLogger(authStoreLogger)) if err != nil { - m.log.Error("Failed creating new authorization store", zap.Error(err)) + m.log.Error("Failed creating new authorization store", zap.Error(err), zap.Bool("UseHashedTokens", opts.UseHashedTokens), zap.String("hasherVariant", hasherVariantName)) return err } authSvc = authorization.NewService(authStore, ts) diff --git a/cmd/influxd/recovery/auth/auth.go b/cmd/influxd/recovery/auth/auth.go index f868303addd..0e6d6fda55b 100644 --- a/cmd/influxd/recovery/auth/auth.go +++ b/cmd/influxd/recovery/auth/auth.go @@ -2,6 +2,7 @@ package auth import ( "context" + "errors" "fmt" "io" "os" @@ -66,19 +67,29 @@ func NewAuthListCommand() *cobra.Command { return cmd } -func (cmd *authListCommand) run() error { +func (cmd *authListCommand) run() (rErr error) { ctx := context.Background() store := bolt.NewKVStore(cmd.logger.With(zap.String("system", "bolt-kvstore")), cmd.boltPath) if err := store.Open(ctx); err != nil { return err } - defer store.Close() + defer func() { + rErr = errors.Join(store.Close(), rErr) + }() tenantStore := tenant.NewStore(store) tenantService := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + + // Giving an already extant AuthorizationHasher avoids scanning the existing hashes in the store. + hasher, err := authorization.NewAuthorizationHasher() + if err != nil { + return err + } + // Create authStore read-only since we're not properly configuring if hashed tokens are enabled. + authStore, err := authorization.NewStore(ctx, store, false, authorization.WithReadOnly(true), authorization.WithAuthorizationHasher(hasher), authorization.WithLogger(cmd.logger)) if err != nil { return err } + // The value of useHashedTokens doesn't matter since authStore is read-only. auth := authorization.NewService(authStore, tenantService) filter := influxdb.AuthorizationFilter{} auths, _, err := auth.FindAuthorizations(ctx, filter) @@ -95,6 +106,8 @@ type authCreateCommand struct { out io.Writer username string org string + + useTokenHashing bool } func NewAuthCreateCommand() *cobra.Command { @@ -120,20 +133,27 @@ func NewAuthCreateCommand() *cobra.Command { cmd.Flags().StringVar(&authCmd.boltPath, "bolt-path", defaultPath, "Path to the BoltDB file") cmd.Flags().StringVar(&authCmd.username, "username", "", "Name of the user") cmd.Flags().StringVar(&authCmd.org, "org", "", "Name of the org") + cmd.Flags().BoolVar(&authCmd.useTokenHashing, "store-token-hashes", false, "Store token hashes") return cmd } -func (cmd *authCreateCommand) run() error { +func (cmd *authCreateCommand) run() (rErr error) { ctx := context.Background() store := bolt.NewKVStore(cmd.logger.With(zap.String("system", "bolt-kvstore")), cmd.boltPath) if err := store.Open(ctx); err != nil { return err } - defer store.Close() + defer func() { + rErr = errors.Join(store.Close(), rErr) + }() + tenantStore := tenant.NewStore(store) tenantService := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + hashVariantName := authorization.DefaultHashVariantName // In the future this could come from cmd + ignoreMissingHashIndex := !cmd.useTokenHashing // we can ignore a missing index only if the user did not request token hashing + authStore, err := authorization.NewStore(ctx, store, cmd.useTokenHashing, + authorization.WithAuthorizationHashVariantName(hashVariantName), authorization.WithIgnoreMissingHashIndex(ignoreMissingHashIndex), authorization.WithLogger(cmd.logger)) if err != nil { return err } @@ -197,12 +217,20 @@ func PrintAuth(ctx context.Context, w io.Writer, v []*influxdb.Authorization, us if err == nil && user != nil { userName = user.Name } + var token string + if t.Token != "" { + token = t.Token + } else if t.HashedToken != "" { + token = authorization.TokenRedactedMessage + } else { + token = authorization.TokenNotAvailableMessage + } row := map[string]interface{}{ "ID": t.ID, "Description": t.Description, "User Name": userName, "User ID": t.UserID, - "Token": t.Token, + "Token": token, "Permissions": t.Permissions, } rows = append(rows, row) diff --git a/cmd/influxd/recovery/auth/auth_test.go b/cmd/influxd/recovery/auth/auth_test.go index 28d66477b88..f26b2ea146c 100644 --- a/cmd/influxd/recovery/auth/auth_test.go +++ b/cmd/influxd/recovery/auth/auth_test.go @@ -4,30 +4,30 @@ import ( "testing" "github.com/influxdata/influxdb/v2/cmd/influxd/recovery/testhelper" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func Test_Auth_Basic(t *testing.T) { db := testhelper.NewTestBoltDb(t) defer db.Close() - assert.Equal(t, ""+ + require.Equal(t, ""+ `ID User Name User ID Description Token Permissions`+"\n"+ - `08371db24dcc8000 testuser 08371db1dd8c8000 testuser's Token A9Ovdl8SmP-rfp8wQ2vJoPUsZoQQJ3EochD88SlJcgrcLw4HBwgUqpSHQxc9N9Drg0_aY6Lp1jutBRcKhbV7aQ== [read:authorizations write:authorizations read:buckets write:buckets read:dashboards write:dashboards read:orgs write:orgs read:sources write:sources read:tasks write:tasks read:telegrafs write:telegrafs read:users write:users read:variables write:variables read:scrapers write:scrapers read:secrets write:secrets read:labels write:labels read:views write:views read:documents write:documents read:notificationRules write:notificationRules read:notificationEndpoints write:notificationEndpoints read:checks write:checks read:dbrp write:dbrp read:notebooks write:notebooks read:annotations write:annotations]`+"\n"+ + `08371db24dcc8000 testuser 08371db1dd8c8000 testuser's Token A9Ovdl8SmP-rfp8wQ2vJoPUsZoQQJ3EochD88SlJcgrcLw4HBwgUqpSHQxc9N9Drg0_aY6Lp1jutBRcKhbV7aQ== [read:annotations write:annotations read:authorizations write:authorizations read:buckets write:buckets read:checks write:checks read:dashboards write:dashboards read:dbrp write:dbrp read:documents write:documents read:labels write:labels read:notebooks write:notebooks read:notificationEndpoints write:notificationEndpoints read:notificationRules write:notificationRules read:orgs write:orgs read:scrapers write:scrapers read:secrets write:secrets read:sources write:sources read:tasks write:tasks read:telegrafs write:telegrafs read:users write:users read:variables write:variables read:views write:views read:remotes write:remotes read:replications write:replications]`+"\n"+ `08371deae98c8000 testuser 08371db1dd8c8000 testuser's read buckets token 4-pZrlm84u9uiMVrPBeITe46KxfdEnvTX5H2CZh38BtAsXX4O47b8QwZ9jHL_Cek2w-VbVfRxDpo0Mu8ORiqyQ== [read:orgs/dd7cd2292f6e974a/buckets]`+"\n", testhelper.MustRunCommand(t, NewAuthCommand(), "list", "--bolt-path", db.Name())) // org name not created - assert.EqualError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--org", "not-exist", "--username", "testuser"), "could not find org \"not-exist\": organization name \"not-exist\" not found") + require.EqualError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--org", "not-exist", "--username", "testuser"), "could not find org \"not-exist\": organization name \"not-exist\" not found") // user not created - assert.EqualError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--org", "myorg", "--username", "testuser2"), "could not find user \"testuser2\": user not found") + require.EqualError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--org", "myorg", "--username", "testuser2"), "could not find user \"testuser2\": user not found") // existing user creates properly - assert.NoError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--username", "testuser", "--org", "myorg")) + require.NoError(t, testhelper.RunCommand(t, NewAuthCommand(), "create-operator", "--bolt-path", db.Name(), "--username", "testuser", "--org", "myorg")) - assert.Regexp(t, ""+ + require.Regexp(t, ""+ `ID User Name User ID Description Token Permissions`+"\n"+ - `08371db24dcc8000 testuser 08371db1dd8c8000 testuser's Token A9Ovdl8SmP-rfp8wQ2vJoPUsZoQQJ3EochD88SlJcgrcLw4HBwgUqpSHQxc9N9Drg0_aY6Lp1jutBRcKhbV7aQ== \[read:authorizations write:authorizations read:buckets write:buckets read:dashboards write:dashboards read:orgs write:orgs read:sources write:sources read:tasks write:tasks read:telegrafs write:telegrafs read:users write:users read:variables write:variables read:scrapers write:scrapers read:secrets write:secrets read:labels write:labels read:views write:views read:documents write:documents read:notificationRules write:notificationRules read:notificationEndpoints write:notificationEndpoints read:checks write:checks read:dbrp write:dbrp read:notebooks write:notebooks read:annotations write:annotations\]`+"\n"+ + `08371db24dcc8000 testuser 08371db1dd8c8000 testuser's Token A9Ovdl8SmP-rfp8wQ2vJoPUsZoQQJ3EochD88SlJcgrcLw4HBwgUqpSHQxc9N9Drg0_aY6Lp1jutBRcKhbV7aQ== \[read:annotations write:annotations read:authorizations write:authorizations read:buckets write:buckets read:checks write:checks read:dashboards write:dashboards read:dbrp write:dbrp read:documents write:documents read:labels write:labels read:notebooks write:notebooks read:notificationEndpoints write:notificationEndpoints read:notificationRules write:notificationRules read:orgs write:orgs read:scrapers write:scrapers read:secrets write:secrets read:sources write:sources read:tasks write:tasks read:telegrafs write:telegrafs read:users write:users read:variables write:variables read:views write:views read:remotes write:remotes read:replications write:replications]`+"\n"+ `08371deae98c8000 testuser 08371db1dd8c8000 testuser's read buckets token 4-pZrlm84u9uiMVrPBeITe46KxfdEnvTX5H2CZh38BtAsXX4O47b8QwZ9jHL_Cek2w-VbVfRxDpo0Mu8ORiqyQ== \[read:orgs/dd7cd2292f6e974a/buckets\]`+"\n"+ `[^\t]* testuser [^\t]* testuser's Recovery Token [^\t]* \[read:authorizations write:authorizations read:buckets write:buckets read:dashboards write:dashboards read:orgs write:orgs read:sources write:sources read:tasks write:tasks read:telegrafs write:telegrafs read:users write:users read:variables write:variables read:scrapers write:scrapers read:secrets write:secrets read:labels write:labels read:views write:views read:documents write:documents read:notificationRules write:notificationRules read:notificationEndpoints write:notificationEndpoints read:checks write:checks read:dbrp write:dbrp read:notebooks write:notebooks read:annotations write:annotations read:remotes write:remotes read:replications write:replications\]`+"\n", testhelper.MustRunCommand(t, NewAuthCommand(), "list", "--bolt-path", db.Name())) diff --git a/cmd/influxd/recovery/testhelper/influxd.bolt.testdata b/cmd/influxd/recovery/testhelper/influxd.bolt.testdata index ea5ea17d3d5..9f98cf8588d 100644 Binary files a/cmd/influxd/recovery/testhelper/influxd.bolt.testdata and b/cmd/influxd/recovery/testhelper/influxd.bolt.testdata differ diff --git a/cmd/influxd/upgrade/security_test.go b/cmd/influxd/upgrade/security_test.go index 1c779e90c68..2809a19af04 100644 --- a/cmd/influxd/upgrade/security_test.go +++ b/cmd/influxd/upgrade/security_test.go @@ -5,18 +5,15 @@ import ( "errors" "fmt" "reflect" - "sort" "testing" "unsafe" - "github.com/google/go-cmp/cmp" "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/authorization" "github.com/influxdata/influxdb/v2/inmem" "github.com/influxdata/influxdb/v2/kit/platform" "github.com/influxdata/influxdb/v2/kv/migration" "github.com/influxdata/influxdb/v2/kv/migration/all" - "github.com/influxdata/influxdb/v2/pkg/testing/assert" "github.com/influxdata/influxdb/v2/tenant" authv1 "github.com/influxdata/influxdb/v2/v1/authorization" "github.com/influxdata/influxdb/v2/v1/services/meta" @@ -143,165 +140,144 @@ func TestUpgradeSecurity(t *testing.T) { } for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { // better do not run in parallel - ctx := context.Background() - log := zaptest.NewLogger(t) + for _, useHashedTokens := range []bool{false, true} { + tc := tc + t.Run(fmt.Sprintf("%s/HashedTokens=%t", tc.name, useHashedTokens), func(t *testing.T) { // better to not run in parallel + ctx := context.Background() + log := zaptest.NewLogger(t) - // mock v1 meta - v1 := &influxDBv1{ - meta: &meta.Client{}, - } - data := &meta.Data{ - Users: tc.users, - } - f := reflect.ValueOf(v1.meta).Elem().Field(4) - f = reflect.NewAt(f.Type(), unsafe.Pointer(f.UnsafeAddr())).Elem() - f.Set(reflect.ValueOf(data)) + // mock v1 meta + v1 := &influxDBv1{ + meta: &meta.Client{}, + } + data := &meta.Data{ + Users: tc.users, + } + f := reflect.ValueOf(v1.meta).Elem().Field(4) + f = reflect.NewAt(f.Type(), unsafe.Pointer(f.UnsafeAddr())).Elem() + f.Set(reflect.ValueOf(data)) - // mock v2 meta - kvStore := inmem.NewKVStore() - migrator, err := migration.NewMigrator(zap.NewNop(), kvStore, all.Migrations[:]...) - require.NoError(t, err) - err = migrator.Up(ctx) - require.NoError(t, err) + // mock v2 meta + kvStore := inmem.NewKVStore() + migrator, err := migration.NewMigrator(zap.NewNop(), kvStore, all.Migrations[:]...) + require.NoError(t, err) + err = migrator.Up(ctx) + require.NoError(t, err) - authStoreV1, err := authv1.NewStore(kvStore) - require.NoError(t, err) + authStoreV1, err := authv1.NewStore(kvStore) + require.NoError(t, err) - tenantStore := tenant.NewStore(kvStore) - tenantSvc := tenant.NewService(tenantStore) + tenantStore := tenant.NewStore(kvStore) + tenantSvc := tenant.NewService(tenantStore) - authStoreV2, err := authorization.NewStore(kvStore) - require.NoError(t, err) + authStoreV2, err := authorization.NewStore(ctx, kvStore, useHashedTokens) + require.NoError(t, err) - v2 := &influxDBv2{ - authSvc: authv1.NewService(authStoreV1, tenantSvc), - onboardSvc: tenant.NewOnboardService( - tenantSvc, - authorization.NewService(authStoreV2, tenantSvc), - ), - } + v2 := &influxDBv2{ + authSvc: authv1.NewService(authStoreV1, tenantSvc), + onboardSvc: tenant.NewOnboardService( + tenantSvc, + authorization.NewService(authStoreV2, tenantSvc), + ), + } - // onboard admin - oReq := &influxdb.OnboardingRequest{ - User: "admin", - Password: "12345678", - Org: "testers", - Bucket: "def", - RetentionPeriodSeconds: influxdb.InfiniteRetention, - } - oResp, err := setupAdmin(ctx, v2, oReq) - require.NoError(t, err) + // onboard admin + oReq := &influxdb.OnboardingRequest{ + User: "admin", + Password: "12345678", + Org: "testers", + Bucket: "def", + RetentionPeriodSeconds: influxdb.InfiniteRetention, + } + oResp, err := setupAdmin(ctx, v2, oReq) + require.NoError(t, err) - // target options - targetOptions := optionsV2{ - userName: oReq.User, - orgName: oReq.Org, - token: oResp.Auth.Token, - orgID: oResp.Auth.OrgID, - userID: oResp.Auth.UserID, - } + // target options + targetOptions := optionsV2{ + userName: oReq.User, + orgName: oReq.Org, + token: oResp.Auth.Token, + orgID: oResp.Auth.OrgID, + userID: oResp.Auth.UserID, + } - for k, v := range tc.db2ids { - for i, id := range v { - b := &influxdb.Bucket{ - ID: id, - Name: fmt.Sprintf("%s_%d", k, id), - OrgID: targetOptions.orgID, + for k, v := range tc.db2ids { + for i, id := range v { + b := &influxdb.Bucket{ + ID: id, + Name: fmt.Sprintf("%s_%d", k, id), + OrgID: targetOptions.orgID, + } + err := tenantSvc.CreateBucket(context.Background(), b) + require.NoError(t, err) + tc.db2ids[k][i] = b.ID } - err := tenantSvc.CreateBucket(context.Background(), b) - require.NoError(t, err) - tc.db2ids[k][i] = b.ID } - } - // fill in expected permissions now that we know IDs - for _, want := range tc.want { - for _, user := range tc.users { - if want.Token == user.Name { // v1 username is v2 token - var permissions []influxdb.Permission - for db, privilege := range user.Privileges { - ids, ok := tc.db2ids[db] - require.True(t, ok) - for _, id := range ids { - id := id - resource := influxdb.Resource{ - Type: influxdb.BucketsResourceType, - OrgID: &targetOptions.orgID, - ID: &id, - } - switch privilege { - case influxql.ReadPrivilege: - permissions = append(permissions, influxdb.Permission{ - Action: influxdb.ReadAction, - Resource: resource, - }) - case influxql.WritePrivilege: - permissions = append(permissions, influxdb.Permission{ - Action: influxdb.WriteAction, - Resource: resource, - }) - case influxql.AllPrivileges: - permissions = append(permissions, influxdb.Permission{ - Action: influxdb.ReadAction, - Resource: resource, - }) - permissions = append(permissions, influxdb.Permission{ - Action: influxdb.WriteAction, - Resource: resource, - }) + // fill in expected permissions now that we know IDs + for _, want := range tc.want { + for _, user := range tc.users { + if want.Token == user.Name { // v1 username is v2 token + var permissions []influxdb.Permission + for db, privilege := range user.Privileges { + ids, ok := tc.db2ids[db] + require.True(t, ok) + for _, id := range ids { + id := id + resource := influxdb.Resource{ + Type: influxdb.BucketsResourceType, + OrgID: &targetOptions.orgID, + ID: &id, + } + switch privilege { + case influxql.ReadPrivilege: + permissions = append(permissions, influxdb.Permission{ + Action: influxdb.ReadAction, + Resource: resource, + }) + case influxql.WritePrivilege: + permissions = append(permissions, influxdb.Permission{ + Action: influxdb.WriteAction, + Resource: resource, + }) + case influxql.AllPrivileges: + permissions = append(permissions, influxdb.Permission{ + Action: influxdb.ReadAction, + Resource: resource, + }) + permissions = append(permissions, influxdb.Permission{ + Action: influxdb.WriteAction, + Resource: resource, + }) + } } } + want.Permissions = permissions } - want.Permissions = permissions } } - } - // command execution - n, err := upgradeUsers(ctx, v1, v2, &targetOptions, tc.db2ids, log) - assert.Equal(t, len(tc.want), n, "Upgraded count must match") - if err != nil { + // command execution + n, err := upgradeUsers(ctx, v1, v2, &targetOptions, tc.db2ids, log) if tc.wantErr != nil { - if diff := cmp.Diff(tc.wantErr.Error(), err.Error()); diff != "" { - t.Fatal(diff) - } + require.Error(t, err, "upgradeUsers should return an error for this test case") + require.EqualError(t, err, tc.wantErr.Error(), "upgradeUsers returned the wrong error for this test case") } else { - t.Fatal(err) - } - } else if tc.wantErr != nil { - t.Fatalf("should have failed with %v", tc.wantErr) - } - for _, want := range tc.want { - actual, err := v2.authSvc.FindAuthorizationByToken(ctx, want.Token) - require.NoError(t, err) - if diff := cmp.Diff(targetOptions.orgID, actual.OrgID); diff != "" { - t.Fatal(diff) - } - if diff := cmp.Diff(targetOptions.userID, actual.UserID); diff != "" { - t.Fatal(diff) + require.NoError(t, err, "upgradeUsers should not return an error for this test case") } - if diff := cmp.Diff(want.Token, actual.Token); diff != "" { - t.Fatal(diff) - } - if diff := cmp.Diff(want.Description, actual.Description); diff != "" { - t.Fatal(diff) - } - if diff := cmp.Diff(want.Status, actual.Status); diff != "" { - t.Fatal(diff) - } - sort.Slice(want.Permissions, func(i, j int) bool { - return *(want.Permissions[i].Resource.ID) < *(want.Permissions[j].Resource.ID) - }) - sort.Slice(actual.Permissions, func(i, j int) bool { - return *(actual.Permissions[i].Resource.ID) < *(actual.Permissions[j].Resource.ID) - }) - if diff := cmp.Diff(want.Permissions, actual.Permissions); diff != "" { - t.Logf("permissions mismatch for user %s", want.Token) - t.Fatal(diff) + require.Equal(t, len(tc.want), n, "Upgraded count must match") + + for _, want := range tc.want { + actual, err := v2.authSvc.FindAuthorizationByToken(ctx, want.Token) + require.NoError(t, err) + require.Equal(t, targetOptions.orgID, actual.OrgID) + require.Equal(t, targetOptions.userID, actual.UserID) + require.Equal(t, want.Token, actual.Token) + require.Equal(t, want.Description, actual.Description) + require.Equal(t, want.Status, actual.Status) + require.ElementsMatch(t, want.Permissions, actual.Permissions) } - } - }) + }) + } } } diff --git a/cmd/influxd/upgrade/upgrade.go b/cmd/influxd/upgrade/upgrade.go index 6e53f053949..920968135d5 100644 --- a/cmd/influxd/upgrade/upgrade.go +++ b/cmd/influxd/upgrade/upgrade.go @@ -88,12 +88,13 @@ func (o *optionsV1) populateDirs() { } type optionsV2 struct { - boltPath string - cliConfigsPath string - enginePath string - cqPath string - configPath string - rmConflicts bool + boltPath string + cliConfigsPath string + enginePath string + cqPath string + configPath string + rmConflicts bool + useHashedTokens bool userName string password string @@ -200,6 +201,12 @@ func NewCommand(ctx context.Context, v *viper.Viper) (*cobra.Command, error) { Default: filepath.Join(homeOrAnyDir(), "continuous_queries.txt"), Desc: "path for exported 1.x continuous queries", }, + { + DestP: &options.target.useHashedTokens, + Flag: "use-hashed-tokens", + Default: options.target.useHashedTokens, + Desc: "enable token hashing", + }, { DestP: &options.target.userName, Flag: "username", @@ -653,7 +660,8 @@ func newInfluxDBv2(ctx context.Context, opts *optionsV2, log *zap.Logger) (svc * svc.ts.BucketService = storage.NewBucketService(log, svc.ts.BucketService, engine) - authStoreV2, err := authorization.NewStore(svc.store) + hashVariantName := authorization.DefaultHashVariantName // In the future this could come from opts. + authStoreV2, err := authorization.NewStore(ctx, svc.store, opts.useHashedTokens, authorization.WithAuthorizationHashVariantName(hashVariantName), authorization.WithLogger(log)) if err != nil { return nil, err } diff --git a/context/token.go b/context/token.go index 39a1a5cd347..cb72dd314bc 100644 --- a/context/token.go +++ b/context/token.go @@ -40,25 +40,25 @@ func GetAuthorizer(ctx context.Context) (influxdb.Authorizer, error) { return a, nil } -// GetToken retrieves a token from the context; errors if no token. -func GetToken(ctx context.Context) (string, error) { +// HasToken determines if a context has a token. Return is nil if token found from the context; errors if no token. +func HasToken(ctx context.Context) error { a, ok := ctx.Value(authorizerCtxKey).(influxdb.Authorizer) if !ok { - return "", &errors.Error{ + return &errors.Error{ Msg: "authorizer not found on context", Code: errors.EInternal, } } - auth, ok := a.(*influxdb.Authorization) + _, ok = a.(*influxdb.Authorization) if !ok { - return "", &errors.Error{ + return &errors.Error{ Msg: fmt.Sprintf("authorizer not an authorization but a %T", a), Code: errors.EInternal, } } - return auth.Token, nil + return nil } // GetUserID retrieves the user ID from the authorizer on the context. diff --git a/context/token_test.go b/context/token_test.go index bc1eaa3ab37..7444114678b 100644 --- a/context/token_test.go +++ b/context/token_test.go @@ -7,6 +7,7 @@ import ( "github.com/influxdata/influxdb/v2" icontext "github.com/influxdata/influxdb/v2/context" "github.com/influxdata/influxdb/v2/kit/platform" + "github.com/stretchr/testify/require" ) func TestGetAuthorizer(t *testing.T) { @@ -24,19 +25,19 @@ func TestGetAuthorizer(t *testing.T) { } } -func TestGetToken(t *testing.T) { +func TestHasToken(t *testing.T) { + { + ctx := context.Background() + require.Error(t, icontext.HasToken(ctx)) + } + ctx := context.Background() ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ Token: "howdy", }) - got, err := icontext.GetToken(ctx) - if err != nil { - t.Errorf("unexpected error while retrieving token: %v", err) - } - - if want := "howdy"; got != want { - t.Errorf("GetToken() want %s, got %s", want, got) - } + require.NoError(t, icontext.HasToken(ctx)) + err := icontext.HasToken(ctx) + require.NoError(t, err) } func TestGetUserID(t *testing.T) { diff --git a/go.mod b/go.mod index fc121a516a0..18785571f99 100644 --- a/go.mod +++ b/go.mod @@ -11,7 +11,7 @@ require ( github.com/RoaringBitmap/roaring v0.4.16 github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 github.com/apache/arrow-go/v18 v18.2.0 - github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 + github.com/benbjohnson/clock v1.1.0 github.com/benbjohnson/tmpl v1.0.0 github.com/buger/jsonparser v1.1.1 github.com/cespare/xxhash/v2 v2.3.0 @@ -21,6 +21,7 @@ require ( github.com/editorconfig-checker/editorconfig-checker v0.0.0-20190819115812-1474bdeaf2a2 github.com/elazarl/go-bindata-assetfs v1.0.1 github.com/go-chi/chi v4.1.0+incompatible + github.com/go-crypt/crypt v0.3.2 github.com/go-stack/stack v1.8.0 github.com/golang-jwt/jwt/v4 v4.5.2 github.com/golang/gddo v0.0.0-20181116215533-9bd4a3295021 @@ -35,7 +36,7 @@ require ( github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe github.com/influxdata/flux v0.197.0 github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 - github.com/influxdata/influx-cli/v2 v2.2.1-0.20221028161653-3285a03e9e28 + github.com/influxdata/influx-cli/v2 v2.7.1-0.20250130214939-76d1c4d9b777 github.com/influxdata/influxql v1.2.0 github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 github.com/influxdata/pkg-config v0.3.0 @@ -55,7 +56,7 @@ require ( github.com/prometheus/common v0.53.0 github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52 github.com/spf13/cast v1.3.0 - github.com/spf13/cobra v1.0.0 + github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.6 github.com/spf13/viper v1.6.1 github.com/stretchr/testify v1.10.0 @@ -65,7 +66,7 @@ require ( github.com/xlab/treeprint v1.0.0 github.com/yudai/gojsondiff v1.0.0 go.etcd.io/bbolt v1.3.6 - go.uber.org/multierr v1.10.0 + go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 golang.org/x/crypto v0.39.0 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 @@ -155,6 +156,7 @@ require ( github.com/gabriel-vasile/mimetype v1.4.4 // indirect github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2 // indirect github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 // indirect + github.com/go-crypt/x v0.3.2 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-sql-driver/mysql v1.6.0 // indirect @@ -180,7 +182,7 @@ require ( github.com/hashicorp/vault/sdk v0.1.8 // indirect github.com/huandu/xstrings v1.0.0 // indirect github.com/imdario/mergo v0.3.12 // indirect - github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/influxdata/gosnowflake v1.9.0 // indirect github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040 // indirect github.com/influxdata/line-protocol/v2 v2.2.1 // indirect @@ -241,8 +243,8 @@ require ( go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect - go.uber.org/atomic v1.7.0 // indirect - golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a // indirect + go.uber.org/atomic v1.10.0 // indirect + golang.org/x/exp/typeparams v0.0.0-20230321023759-10a507213a29 // indirect golang.org/x/mod v0.25.0 // indirect golang.org/x/net v0.41.0 // indirect golang.org/x/oauth2 v0.27.0 // indirect diff --git a/go.sum b/go.sum index 210cff31657..23dabca0f60 100644 --- a/go.sum +++ b/go.sum @@ -189,8 +189,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.28.11 h1:HYS0csS7UJxdYRoG+bGgUYrSwVn github.com/aws/aws-sdk-go-v2/service/sts v1.28.11/go.mod h1:QXnthRM35zI92048MMwfFChjFmoufTdhtHmouwNfhhU= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= -github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 h1:wOysYcIdqv3WnvwqFFzrYCFALPED7qkUGaLXu359GSc= -github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3/go.mod h1:UMqtWQTnOe4byzwe7Zhwh8f8s+36uszN51sJrSIZlTE= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/immutable v0.4.3 h1:GYHcksoJ9K6HyAUpGxwZURrbTkXA0Dh4otXGqbhdrjA= github.com/benbjohnson/immutable v0.4.3/go.mod h1:qJIKKSmdqz1tVzNtst1DZzvaqOU1onk1rc03IeM3Owk= github.com/benbjohnson/tmpl v1.0.0 h1:T5QPGJD0W6JJxyEEAlVnX3co/IkUrfHen1/42nlgAHo= @@ -236,7 +236,7 @@ github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7 github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI= github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= @@ -324,6 +324,10 @@ github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1T github.com/go-chi/chi v4.1.0+incompatible h1:ETj3cggsVIY2Xao5ExCu6YhEh5MD6JTfcBzS37R260w= github.com/go-chi/chi v4.1.0+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= +github.com/go-crypt/crypt v0.3.2 h1:I4i0u2g8X9bxCXIjvv19BDVXqQbddDQrURCJrOyyJos= +github.com/go-crypt/crypt v0.3.2/go.mod h1:U0YhpCizEtaVC4gVfUUN0qGn1Z6+e3at+B5uLYx/sV0= +github.com/go-crypt/x v0.3.2 h1:m2wn2+8tp28V4yDiW5NSTiyNSXnCoTs1R1+H+cAJA3M= +github.com/go-crypt/x v0.3.2/go.mod h1:uelN9rbD2e2eqE8KA26B9R6OQ0TdM6msWdPsoMM1ZFk= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -510,8 +514,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1: github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe h1:7j4SdN/BvQwN6WoUq7mv0kg5U9NhnFBxPGMafYRKym0= github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og= github.com/influxdata/flux v0.197.0 h1:PCcwQc9e2FSLe0LWt9NxZQDB3o6F23WY1+yboGd8rbk= @@ -520,8 +524,8 @@ github.com/influxdata/gosnowflake v1.9.0 h1:fw6peFfTfJK+jbI98RzEEbte8F1SNBX8a91c github.com/influxdata/gosnowflake v1.9.0/go.mod h1:VYPoQhZtz3I1zh+YIMG4axm/iUxoKCTbTEQl/SYvUNM= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69/go.mod h1:pwymjR6SrP3gD3pRj9RJwdl1j5s3doEEV8gS4X9qSzA= -github.com/influxdata/influx-cli/v2 v2.2.1-0.20221028161653-3285a03e9e28 h1:brESt4mwZknEUuwrYcGSG6JqjTKC5M+qVKgL73ondFg= -github.com/influxdata/influx-cli/v2 v2.2.1-0.20221028161653-3285a03e9e28/go.mod h1:rvb2oIMqPs+O9gL6r0kqJ2X0tbQ8WRRtteeWfCqdhZU= +github.com/influxdata/influx-cli/v2 v2.7.1-0.20250130214939-76d1c4d9b777 h1:T5fM+L9pG0+QcSJNwL6wS8kuKHacm6SqnaASQGynWvs= +github.com/influxdata/influx-cli/v2 v2.7.1-0.20250130214939-76d1c4d9b777/go.mod h1:0R6klN42p+Vr8M7WQpkNYeOubSQ57M3hx/LxsapGXV8= github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040 h1:MBLCfcSsUyFPDJp6T7EoHp/Ph3Jkrm4EuUKLD2rUWHg= github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040/go.mod h1:vLNHdxTJkIf2mSLvGrpj8TCcISApPoXkaxP8g9uRlW8= github.com/influxdata/influxql v1.2.0 h1:EkgnTLCmaXeZKEjA6G+B7a/HH+Gl7GVLO0k2AoZbJMU= @@ -591,7 +595,6 @@ github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhR github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= @@ -735,6 +738,7 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= @@ -763,14 +767,14 @@ github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8= -github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= +github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/spf13/viper v1.6.1 h1:VPZzIkznI1YhVMRi6vNFLHSwhnhReBfgTxIPccpfdZk= github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -862,15 +866,15 @@ go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= +go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= -go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= -go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= @@ -905,8 +909,8 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= -golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a h1:Jw5wfR+h9mnIYH+OtGT2im5wV1YGGDora5vTv/aa5bE= -golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230321023759-10a507213a29 h1:e7LhZmJ631l59keHP9ssC3sgSn3/oiEHKHKXDkimURY= +golang.org/x/exp/typeparams v0.0.0-20230321023759-10a507213a29/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= diff --git a/http/auth_test.go b/http/auth_test.go index 316f85a10b0..32022164cb0 100644 --- a/http/auth_test.go +++ b/http/auth_test.go @@ -858,7 +858,7 @@ func TestService_handleDeleteAuthorization(t *testing.T) { } } -func initAuthorizationService(f platformtesting.AuthorizationFields, t *testing.T) (platform.AuthorizationService, string, func()) { +func initAuthorizationService(f platformtesting.AuthorizationFields, useTokenHashing bool, t *testing.T) (platform.AuthorizationService, string, func()) { t.Helper() if t.Name() == "TestAuthorizationService_FindAuthorizations/find_authorization_by_token" { /* @@ -872,12 +872,14 @@ func initAuthorizationService(f platformtesting.AuthorizationFields, t *testing. t.Skip("HTTP authorization service does not required a user id on the authentication struct. We get the user from the session token.") } + ctx := context.Background() + store := platformtesting.NewTestInmemStore(t) tenantStore := tenant.NewStore(store) tenantStore.OrgIDGen = f.OrgIDGenerator tenantService := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) if err != nil { t.Fatal(err) } @@ -888,8 +890,6 @@ func initAuthorizationService(f platformtesting.AuthorizationFields, t *testing. svc.TokenGenerator = f.TokenGenerator svc.TimeGenerator = f.TimeGenerator - ctx := context.Background() - for _, u := range f.Users { if err := tenantService.CreateUser(ctx, u); err != nil { t.Fatalf("failed to populate users") @@ -953,12 +953,29 @@ func initAuthorizationService(f platformtesting.AuthorizationFields, t *testing. return &AuthorizationService{Client: httpClient}, "", done } +func runAuthorizationServiceTest( + name string, + tf func(init func(platformtesting.AuthorizationFields, *testing.T) (platform.AuthorizationService, string, func()), t *testing.T), + initWithHashing func(f platformtesting.AuthorizationFields, useTokenHashing bool, t *testing.T) (platform.AuthorizationService, string, func()), + t *testing.T, +) { + t.Helper() + for _, useHashedTokens := range []bool{false, true} { + init := func(f platformtesting.AuthorizationFields, t *testing.T) (platform.AuthorizationService, string, func()) { + return initWithHashing(f, useHashedTokens, t) + } + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useHashedTokens), func(t *testing.T) { + tf(init, t) + }) + } +} + func TestAuthorizationService_CreateAuthorization(t *testing.T) { - platformtesting.CreateAuthorization(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_CreateAuthorization", platformtesting.CreateAuthorization, initAuthorizationService, t) } func TestAuthorizationService_FindAuthorizationByID(t *testing.T) { - platformtesting.FindAuthorizationByID(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_FindAuthorizationByID", platformtesting.FindAuthorizationByID, initAuthorizationService, t) } func TestAuthorizationService_FindAuthorizationByToken(t *testing.T) { @@ -967,19 +984,19 @@ func TestAuthorizationService_FindAuthorizationByToken(t *testing.T) { authorization by token string via headers or something */ t.Skip() - platformtesting.FindAuthorizationByToken(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_FindAuthorizationByToken", platformtesting.FindAuthorizationByToken, initAuthorizationService, t) } func TestAuthorizationService_FindAuthorizations(t *testing.T) { - platformtesting.FindAuthorizations(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_FindAuthorizations", platformtesting.FindAuthorizations, initAuthorizationService, t) } func TestAuthorizationService_DeleteAuthorization(t *testing.T) { - platformtesting.DeleteAuthorization(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_DeleteAuthorization", platformtesting.DeleteAuthorization, initAuthorizationService, t) } func TestAuthorizationService_UpdateAuthorization(t *testing.T) { - platformtesting.UpdateAuthorization(initAuthorizationService, t) + runAuthorizationServiceTest("TestAuthorizationService_UpdateAuthorization", platformtesting.UpdateAuthorization, initAuthorizationService, t) } func MustMarshal(o interface{}) []byte { diff --git a/http/task_service_test.go b/http/task_service_test.go index e896d60f5ba..64851ee7531 100644 --- a/http/task_service_test.go +++ b/http/task_service_test.go @@ -26,6 +26,7 @@ import ( "github.com/influxdata/influxdb/v2/task/taskmodel" "github.com/influxdata/influxdb/v2/tenant" influxdbtesting "github.com/influxdata/influxdb/v2/testing" + "github.com/stretchr/testify/require" "go.uber.org/zap" "go.uber.org/zap/zaptest" ) @@ -1216,15 +1217,30 @@ func TestService_handlePostTaskLabel(t *testing.T) { } } +func runTestWithTokenHashingOptions(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + // Test that org name to org ID translation happens properly in the HTTP layer. // Regression test for https://github.com/influxdata/influxdb/issues/12089. func TestTaskHandler_CreateTaskWithOrgName(t *testing.T) { + runTestWithTokenHashingOptions("TestTaskHandler_CreateTaskWithOrgName", runTestTaskHandler_CreateTaskWithOrgName, t) +} + +func runTestTaskHandler_CreateTaskWithOrgName(useTokenHashing bool, t *testing.T) { + ctx := context.Background() + i := influxdbtesting.NewTestInmemStore(t) ts := tenant.NewService(tenant.NewStore(i)) - aStore, _ := authorization.NewStore(i) + aStore, err := authorization.NewStore(ctx, i, useTokenHashing) + require.NoError(t, err) as := authorization.NewService(aStore, ts) - ctx := context.Background() // Set up user and org. u := &influxdb.User{Name: "u"} @@ -1315,7 +1331,14 @@ func TestTaskHandler_CreateTaskWithOrgName(t *testing.T) { } func TestTaskHandler_Sessions(t *testing.T) { + runTestWithTokenHashingOptions("TestTaskHandler_Sessions", runTestTaskHandler_Sessions, t) +} + +func runTestTaskHandler_Sessions(useTokenHashing bool, t *testing.T) { t.Skip("rework these") + + ctx := context.Background() + // Common setup to get a working base for using tasks. st := influxdbtesting.NewTestInmemStore(t) @@ -1328,14 +1351,12 @@ func TestTaskHandler_Sessions(t *testing.T) { } labelService := label.NewService(labelStore) - authStore, err := authorization.NewStore(st) + authStore, err := authorization.NewStore(ctx, st, useTokenHashing) if err != nil { t.Fatal(err) } authService := authorization.NewService(authStore, tSvc) - ctx := context.Background() - // Set up user and org. u := &influxdb.User{Name: "u"} if err := tSvc.CreateUser(ctx, u); err != nil { diff --git a/kv/errors.go b/kv/errors.go index 43742b42150..1e3e0aaf56c 100644 --- a/kv/errors.go +++ b/kv/errors.go @@ -12,6 +12,7 @@ func UnexpectedIndexError(err error) *errors.Error { Code: errors.EInternal, Msg: fmt.Sprintf("unexpected error retrieving index; Err: %v", err), Op: "kv/index", + Err: err, } } diff --git a/kv/migration/all/0003_task_owners_test.go b/kv/migration/all/0003_task_owners_test.go index fe22c1c4d81..579deba77dc 100644 --- a/kv/migration/all/0003_task_owners_test.go +++ b/kv/migration/all/0003_task_owners_test.go @@ -10,10 +10,11 @@ import ( ) func Test_(t *testing.T) { + // Token hashing isn't applicable to this test because we never migrate far enough up. ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() - ts := newService(t, ctx, 2) + ts := newService(t, ctx, 2, false) taskBucket := []byte("tasksv1") id := "05da585043e02000" diff --git a/kv/migration/all/0015_record-shard-group-durations-in-bucket-metadata_test.go b/kv/migration/all/0015_record-shard-group-durations-in-bucket-metadata_test.go index 66dbd40f942..a232a7fdbef 100644 --- a/kv/migration/all/0015_record-shard-group-durations-in-bucket-metadata_test.go +++ b/kv/migration/all/0015_record-shard-group-durations-in-bucket-metadata_test.go @@ -3,6 +3,7 @@ package all import ( "context" "encoding/json" + "fmt" "testing" "time" @@ -18,11 +19,21 @@ func TestMigration_ShardGroupDuration(t *testing.T) { } func testRepairMissingShardGroupDurations(t *testing.T, migrationNum int) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + name := fmt.Sprintf("RepairMissingShardGroupDurations/migrationNum=%d/useTokenHashing=%t", migrationNum, useTokenHashing) + t.Run(name, func(t *testing.T) { + testRepairMissingShardGroupDurationsWithTokenHashing(t, migrationNum, useTokenHashing) + }) + } +} + +func testRepairMissingShardGroupDurationsWithTokenHashing(t *testing.T, migrationNum int, useTokenHashing bool) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Run up to the migration before the migration-under-test. - ts := newService(t, ctx, migrationNum-2) + ts := newService(t, ctx, migrationNum-2, useTokenHashing) // Seed some buckets. buckets := []*influxdb.Bucket{ diff --git a/kv/migration/all/0016_add-annotations-notebooks-to-oper-token_test.go b/kv/migration/all/0016_add-annotations-notebooks-to-oper-token_test.go index 270b2ebb688..a7e0c750c7a 100644 --- a/kv/migration/all/0016_add-annotations-notebooks-to-oper-token_test.go +++ b/kv/migration/all/0016_add-annotations-notebooks-to-oper-token_test.go @@ -12,11 +12,15 @@ import ( ) func TestMigration_AnnotationsNotebooksOperToken(t *testing.T) { + runTestWithTokenHashing("TestMigration_AnnotationsNotebooksOperToken", runTestMigration_AnnotationsNotebooksOperToken, t) +} + +func runTestMigration_AnnotationsNotebooksOperToken(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Run up to migration 15. - ts := newService(t, ctx, 15) + ts := newService(t, ctx, 15, useTokenHashing) // Auth bucket contains the authorizations AKA tokens authBucket := []byte("authorizationsv1") diff --git a/kv/migration/all/0017_add-annotations-notebooks-to-all-access-tokens_test.go b/kv/migration/all/0017_add-annotations-notebooks-to-all-access-tokens_test.go index 614db924125..f21e881e8b6 100644 --- a/kv/migration/all/0017_add-annotations-notebooks-to-all-access-tokens_test.go +++ b/kv/migration/all/0017_add-annotations-notebooks-to-all-access-tokens_test.go @@ -13,11 +13,15 @@ import ( ) func TestMigration_AnnotationsNotebooksAllAccessToken(t *testing.T) { + runTestWithTokenHashing("TestMigration_AnnotationsNotebooksAllAccessToken", runTestMigration_AnnotationsNotebooksAllAccessToken, t) +} + +func runTestMigration_AnnotationsNotebooksAllAccessToken(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Run up to migration 16. - ts := newService(t, ctx, 16) + ts := newService(t, ctx, 16, useTokenHashing) // Auth bucket contains the authorizations AKA tokens authBucket := []byte("authorizationsv1") diff --git a/kv/migration/all/0019_add-remotes-replications-to-tokens_test.go b/kv/migration/all/0019_add-remotes-replications-to-tokens_test.go index 54327e16dbf..8f026adfe55 100644 --- a/kv/migration/all/0019_add-remotes-replications-to-tokens_test.go +++ b/kv/migration/all/0019_add-remotes-replications-to-tokens_test.go @@ -12,11 +12,15 @@ import ( ) func TestMigration_RemotesReplicationsOperToken(t *testing.T) { + runTestWithTokenHashing("TestMigration_RemotesReplicationsOperToken", runTestMigration_RemotesReplicationsOperToken, t) +} + +func runTestMigration_RemotesReplicationsOperToken(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Run up to migration 18. - ts := newService(t, ctx, 18) + ts := newService(t, ctx, 18, useTokenHashing) // Auth bucket contains the authorizations AKA tokens authBucket := []byte("authorizationsv1") @@ -131,11 +135,15 @@ func TestMigration_RemotesReplicationsOperToken(t *testing.T) { } func TestMigration_RemotesReplicationsAllAccessToken(t *testing.T) { + runTestWithTokenHashing("TestMigration_RemotesReplicationsAllAccessToken", runTestMigration_RemotesReplicationsAllAccessToken, t) +} + +func runTestMigration_RemotesReplicationsAllAccessToken(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() // Run up to migration 18. - ts := newService(t, ctx, 18) + ts := newService(t, ctx, 18, useTokenHashing) // Auth bucket contains the authorizations AKA tokens authBucket := []byte("authorizationsv1") diff --git a/kv/migration/all/0021_create-authorization-hashed-index-v1.go b/kv/migration/all/0021_create-authorization-hashed-index-v1.go new file mode 100644 index 00000000000..b4a0d48664f --- /dev/null +++ b/kv/migration/all/0021_create-authorization-hashed-index-v1.go @@ -0,0 +1,8 @@ +package all + +import "github.com/influxdata/influxdb/v2/kv/migration" + +var Migration0021_CreateAuthorizationHashedIndexv1 = migration.CreateBuckets( + "create authorizationhashedindexv1 bucket", + []byte("authorizationhashedindexv1"), +) diff --git a/kv/migration/all/all.go b/kv/migration/all/all.go index 2ee8878d21a..c23667e5d75 100644 --- a/kv/migration/all/all.go +++ b/kv/migration/all/all.go @@ -47,5 +47,7 @@ var Migrations = [...]migration.Spec{ Migration0019_AddRemotesReplicationsToTokens, // add_remotes_replications_metrics_buckets Migration0020_Add_remotes_replications_metrics_buckets, + // create bucket for hashed token index + Migration0021_CreateAuthorizationHashedIndexv1, // {{ do_not_edit . }} } diff --git a/kv/migration/all/test_service_test.go b/kv/migration/all/test_service_test.go index 1487db26750..998390fcc23 100644 --- a/kv/migration/all/test_service_test.go +++ b/kv/migration/all/test_service_test.go @@ -2,6 +2,7 @@ package all import ( "context" + "fmt" "testing" "github.com/benbjohnson/clock" @@ -11,6 +12,7 @@ import ( "github.com/influxdata/influxdb/v2/kv" "github.com/influxdata/influxdb/v2/kv/migration" "github.com/influxdata/influxdb/v2/tenant" + "github.com/stretchr/testify/require" "go.uber.org/zap/zaptest" ) @@ -23,7 +25,16 @@ type testService struct { Clock clock.Clock } -func newService(t *testing.T, ctx context.Context, endMigration int) *testService { +func runTestWithTokenHashing(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + +func newService(t *testing.T, ctx context.Context, endMigration int, useTokenHashing bool) *testService { t.Helper() var ( @@ -46,10 +57,18 @@ func newService(t *testing.T, ctx context.Context, endMigration int) *testServic store := tenant.NewStore(ts.Store) tenantSvc := tenant.NewService(store) - authStore, err := authorization.NewStore(ts.Store) - if err != nil { - t.Fatal(err) + var ignoreMissingHashIndex bool + if endMigration <= 21 { + ignoreMissingHashIndex = true } + missingHashIndexOption := authorization.WithIgnoreMissingHashIndex(ignoreMissingHashIndex) + authStore, err := authorization.NewStore(ctx, ts.Store, useTokenHashing, missingHashIndexOption) + if useTokenHashing && ignoreMissingHashIndex { + require.ErrorIs(t, err, kv.ErrBucketNotFound) + t.Skipf("migrationLevel=%d with useTokenHashing=%t is not a valid combination", endMigration, useTokenHashing) + } + require.NoError(t, err) + authSvc := authorization.NewService(authStore, tenantSvc) ts.Service = kv.NewService(logger, ts.Store, tenantSvc) diff --git a/kv/task_test.go b/kv/task_test.go index dc9f1326f83..e4beb870fd4 100644 --- a/kv/task_test.go +++ b/kv/task_test.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "encoding/json" + "fmt" "testing" "time" @@ -26,20 +27,34 @@ import ( "go.uber.org/zap/zaptest" ) +func runTestWithTokenHashing(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + func TestBoltTaskService(t *testing.T) { + runTestWithTokenHashing("TestBoltTaskService", runTestBoltTaskService, t) +} + +func runTestBoltTaskService(useTokenHashing bool, t *testing.T) { servicetest.TestTaskService( t, func(t *testing.T) (*servicetest.System, context.CancelFunc) { + ctx, cancelFunc := context.WithCancel(context.Background()) + store, close := itesting.NewTestBoltStore(t) tenantStore := tenant.NewStore(store) ts := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ts) - ctx, cancelFunc := context.WithCancel(context.Background()) service := kv.NewService(zaptest.NewLogger(t), store, ts, kv.ServiceConfig{ FluxLanguageService: fluxlang.DefaultService, }) @@ -72,7 +87,7 @@ type testService struct { Clock clock.Clock } -func newService(t *testing.T, ctx context.Context, c clock.Clock) *testService { +func newService(t *testing.T, ctx context.Context, c clock.Clock, useTokenHashing bool) *testService { t.Helper() if c == nil { @@ -86,16 +101,13 @@ func newService(t *testing.T, ctx context.Context, c clock.Clock) *testService { ) store = itesting.NewTestInmemStore(t) - if err != nil { - t.Fatal("failed to create InmemStore", err) - } ts.Store = store tenantStore := tenant.NewStore(store) tenantSvc := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, tenantSvc) @@ -135,10 +147,14 @@ func newService(t *testing.T, ctx context.Context, c clock.Clock) *testService { } func TestRetrieveTaskWithBadAuth(t *testing.T) { + runTestWithTokenHashing("TestRetrieveTaskWithBadAuth", runTestRetrieveTaskWithBadAuth, t) +} + +func runTestRetrieveTaskWithBadAuth(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() - ts := newService(t, ctx, nil) + ts := newService(t, ctx, nil, useTokenHashing) ctx = icontext.SetAuthorizer(ctx, &ts.Auth) @@ -208,13 +224,17 @@ func TestRetrieveTaskWithBadAuth(t *testing.T) { } func TestService_UpdateTask_InactiveToActive(t *testing.T) { + runTestWithTokenHashing("TestService_UpdateTask_InactiveToActive", runTestService_UpdateTask_InactiveToActive, t) +} + +func runTestService_UpdateTask_InactiveToActive(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() c := clock.NewMock() c.Set(time.Unix(1000, 0)) - ts := newService(t, ctx, c) + ts := newService(t, ctx, c, useTokenHashing) ctx = icontext.SetAuthorizer(ctx, &ts.Auth) @@ -257,6 +277,10 @@ func TestService_UpdateTask_InactiveToActive(t *testing.T) { } func TestTaskRunCancellation(t *testing.T) { + runTestWithTokenHashing("TestTaskRunCancellation", runTestTaskRunCancellation, t) +} + +func runTestTaskRunCancellation(useTokenHashing bool, t *testing.T) { store, closeSvc := itesting.NewTestBoltStore(t) defer closeSvc() @@ -266,7 +290,7 @@ func TestTaskRunCancellation(t *testing.T) { tenantStore := tenant.NewStore(store) tenantSvc := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, tenantSvc) @@ -332,13 +356,17 @@ func TestTaskRunCancellation(t *testing.T) { } func TestService_UpdateTask_RecordLatestSuccessAndFailure(t *testing.T) { + runTestWithTokenHashing("TestService_UpdateTask_RecordLatestSuccessAndFailure", runTestService_UpdateTask_RecordLatestSuccessAndFailure, t) +} + +func runTestService_UpdateTask_RecordLatestSuccessAndFailure(useTokenHashing bool, t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) defer cancelFunc() c := clock.NewMock() c.Set(time.Unix(1000, 0)) - ts := newService(t, ctx, c) + ts := newService(t, ctx, c, useTokenHashing) ctx = icontext.SetAuthorizer(ctx, &ts.Auth) diff --git a/pkg/crypt/algorithm/influxdb2/const.go b/pkg/crypt/algorithm/influxdb2/const.go new file mode 100644 index 00000000000..6bc3498301a --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/const.go @@ -0,0 +1,18 @@ +package influxdb2 + +const ( + // EncodingFmt is the encoding format for this algorithm. + EncodingFmt = "$%s$%s" + + // EncodingSections is the number sections in EncodingFmt delimited by crypt.Delimiter. + EncodingSections = 2 + + // AlgName is the name for this algorithm. + AlgName = "influxdb2" + + // VariantIdentifierSHA256 is the identifier used in SHA256 variants of this algorithm. + VariantIdentifierSHA256 = "influxdb2-sha256" + + // VariantIdentifierSHA512 is the identifier used in SHA512 variants of this algorithm. + VariantIdentifierSHA512 = "influxdb2-sha512" +) diff --git a/pkg/crypt/algorithm/influxdb2/decoder.go b/pkg/crypt/algorithm/influxdb2/decoder.go new file mode 100644 index 00000000000..7e0fbc51523 --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/decoder.go @@ -0,0 +1,94 @@ +package influxdb2 + +import ( + "fmt" + "strings" + + "github.com/go-crypt/crypt" + "github.com/go-crypt/crypt/algorithm" +) + +// RegisterDecoder registers all influxdb2 decoders. +func RegisterDecoder(r algorithm.DecoderRegister) error { + for _, variant := range AllVariants { + if err := RegisterDecoderVariant(r, variant); err != nil { + return err + } + } + return nil +} + +// RegisterDecoderSHA256 registers specifically the SHA256 decoder variant with the algorithm.DecoderRegister. +func RegisterDecoderSHA256(r algorithm.DecoderRegister) (err error) { + return RegisterDecoderVariant(r, VariantSHA256) +} + +// RegisterDecoderSHA512 registers specifically the SHA512 decoder variant with the algorithm.DecoderRegister. +func RegisterDecoderSHA512(r algorithm.DecoderRegister) (err error) { + return RegisterDecoderVariant(r, VariantSHA512) +} + +// RegisterDecoderVariant registers the specified decoder variant. +func RegisterDecoderVariant(r algorithm.DecoderRegister, variant Variant) error { + if err := r.RegisterDecodeFunc(variant.Prefix(), DecodeVariant(variant)); err != nil { + return fmt.Errorf("error registered decoder variant %s: %w", variant.Prefix(), err) + } + return nil +} + +// DecodeVariant the encoded digest into a algorithm.Digest provided it matches the provided plaintext.Variant. If +// plaintext.VariantNone is used all variants can be decoded. +func DecodeVariant(v Variant) func(encodedDigest string) (digest algorithm.Digest, err error) { + return func(encodedDigest string) (digest algorithm.Digest, err error) { + var ( + parts []string + variant Variant + ) + + if variant, parts, err = decoderParts(encodedDigest); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, err) + } + + if v != VariantNone && v != variant { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, fmt.Errorf("the '%s' variant cannot be decoded only the '%s' variant can be", variant.Prefix(), v.Prefix())) + } + + if digest, err = decode(variant, parts); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, err) + } + + return digest, nil + } +} + +func decoderParts(encodedDigest string) (Variant, []string, error) { + // First section is empty, hence the +1. + parts := strings.SplitN(encodedDigest, crypt.Delimiter, EncodingSections+1) + + if len(parts) != EncodingSections+1 { + return VariantNone, nil, algorithm.ErrEncodedHashInvalidFormat + } + + variant := NewVariant(parts[1]) + if variant == VariantNone { + return variant, nil, fmt.Errorf("hash identifier is not valid for %s digest: %w", AlgName, algorithm.ErrEncodedHashInvalidIdentifier) + } + + return variant, parts[2:], nil +} + +func decode(variant Variant, parts []string) (digest algorithm.Digest, err error) { + decoded := &Digest{ + Variant: variant, + } + + if decoded.key, err = decoded.Variant.Decode(parts[0]); err != nil { + return nil, fmt.Errorf("%w: %w", algorithm.ErrEncodedHashKeyEncoding, err) + } + + if len(decoded.key) == 0 { + return nil, fmt.Errorf("key has 0 bytes: %w", algorithm.ErrEncodedHashKeyEncoding) + } + + return decoded, nil +} diff --git a/pkg/crypt/algorithm/influxdb2/digest.go b/pkg/crypt/algorithm/influxdb2/digest.go new file mode 100644 index 00000000000..097db2eaf43 --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/digest.go @@ -0,0 +1,82 @@ +package influxdb2 + +import ( + "crypto/subtle" + "errors" + "fmt" + + "github.com/go-crypt/crypt/algorithm" +) + +// ErrDigestInvalid is an error returned when a hash digest has an invalid or unsupported properties. It is NOT +// returned on token or password mismatches. It is equivalent to go-crypt's algorithm.ErrPasswordInvalid +// error, but with a message that makes more sense for our usage with tokens. +var ErrDigestInvalid = errors.New("hashed token or password is invalid") + +// NewSHA256Digest creates a new influxdb2.Digest using the SHA256 for the hash. +func NewSHA256Digest(password string) (digest Digest) { + digest = Digest{ + Variant: VariantSHA256, + key: []byte(password), + } + + return digest +} + +// Digest is an algorithm.Digest which handles influxdb2 matching. +type Digest struct { + Variant Variant + + key []byte +} + +// Match returns true if the string password matches the current influxdb2.Digest. +func (d *Digest) Match(password string) bool { + return d.MatchBytes([]byte(password)) +} + +// MatchBytes returns true if the []byte passwordBytes matches the current influxdb2.Digest. +func (d *Digest) MatchBytes(passwordBytes []byte) bool { + m, _ := d.MatchBytesAdvanced(passwordBytes) + return m +} + +// MatchAdvanced is the same as Match except if there is an error it returns that as well. +func (d *Digest) MatchAdvanced(password string) (match bool, err error) { + return d.MatchBytesAdvanced([]byte(password)) +} + +// MatchBytesAdvanced is the same as MatchBytes except if there is an error it returns that as well. +func (d *Digest) MatchBytesAdvanced(passwordBytes []byte) (match bool, err error) { + if len(d.key) == 0 { + return false, fmt.Errorf(algorithm.ErrFmtDigestMatch, AlgName, fmt.Errorf("%w: key has 0 bytes", ErrDigestInvalid)) + } + + input := d.Variant.Hash(passwordBytes) + return subtle.ConstantTimeCompare(d.key, input) == 1, nil +} + +// Encode returns the encoded form of this plaintext.Digest. +func (d *Digest) Encode() string { + return fmt.Sprintf(EncodingFmt, d.Variant.Prefix(), d.Variant.Encode(d.Key())) +} + +// String returns the storable format of the plaintext.Digest encoded hash. +func (d *Digest) String() string { + return d.Encode() +} + +func (d *Digest) defaults() { + switch d.Variant { + case VariantSHA256, VariantSHA512: + break + default: + d.Variant = DefaultVariant + } +} + +// Key returns the raw plaintext key which can be used in situations where the plaintext value is required such as +// validating JWT's signed by HMAC-SHA256. +func (d *Digest) Key() []byte { + return d.key +} diff --git a/pkg/crypt/algorithm/influxdb2/doc.go b/pkg/crypt/algorithm/influxdb2/doc.go new file mode 100644 index 00000000000..34eb0788e2b --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/doc.go @@ -0,0 +1,2 @@ +// Package influxb2 implements github.com/go-crypt/crypt interfaces with variants of InfluxDB 2.x token hashing. +package influxdb2 diff --git a/pkg/crypt/algorithm/influxdb2/hasher.go b/pkg/crypt/algorithm/influxdb2/hasher.go new file mode 100644 index 00000000000..6aebfe42eaa --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/hasher.go @@ -0,0 +1,76 @@ +package influxdb2 + +import ( + "github.com/go-crypt/crypt/algorithm" +) + +// New returns a *hasher.Hasher without any settings configured. +func New(opts ...Opt) (hasher *Hasher, err error) { + hasher = &Hasher{} + + if err = hasher.WithOptions(opts...); err != nil { + return nil, err + } + + if err = hasher.Validate(); err != nil { + return nil, err + } + + return hasher, nil +} + +// Hasher is a crypt.Hash for plaintext which can be initialized via influxdb2.New using a functional options pattern. +type Hasher struct { + variant Variant +} + +// WithOptions applies the provided functional options provided as an influxdb2.Opt to the influxdb2.Hasher. +func (h *Hasher) WithOptions(opts ...Opt) (err error) { + for _, opt := range opts { + if err = opt(h); err != nil { + return err + } + } + + return nil +} + +// Variant returns which variant this hasher implements. +func (h *Hasher) Variant() Variant { + return h.variant +} + +// Validate checks the hasher configuration to ensure it's valid. This should be used when the influxdb2.Hasher is going +// to be reused and you should use it in conjunction with MustHash. +func (h *Hasher) Validate() (err error) { + return nil +} + +// Hash performs the hashing operation on a password and resets any relevant parameters such as a manually set salt. +// It then returns a plaintext.Digest and error. +func (h *Hasher) Hash(password string) (hashed algorithm.Digest, err error) { + d := &Digest{ + Variant: h.variant, + key: []byte(password), + } + + d.defaults() + + return d, nil +} + +// HashWithSalt is an overload of Hasher.Digest that also accepts a salt. The salt is ignored since we can't support +// salted hashes because we need to lookup the auth record by the token. +func (h *Hasher) HashWithSalt(password string, salt []byte) (hashed algorithm.Digest, err error) { + return h.Hash(password) +} + +// MustHash overloads the Hash method and panics if the error is not nil. It's recommended if you use this method to +// utilize the Validate method first or handle the panic appropriately. +func (h *Hasher) MustHash(password string) (hashed algorithm.Digest) { + if d, err := h.Hash(password); err != nil { + panic(err) + } else { + return d + } +} diff --git a/pkg/crypt/algorithm/influxdb2/influxdb2_test.go b/pkg/crypt/algorithm/influxdb2/influxdb2_test.go new file mode 100644 index 00000000000..12a59da5140 --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/influxdb2_test.go @@ -0,0 +1,64 @@ +package influxdb2 + +import ( + "testing" + + "github.com/go-crypt/crypt" + "github.com/go-crypt/crypt/algorithm" + "github.com/stretchr/testify/require" +) + +func TestInfluxDB2Outputs(t *testing.T) { + hasherSha256, err := New(WithVariant(VariantSHA256)) + require.NoError(t, err) + + hasherSha512, err := New(WithVariant(VariantSHA512)) + require.NoError(t, err) + + cases := []struct { + desc string + token string + phc string + hasher *Hasher + }{ + { + "ShouldValidateWithSHA256", + "c27cb2033ab304629d32e1dbcd0ca7186322d3be98af5dd4c329ab800ef85d73", + "$influxdb2-sha256$kMrC1MoFhWvvKSgyqpMaLuo2O3LINv4_XByCSkfV9K0=", + hasherSha256, + }, + { + "ShouldValidateWithSHA512", + "322be1195f22da43a88e5ef1e856b707d6b7d41a6068feec343decbc5d784e50", + "$influxdb2-sha512$ffgzLTTAyWBDczT0kKzwLzLjlemQh6UiFvqIA0CPd-B7qgqAetWEuKRI9qOLeE4ak6mxcxwthyKUO40sHf5V5w==", + hasherSha512, + }, + } + + decoder := crypt.NewDecoder() + require.NoError(t, RegisterDecoder(decoder)) + + for _, tc := range cases { + t.Run(tc.desc, func(t *testing.T) { + { + // Will the token match the PHC format? + var digest algorithm.Digest + digest, err := decoder.Decode(tc.phc) + require.NoError(t, err) + require.True(t, digest.Match(tc.token)) + + // Will an incorrect token not match? + require.False(t, digest.Match("WrongToken")) + } + + { + // Is hashing the token deterministic? + digest, err := tc.hasher.Hash(tc.token) + require.NoError(t, err) + phc := digest.Encode() + require.Equal(t, tc.phc, phc) + + } + }) + } +} diff --git a/pkg/crypt/algorithm/influxdb2/opts.go b/pkg/crypt/algorithm/influxdb2/opts.go new file mode 100644 index 00000000000..9a5a8ebf51a --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/opts.go @@ -0,0 +1,45 @@ +package influxdb2 + +import ( + "fmt" + + "github.com/go-crypt/crypt/algorithm" +) + +// Opt describes the functional option pattern for the plaintext.Hasher. +type Opt func(h *Hasher) (err error) + +// WithVariant configures the plaintext.Variant of the resulting plaintext.Digest. +// Default is plaintext.VariantPlainText. +func WithVariant(variant Variant) Opt { + return func(h *Hasher) (err error) { + switch variant { + case VariantNone, VariantSHA256, VariantSHA512: + h.variant = variant + + return nil + default: + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf("%w: variant '%d' is invalid", algorithm.ErrParameterInvalid, variant)) + } + } +} + +// WithVariantName uses the variant name or identifier to configure the plaintext.Variant of the resulting plaintext.Digest. +// Default is plaintext.VariantPlainText. +func WithVariantName(identifier string) Opt { + return func(h *Hasher) (err error) { + if identifier == "" { + return nil + } + + variant := NewVariant(identifier) + + if variant == VariantNone { + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf("%w: variant identifier '%s' is invalid", algorithm.ErrParameterInvalid, identifier)) + } + + h.variant = variant + + return nil + } +} diff --git a/pkg/crypt/algorithm/influxdb2/parameters.go b/pkg/crypt/algorithm/influxdb2/parameters.go new file mode 100644 index 00000000000..d35243d5e55 --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/parameters.go @@ -0,0 +1,54 @@ +package influxdb2 + +import ( + "fmt" + "strconv" + "strings" +) + +// Parameter is a key value pair. +type Parameter struct { + Key string + Value string +} + +// Int converts the Value to an int using strconv.Atoi. +func (p Parameter) Int() (int, error) { + return strconv.Atoi(p.Value) +} + +const ( + // ParameterDefaultItemSeparator is the default item separator. + ParameterDefaultItemSeparator = "," + + // ParameterDefaultKeyValueSeparator is the default key value separator. + ParameterDefaultKeyValueSeparator = "=" +) + +// DecodeParameterStr is an alias for DecodeParameterStrAdvanced using item separator and key value separator +// of ',' and '=' respectively. +func DecodeParameterStr(input string) (opts []Parameter, err error) { + return DecodeParameterStrAdvanced(input, ParameterDefaultItemSeparator, ParameterDefaultKeyValueSeparator) +} + +// DecodeParameterStrAdvanced decodes parameter strings into a []Parameter where sepItem separates each parameter, and sepKV separates the key and value. +func DecodeParameterStrAdvanced(input string, sepItem, sepKV string) (opts []Parameter, err error) { + if input == "" { + return nil, fmt.Errorf("empty strings can't be decoded to parameters") + } + + o := strings.Split(input, sepItem) + + opts = make([]Parameter, len(o)) + + for i, joined := range o { + kv := strings.SplitN(joined, sepKV, 2) + if len(kv) != 2 { + return nil, fmt.Errorf("parameter pair '%s' is not properly encoded: does not contain kv separator '%s'", joined, sepKV) + } + + opts[i] = Parameter{Key: kv[0], Value: kv[1]} + } + + return opts, nil +} diff --git a/pkg/crypt/algorithm/influxdb2/variant.go b/pkg/crypt/algorithm/influxdb2/variant.go new file mode 100644 index 00000000000..5f9815b4d77 --- /dev/null +++ b/pkg/crypt/algorithm/influxdb2/variant.go @@ -0,0 +1,95 @@ +package influxdb2 + +import ( + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "fmt" + + "github.com/go-crypt/crypt/algorithm" +) + +// NewVariant converts an identifier string to a influxdb2.Variant. +func NewVariant(identifier string) (variant Variant) { + switch identifier { + case VariantIdentifierSHA256: + return VariantSHA256 + case VariantIdentifierSHA512: + return VariantSHA512 + default: + return VariantNone + } +} + +// Variant is a variant of the influxdb2.Digest. +type Variant int + +const ( + // VariantNone is a variant of the influxdb2.Digest which is unknown. + VariantNone Variant = iota + + // VariantSHA256 is a variant of the influxdb2.Digest which uses SHA256 as the hash. + VariantSHA256 + + // VariantSHA512 is a variant of the influxdb2.Digest which uses SHA512 as the hash. + VariantSHA512 +) + +const DefaultVariant = VariantSHA256 + +var AllVariants []Variant = []Variant{ + VariantSHA256, + VariantSHA512, +} + +// Prefix returns the influxdb2.Variant prefix identifier. +func (v Variant) Prefix() (prefix string) { + switch v { + case VariantSHA256: + return VariantIdentifierSHA256 + case VariantSHA512: + return VariantIdentifierSHA512 + default: + return + } +} + +// RegisterDecoder registers the variant with a decoder. +func (v Variant) RegisterDecoder(r algorithm.DecoderRegister) error { + switch v { + case VariantSHA256: + return RegisterDecoderSHA256(r) + case VariantSHA512: + return RegisterDecoderSHA512(r) + default: + return fmt.Errorf("RegisterDecoder with invalid variant %v: %w", v, algorithm.ErrParameterInvalid) + } +} + +// Decode performs the decode operation for this influxdb2.Variant. +func (v Variant) Decode(src string) (dst []byte, err error) { + switch v { + case VariantSHA256, VariantSHA512: + return base64.URLEncoding.DecodeString(src) + default: + return []byte(src), nil + } +} + +// Encode performs the encode operation for this influxdb2.Variant. +func (v Variant) Encode(src []byte) (dst string) { + return base64.URLEncoding.EncodeToString(v.Hash(src)) +} + +// Hash performs the hashing operation on the input, returning the raw binary. +func (v Variant) Hash(input []byte) []byte { + switch v { + case VariantSHA256: + h := sha256.Sum256(input) + return h[:] + case VariantSHA512: + h := sha512.Sum512(input) + return h[:] + } + return nil +} diff --git a/query/logger.go b/query/logger.go index 7ec205cf69d..58a7e83b4ec 100644 --- a/query/logger.go +++ b/query/logger.go @@ -47,6 +47,7 @@ func (q *Log) Redact() { *auth = *q.ProxyRequest.Request.Authorization // Redact authorization token auth.Token = "" + auth.HashedToken = "" // Apply redacted authorization request.Request.Authorization = auth diff --git a/replications/internal/queue_management_test.go b/replications/internal/queue_management_test.go index dc3b0d63583..23d5cc134ad 100644 --- a/replications/internal/queue_management_test.go +++ b/replications/internal/queue_management_test.go @@ -97,12 +97,14 @@ func TestEnqueueScan(t *testing.T) { } // Check queue position + // The sleep is not ideal, but we need to let the queue process some before closing it and there's not + // currently a great way to check if it has run any. + time.Sleep(50 * time.Millisecond) closeRq(rq) scan, err := rq.queue.NewScanner() t.Cleanup(func() { require.NoError(t, rq.queue.Close()) }) - if tt.writeFuncReturn == nil { require.ErrorIs(t, err, io.EOF) } else { diff --git a/task/backend/analytical_storage_test.go b/task/backend/analytical_storage_test.go index e9bdfd15310..f0a55030880 100644 --- a/task/backend/analytical_storage_test.go +++ b/task/backend/analytical_storage_test.go @@ -2,6 +2,7 @@ package backend_test import ( "context" + "fmt" "os" "testing" "time" @@ -35,7 +36,20 @@ import ( "go.uber.org/zap/zaptest" ) +func runTestWithTokenHashing(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + func TestAnalyticalStore(t *testing.T) { + runTestWithTokenHashing("TestAnalyticalStore", runTestAnalyticalStore, t) +} + +func runTestAnalyticalStore(useTokenHashing bool, t *testing.T) { t.Skip("https://github.com/influxdata/influxdb/issues/22920") servicetest.TestTaskService( t, @@ -50,7 +64,7 @@ func TestAnalyticalStore(t *testing.T) { tenantStore := tenant.NewStore(store) ts := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ts) diff --git a/task/backend/executor/executor_test.go b/task/backend/executor/executor_test.go index 2c62e1f19c7..a74996c603c 100644 --- a/task/backend/executor/executor_test.go +++ b/task/backend/executor/executor_test.go @@ -56,7 +56,16 @@ type tes struct { tc testCreds } -func taskExecutorSystem(t *testing.T) tes { +func runTestWithTokenHashing(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + +func taskExecutorSystem(useTokenHashing bool, t *testing.T) tes { var ( aqs = newFakeQueryService() qs = query.QueryServiceBridge{ @@ -77,7 +86,7 @@ func taskExecutorSystem(t *testing.T) tes { tenantStore := tenant.NewStore(store) tenantSvc := tenant.NewService(tenantStore) - authStore, err := authorization.NewStore(store) + authStore, err := authorization.NewStore(ctx, store, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, tenantSvc) @@ -100,21 +109,22 @@ func taskExecutorSystem(t *testing.T) tes { } func TestTaskExecutor(t *testing.T) { - t.Run("QuerySuccess", testQuerySuccess) - t.Run("QueryFailure", testQueryFailure) - t.Run("ManualRun", testManualRun) - t.Run("ResumeRun", testResumingRun) - t.Run("WorkerLimit", testWorkerLimit) - t.Run("LimitFunc", testLimitFunc) - t.Run("Metrics", testMetrics) - t.Run("IteratorFailure", testIteratorFailure) - t.Run("ErrorHandling", testErrorHandling) + + runTestWithTokenHashing("QuerySuccess", testQuerySuccess, t) + runTestWithTokenHashing("QueryFailure", testQueryFailure, t) + runTestWithTokenHashing("ManualRun", testManualRun, t) + runTestWithTokenHashing("ResumeRun", testResumingRun, t) + runTestWithTokenHashing("WorkerLimit", testWorkerLimit, t) + runTestWithTokenHashing("LimitFunc", testLimitFunc, t) + runTestWithTokenHashing("Metrics", testMetrics, t) + runTestWithTokenHashing("IteratorFailure", testIteratorFailure, t) + runTestWithTokenHashing("ErrorHandling", testErrorHandling, t) } -func testQuerySuccess(t *testing.T) { +func testQuerySuccess(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) var ( script = fmt.Sprintf(fmtTestScript, t.Name()) @@ -179,9 +189,9 @@ func testQuerySuccess(t *testing.T) { } } -func testQueryFailure(t *testing.T) { +func testQueryFailure(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) script := fmt.Sprintf(fmtTestScript, t.Name()) ctx := icontext.SetAuthorizer(context.Background(), tes.tc.Auth) @@ -215,9 +225,9 @@ func testQueryFailure(t *testing.T) { } } -func testManualRun(t *testing.T) { +func testManualRun(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) script := fmt.Sprintf(fmtTestScript, t.Name()) ctx := icontext.SetAuthorizer(context.Background(), tes.tc.Auth) @@ -262,9 +272,9 @@ func testManualRun(t *testing.T) { } } -func testResumingRun(t *testing.T) { +func testResumingRun(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) script := fmt.Sprintf(fmtTestScript, t.Name()) ctx := icontext.SetAuthorizer(context.Background(), tes.tc.Auth) @@ -305,9 +315,9 @@ func testResumingRun(t *testing.T) { } } -func testWorkerLimit(t *testing.T) { +func testWorkerLimit(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) script := fmt.Sprintf(fmtTestScript, t.Name()) ctx := icontext.SetAuthorizer(context.Background(), tes.tc.Auth) @@ -335,9 +345,9 @@ func testWorkerLimit(t *testing.T) { } } -func testLimitFunc(t *testing.T) { +func testLimitFunc(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) script := fmt.Sprintf(fmtTestScript, t.Name()) ctx := icontext.SetAuthorizer(context.Background(), tes.tc.Auth) @@ -374,9 +384,9 @@ func testLimitFunc(t *testing.T) { } } -func testMetrics(t *testing.T) { +func testMetrics(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) metrics := tes.metrics reg := prom.NewRegistry(zaptest.NewLogger(t)) reg.MustRegister(metrics.PrometheusCollectors()...) @@ -445,9 +455,9 @@ func testMetrics(t *testing.T) { assert.Greater(t, *m.Histogram.SampleSum, float64(100), "run latency metric unexpectedly small") } -func testIteratorFailure(t *testing.T) { +func testIteratorFailure(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) // replace iterator exhaust function with one which errors tes.ex.workerPool = sync.Pool{New: func() interface{} { @@ -493,9 +503,9 @@ func testIteratorFailure(t *testing.T) { } } -func testErrorHandling(t *testing.T) { +func testErrorHandling(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) metrics := tes.metrics reg := prom.NewRegistry(zaptest.NewLogger(t)) @@ -540,9 +550,13 @@ func testErrorHandling(t *testing.T) { } func TestPromiseFailure(t *testing.T) { + runTestWithTokenHashing("TestPromiseFailure", testPromiseFailure, t) +} + +func testPromiseFailure(useTokenHashing bool, t *testing.T) { t.Parallel() - tes := taskExecutorSystem(t) + tes := taskExecutorSystem(useTokenHashing, t) var ( script = fmt.Sprintf(fmtTestScript, t.Name()) diff --git a/task/backend/executor/limits_test.go b/task/backend/executor/limits_test.go index e8f512a37c6..60bcb7dad0b 100644 --- a/task/backend/executor/limits_test.go +++ b/task/backend/executor/limits_test.go @@ -15,7 +15,11 @@ var ( ) func TestTaskConcurrency(t *testing.T) { - tes := taskExecutorSystem(t) + runTestWithTokenHashing("TestTaskConcurrency", testTaskConcurrency, t) +} + +func testTaskConcurrency(useTokenHashing bool, t *testing.T) { + tes := taskExecutorSystem(useTokenHashing, t) te := tes.ex r1, err := te.tcs.CreateRun(context.Background(), taskWith1Concurrency.ID, time.Now().Add(-4*time.Second), time.Now()) if err != nil { diff --git a/tenant/http_server_onboarding.go b/tenant/http_server_onboarding.go index b6dce718d96..bb63d0d249c 100644 --- a/tenant/http_server_onboarding.go +++ b/tenant/http_server_onboarding.go @@ -129,6 +129,7 @@ func (a *authResponse) toPlatform() *influxdb.Authorization { res := &influxdb.Authorization{ ID: a.ID, Token: a.Token, + HashedToken: a.HashedToken, Status: a.Status, Description: a.Description, OrgID: a.OrgID, diff --git a/tenant/http_server_onboarding_test.go b/tenant/http_server_onboarding_test.go index 7269140c5df..7d4d5d22693 100644 --- a/tenant/http_server_onboarding_test.go +++ b/tenant/http_server_onboarding_test.go @@ -15,21 +15,21 @@ import ( "go.uber.org/zap/zaptest" ) -func initOnboardHttpService(f itesting.OnboardingFields, t *testing.T) (influxdb.OnboardingService, func()) { +func initOnboardHttpService(f itesting.OnboardingFields, useTokenHashing bool, t *testing.T) (influxdb.OnboardingService, func()) { t.Helper() + ctx := context.Background() s := itesting.NewTestInmemStore(t) storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) svc := tenant.NewOnboardService(ten, authSvc) - ctx := context.Background() if !f.IsOnboarding { // create a dummy so so we can no longer onboard err := ten.CreateUser(ctx, &influxdb.User{Name: "dummy", Status: influxdb.Active}) diff --git a/tenant/service_onboarding_test.go b/tenant/service_onboarding_test.go index 0c85f8649f8..9924762ee16 100644 --- a/tenant/service_onboarding_test.go +++ b/tenant/service_onboarding_test.go @@ -2,6 +2,7 @@ package tenant_test import ( "context" + "fmt" "testing" "time" @@ -22,25 +23,25 @@ func TestBoltOnboardingService(t *testing.T) { influxdbtesting.OnboardInitialUser(initBoltOnboardingService, t) } -func initBoltOnboardingService(f influxdbtesting.OnboardingFields, t *testing.T) (influxdb.OnboardingService, func()) { +func initBoltOnboardingService(f influxdbtesting.OnboardingFields, useTokenHashing bool, t *testing.T) (influxdb.OnboardingService, func()) { s := influxdbtesting.NewTestInmemStore(t) - svc := initOnboardingService(s, f, t) + svc := initOnboardingService(s, f, useTokenHashing, t) return svc, func() {} } -func initOnboardingService(s kv.Store, f influxdbtesting.OnboardingFields, t *testing.T) influxdb.OnboardingService { +func initOnboardingService(s kv.Store, f influxdbtesting.OnboardingFields, useTokenHashing bool, t *testing.T) influxdb.OnboardingService { + ctx := context.Background() + storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) // we will need an auth service as well svc := tenant.NewOnboardService(ten, authSvc) - ctx := context.Background() - t.Logf("Onboarding: %v", f.IsOnboarding) if !f.IsOnboarding { // create a dummy so so we can no longer onboard @@ -53,18 +54,33 @@ func initOnboardingService(s kv.Store, f influxdbtesting.OnboardingFields, t *te return svc } +func runTestWithTokenHashing(name string, testFunc func(bool, *testing.T), t *testing.T) { + t.Helper() + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", name, useTokenHashing), func(t *testing.T) { + testFunc(useTokenHashing, t) + }) + } +} + func TestOnboardURM(t *testing.T) { + runTestWithTokenHashing("TestOnboardURM", testOnboardURM, t) +} + +func testOnboardURM(useTokenHashing bool, t *testing.T) { + ctx := context.Background() + s := influxdbtesting.NewTestInmemStore(t) storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) svc := tenant.NewOnboardService(ten, authSvc) - ctx := icontext.SetAuthorizer(context.Background(), &influxdb.Authorization{ + ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ UserID: 123, }) @@ -92,17 +108,23 @@ func TestOnboardURM(t *testing.T) { } func TestOnboardAuth(t *testing.T) { + runTestWithTokenHashing("TestOnboardAuth", testOnboardAuth, t) +} + +func testOnboardAuth(useTokenHashing bool, t *testing.T) { + ctx := context.Background() + s := influxdbtesting.NewTestInmemStore(t) storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) svc := tenant.NewOnboardService(ten, authSvc) - ctx := icontext.SetAuthorizer(context.Background(), &influxdb.Authorization{ + ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ UserID: 123, }) @@ -170,11 +192,17 @@ func TestOnboardAuth(t *testing.T) { } func TestOnboardService_RetentionPolicy(t *testing.T) { + runTestWithTokenHashing("TestOnboardService_RetentionPolicy", testOnboardService_RetentionPolicy, t) +} + +func testOnboardService_RetentionPolicy(useTokenHashing bool, t *testing.T) { + ctx := context.Background() + s := influxdbtesting.NewTestInmemStore(t) storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) @@ -182,7 +210,7 @@ func TestOnboardService_RetentionPolicy(t *testing.T) { // we will need an auth service as well svc := tenant.NewOnboardService(ten, authSvc) - ctx := icontext.SetAuthorizer(context.Background(), &influxdb.Authorization{ + ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ UserID: 123, }) @@ -202,11 +230,17 @@ func TestOnboardService_RetentionPolicy(t *testing.T) { } func TestOnboardService_RetentionPolicyDeprecated(t *testing.T) { + runTestWithTokenHashing("TestOnboardService_RetentionPolicyDeprecated", testOnboardService_RetentionPolicyDeprecated, t) +} + +func testOnboardService_RetentionPolicyDeprecated(useTokenHashing bool, t *testing.T) { + ctx := context.Background() + s := influxdbtesting.NewTestInmemStore(t) storage := tenant.NewStore(s) ten := tenant.NewService(storage) - authStore, err := authorization.NewStore(s) + authStore, err := authorization.NewStore(ctx, s, useTokenHashing) require.NoError(t, err) authSvc := authorization.NewService(authStore, ten) @@ -214,7 +248,7 @@ func TestOnboardService_RetentionPolicyDeprecated(t *testing.T) { // we will need an auth service as well svc := tenant.NewOnboardService(ten, authSvc) - ctx := icontext.SetAuthorizer(context.Background(), &influxdb.Authorization{ + ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ UserID: 123, }) @@ -234,25 +268,29 @@ func TestOnboardService_RetentionPolicyDeprecated(t *testing.T) { } func TestOnboardService_WeakPassword(t *testing.T) { - s := influxdbtesting.NewTestInmemStore(t) - storage := tenant.NewStore(s) - ten := tenant.NewService(storage) - - authStore, err := authorization.NewStore(s) - require.NoError(t, err) - - authSvc := authorization.NewService(authStore, ten) - svc := tenant.NewOnboardService(ten, authSvc) - - ctx := icontext.SetAuthorizer(context.Background(), &influxdb.Authorization{ - UserID: 123, - }) - - _, err = svc.OnboardInitialUser(ctx, &influxdb.OnboardingRequest{ - User: "name", - Password: "short", - Org: "name", - Bucket: "name", - }) - assert2.ErrorIs(t, err, influx_errors.EPasswordLength) + for _, useHashedTokens := range []bool{false, true} { + ctx := context.Background() + + s := influxdbtesting.NewTestInmemStore(t) + storage := tenant.NewStore(s) + ten := tenant.NewService(storage) + + authStore, err := authorization.NewStore(ctx, s, useHashedTokens) + require.NoError(t, err) + + authSvc := authorization.NewService(authStore, ten) + svc := tenant.NewOnboardService(ten, authSvc) + + ctx = icontext.SetAuthorizer(ctx, &influxdb.Authorization{ + UserID: 123, + }) + + _, err = svc.OnboardInitialUser(ctx, &influxdb.OnboardingRequest{ + User: "name", + Password: "short", + Org: "name", + Bucket: "name", + }) + assert2.ErrorIs(t, err, influx_errors.EPasswordLength) + } } diff --git a/testing/auth.go b/testing/auth.go index d1badb8cbc4..0f75ec72706 100644 --- a/testing/auth.go +++ b/testing/auth.go @@ -13,6 +13,7 @@ import ( "github.com/influxdata/influxdb/v2/kit/platform" "github.com/influxdata/influxdb/v2/kit/platform/errors" "github.com/influxdata/influxdb/v2/mock" + "github.com/stretchr/testify/require" ) const ( @@ -24,7 +25,7 @@ const ( var authorizationCmpOptions = cmp.Options{ cmpopts.EquateEmpty(), - cmpopts.IgnoreFields(influxdb.Authorization{}, "ID", "Token", "CreatedAt", "UpdatedAt"), + cmpopts.IgnoreFields(influxdb.Authorization{}, "ID", "Token", "HashedToken", "CreatedAt", "UpdatedAt"), cmp.Comparer(func(x, y []byte) bool { return bytes.Equal(x, y) }), @@ -663,7 +664,7 @@ func UpdateAuthorization( err: &errors.Error{ Code: errors.EInvalid, Op: influxdb.OpUpdateAuthorization, - Msg: "unknown authorization status", + Msg: "encodeAuthorization: unknown authorization status", }, }, }, @@ -858,6 +859,13 @@ func FindAuthorizationByToken( if diff := cmp.Diff(authorization, tt.wants.authorization, authorizationCmpOptions...); diff != "" { t.Errorf("authorization is different -got/+want\ndiff %s", diff) } + + // Verify that lookup by the hashed token does not work. + if authorization.IsHashedTokenSet() { + a, err := s.FindAuthorizationByToken(ctx, authorization.HashedToken) + require.ErrorContains(t, err, "authorization not found") + require.Nil(t, a) + } }) } } diff --git a/testing/onboarding.go b/testing/onboarding.go index 03f473ed830..1d9ec80bc46 100644 --- a/testing/onboarding.go +++ b/testing/onboarding.go @@ -2,6 +2,7 @@ package testing import ( "context" + "fmt" "testing" "time" @@ -10,6 +11,7 @@ import ( platform2 "github.com/influxdata/influxdb/v2/kit/platform" "github.com/influxdata/influxdb/v2/kit/platform/errors" "github.com/influxdata/influxdb/v2/mock" + "github.com/stretchr/testify/require" ) var onboardCmpOptions = cmp.Options{ @@ -39,7 +41,7 @@ type OnboardingFields struct { // OnboardInitialUser testing func OnboardInitialUser( - init func(OnboardingFields, *testing.T) (platform.OnboardingService, func()), + init func(OnboardingFields, bool, *testing.T) (platform.OnboardingService, func()), t *testing.T, ) { type args struct { @@ -187,27 +189,22 @@ func OnboardInitialUser( }, } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, done := init(tt.fields, t) - defer done() - ctx := context.Background() - results, err := s.OnboardInitialUser(ctx, tt.args.request) - if (err != nil) != (tt.wants.errCode != "") { - t.Logf("Error: %v", err) - t.Fatalf("expected error code '%s' got '%v'", tt.wants.errCode, err) - } - if err != nil && tt.wants.errCode != "" { - if code := errors.ErrorCode(err); code != tt.wants.errCode { - t.Logf("Error: %v", err) - t.Fatalf("expected error code to match '%s' got '%v'", tt.wants.errCode, code) + for _, useTokenHashing := range []bool{false, true} { + t.Run(fmt.Sprintf("%s/TokenHashing=%t", tt.name, useTokenHashing), func(t *testing.T) { + s, done := init(tt.fields, useTokenHashing, t) + defer done() + ctx := context.Background() + results, err := s.OnboardInitialUser(ctx, tt.args.request) + if tt.wants.errCode == "" { + require.NoError(t, err, "s.OnboardInitialUser") + } else { + require.Equal(t, tt.wants.errCode, errors.ErrorCode(err), "s.OnboardInitialUser") } - } - if diff := cmp.Diff(results, tt.wants.results, onboardCmpOptions); diff != "" { - t.Errorf("onboarding results are different -got/+want\ndiff %s", diff) - } - }) + diff := cmp.Diff(results, tt.wants.results, onboardCmpOptions) + require.Empty(t, diff) + }) + } } - } const (