diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 3d34e6de..b1c24eba 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,18 +15,18 @@ jobs: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + - uses: actions/checkout@v5 + - uses: actions/setup-go@v6 with: - go-version: '1.23.8' + go-version: '1.24.6' cache: false - name: golangci-lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v8 with: # Require: The version of golangci-lint to use. # When `install-mode` is `binary` (default) the value can be v1.2 or v1.2.3 or `latest` to use the latest version. # When `install-mode` is `goinstall` the value can be v1.2.3, `latest`, or the hash of a commit. - version: v1.62.2 + version: v2.6.0 # Optional: working directory, useful for monorepos # working-directory: somedir diff --git a/.golangci.yml b/.golangci.yml index 6e82c2ba..174538a8 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,542 +1,437 @@ -linters-settings: - depguard: - # new configuration - rules: - logger: - deny: - # logging is allowed only by logutils.Log, - # logrus is allowed to use only in logutils package. - - pkg: "github.com/sirupsen/logrus" - desc: logging is allowed only by logutils.Log - dupl: - threshold: 100 - funlen: - lines: -1 # the number of lines (code + empty lines) is not a right metric and leads to code without empty line or one-liner. - statements: 60 - goconst: - min-len: 2 - min-occurrences: 3 - gocritic: - enabled-tags: - - diagnostic - - experimental - - opinionated - - performance - - style - disabled-checks: - - dupImport # https://github.com/go-critic/go-critic/issues/845 - - ifElseChain - - octalLiteral - - whyNoLint - gocyclo: - min-complexity: 15 - gofmt: - rewrite-rules: - - pattern: 'interface{}' - replacement: 'any' - gomnd: - # don't include the "operation" and "assign" - checks: - - argument - - case - - condition - - return - ignored-numbers: - - '0' - - '1' - - '2' - - '3' - ignored-functions: - - strings.SplitN - - govet: - check-shadowing: true - settings: - printf: - funcs: - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf - - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf - lll: - line-length: 140 - misspell: - locale: US - nolintlint: - allow-unused: false # report any unused nolint directives - require-explanation: false # don't require an explanation for nolint directives - require-specific: false # don't require nolint directives to be specific about which linter is being skipped - revive: - # Maximum number of open files at the same time. - # See https://github.com/mgechev/revive#command-line-flags - # Defaults to unlimited. - max-open-files: 2048 - # When set to false, ignores files with "GENERATED" header, similar to golint. - # See https://github.com/mgechev/revive#available-rules for details. - # Default: false - ignore-generated-header: true - # Sets the default severity. - # See https://github.com/mgechev/revive#configuration - # Default: warning - severity: error - # Enable all available rules. - # Default: false - enable-all-rules: true - # Sets the default failure confidence. - # This means that linting errors with less than 0.8 confidence will be ignored. - # Default: 0.8 - confidence: 0.1 - rules: - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#add-constant - - name: add-constant - severity: warning - disabled: true - arguments: - - maxLitCount: "3" - allowStrs: '""' - allowInts: "0,1,2" - allowFloats: "0.0,0.,1.0,1.,2.0,2." - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#argument-limit - - name: argument-limit - severity: warning - disabled: false - arguments: [8] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#atomic - - name: atomic - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#banned-characters - - name: banned-characters - severity: warning - disabled: false - arguments: ["Ω", "Σ", "σ", "7"] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return - - name: bare-return - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#blank-imports - - name: blank-imports - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bool-literal-in-expr - - name: bool-literal-in-expr - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#call-to-gc - - name: call-to-gc - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cognitive-complexity - - name: cognitive-complexity - severity: warning - disabled: true - arguments: [7] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#comment-spacings - - name: comment-spacings - severity: warning - disabled: false - arguments: - - mypragma - - otherpragma - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming - - name: confusing-naming - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-results - - name: confusing-results - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#constant-logical-expr - - name: constant-logical-expr - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-as-argument - - name: context-as-argument - severity: warning - disabled: false - arguments: - - allowTypesBefore: "*testing.T,*github.com/user/repo/testing.Harness" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-keys-type - - name: context-keys-type - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cyclomatic - - name: cyclomatic - severity: warning - disabled: true - arguments: [3] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#datarace - - name: datarace - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#deep-exit - - name: deep-exit - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#defer - - name: defer - severity: warning - disabled: false - arguments: - - ["call-chain", "loop"] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports - - name: dot-imports - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#duplicated-imports - - name: duplicated-imports - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#early-return - - name: early-return - severity: warning - disabled: false - arguments: - - "preserveScope" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-block - - name: empty-block - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-lines - - name: empty-lines - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#enforce-map-style - - name: enforce-map-style - severity: warning - disabled: false - arguments: - - "make" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming - - name: error-naming - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-return - - name: error-return - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-strings - - name: error-strings - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#errorf - - name: errorf - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#exported - - name: exported - severity: warning - disabled: false - arguments: - # - "preserveScope" - - "checkPrivateReceivers" - - "sayRepetitiveInsteadOfStutters" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#file-header - - name: file-header - severity: warning - disabled: true - arguments: - - This is the text that must appear at the top of source files. - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#flag-parameter - - name: flag-parameter - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-result-limit - - name: function-result-limit - severity: warning - disabled: true - arguments: [2] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-length - - name: function-length - severity: warning - disabled: true - arguments: [10, 0] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#get-return - - name: get-return - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#identical-branches - - name: identical-branches - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#if-return - - name: if-return - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#increment-decrement - - name: increment-decrement - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#indent-error-flow - - name: indent-error-flow - severity: warning - disabled: false - arguments: - - "preserveScope" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming - - name: import-alias-naming - severity: warning - disabled: false - arguments: - - "^[a-z][a-z0-9_]{0,}$" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#imports-blacklist - - name: imports-blacklist - severity: warning - disabled: false - arguments: - - "crypto/md5" - - "crypto/sha1" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing - - name: import-shadowing - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#line-length-limit - - name: line-length-limit - severity: warning - disabled: false - arguments: [140] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#max-public-structs - - name: max-public-structs - severity: warning - disabled: true - arguments: [3] - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-parameter - - name: modifies-parameter - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-value-receiver - - name: modifies-value-receiver - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#nested-structs - - name: nested-structs - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#optimize-operands-order - - name: optimize-operands-order - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#package-comments - - name: package-comments - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range - - name: range - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-in-closure - - name: range-val-in-closure - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-address - - name: range-val-address - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#receiver-naming - - name: receiver-naming - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redundant-import-alias - - name: redundant-import-alias - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redefines-builtin-id - - name: redefines-builtin-id - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-of-int - - name: string-of-int - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-format - - name: string-format - severity: warning - disabled: false - arguments: - - - 'core.WriteError[1].Message' - - '/^([^A-Z]|$)/' - - must not start with a capital letter - - - 'fmt.Errorf[0]' - - '/(^|[^\.!?])$/' - - must not end in punctuation - - - panic - - '/^[^\n]*$/' - - must not contain line breaks - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#struct-tag - - name: struct-tag - arguments: - - "json,inline" - - "bson,outline,gnu" - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#superfluous-else - - name: superfluous-else - severity: warning - disabled: false - arguments: - - "preserveScope" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal - - name: time-equal - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-naming - - name: time-naming - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-naming - - name: var-naming - severity: warning - disabled: false - arguments: - - ["ID"] # AllowList - - ["VM"] # DenyList - - - upperCaseConst: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration - - name: var-declaration - severity: warning - disabled: false - - name: unchecked-type-assertion - severity: warning - disabled: true - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unconditional-recursion - - name: unconditional-recursion - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-naming - - name: unexported-naming - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-return - - name: unexported-return - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unhandled-error - - name: unhandled-error - severity: warning - disabled: false - arguments: - - "fmt.Printf" - - "fmt.Println" - - "strings.Builder.WriteString" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt - - name: unnecessary-stmt - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unreachable-code - - name: unreachable-code - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-parameter - - name: unused-parameter - severity: warning - disabled: false - arguments: - - allowRegex: "^_" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-receiver - - name: unused-receiver - severity: warning - disabled: false - arguments: - - allowRegex: "^_" - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break - - name: useless-break - severity: warning - disabled: false - # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#waitgroup-by-value - - name: waitgroup-by-value - severity: warning - disabled: false - gci: - # Section configuration to compare against. - # Section names are case-insensitive and may contain parameters in (). - # The default order of sections is `standard > default > custom > blank > dot > alias > localmodule`, - # If `custom-order` is `true`, it follows the order of `sections` option. - # Default: ["standard", "default"] - sections: - - standard # Standard section: captures all standard packages. - - default # Default section: contains all imports that could not be matched to another section type. - - prefix(github.com/ydb-platform/ydb) # Custom section: groups all imports with the specified Prefix. - - localmodule # Local module section: contains all local packages. This section is not present unless explicitly enabled. - # Enable custom order of sections. - # If `true`, make the section order the same as the order of `sections`. - # Default: false - custom-order: true - +version: "2" linters: - fast: false - disable-all: true + default: none enable: - bodyclose - depguard - dogsled - # - dupl - errcheck - - exportloopref - funlen - gocheckcompilerdirectives - # - gochecknoinits - goconst - # - gocritic - gocyclo - - gofmt - - goimports - # - gomnd - goprintffuncname - # - gosec - - gosimple - govet - ineffassign - lll - misspell - nakedret - noctx - # - nolintlint - revive - # - staticcheck - - stylecheck - - typecheck + - staticcheck - unconvert - unparam - unused - whitespace - - wsl - # Gci controls Go package import order and makes it always deterministic. + - wsl_v5 + settings: + depguard: + rules: + logger: + deny: + - pkg: github.com/sirupsen/logrus + desc: logging is allowed only by logutils.Log + dupl: + threshold: 100 + funlen: + lines: -1 + statements: 60 + goconst: + min-len: 2 + min-occurrences: 3 + gocritic: + disabled-checks: + - dupImport + - ifElseChain + - octalLiteral + - whyNoLint + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + gocyclo: + min-complexity: 15 + govet: + settings: + printf: + funcs: + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + lll: + line-length: 140 + misspell: + locale: US + mnd: + checks: + - argument + - case + - condition + - return + ignored-numbers: + - "0" + - "1" + - "2" + - "3" + ignored-functions: + - strings.SplitN + nolintlint: + require-explanation: false + require-specific: false + allow-unused: false + revive: + max-open-files: 2048 + confidence: 0.1 + severity: error + enable-all-rules: true + rules: + - name: add-constant + arguments: + - allowFloats: 0.0,0.,1.0,1.,2.0,2. + allowInts: 0,1,2 + allowStrs: '""' + maxLitCount: "3" + severity: warning + disabled: true + - name: argument-limit + arguments: + - 8 + severity: warning + disabled: false + - name: atomic + severity: warning + disabled: false + - name: banned-characters + arguments: + - Ω + - Σ + - σ + - "7" + severity: warning + disabled: false + - name: bare-return + severity: warning + disabled: true + - name: blank-imports + severity: warning + disabled: true + - name: bool-literal-in-expr + severity: warning + disabled: false + - name: call-to-gc + severity: warning + disabled: false + - name: cognitive-complexity + arguments: + - 7 + severity: warning + disabled: true + - name: comment-spacings + arguments: + - mypragma + - otherpragma + severity: warning + disabled: false + - name: confusing-naming + severity: warning + disabled: true + - name: confusing-results + severity: warning + disabled: false + - name: constant-logical-expr + severity: warning + disabled: false + - name: context-as-argument + arguments: + - allowTypesBefore: '*testing.T,*github.com/user/repo/testing.Harness' + severity: warning + disabled: false + - name: context-keys-type + severity: warning + disabled: false + - name: cyclomatic + arguments: + - 3 + severity: warning + disabled: true + - name: datarace + severity: warning + disabled: false + - name: deep-exit + severity: warning + disabled: true + - name: defer + arguments: + - - call-chain + - loop + severity: warning + disabled: false + - name: dot-imports + severity: warning + disabled: false + - name: duplicated-imports + severity: warning + disabled: false + - name: early-return + arguments: + - preserveScope + severity: warning + disabled: false + - name: empty-block + severity: warning + disabled: false + - name: empty-lines + severity: warning + disabled: false + - name: enforce-map-style + arguments: + - make + severity: warning + disabled: false + - name: error-naming + severity: warning + disabled: false + - name: error-return + severity: warning + disabled: false + - name: error-strings + severity: warning + disabled: false + - name: errorf + severity: warning + disabled: false + - name: exported + arguments: + - checkPrivateReceivers + - sayRepetitiveInsteadOfStutters + severity: warning + disabled: false + - name: file-header + arguments: + - This is the text that must appear at the top of source files. + severity: warning + disabled: true + - name: flag-parameter + severity: warning + disabled: true + - name: function-result-limit + arguments: + - 2 + severity: warning + disabled: true + - name: function-length + arguments: + - 10 + - 0 + severity: warning + disabled: true + - name: get-return + severity: warning + disabled: false + - name: identical-branches + severity: warning + disabled: false + - name: if-return + severity: warning + disabled: false + - name: increment-decrement + severity: warning + disabled: false + - name: indent-error-flow + arguments: + - preserveScope + severity: warning + disabled: false + - name: import-alias-naming + arguments: + - ^[a-z][a-z0-9_]{0,}$ + severity: warning + disabled: false + - name: imports-blocklist + arguments: + - crypto/md5 + - crypto/sha1 + severity: warning + disabled: false + - name: import-shadowing + severity: warning + disabled: false + - name: line-length-limit + arguments: + - 140 + severity: warning + disabled: false + - name: max-public-structs + arguments: + - 3 + severity: warning + disabled: true + - name: modifies-parameter + severity: warning + disabled: true + - name: modifies-value-receiver + severity: warning + disabled: false + - name: nested-structs + severity: warning + disabled: false + - name: optimize-operands-order + severity: warning + disabled: false + - name: package-comments + severity: warning + disabled: false + - name: range + severity: warning + disabled: false + - name: range-val-in-closure + severity: warning + disabled: false + - name: range-val-address + severity: warning + disabled: false + - name: receiver-naming + severity: warning + disabled: false + - name: redundant-import-alias + severity: warning + disabled: false + - name: redefines-builtin-id + severity: warning + disabled: false + - name: string-of-int + severity: warning + disabled: false + - name: string-format + arguments: + - - core.WriteError[1].Message + - /^([^A-Z]|$)/ + - must not start with a capital letter + - - fmt.Errorf[0] + - /(^|[^\.!?])$/ + - must not end in punctuation + - - panic + - /^[^\n]*$/ + - must not contain line breaks + severity: warning + disabled: false + - name: struct-tag + arguments: + - json,inline + - bson,outline,gnu + severity: warning + disabled: false + - name: superfluous-else + arguments: + - preserveScope + severity: warning + disabled: false + - name: time-equal + severity: warning + disabled: false + - name: time-naming + severity: warning + disabled: false + - name: var-naming + arguments: + - - ID + - - VM + - - upperCaseConst: true + severity: warning + disabled: false + - name: var-declaration + severity: warning + disabled: false + - name: unchecked-type-assertion + severity: warning + disabled: true + - name: unconditional-recursion + severity: warning + disabled: false + - name: unexported-naming + severity: warning + disabled: false + - name: unexported-return + severity: warning + disabled: false + - name: unhandled-error + arguments: + - fmt.Printf + - fmt.Println + - strings.Builder.WriteString + severity: warning + disabled: false + - name: unnecessary-stmt + severity: warning + disabled: false + - name: unreachable-code + severity: warning + disabled: false + - name: unused-parameter + arguments: + - allowRegex: ^_ + severity: warning + disabled: false + - name: unused-receiver + arguments: + - allowRegex: ^_ + severity: warning + disabled: false + - name: useless-break + severity: warning + disabled: false + - name: use-errors-new + disabled: true + - name: waitgroup-by-value + severity: warning + disabled: false + wsl_v5: + allow-first-in-block: false + allow-whole-block: false + branch-max-lines: 0 + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + rules: + - linters: + - mnd + path: _test\.go + - path: .go + text: ST1003 + - path: .go + text: 'var-naming: don''t use an underscore in package name' + paths: + - third_party$ + - builtin$ + - examples$ + - tools/clickhouse/* +formatters: + enable: - gci - - # don't enable: - # - asciicheck - # - scopelint - # - gochecknoglobals - # - gocognit - # - godot - # - godox - # - goerr113 - # - interfacer - # - maligned - # - nestif - # - prealloc - # - testpackage - -issues: - # Excluding configuration per-path, per-linter, per-text and per-source - exclude-rules: - - path: _test\.go - linters: - - gomnd - - - path: ".go" - text: "ST1003" - - path: ".go" - text: "var-naming: don't use an underscore in package name" - - -run: - timeout: 5m - skip-dirs: - - test/testdata_etc # test files - - internal/cache # extracted from Go code - - internal/renameio # extracted from Go code - - internal/robustio # extracted from Go code - - library/go/yson # copied from ytsaurus/ytsaurus/yt/go/yson - skip-files: - - app/server/conversion/itoa.go # copied from Go std lib + - gofmt + - goimports + settings: + gci: + sections: + - standard + - default + - prefix(github.com/ydb-platform/ydb) + - localmodule + custom-order: true + gofmt: + rewrite-rules: + - pattern: interface{} + replacement: any + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/api/common/gateways_config.pb.go b/api/common/gateways_config.pb.go index 8a6e03de..3afc864e 100644 --- a/api/common/gateways_config.pb.go +++ b/api/common/gateways_config.pb.go @@ -313,6 +313,53 @@ func (TMongoDbDataSourceOptions_EUnsupportedTypeDisplayMode) EnumDescriptor() ([ return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZIP(), []int{8, 2} } +// EQueryDataFormat determines the format of the data returned by the YDB server +type TYdbDataSourceOptions_EQueryDataFormat int32 + +const ( + TYdbDataSourceOptions_QUERY_DATA_FORMAT_UNSPECIFIED TYdbDataSourceOptions_EQueryDataFormat = 0 + TYdbDataSourceOptions_ARROW TYdbDataSourceOptions_EQueryDataFormat = 1 +) + +// Enum value maps for TYdbDataSourceOptions_EQueryDataFormat. +var ( + TYdbDataSourceOptions_EQueryDataFormat_name = map[int32]string{ + 0: "QUERY_DATA_FORMAT_UNSPECIFIED", + 1: "ARROW", + } + TYdbDataSourceOptions_EQueryDataFormat_value = map[string]int32{ + "QUERY_DATA_FORMAT_UNSPECIFIED": 0, + "ARROW": 1, + } +) + +func (x TYdbDataSourceOptions_EQueryDataFormat) Enum() *TYdbDataSourceOptions_EQueryDataFormat { + p := new(TYdbDataSourceOptions_EQueryDataFormat) + *p = x + return p +} + +func (x TYdbDataSourceOptions_EQueryDataFormat) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TYdbDataSourceOptions_EQueryDataFormat) Descriptor() protoreflect.EnumDescriptor { + return file_yql_essentials_providers_common_proto_gateways_config_proto_enumTypes[5].Descriptor() +} + +func (TYdbDataSourceOptions_EQueryDataFormat) Type() protoreflect.EnumType { + return &file_yql_essentials_providers_common_proto_gateways_config_proto_enumTypes[5] +} + +func (x TYdbDataSourceOptions_EQueryDataFormat) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use TYdbDataSourceOptions_EQueryDataFormat.Descriptor instead. +func (TYdbDataSourceOptions_EQueryDataFormat) EnumDescriptor() ([]byte, []int) { + return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZIP(), []int{12, 0} +} + // TGenericEndpoint represents the network address of a generic data source instance type TGenericEndpoint struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -994,6 +1041,51 @@ func (x *TIcebergDataSourceOptions) GetWarehouse() *TIcebergWarehouse { return nil } +// TYdbDataSourceOptions represents settings specific for YDB data source +type TYdbDataSourceOptions struct { + state protoimpl.MessageState `protogen:"open.v1"` + QueryDataFormat TYdbDataSourceOptions_EQueryDataFormat `protobuf:"varint,1,opt,name=query_data_format,json=queryDataFormat,proto3,enum=NYql.TYdbDataSourceOptions_EQueryDataFormat" json:"query_data_format,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TYdbDataSourceOptions) Reset() { + *x = TYdbDataSourceOptions{} + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TYdbDataSourceOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TYdbDataSourceOptions) ProtoMessage() {} + +func (x *TYdbDataSourceOptions) ProtoReflect() protoreflect.Message { + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TYdbDataSourceOptions.ProtoReflect.Descriptor instead. +func (*TYdbDataSourceOptions) Descriptor() ([]byte, []int) { + return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZIP(), []int{12} +} + +func (x *TYdbDataSourceOptions) GetQueryDataFormat() TYdbDataSourceOptions_EQueryDataFormat { + if x != nil { + return x.QueryDataFormat + } + return TYdbDataSourceOptions_QUERY_DATA_FORMAT_UNSPECIFIED +} + // TGenericDataSourceInstance helps to identify the instance of a data source to redirect request to. type TGenericDataSourceInstance struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -1023,6 +1115,7 @@ type TGenericDataSourceInstance struct { // *TGenericDataSourceInstance_LoggingOptions // *TGenericDataSourceInstance_MongodbOptions // *TGenericDataSourceInstance_IcebergOptions + // *TGenericDataSourceInstance_YdbOptions Options isTGenericDataSourceInstance_Options `protobuf_oneof:"options"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache @@ -1030,7 +1123,7 @@ type TGenericDataSourceInstance struct { func (x *TGenericDataSourceInstance) Reset() { *x = TGenericDataSourceInstance{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[12] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1042,7 +1135,7 @@ func (x *TGenericDataSourceInstance) String() string { func (*TGenericDataSourceInstance) ProtoMessage() {} func (x *TGenericDataSourceInstance) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[12] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[13] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1055,7 +1148,7 @@ func (x *TGenericDataSourceInstance) ProtoReflect() protoreflect.Message { // Deprecated: Use TGenericDataSourceInstance.ProtoReflect.Descriptor instead. func (*TGenericDataSourceInstance) Descriptor() ([]byte, []int) { - return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZIP(), []int{12} + return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZIP(), []int{13} } func (x *TGenericDataSourceInstance) GetKind() EGenericDataSourceKind { @@ -1179,6 +1272,15 @@ func (x *TGenericDataSourceInstance) GetIcebergOptions() *TIcebergDataSourceOpti return nil } +func (x *TGenericDataSourceInstance) GetYdbOptions() *TYdbDataSourceOptions { + if x != nil { + if x, ok := x.Options.(*TGenericDataSourceInstance_YdbOptions); ok { + return x.YdbOptions + } + } + return nil +} + type isTGenericDataSourceInstance_Options interface { isTGenericDataSourceInstance_Options() } @@ -1215,6 +1317,10 @@ type TGenericDataSourceInstance_IcebergOptions struct { IcebergOptions *TIcebergDataSourceOptions `protobuf:"bytes,14,opt,name=iceberg_options,json=icebergOptions,proto3,oneof"` } +type TGenericDataSourceInstance_YdbOptions struct { + YdbOptions *TYdbDataSourceOptions `protobuf:"bytes,15,opt,name=ydb_options,json=ydbOptions,proto3,oneof"` +} + func (*TGenericDataSourceInstance_PgOptions) isTGenericDataSourceInstance_Options() {} func (*TGenericDataSourceInstance_ChOptions) isTGenericDataSourceInstance_Options() {} @@ -1231,6 +1337,8 @@ func (*TGenericDataSourceInstance_MongodbOptions) isTGenericDataSourceInstance_O func (*TGenericDataSourceInstance_IcebergOptions) isTGenericDataSourceInstance_Options() {} +func (*TGenericDataSourceInstance_YdbOptions) isTGenericDataSourceInstance_Options() {} + type TGenericCredentials_TBasic struct { state protoimpl.MessageState `protogen:"open.v1"` Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` @@ -1241,7 +1349,7 @@ type TGenericCredentials_TBasic struct { func (x *TGenericCredentials_TBasic) Reset() { *x = TGenericCredentials_TBasic{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[13] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1253,7 +1361,7 @@ func (x *TGenericCredentials_TBasic) String() string { func (*TGenericCredentials_TBasic) ProtoMessage() {} func (x *TGenericCredentials_TBasic) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[13] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[14] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1293,7 +1401,7 @@ type TGenericCredentials_TToken struct { func (x *TGenericCredentials_TToken) Reset() { *x = TGenericCredentials_TToken{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[14] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1305,7 +1413,7 @@ func (x *TGenericCredentials_TToken) String() string { func (*TGenericCredentials_TToken) ProtoMessage() {} func (x *TGenericCredentials_TToken) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[14] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[15] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1344,7 +1452,7 @@ type TIcebergCatalog_THadoop struct { func (x *TIcebergCatalog_THadoop) Reset() { *x = TIcebergCatalog_THadoop{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[15] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1356,7 +1464,7 @@ func (x *TIcebergCatalog_THadoop) String() string { func (*TIcebergCatalog_THadoop) ProtoMessage() {} func (x *TIcebergCatalog_THadoop) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[15] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[16] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1384,7 +1492,7 @@ type TIcebergCatalog_THiveMetastore struct { func (x *TIcebergCatalog_THiveMetastore) Reset() { *x = TIcebergCatalog_THiveMetastore{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[16] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1396,7 +1504,7 @@ func (x *TIcebergCatalog_THiveMetastore) String() string { func (*TIcebergCatalog_THiveMetastore) ProtoMessage() {} func (x *TIcebergCatalog_THiveMetastore) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[16] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[17] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1437,7 +1545,7 @@ type TIcebergWarehouse_TS3 struct { func (x *TIcebergWarehouse_TS3) Reset() { *x = TIcebergWarehouse_TS3{} - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[17] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1449,7 +1557,7 @@ func (x *TIcebergWarehouse_TS3) String() string { func (*TIcebergWarehouse_TS3) ProtoMessage() {} func (x *TIcebergWarehouse_TS3) ProtoReflect() protoreflect.Message { - mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[17] + mi := &file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[18] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1607,86 +1715,101 @@ var file_yql_essentials_providers_common_proto_gateways_config_proto_rawDesc = s 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x35, 0x0a, 0x09, 0x77, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x49, 0x63, 0x65, 0x62, 0x65, 0x72, 0x67, 0x57, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, - 0x73, 0x65, 0x52, 0x09, 0x77, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x22, 0xeb, 0x06, - 0x0a, 0x1a, 0x54, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x30, 0x0a, 0x04, - 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x4e, 0x59, 0x71, - 0x6c, 0x2e, 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x32, - 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x16, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, - 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, - 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x3b, - 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x47, 0x65, 0x6e, 0x65, - 0x72, 0x69, 0x63, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x52, 0x0b, - 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x75, - 0x73, 0x65, 0x5f, 0x74, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x75, 0x73, - 0x65, 0x54, 0x6c, 0x73, 0x12, 0x32, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x45, 0x47, - 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x52, 0x08, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x43, 0x0a, 0x0a, 0x70, 0x67, 0x5f, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x4e, - 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x53, 0x51, 0x4c, 0x44, - 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x48, 0x00, 0x52, 0x09, 0x70, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x43, 0x0a, - 0x0a, 0x63, 0x68, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x22, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x68, - 0x6f, 0x75, 0x73, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x63, 0x68, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x73, 0x33, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x53, - 0x33, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x73, 0x33, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, - 0x42, 0x0a, 0x0a, 0x67, 0x70, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0a, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x47, 0x72, 0x65, 0x65, - 0x6e, 0x70, 0x6c, 0x75, 0x6d, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x67, 0x70, 0x4f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x47, 0x0a, 0x0e, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5f, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x4e, 0x59, - 0x71, 0x6c, 0x2e, 0x54, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0d, 0x6f, - 0x72, 0x61, 0x63, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x0f, - 0x6c, 0x6f, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, - 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x4c, 0x6f, - 0x67, 0x67, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x6c, 0x6f, 0x67, 0x67, 0x69, 0x6e, - 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x0f, 0x6d, 0x6f, 0x6e, 0x67, - 0x6f, 0x64, 0x62, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1f, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x4d, 0x6f, 0x6e, 0x67, 0x6f, 0x44, - 0x62, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x6d, 0x6f, 0x6e, 0x67, 0x6f, 0x64, 0x62, 0x4f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x0f, 0x69, 0x63, 0x65, 0x62, 0x65, 0x72, 0x67, 0x5f, - 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, - 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x49, 0x63, 0x65, 0x62, 0x65, 0x72, 0x67, 0x44, 0x61, 0x74, - 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, - 0x52, 0x0e, 0x69, 0x63, 0x65, 0x62, 0x65, 0x72, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x42, 0x09, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0xf7, 0x01, 0x0a, 0x16, - 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x20, 0x0a, 0x1c, 0x44, 0x41, 0x54, 0x41, 0x5f, 0x53, - 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, - 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x43, 0x4c, 0x49, 0x43, - 0x4b, 0x48, 0x4f, 0x55, 0x53, 0x45, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x4f, 0x53, 0x54, - 0x47, 0x52, 0x45, 0x53, 0x51, 0x4c, 0x10, 0x02, 0x12, 0x06, 0x0a, 0x02, 0x53, 0x33, 0x10, 0x03, - 0x12, 0x07, 0x0a, 0x03, 0x59, 0x44, 0x42, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x59, 0x53, - 0x51, 0x4c, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x4d, 0x53, 0x5f, 0x53, 0x51, 0x4c, 0x5f, 0x53, - 0x45, 0x52, 0x56, 0x45, 0x52, 0x10, 0x06, 0x12, 0x0d, 0x0a, 0x09, 0x47, 0x52, 0x45, 0x45, 0x4e, - 0x50, 0x4c, 0x55, 0x4d, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x4f, 0x52, 0x41, 0x43, 0x4c, 0x45, - 0x10, 0x08, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, 0x47, 0x47, 0x49, 0x4e, 0x47, 0x10, 0x09, 0x12, - 0x0c, 0x0a, 0x08, 0x4d, 0x4f, 0x4e, 0x47, 0x4f, 0x5f, 0x44, 0x42, 0x10, 0x0a, 0x12, 0x09, 0x0a, - 0x05, 0x52, 0x45, 0x44, 0x49, 0x53, 0x10, 0x0b, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x52, 0x4f, 0x4d, - 0x45, 0x54, 0x48, 0x45, 0x55, 0x53, 0x10, 0x0c, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x43, 0x45, 0x42, - 0x45, 0x52, 0x47, 0x10, 0x0d, 0x12, 0x0e, 0x0a, 0x0a, 0x4f, 0x50, 0x45, 0x4e, 0x53, 0x45, 0x41, - 0x52, 0x43, 0x48, 0x10, 0x0e, 0x2a, 0x42, 0x0a, 0x10, 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, - 0x63, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, - 0x54, 0x4f, 0x43, 0x4f, 0x4c, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x41, 0x54, 0x49, 0x56, 0x45, 0x10, 0x01, 0x12, - 0x08, 0x0a, 0x04, 0x48, 0x54, 0x54, 0x50, 0x10, 0x02, 0x42, 0x34, 0x5a, 0x32, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x79, 0x64, 0x62, 0x2d, 0x70, 0x6c, 0x61, 0x74, - 0x66, 0x6f, 0x72, 0x6d, 0x2f, 0x66, 0x71, 0x2d, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x2d, 0x67, 0x6f, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x73, 0x65, 0x52, 0x09, 0x77, 0x61, 0x72, 0x65, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x22, 0xb3, 0x01, + 0x0a, 0x15, 0x54, 0x59, 0x64, 0x62, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x58, 0x0a, 0x11, 0x71, 0x75, 0x65, 0x72, 0x79, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x2c, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x59, 0x64, 0x62, 0x44, 0x61, + 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, + 0x45, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, + 0x52, 0x0f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, + 0x74, 0x22, 0x40, 0x0a, 0x10, 0x45, 0x51, 0x75, 0x65, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x46, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x21, 0x0a, 0x1d, 0x51, 0x55, 0x45, 0x52, 0x59, 0x5f, 0x44, + 0x41, 0x54, 0x41, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, + 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x52, 0x52, 0x4f, + 0x57, 0x10, 0x01, 0x22, 0xab, 0x07, 0x0a, 0x1a, 0x54, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, + 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, + 0x63, 0x65, 0x12, 0x30, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x1c, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, + 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x32, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x47, + 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x08, + 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, + 0x62, 0x61, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, + 0x62, 0x61, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x4e, 0x59, 0x71, 0x6c, + 0x2e, 0x54, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x61, 0x6c, 0x73, 0x52, 0x0b, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, + 0x73, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x5f, 0x74, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x06, 0x75, 0x73, 0x65, 0x54, 0x6c, 0x73, 0x12, 0x32, 0x0a, 0x08, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x4e, + 0x59, 0x71, 0x6c, 0x2e, 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x43, + 0x0a, 0x0a, 0x70, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x50, 0x6f, 0x73, 0x74, 0x67, + 0x72, 0x65, 0x53, 0x51, 0x4c, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x70, 0x67, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x43, 0x0a, 0x0a, 0x63, 0x68, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, + 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x68, 0x6f, 0x75, 0x73, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x63, + 0x68, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x73, 0x33, 0x5f, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x4e, + 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x53, 0x33, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, 0x73, 0x33, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x0a, 0x67, 0x70, 0x5f, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x4e, 0x59, 0x71, 0x6c, + 0x2e, 0x54, 0x47, 0x72, 0x65, 0x65, 0x6e, 0x70, 0x6c, 0x75, 0x6d, 0x44, 0x61, 0x74, 0x61, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x09, + 0x67, 0x70, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x47, 0x0a, 0x0e, 0x6f, 0x72, 0x61, + 0x63, 0x6c, 0x65, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1e, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, + 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x48, 0x00, 0x52, 0x0d, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x0f, 0x6c, 0x6f, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x4e, 0x59, + 0x71, 0x6c, 0x2e, 0x54, 0x4c, 0x6f, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, 0x61, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, + 0x6c, 0x6f, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, + 0x0a, 0x0f, 0x6d, 0x6f, 0x6e, 0x67, 0x6f, 0x64, 0x62, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, + 0x4d, 0x6f, 0x6e, 0x67, 0x6f, 0x44, 0x62, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x6d, 0x6f, 0x6e, 0x67, + 0x6f, 0x64, 0x62, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, 0x0f, 0x69, 0x63, + 0x65, 0x62, 0x65, 0x72, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0e, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x4e, 0x59, 0x71, 0x6c, 0x2e, 0x54, 0x49, 0x63, 0x65, 0x62, + 0x65, 0x72, 0x67, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x69, 0x63, 0x65, 0x62, 0x65, 0x72, 0x67, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3e, 0x0a, 0x0b, 0x79, 0x64, 0x62, 0x5f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x4e, 0x59, + 0x71, 0x6c, 0x2e, 0x54, 0x59, 0x64, 0x62, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x79, 0x64, 0x62, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x09, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2a, 0xf7, 0x01, 0x0a, 0x16, 0x45, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x44, 0x61, + 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x20, 0x0a, 0x1c, + 0x44, 0x41, 0x54, 0x41, 0x5f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x4b, 0x49, 0x4e, 0x44, + 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0e, + 0x0a, 0x0a, 0x43, 0x4c, 0x49, 0x43, 0x4b, 0x48, 0x4f, 0x55, 0x53, 0x45, 0x10, 0x01, 0x12, 0x0e, + 0x0a, 0x0a, 0x50, 0x4f, 0x53, 0x54, 0x47, 0x52, 0x45, 0x53, 0x51, 0x4c, 0x10, 0x02, 0x12, 0x06, + 0x0a, 0x02, 0x53, 0x33, 0x10, 0x03, 0x12, 0x07, 0x0a, 0x03, 0x59, 0x44, 0x42, 0x10, 0x04, 0x12, + 0x09, 0x0a, 0x05, 0x4d, 0x59, 0x53, 0x51, 0x4c, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x4d, 0x53, + 0x5f, 0x53, 0x51, 0x4c, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x45, 0x52, 0x10, 0x06, 0x12, 0x0d, 0x0a, + 0x09, 0x47, 0x52, 0x45, 0x45, 0x4e, 0x50, 0x4c, 0x55, 0x4d, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, + 0x4f, 0x52, 0x41, 0x43, 0x4c, 0x45, 0x10, 0x08, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, 0x47, 0x47, + 0x49, 0x4e, 0x47, 0x10, 0x09, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x4f, 0x4e, 0x47, 0x4f, 0x5f, 0x44, + 0x42, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x45, 0x44, 0x49, 0x53, 0x10, 0x0b, 0x12, 0x0e, + 0x0a, 0x0a, 0x50, 0x52, 0x4f, 0x4d, 0x45, 0x54, 0x48, 0x45, 0x55, 0x53, 0x10, 0x0c, 0x12, 0x0b, + 0x0a, 0x07, 0x49, 0x43, 0x45, 0x42, 0x45, 0x52, 0x47, 0x10, 0x0d, 0x12, 0x0e, 0x0a, 0x0a, 0x4f, + 0x50, 0x45, 0x4e, 0x53, 0x45, 0x41, 0x52, 0x43, 0x48, 0x10, 0x0e, 0x2a, 0x42, 0x0a, 0x10, 0x45, + 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, + 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x54, 0x4f, 0x43, 0x4f, 0x4c, 0x5f, 0x55, 0x4e, 0x53, 0x50, + 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x41, 0x54, + 0x49, 0x56, 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x54, 0x54, 0x50, 0x10, 0x02, 0x42, + 0x34, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x79, 0x64, + 0x62, 0x2d, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x2f, 0x66, 0x71, 0x2d, 0x63, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2d, 0x67, 0x6f, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x63, + 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( @@ -1701,61 +1824,65 @@ func file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescGZI return file_yql_essentials_providers_common_proto_gateways_config_proto_rawDescData } -var file_yql_essentials_providers_common_proto_gateways_config_proto_enumTypes = make([]protoimpl.EnumInfo, 5) -var file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes = make([]protoimpl.MessageInfo, 18) +var file_yql_essentials_providers_common_proto_gateways_config_proto_enumTypes = make([]protoimpl.EnumInfo, 6) +var file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes = make([]protoimpl.MessageInfo, 19) var file_yql_essentials_providers_common_proto_gateways_config_proto_goTypes = []any{ (EGenericDataSourceKind)(0), // 0: NYql.EGenericDataSourceKind (EGenericProtocol)(0), // 1: NYql.EGenericProtocol (TMongoDbDataSourceOptions_EReadingMode)(0), // 2: NYql.TMongoDbDataSourceOptions.EReadingMode (TMongoDbDataSourceOptions_EUnexpectedTypeDisplayMode)(0), // 3: NYql.TMongoDbDataSourceOptions.EUnexpectedTypeDisplayMode (TMongoDbDataSourceOptions_EUnsupportedTypeDisplayMode)(0), // 4: NYql.TMongoDbDataSourceOptions.EUnsupportedTypeDisplayMode - (*TGenericEndpoint)(nil), // 5: NYql.TGenericEndpoint - (*TGenericCredentials)(nil), // 6: NYql.TGenericCredentials - (*TPostgreSQLDataSourceOptions)(nil), // 7: NYql.TPostgreSQLDataSourceOptions - (*TClickhouseDataSourceOptions)(nil), // 8: NYql.TClickhouseDataSourceOptions - (*TS3DataSourceOptions)(nil), // 9: NYql.TS3DataSourceOptions - (*TGreenplumDataSourceOptions)(nil), // 10: NYql.TGreenplumDataSourceOptions - (*TOracleDataSourceOptions)(nil), // 11: NYql.TOracleDataSourceOptions - (*TLoggingDataSourceOptions)(nil), // 12: NYql.TLoggingDataSourceOptions - (*TMongoDbDataSourceOptions)(nil), // 13: NYql.TMongoDbDataSourceOptions - (*TIcebergCatalog)(nil), // 14: NYql.TIcebergCatalog - (*TIcebergWarehouse)(nil), // 15: NYql.TIcebergWarehouse - (*TIcebergDataSourceOptions)(nil), // 16: NYql.TIcebergDataSourceOptions - (*TGenericDataSourceInstance)(nil), // 17: NYql.TGenericDataSourceInstance - (*TGenericCredentials_TBasic)(nil), // 18: NYql.TGenericCredentials.TBasic - (*TGenericCredentials_TToken)(nil), // 19: NYql.TGenericCredentials.TToken - (*TIcebergCatalog_THadoop)(nil), // 20: NYql.TIcebergCatalog.THadoop - (*TIcebergCatalog_THiveMetastore)(nil), // 21: NYql.TIcebergCatalog.THiveMetastore - (*TIcebergWarehouse_TS3)(nil), // 22: NYql.TIcebergWarehouse.TS3 + (TYdbDataSourceOptions_EQueryDataFormat)(0), // 5: NYql.TYdbDataSourceOptions.EQueryDataFormat + (*TGenericEndpoint)(nil), // 6: NYql.TGenericEndpoint + (*TGenericCredentials)(nil), // 7: NYql.TGenericCredentials + (*TPostgreSQLDataSourceOptions)(nil), // 8: NYql.TPostgreSQLDataSourceOptions + (*TClickhouseDataSourceOptions)(nil), // 9: NYql.TClickhouseDataSourceOptions + (*TS3DataSourceOptions)(nil), // 10: NYql.TS3DataSourceOptions + (*TGreenplumDataSourceOptions)(nil), // 11: NYql.TGreenplumDataSourceOptions + (*TOracleDataSourceOptions)(nil), // 12: NYql.TOracleDataSourceOptions + (*TLoggingDataSourceOptions)(nil), // 13: NYql.TLoggingDataSourceOptions + (*TMongoDbDataSourceOptions)(nil), // 14: NYql.TMongoDbDataSourceOptions + (*TIcebergCatalog)(nil), // 15: NYql.TIcebergCatalog + (*TIcebergWarehouse)(nil), // 16: NYql.TIcebergWarehouse + (*TIcebergDataSourceOptions)(nil), // 17: NYql.TIcebergDataSourceOptions + (*TYdbDataSourceOptions)(nil), // 18: NYql.TYdbDataSourceOptions + (*TGenericDataSourceInstance)(nil), // 19: NYql.TGenericDataSourceInstance + (*TGenericCredentials_TBasic)(nil), // 20: NYql.TGenericCredentials.TBasic + (*TGenericCredentials_TToken)(nil), // 21: NYql.TGenericCredentials.TToken + (*TIcebergCatalog_THadoop)(nil), // 22: NYql.TIcebergCatalog.THadoop + (*TIcebergCatalog_THiveMetastore)(nil), // 23: NYql.TIcebergCatalog.THiveMetastore + (*TIcebergWarehouse_TS3)(nil), // 24: NYql.TIcebergWarehouse.TS3 } var file_yql_essentials_providers_common_proto_gateways_config_proto_depIdxs = []int32{ - 18, // 0: NYql.TGenericCredentials.basic:type_name -> NYql.TGenericCredentials.TBasic - 19, // 1: NYql.TGenericCredentials.token:type_name -> NYql.TGenericCredentials.TToken + 20, // 0: NYql.TGenericCredentials.basic:type_name -> NYql.TGenericCredentials.TBasic + 21, // 1: NYql.TGenericCredentials.token:type_name -> NYql.TGenericCredentials.TToken 2, // 2: NYql.TMongoDbDataSourceOptions.reading_mode:type_name -> NYql.TMongoDbDataSourceOptions.EReadingMode 3, // 3: NYql.TMongoDbDataSourceOptions.unexpected_type_display_mode:type_name -> NYql.TMongoDbDataSourceOptions.EUnexpectedTypeDisplayMode 4, // 4: NYql.TMongoDbDataSourceOptions.unsupported_type_display_mode:type_name -> NYql.TMongoDbDataSourceOptions.EUnsupportedTypeDisplayMode - 20, // 5: NYql.TIcebergCatalog.hadoop:type_name -> NYql.TIcebergCatalog.THadoop - 21, // 6: NYql.TIcebergCatalog.hive_metastore:type_name -> NYql.TIcebergCatalog.THiveMetastore - 22, // 7: NYql.TIcebergWarehouse.s3:type_name -> NYql.TIcebergWarehouse.TS3 - 14, // 8: NYql.TIcebergDataSourceOptions.catalog:type_name -> NYql.TIcebergCatalog - 15, // 9: NYql.TIcebergDataSourceOptions.warehouse:type_name -> NYql.TIcebergWarehouse - 0, // 10: NYql.TGenericDataSourceInstance.kind:type_name -> NYql.EGenericDataSourceKind - 5, // 11: NYql.TGenericDataSourceInstance.endpoint:type_name -> NYql.TGenericEndpoint - 6, // 12: NYql.TGenericDataSourceInstance.credentials:type_name -> NYql.TGenericCredentials - 1, // 13: NYql.TGenericDataSourceInstance.protocol:type_name -> NYql.EGenericProtocol - 7, // 14: NYql.TGenericDataSourceInstance.pg_options:type_name -> NYql.TPostgreSQLDataSourceOptions - 8, // 15: NYql.TGenericDataSourceInstance.ch_options:type_name -> NYql.TClickhouseDataSourceOptions - 9, // 16: NYql.TGenericDataSourceInstance.s3_options:type_name -> NYql.TS3DataSourceOptions - 10, // 17: NYql.TGenericDataSourceInstance.gp_options:type_name -> NYql.TGreenplumDataSourceOptions - 11, // 18: NYql.TGenericDataSourceInstance.oracle_options:type_name -> NYql.TOracleDataSourceOptions - 12, // 19: NYql.TGenericDataSourceInstance.logging_options:type_name -> NYql.TLoggingDataSourceOptions - 13, // 20: NYql.TGenericDataSourceInstance.mongodb_options:type_name -> NYql.TMongoDbDataSourceOptions - 16, // 21: NYql.TGenericDataSourceInstance.iceberg_options:type_name -> NYql.TIcebergDataSourceOptions - 22, // [22:22] is the sub-list for method output_type - 22, // [22:22] is the sub-list for method input_type - 22, // [22:22] is the sub-list for extension type_name - 22, // [22:22] is the sub-list for extension extendee - 0, // [0:22] is the sub-list for field type_name + 22, // 5: NYql.TIcebergCatalog.hadoop:type_name -> NYql.TIcebergCatalog.THadoop + 23, // 6: NYql.TIcebergCatalog.hive_metastore:type_name -> NYql.TIcebergCatalog.THiveMetastore + 24, // 7: NYql.TIcebergWarehouse.s3:type_name -> NYql.TIcebergWarehouse.TS3 + 15, // 8: NYql.TIcebergDataSourceOptions.catalog:type_name -> NYql.TIcebergCatalog + 16, // 9: NYql.TIcebergDataSourceOptions.warehouse:type_name -> NYql.TIcebergWarehouse + 5, // 10: NYql.TYdbDataSourceOptions.query_data_format:type_name -> NYql.TYdbDataSourceOptions.EQueryDataFormat + 0, // 11: NYql.TGenericDataSourceInstance.kind:type_name -> NYql.EGenericDataSourceKind + 6, // 12: NYql.TGenericDataSourceInstance.endpoint:type_name -> NYql.TGenericEndpoint + 7, // 13: NYql.TGenericDataSourceInstance.credentials:type_name -> NYql.TGenericCredentials + 1, // 14: NYql.TGenericDataSourceInstance.protocol:type_name -> NYql.EGenericProtocol + 8, // 15: NYql.TGenericDataSourceInstance.pg_options:type_name -> NYql.TPostgreSQLDataSourceOptions + 9, // 16: NYql.TGenericDataSourceInstance.ch_options:type_name -> NYql.TClickhouseDataSourceOptions + 10, // 17: NYql.TGenericDataSourceInstance.s3_options:type_name -> NYql.TS3DataSourceOptions + 11, // 18: NYql.TGenericDataSourceInstance.gp_options:type_name -> NYql.TGreenplumDataSourceOptions + 12, // 19: NYql.TGenericDataSourceInstance.oracle_options:type_name -> NYql.TOracleDataSourceOptions + 13, // 20: NYql.TGenericDataSourceInstance.logging_options:type_name -> NYql.TLoggingDataSourceOptions + 14, // 21: NYql.TGenericDataSourceInstance.mongodb_options:type_name -> NYql.TMongoDbDataSourceOptions + 17, // 22: NYql.TGenericDataSourceInstance.iceberg_options:type_name -> NYql.TIcebergDataSourceOptions + 18, // 23: NYql.TGenericDataSourceInstance.ydb_options:type_name -> NYql.TYdbDataSourceOptions + 24, // [24:24] is the sub-list for method output_type + 24, // [24:24] is the sub-list for method input_type + 24, // [24:24] is the sub-list for extension type_name + 24, // [24:24] is the sub-list for extension extendee + 0, // [0:24] is the sub-list for field type_name } func init() { file_yql_essentials_providers_common_proto_gateways_config_proto_init() } @@ -1774,7 +1901,7 @@ func file_yql_essentials_providers_common_proto_gateways_config_proto_init() { file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[10].OneofWrappers = []any{ (*TIcebergWarehouse_S3)(nil), } - file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[12].OneofWrappers = []any{ + file_yql_essentials_providers_common_proto_gateways_config_proto_msgTypes[13].OneofWrappers = []any{ (*TGenericDataSourceInstance_PgOptions)(nil), (*TGenericDataSourceInstance_ChOptions)(nil), (*TGenericDataSourceInstance_S3Options)(nil), @@ -1783,14 +1910,15 @@ func file_yql_essentials_providers_common_proto_gateways_config_proto_init() { (*TGenericDataSourceInstance_LoggingOptions)(nil), (*TGenericDataSourceInstance_MongodbOptions)(nil), (*TGenericDataSourceInstance_IcebergOptions)(nil), + (*TGenericDataSourceInstance_YdbOptions)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_yql_essentials_providers_common_proto_gateways_config_proto_rawDesc), len(file_yql_essentials_providers_common_proto_gateways_config_proto_rawDesc)), - NumEnums: 5, - NumMessages: 18, + NumEnums: 6, + NumMessages: 19, NumExtensions: 0, NumServices: 0, }, diff --git a/app/bench/app.go b/app/bench/app.go index 2ff88336..fb99c4e1 100644 --- a/app/bench/app.go +++ b/app/bench/app.go @@ -2,6 +2,7 @@ package bench import ( "context" + "errors" "fmt" "os" "os/signal" @@ -17,15 +18,15 @@ import ( func validateConfig(logger *zap.Logger, cfg *config.TBenchmarkConfig) error { if cfg.GetServerRemote() == nil && cfg.GetServerLocal() == nil { - return fmt.Errorf("you must provide either local or remote configuration for connector") + return errors.New("you must provide either local or remote configuration for connector") } if cfg.GetDataSourceInstance() == nil { - return fmt.Errorf("you must provide data source instance") + return errors.New("you must provide data source instance") } if cfg.GetResultDir() == "" { - return fmt.Errorf("empty result dir") + return errors.New("empty result dir") } if _, err := os.Stat(cfg.GetResultDir()); os.IsNotExist(err) { diff --git a/app/bench/report.go b/app/bench/report.go index ee6f8bfc..0abd28b4 100644 --- a/app/bench/report.go +++ b/app/bench/report.go @@ -23,7 +23,7 @@ func (t *jsonTime) MarshalJSON() ([]byte, error) { return nil, nil } - return []byte(fmt.Sprintf(`"%s"`, t.Time.Format(layout))), nil + return []byte(fmt.Sprintf(`"%s"`, t.Format(layout))), nil } type report struct { diff --git a/app/bench/report_generator.go b/app/bench/report_generator.go index c62b072d..3eee649a 100644 --- a/app/bench/report_generator.go +++ b/app/bench/report_generator.go @@ -28,6 +28,7 @@ type reportGenerator struct { func (agg *reportGenerator) start() { agg.wg.Add(1) + go agg.progress() } @@ -81,6 +82,7 @@ func (agg *reportGenerator) stop() *report { agg.wg.Wait() finalReport := agg.makeReport() + finalReport.StopTime = &jsonTime{time.Now()} agg.logger.Info("FINAL RESULT: " + finalReport.String()) diff --git a/app/bench/test_case_runner.go b/app/bench/test_case_runner.go index f20099b3..58d6c4fa 100644 --- a/app/bench/test_case_runner.go +++ b/app/bench/test_case_runner.go @@ -105,6 +105,7 @@ func (tcr *testCaseRunner) run() error { if err := limiter.Wait(ctx); err != nil { if errors.Is(err, context.DeadlineExceeded) { tcr.logger.Info("load session finished") + return nil } @@ -136,7 +137,6 @@ func (tcr *testCaseRunner) executeScenario() error { }, tcr.cfg.Table, ) - if err != nil { return fmt.Errorf("describe table: %w", err) } @@ -226,7 +226,8 @@ func (tcr *testCaseRunner) readSplits(splits []*api_service_protos.TSplit) error } func (tcr *testCaseRunner) finish() *report { - tcr.srv.Stop() // terminate server + tcr.srv.Stop() // terminate server + return tcr.reportGenerator.stop() // obtain final report } diff --git a/app/client/connector/list_splits.go b/app/client/connector/list_splits.go index c5ac12ce..fbd67715 100644 --- a/app/client/connector/list_splits.go +++ b/app/client/connector/list_splits.go @@ -58,7 +58,7 @@ func listSplits(cmd *cobra.Command, _ []string) error { fmt.Println("Split select: ", common.MustProtobufToJSONString(split.Select, false, "")) // fq-connector-go serializes split descriptions to JSON, so they're always human-readable fmt.Println("Split description: ", string(split.GetDescription())) - fmt.Printf("\n") + fmt.Println("") } } diff --git a/app/client/observation/cmd.go b/app/client/observation/cmd.go index a63e6421..3bdf0400 100644 --- a/app/client/observation/cmd.go +++ b/app/client/observation/cmd.go @@ -147,7 +147,7 @@ func getClient(cmd *cobra.Command) (observation.ObservationServiceClient, *grpc. } // Set up a connection to the server - conn, err := grpc.Dial(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) + conn, err := grpc.NewClient(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { return nil, nil, fmt.Errorf("failed to connect to server: %w", err) } diff --git a/app/client/observation/dump.go b/app/client/observation/dump.go index 10b0e1a4..ed5acf4e 100644 --- a/app/client/observation/dump.go +++ b/app/client/observation/dump.go @@ -41,6 +41,7 @@ func dumpIncomingQueries(cmd *cobra.Command) error { queries, err := fetchIncomingQueries(endpoint, logger) if err != nil { fmt.Printf("Error fetching from %s: %v\n", endpoint, err) + continue } @@ -56,7 +57,7 @@ func dumpIncomingQueries(cmd *cobra.Command) error { } if len(allQueries) == 0 { - return fmt.Errorf("no incoming queries fetched from any endpoint") + return errors.New("no incoming queries fetched from any endpoint") } // Write to CSV file @@ -89,6 +90,7 @@ func dumpOutgoingQueries(cmd *cobra.Command) error { queries, err := fetchOutgoingQueries(endpoint, logger) if err != nil { fmt.Printf("Error fetching from %s: %v\n", endpoint, err) + continue } @@ -104,7 +106,7 @@ func dumpOutgoingQueries(cmd *cobra.Command) error { } if len(allQueries) == 0 { - return fmt.Errorf("no outgoing queries fetched from any endpoint") + return errors.New("no outgoing queries fetched from any endpoint") } // Write to CSV file @@ -127,7 +129,7 @@ func getDumpParams(cmd *cobra.Command) ([]string, string, error) { endpoints := strings.Split(endpointsStr, ",") if len(endpoints) == 0 { - return nil, "", fmt.Errorf("no endpoints provided") + return nil, "", errors.New("no endpoints provided") } // Get output file path @@ -152,7 +154,7 @@ func fetchIncomingQueries(endpoint string, logger *zap.Logger) ([]*observation.I logger.Info("connecting to endpoint for incoming queries", zap.String("endpoint", endpoint)) // Connect to the endpoint - conn, err := grpc.Dial(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) + conn, err := grpc.NewClient(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { return nil, fmt.Errorf("failed to connect to %s: %w", endpoint, err) } @@ -196,6 +198,7 @@ func fetchIncomingQueries(endpoint string, logger *zap.Logger) ([]*observation.I if resp.Error != nil && resp.Error.Status != 0 { logger.Warn("received error in stream", zap.String("message", resp.Error.Message)) + continue } @@ -235,7 +238,7 @@ func fetchOutgoingQueries(endpoint string, logger *zap.Logger) ([]*observation.O logger.Info("connecting to endpoint for outgoing queries", zap.String("endpoint", endpoint)) // Connect to the endpoint - conn, err := grpc.Dial(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) + conn, err := grpc.NewClient(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials())) if err != nil { return nil, fmt.Errorf("failed to connect to %s: %w", endpoint, err) } @@ -279,6 +282,7 @@ func fetchOutgoingQueries(endpoint string, logger *zap.Logger) ([]*observation.O if resp.Error != nil && resp.Error.Status != 0 { logger.Warn("received error in stream", zap.String("message", resp.Error.Message)) + continue } @@ -328,7 +332,7 @@ type outgoingQueryWithEndpoint struct { // writeIncomingQueriesToCSV writes incoming queries to a CSV file func writeIncomingQueriesToCSV(queries []*incomingQueryWithEndpoint, outputPath string) error { if len(queries) == 0 { - return fmt.Errorf("no queries to write") + return errors.New("no queries to write") } // Create CSV file @@ -362,11 +366,13 @@ func writeIncomingQueriesToCSV(queries []*incomingQueryWithEndpoint, outputPath // Write data for _, q := range queries { createdAt := "" + if q.CreatedAt != nil { createdAt = q.CreatedAt.AsTime().Format(time.RFC3339Nano) } finishedAt := "" + if q.FinishedAt != nil { finishedAt = q.FinishedAt.AsTime().Format(time.RFC3339Nano) } @@ -376,6 +382,7 @@ func writeIncomingQueriesToCSV(queries []*incomingQueryWithEndpoint, outputPath if q.CreatedAt != nil && q.FinishedAt != nil { elapsedTime := q.FinishedAt.AsTime().Sub(q.CreatedAt.AsTime()) + elapsedTimeMs = strconv.FormatInt(elapsedTime.Milliseconds(), 10) } else { elapsedTimeMs = "" @@ -405,7 +412,7 @@ func writeIncomingQueriesToCSV(queries []*incomingQueryWithEndpoint, outputPath // writeOutgoingQueriesToCSV writes outgoing queries to a CSV file func writeOutgoingQueriesToCSV(queries []*outgoingQueryWithEndpoint, outputPath string) error { if len(queries) == 0 { - return fmt.Errorf("no queries to write") + return errors.New("no queries to write") } // Create CSV file @@ -442,11 +449,13 @@ func writeOutgoingQueriesToCSV(queries []*outgoingQueryWithEndpoint, outputPath // Write data for _, q := range queries { createdAt := "" + if q.CreatedAt != nil { createdAt = q.CreatedAt.AsTime().Format(time.RFC3339Nano) } finishedAt := "" + if q.FinishedAt != nil { finishedAt = q.FinishedAt.AsTime().Format(time.RFC3339Nano) } @@ -456,6 +465,7 @@ func writeOutgoingQueriesToCSV(queries []*outgoingQueryWithEndpoint, outputPath if q.CreatedAt != nil && q.FinishedAt != nil { elapsedTime := q.FinishedAt.AsTime().Sub(q.CreatedAt.AsTime()) + elapsedTimeMs = strconv.FormatInt(elapsedTime.Milliseconds(), 10) } else { elapsedTimeMs = "" diff --git a/app/client/observation/list.go b/app/client/observation/list.go index 303b08ea..bd1925b7 100644 --- a/app/client/observation/list.go +++ b/app/client/observation/list.go @@ -50,6 +50,7 @@ func listIncomingQueries(cmd *cobra.Command, _ []string, state observation.Query if resp.Error != nil && resp.Error.Status != 0 { fmt.Printf("Error: %s\n", resp.Error.Message) + continue } @@ -121,6 +122,7 @@ func listOutgoingQueries(cmd *cobra.Command, _ []string, state observation.Query if resp.Error != nil && resp.Error.Status != 0 { fmt.Printf("Error: %s\n", resp.Error.Message) + continue } diff --git a/app/client/utils/flags.go b/app/client/utils/flags.go index bc397bfb..d0146c81 100644 --- a/app/client/utils/flags.go +++ b/app/client/utils/flags.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive const ( ConfigFlag = "config" diff --git a/app/client/utils/preset.go b/app/client/utils/preset.go index b032014d..08e17c79 100644 --- a/app/client/utils/preset.go +++ b/app/client/utils/preset.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "fmt" diff --git a/app/client/ydb/column_shard_benchmark_select.go b/app/client/ydb/column_shard_benchmark_select.go index d6e075c9..2c5aa968 100644 --- a/app/client/ydb/column_shard_benchmark_select.go +++ b/app/client/ydb/column_shard_benchmark_select.go @@ -37,13 +37,13 @@ func columnShardBenchmarkSelect(cmd *cobra.Command, _ []string) error { ctx := context.Background() connManager := rdbms_ydb.NewConnectionManager(ydbConfig, rdbms_utils.ConnectionManagerBase{}) + cs, err := connManager.Make(&rdbms_utils.ConnectionParams{ Ctx: ctx, Logger: preset.Logger, DataSourceInstance: preset.Cfg.DataSourceInstance, QueryPhase: rdbms_utils.QueryPhaseReadSplits, }) - if err != nil { return fmt.Errorf("make connection: %v", err) } @@ -147,7 +147,6 @@ func columnShardBenchmarkSelectSingleShard( return nil }) - if err != nil { return nil, fmt.Errorf("query do: %w", err) } diff --git a/app/client/ydb/column_shard_data_distribution.go b/app/client/ydb/column_shard_data_distribution.go index 41d871b9..62102953 100644 --- a/app/client/ydb/column_shard_data_distribution.go +++ b/app/client/ydb/column_shard_data_distribution.go @@ -31,13 +31,13 @@ func columnShardsDataDistribution(cmd *cobra.Command, _ []string) error { ctx := context.Background() connManager := ydb.NewConnectionManager(ydbConfig, rdbms_utils.ConnectionManagerBase{}) + cs, err := connManager.Make(&rdbms_utils.ConnectionParams{ Ctx: ctx, Logger: preset.Logger, DataSourceInstance: preset.Cfg.DataSourceInstance, QueryPhase: rdbms_utils.QueryPhaseReadSplits, }) - if err != nil { return fmt.Errorf("make connection: %v", err) } @@ -105,7 +105,6 @@ func columnShardsDataDistribution(cmd *cobra.Command, _ []string) error { return nil }) - if err != nil { resultChan <- rowsCountResult{shardId: shardId, err: err} } @@ -127,7 +126,7 @@ func columnShardsDataDistribution(cmd *cobra.Command, _ []string) error { mean := stat.Mean(totalRowsPerShard, nil) if mean == 0 { - return fmt.Errorf("coefficient of variation is undefined for mean = 0") + return errors.New("coefficient of variation is undefined for mean = 0") } stdDev := stat.StdDev(totalRowsPerShard, nil) diff --git a/app/client/ydb/common.go b/app/client/ydb/common.go index a340a82a..18997941 100644 --- a/app/client/ydb/common.go +++ b/app/client/ydb/common.go @@ -61,7 +61,6 @@ func getColumnShardIDs(ctx context.Context, driver *ydb.Driver, prefix string) ( return nil }) - if err != nil { return nil, fmt.Errorf("query: %w", err) } diff --git a/app/observation/config.go b/app/observation/config.go index 162ade7c..7e7a86a7 100644 --- a/app/observation/config.go +++ b/app/observation/config.go @@ -1,6 +1,7 @@ package observation import ( + "errors" "fmt" "github.com/ydb-platform/fq-connector-go/app/config" @@ -9,7 +10,7 @@ import ( func validateObservationServerConfig(cfg *config.TObservationServerConfig) error { if cfg.Endpoint == nil { - return fmt.Errorf("missing required field `endpoint`") + return errors.New("missing required field `endpoint`") } if err := validateObservationDiscoveryConfig(cfg.Discovery); err != nil { @@ -25,7 +26,7 @@ func validateObservationServerConfig(cfg *config.TObservationServerConfig) error func validateObservationDiscoveryConfig(cfg *config.TObservationDiscoveryConfig) error { if cfg == nil { - return fmt.Errorf("missing required field `discovery`") + return errors.New("missing required field `discovery`") } switch t := cfg.GetPayload().(type) { @@ -46,16 +47,16 @@ func validateObservationDiscoveryConfig(cfg *config.TObservationDiscoveryConfig) func validateObservationDiscoveryStaticConfig(cfg *config.TObservationDiscoveryConfig_TStaticDiscoveryConfig) error { if len(cfg.Endpoints) == 0 { - return fmt.Errorf("missing required field `endpoints`") + return errors.New("missing required field `endpoints`") } for _, endpoint := range cfg.Endpoints { if endpoint.GetHost() == "" { - return fmt.Errorf("missing required field `host`") + return errors.New("missing required field `host`") } if endpoint.GetPort() == 0 { - return fmt.Errorf("missing required field `port`") + return errors.New("missing required field `port`") } } @@ -64,7 +65,7 @@ func validateObservationDiscoveryStaticConfig(cfg *config.TObservationDiscoveryC func validateObservationDiscoveryKubernetesConfig(cfg *config.TObservationDiscoveryConfig_TKubernetesDiscoveryConfig) error { if cfg.LabelSelector == "" { - return fmt.Errorf("missing required field `label_selector`") + return errors.New("missing required field `label_selector`") } return nil diff --git a/app/observation/discovery/kubernetes.go b/app/observation/discovery/kubernetes.go index 0b096946..a18094ea 100644 --- a/app/observation/discovery/kubernetes.go +++ b/app/observation/discovery/kubernetes.go @@ -64,6 +64,7 @@ func (k *kubernetesDiscovery) extractEndpointsFromSlice( Host: address, Port: uint32(*port.Port), } + endpoints = append(endpoints, endpoint) } } @@ -107,6 +108,7 @@ func (k *kubernetesDiscovery) GetEndpoints(logger *zap.Logger) ([]*api_common.TG for _, slice := range endpointSlices.Items { endpoints := k.extractEndpointsFromSlice(logger, &slice) + allEndpoints = append(allEndpoints, endpoints...) } diff --git a/app/observation/server.go b/app/observation/server.go index fae900c9..03be9b49 100644 --- a/app/observation/server.go +++ b/app/observation/server.go @@ -76,6 +76,7 @@ func (s *aggregationServer) handleWebSocket(w http.ResponseWriter, r *http.Reque conn, err := s.upgrader.Upgrade(w, r, nil) if err != nil { s.logger.Error("websocket upgrade failed", zap.Error(err)) + return } defer conn.Close() @@ -92,6 +93,7 @@ func (s *aggregationServer) handleWebSocket(w http.ResponseWriter, r *http.Reque duration := time.Since(startTime) totalQueries := 0 + for _, q := range queries { totalQueries += len(q) } @@ -103,6 +105,7 @@ func (s *aggregationServer) handleWebSocket(w http.ResponseWriter, r *http.Reque if err := conn.WriteJSON(queries); err != nil { s.logger.Error("websocket write failed", zap.Error(err)) + return } } @@ -119,6 +122,7 @@ func (s *aggregationServer) pollEndpoints() map[string][]*QueryWithFormattedTime endpoints, err := s.discovery.GetEndpoints(s.logger) if err != nil { s.logger.Error("discover endpoints", zap.Error(err)) + return nil } @@ -136,6 +140,7 @@ func (s *aggregationServer) pollEndpoints() map[string][]*QueryWithFormattedTime if err != nil { s.logger.Error("error polling endpoint", zap.Stringer("endpoint", endpoint), zap.Error(err)) + continue } @@ -204,6 +209,7 @@ func (s *aggregationServer) getOutgoingQueries(endpoint *api_common.TGenericEndp if resp.Query != nil { receivedCount++ + query := &QueryWithFormattedTime{ OutgoingQuery: resp.Query, } diff --git a/app/server/cmd.go b/app/server/cmd.go index c7a70264..cde3cc8f 100644 --- a/app/server/cmd.go +++ b/app/server/cmd.go @@ -59,6 +59,7 @@ func flagToPort(f *pflag.Flag, port *uint32, errs *[]error) { val, err := strconv.Atoi(f.Value.String()) if err != nil { *errs = append(*errs, fmt.Errorf("strconv '%s': %w", f.Value, err)) + return } @@ -86,6 +87,7 @@ func overrideConfigWithFlags(cfg *app_config.TServerConfig, flags *pflag.FlagSet val, ok := app_config.ELogLevel_value[f.Value.String()] if !ok { var buf strings.Builder + for k := range app_config.ELogLevel_value { buf.WriteString(fmt.Sprintf("\"%s\"", k)) buf.WriteString(" ") @@ -102,6 +104,7 @@ func overrideConfigWithFlags(cfg *app_config.TServerConfig, flags *pflag.FlagSet cfg.Logger.LogLevel = app_config.ELogLevel(val) case enableSQLQueryLoggingFlag: cfg.Logger.EnableSqlQueryLogging = enableSQLQueryLogging + default: } }) diff --git a/app/server/config/config.go b/app/server/config/config.go index d6dee8f0..5c536fba 100644 --- a/app/server/config/config.go +++ b/app/server/config/config.go @@ -334,7 +334,7 @@ func validateServerConfig(c *config.TServerConfig) error { func validateConnectorServerConfig(c *config.TConnectorServerConfig) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if err := validateEndpoint(c.Endpoint); err != nil { @@ -350,7 +350,7 @@ func validateConnectorServerConfig(c *config.TConnectorServerConfig) error { func validateEndpoint(c *api_common.TGenericEndpoint) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if c.Host == "" { @@ -389,7 +389,7 @@ func validateReadLimiterConfig(c *config.TReadLimiterConfig) error { // but if it's not nil, one must set limits explicitly if c.GetRows() == 0 { - return fmt.Errorf("invalid value of field `rows`") + return errors.New("invalid value of field `rows`") } return nil @@ -416,16 +416,16 @@ const maxInterconnectMessageSize = 50 * 1024 * 1024 func validatePagingConfig(c *config.TPagingConfig) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } limitIsSet := c.BytesPerPage != 0 || c.RowsPerPage != 0 if !limitIsSet { - return fmt.Errorf("you must set either `bytes_per_page` or `rows_per_page` or both of them") + return errors.New("you must set either `bytes_per_page` or `rows_per_page` or both of them") } if c.BytesPerPage > maxInterconnectMessageSize { - return fmt.Errorf("`bytes_per_page` limit exceeds the limits of interconnect system used by YDB engine") + return errors.New("`bytes_per_page` limit exceeds the limits of interconnect system used by YDB engine") } return nil @@ -433,7 +433,7 @@ func validatePagingConfig(c *config.TPagingConfig) error { func validateConversionConfig(c *config.TConversionConfig) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } return nil @@ -441,7 +441,7 @@ func validateConversionConfig(c *config.TConversionConfig) error { func validateDatasourcesConfig(c *config.TDatasourcesConfig) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if err := validateRelationalDatasourceConfig(c.Clickhouse); err != nil { @@ -546,11 +546,11 @@ func validateYdbConfig(c *config.TYdbConfig) error { switch c.Mode { case config.TYdbConfig_MODE_QUERY_SERVICE_NATIVE: if c.ResourcePool == "" { - return fmt.Errorf("you must set `resource_pool` if `mode` is `query_service_native`") + return errors.New("you must set `resource_pool` if `mode` is `query_service_native`") } case config.TYdbConfig_MODE_TABLE_SERVICE_STDLIB_SCAN_QUERIES: if c.ResourcePool != "" { - return fmt.Errorf("you must not set `resource_pool` if `mode` is `table_service_stdlib_scan_queries`") + return errors.New("you must not set `resource_pool` if `mode` is `table_service_stdlib_scan_queries`") } default: return fmt.Errorf("invalid `mode` value: %v", c.Mode) @@ -558,7 +558,7 @@ func validateYdbConfig(c *config.TYdbConfig) error { if c.ServiceAccountKeyFileCredentials != "" { if c.IamEndpoint == nil { - return fmt.Errorf("you must set `iam_endpoint` if `service_account_key_file_credentials` is set") + return errors.New("you must set `iam_endpoint` if `service_account_key_file_credentials` is set") } if c.IamEndpoint.Host == "" { @@ -571,7 +571,7 @@ func validateYdbConfig(c *config.TYdbConfig) error { } if c.Splitting == nil { - return fmt.Errorf("you must set `splitting` section") + return errors.New("you must set `splitting` section") } if _, err := common.DurationFromString(c.Splitting.QueryTabletIdsTimeout); err != nil { @@ -619,7 +619,7 @@ func validateMongoDBConfig(c *config.TMongoDbConfig) error { } if c.CountDocsToDeduceSchema == 0 { - return fmt.Errorf("validate `count_docs_to_deduce_schema`: can't be zero") + return errors.New("validate `count_docs_to_deduce_schema`: can't be zero") } if c.ObjectIdYqlType == config.TMongoDbConfig_OBJECT_ID_UNSPECIFIED { @@ -643,7 +643,7 @@ func validateRedisConfig(c *config.TRedisConfig) error { } if c.CountDocsToDeduceSchema == 0 { - return fmt.Errorf("validate `count_docs_to_deduce_schema`: can't be zero") + return errors.New("validate `count_docs_to_deduce_schema`: can't be zero") } if err := validateExponentialBackoff(c.ExponentialBackoff); err != nil { @@ -663,11 +663,11 @@ func validateLoggingConfig(c *config.TLoggingConfig) error { } if c.GetStatic() == nil && c.GetDynamic() == nil { - return fmt.Errorf("you should set either `static` or `dynamic` section") + return errors.New("you should set either `static` or `dynamic` section") } if c.GetStatic() != nil && c.GetDynamic() != nil { - return fmt.Errorf("you should set either `static` or `dynamic` section, not both of them") + return errors.New("you should set either `static` or `dynamic` section, not both of them") } if err := validateLoggingResolvingStaticConfig(c.GetStatic()); err != nil { @@ -729,11 +729,11 @@ func validateLoggingResolvingDynamicConfig(c *config.TLoggingConfig_TDynamicReso } if c.LoggingEndpoint.Host == "" { - return fmt.Errorf("missing `logging_endpoint.host`") + return errors.New("missing `logging_endpoint.host`") } if c.LoggingEndpoint.Port == 0 { - return fmt.Errorf("missing `logging_endpoint.port`") + return errors.New("missing `logging_endpoint.port`") } return nil @@ -741,7 +741,7 @@ func validateLoggingResolvingDynamicConfig(c *config.TLoggingConfig_TDynamicReso func validateExponentialBackoff(c *config.TExponentialBackoffConfig) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if _, err := common.DurationFromString(c.InitialInterval); err != nil { @@ -785,7 +785,7 @@ func validateOpenSearchConfig(c *config.TOpenSearchConfig) error { } if c.BatchSize == 0 { - return fmt.Errorf("validate `batch_size`, must be greater than zero") + return errors.New("validate `batch_size`, must be greater than zero") } if err := validateExponentialBackoff(c.ExponentialBackoff); err != nil { @@ -813,7 +813,7 @@ func validateObservationConfig(c *config.TObservationConfig) error { func validateObservationServerConfig(c *config.TObservationConfig_TServer) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if err := validateEndpoint(c.Endpoint); err != nil { @@ -825,12 +825,12 @@ func validateObservationServerConfig(c *config.TObservationConfig_TServer) error func validateObservationStorageConfig(c *config.TObservationConfig_TStorage) error { if c == nil { - return fmt.Errorf("required section is missing") + return errors.New("required section is missing") } if storage := c.GetSqlite(); storage != nil { if storage.Path == "" { - return fmt.Errorf("empty `sqlite.path`") + return errors.New("empty `sqlite.path`") } if _, err := common.DurationFromString(storage.GcPeriod); err != nil { @@ -899,6 +899,7 @@ func NewConfigFromFile(configPath string) (*config.TServerConfig, error) { if cfg == nil { err := errors.Join(errs...) + return nil, err } diff --git a/app/server/conversion/converters_default.go b/app/server/conversion/converters_default.go index 34569deb..20fc9488 100644 --- a/app/server/conversion/converters_default.go +++ b/app/server/conversion/converters_default.go @@ -78,7 +78,6 @@ type dateConverter struct{} func (dateConverter) Convert(in *time.Time) (uint16, error) { out, err := common.TimeToYDBDate(in) - if err != nil { return 0, fmt.Errorf("convert time to YDB Date: %w", err) } @@ -102,7 +101,6 @@ type datetimeConverter struct{} func (datetimeConverter) Convert(in *time.Time) (uint32, error) { out, err := common.TimeToYDBDatetime(in) - if err != nil { return 0, fmt.Errorf("convert time to YDB Datetime: %w", err) } @@ -120,7 +118,6 @@ type timestampConverter struct{} func (timestampConverter) Convert(in *time.Time) (uint64, error) { out, err := common.TimeToYDBTimestamp(in) - if err != nil { return 0, fmt.Errorf("convert time to YDB Timestamp: %w", err) } diff --git a/app/server/conversion/converters_test.go b/app/server/conversion/converters_test.go index b20f1eed..a1c78c00 100644 --- a/app/server/conversion/converters_test.go +++ b/app/server/conversion/converters_test.go @@ -8,6 +8,7 @@ import ( "github.com/stretchr/testify/require" ) +//nolint:revive func TestDateToStringConverter(t *testing.T) { testCases := []time.Time{ time.Date(math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, time.UTC), @@ -36,6 +37,7 @@ func TestDateToStringConverter(t *testing.T) { // Check equivalence of results produced by default and unsafe converters expectedOut, err := converterDefault.Convert(&tc) require.NoError(t, err) + actualOut, err := converterUnsafe.Convert(&tc) require.NoError(t, err) require.Equal(t, expectedOut, actualOut) @@ -91,12 +93,14 @@ func FuzzDateToStringConverter(f *testing.F) { in := time.Date(year, time.Month(month), day, hour, minutes, sec, nsec, time.UTC) expectedOut, err := converterDefault.Convert(&in) require.NoError(t, err) + actualOut, err := converterUnsafe.Convert(&in) require.NoError(t, err) require.Equal(t, expectedOut, actualOut) }) } +//nolint:revive func TestTimestampToStringConverter(t *testing.T) { testCases := []time.Time{ time.Date(math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, math.MaxInt, time.UTC), @@ -128,6 +132,7 @@ func TestTimestampToStringConverter(t *testing.T) { // Check equivalence of results produced by default and unsafe converters expectedOut, err := converterDefault.Convert(&tc) require.NoError(t, err) + actualOut, err := converterUnsafe.Convert(&tc) require.NoError(t, err) require.Equal(t, expectedOut, actualOut) diff --git a/app/server/conversion/converters_unsafe.go b/app/server/conversion/converters_unsafe.go index d88f9d69..252e4f72 100644 --- a/app/server/conversion/converters_unsafe.go +++ b/app/server/conversion/converters_unsafe.go @@ -50,6 +50,7 @@ func (dateToStringConverterUnsafe) Convert(in *time.Time) (string, error) { buf = append(buf, []byte("00")...) case absYear < 1000: buf = append(buf, byte('0')) + default: } buf, _ = formatBits(buf, uint64(absYear), 10, false, true) @@ -57,6 +58,7 @@ func (dateToStringConverterUnsafe) Convert(in *time.Time) (string, error) { // month buf = append(buf, byte('-')) + if month < 10 { buf = append(buf, byte('0')) } @@ -66,6 +68,7 @@ func (dateToStringConverterUnsafe) Convert(in *time.Time) (string, error) { // day buf = append(buf, byte('-')) + if day < 10 { buf = append(buf, byte('0')) } @@ -100,6 +103,7 @@ func (timestampToStringConverterUTCUnsafe) Convert(src *time.Time) (string, erro buf = append(buf, []byte("00")...) case absYear < 1000: buf = append(buf, byte('0')) + default: } buf, _ = formatBits(buf, uint64(absYear), 10, false, true) @@ -107,6 +111,7 @@ func (timestampToStringConverterUTCUnsafe) Convert(src *time.Time) (string, erro // month buf = append(buf, byte('-')) + if month < 10 { buf = append(buf, byte('0')) } @@ -116,6 +121,7 @@ func (timestampToStringConverterUTCUnsafe) Convert(src *time.Time) (string, erro // day buf = append(buf, byte('-')) + if day < 10 { buf = append(buf, byte('0')) } diff --git a/app/server/conversion/itoa.go b/app/server/conversion/itoa.go index 5d64413b..f9709796 100644 --- a/app/server/conversion/itoa.go +++ b/app/server/conversion/itoa.go @@ -26,13 +26,16 @@ const digits = "0123456789abcdefghijklmnopqrstuvwxyz" // set, the string is appended to dst and the resulting byte slice is // returned as the first result value; otherwise the string is returned // as the second result value. -func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s string) { +// +//nolint:unparam +func formatBits(dst []byte, u uint64, base int, neg, appending bool) (d []byte, s string) { if base < 2 || base > len(digits) { panic("strconv: illegal AppendInt/FormatInt base") } // 2 <= base && base <= len(digits) var a [64 + 1]byte // +1 for sign of 64bit value in base 2 + i := len(a) if neg { @@ -45,7 +48,6 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s if base == 10 { // common case: use constants for / because // the compiler can optimize it into a multiply+shift - if host32bit { // convert the lower digits using 32bit operations for u >= 1e9 { @@ -53,11 +55,15 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s // since 64bit division and modulo operations // are calculated by runtime functions on 32bit machines. q := u / 1e9 + us := uint(u - q*1e9) // u % 1e9 fits into a uint + for j := 4; j > 0; j-- { is := us % 100 * 2 + us /= 100 i -= 2 + a[i+1] = smallsString[is+1] a[i+0] = smallsString[is+0] } @@ -65,6 +71,7 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s // us < 10, since it contains the last digit // from the initial 9-digit us. i-- + a[i] = smallsString[us*2+1] u = q @@ -76,8 +83,11 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s us := uint(u) for us >= 100 { is := us % 100 * 2 + us /= 100 + i -= 2 + a[i+1] = smallsString[is+1] a[i+0] = smallsString[is+0] } @@ -85,24 +95,30 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s // us < 100 is := us * 2 i-- + a[i] = smallsString[is+1] + if us >= 10 { i-- + a[i] = smallsString[is] } - } else if isPowerOfTwo(base) { // Use shifts and masks instead of / and %. shift := uint(bits.TrailingZeros(uint(base))) b := uint64(base) m := uint(base) - 1 // == 1<= b { i-- + a[i] = digits[uint(u)&m] + u >>= shift } // u < base i-- + a[i] = digits[uint(u)] } else { // general case @@ -113,26 +129,32 @@ func formatBits(dst []byte, u uint64, base int, neg, append_ bool) (d []byte, s // since 64bit division and modulo operations // are calculated by runtime functions on 32bit machines. q := u / b + a[i] = digits[uint(u-q*b)] u = q } // u < base i-- + a[i] = digits[uint(u)] } // add sign, if any if neg { i-- + a[i] = '-' } - if append_ { + if appending { d = append(dst, a[i:]...) - return + + return d, s } + s = string(a[i:]) - return + + return d, s } func isPowerOfTwo(x int) bool { diff --git a/app/server/conversion/strftime.go b/app/server/conversion/strftime.go index 976976f4..8f328080 100644 --- a/app/server/conversion/strftime.go +++ b/app/server/conversion/strftime.go @@ -20,17 +20,24 @@ func formatNanoseconds(buf []byte, ns int) []byte { var tmp [9]byte b := ns % 100 * 2 + tmp[8] = tab[b+1] tmp[7] = tab[b] + ns /= 100 + b = ns % 100 * 2 tmp[6] = tab[b+1] tmp[5] = tab[b] + ns /= 100 + b = ns % 100 * 2 tmp[4] = tab[b+1] tmp[3] = tab[b] + ns /= 100 + b = ns % 100 * 2 tmp[2] = tab[b+1] tmp[1] = tab[b] diff --git a/app/server/data_source_collection.go b/app/server/data_source_collection.go index e79cd427..eb920727 100644 --- a/app/server/data_source_collection.go +++ b/app/server/data_source_collection.go @@ -356,6 +356,7 @@ func doReadSplit[T paging.Acceptor]( readStats := sinkFactory.FinalStats() fields := common.SelectToFields(split.Select) + fields = append(fields, zap.Uint64("total_bytes", readStats.GetBytes()), zap.Uint64("total_rows", readStats.GetRows()), @@ -392,7 +393,6 @@ func NewDataSourceCollection( observationStorage, ydbTableMetadataCache, ) - if err != nil { return nil, fmt.Errorf("new data source factory: %w", err) } diff --git a/app/server/datasource/nosql/mongodb/datasource.go b/app/server/datasource/nosql/mongodb/datasource.go index 1ed6e387..41a4f592 100644 --- a/app/server/datasource/nosql/mongodb/datasource.go +++ b/app/server/datasource/nosql/mongodb/datasource.go @@ -3,6 +3,7 @@ package mongodb import ( "context" "crypto/tls" + "errors" "fmt" "go.mongodb.org/mongo-driver/bson" @@ -51,7 +52,7 @@ func (ds *dataSource) DescribeTable( mongoDbOptions := dsi.GetMongodbOptions() if mongoDbOptions == nil { - return nil, fmt.Errorf("TMongoDbDataSourceOptions not provided") + return nil, errors.New("TMongoDbDataSourceOptions not provided") } switch mongoDbOptions.ReadingMode { @@ -72,12 +73,12 @@ func (ds *dataSource) DescribeTable( err = ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var connErr error + conn, connErr = ds.makeConnection(ctx, logger, dsi) return connErr }, ) - if err != nil { return nil, fmt.Errorf("make connection: %w", err) } @@ -102,6 +103,7 @@ func (ds *dataSource) DescribeTable( }() docs := make([]bson.Raw, 0, ds.cfg.GetCountDocsToDeduceSchema()) + for cursor.Next(ctx) { docs = append(docs, cursor.Current) } @@ -167,7 +169,7 @@ func (ds *dataSource) ReadSplit( mongoDbOptions := dsi.GetMongodbOptions() if mongoDbOptions == nil { - return fmt.Errorf("TMongoDbDataSourceOptions not provided") + return errors.New("TMongoDbDataSourceOptions not provided") } switch mongoDbOptions.ReadingMode { @@ -183,12 +185,12 @@ func (ds *dataSource) ReadSplit( err := ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var connErr error + conn, connErr = ds.makeConnection(ctx, logger, dsi) return connErr }, ) - if err != nil { return fmt.Errorf("make connection: %w", err) } diff --git a/app/server/datasource/nosql/mongodb/document_reader.go b/app/server/datasource/nosql/mongodb/document_reader.go index ca392f8e..d14d77fd 100644 --- a/app/server/datasource/nosql/mongodb/document_reader.go +++ b/app/server/datasource/nosql/mongodb/document_reader.go @@ -171,7 +171,7 @@ func (r *documentReader) accept(doc bson.M) error { if isSerializedDocumentReadingMode(r.readingMode) { if len(r.arrowTypes.Fields()) != 2 { - return fmt.Errorf("unexpected number of accepters for a serialized document reading mode") + return errors.New("unexpected number of accepters for a serialized document reading mode") } for i, f := range r.arrowTypes.Fields() { @@ -275,6 +275,7 @@ func (r *documentReader) acceptSingleField(acceptor any, doc bson.M, fieldName s value, ok := doc[fieldName] if !ok { *a = nil + return nil } @@ -286,6 +287,7 @@ func (r *documentReader) acceptSingleField(acceptor any, doc bson.M, fieldName s if r.unexpectedDisplayMode == api_common.TMongoDbDataSourceOptions_UNEXPECTED_AS_NULL { *a = nil + return nil } } @@ -321,6 +323,7 @@ func (r *documentReader) acceptSingleField(acceptor any, doc bson.M, fieldName s value, ok := doc[fieldName] if !ok { *a = nil + return nil } @@ -368,7 +371,6 @@ func makeTransformer(ydbTypes []*Ydb.Type, cc conversion.Collection) (paging.Row for _, ydbType := range ydbTypes { acceptors, appenders, err = addAcceptorAppender(ydbType, cc, acceptors, appenders) - if err != nil { return nil, fmt.Errorf("addAcceptorAppender: %w", err) } @@ -436,34 +438,38 @@ func addAcceptorAppenderNullable(ydbType *Ydb.Type, cc conversion.Collection, ac acceptorPtr := acceptor.(**any) if *acceptorPtr == nil { builder.AppendNull() + return nil } return yqlStringAppender(**acceptorPtr, builder, cc.Bytes()) }) + default: + return nil, nil, fmt.Errorf("unsupported: %v", ydbType.String()) } case *Ydb.Type_TaggedType: - if t.TaggedType.Tag == objectIdTag { - acceptors = append(acceptors, new(*primitive.ObjectID)) - appenders = append(appenders, func(acceptor any, builder array.Builder) error { - value := acceptor.(**primitive.ObjectID) - if *value == nil { - builder.AppendNull() - return nil - } - - bytes, err := (*value).MarshalText() - if err != nil { - return err - } - - return utils.AppendValueToArrowBuilder[[]byte, []byte, *array.BinaryBuilder](&bytes, builder, cc.Bytes()) - }) - } else { + if t.TaggedType.Tag != objectIdTag { return nil, nil, fmt.Errorf("unknown Tagged tag: %s", t.TaggedType.Tag) } + acceptors = append(acceptors, new(*primitive.ObjectID)) + appenders = append(appenders, func(acceptor any, builder array.Builder) error { + value := acceptor.(**primitive.ObjectID) + if *value == nil { + builder.AppendNull() + + return nil + } + + bytes, err := (*value).MarshalText() + if err != nil { + return err + } + + return utils.AppendValueToArrowBuilder[[]byte, []byte, *array.BinaryBuilder](&bytes, builder, cc.Bytes()) + }) + default: return nil, nil, fmt.Errorf("unsupported: %v", ydbType.String()) } @@ -508,30 +514,32 @@ func addAcceptorAppenderNonNullable(ydbType *Ydb.Type, cc conversion.Collection, acceptorPtr := acceptor.(*any) if acceptorPtr == nil { builder.AppendNull() + return nil } return yqlStringAppender(*acceptorPtr, builder, cc.Bytes()) }) + default: + return nil, nil, fmt.Errorf("unsupported: %v", ydbType.String()) } case *Ydb.Type_TaggedType: - if t.TaggedType.Tag == objectIdTag { - acceptors = append(acceptors, new(primitive.ObjectID)) - appenders = append(appenders, func(acceptor any, builder array.Builder) error { - value := acceptor.(*primitive.ObjectID) - - bytes, err := value.MarshalText() - if err != nil { - return fmt.Errorf("marshal text from data in ObjectId: %w", err) - } - - return utils.AppendValueToArrowBuilder[[]byte, []byte, *array.BinaryBuilder](&bytes, builder, cc.Bytes()) - }) - } else { + if t.TaggedType.Tag != objectIdTag { return nil, nil, fmt.Errorf("unknown Tagged tag: %s", t.TaggedType.Tag) } + acceptors = append(acceptors, new(primitive.ObjectID)) + appenders = append(appenders, func(acceptor any, builder array.Builder) error { + value := acceptor.(*primitive.ObjectID) + + bytes, err := value.MarshalText() + if err != nil { + return fmt.Errorf("marshal text from data in ObjectId: %w", err) + } + + return utils.AppendValueToArrowBuilder[[]byte, []byte, *array.BinaryBuilder](&bytes, builder, cc.Bytes()) + }) default: return nil, nil, fmt.Errorf("unsupported: %v", ydbType.String()) } diff --git a/app/server/datasource/nosql/mongodb/filtering.go b/app/server/datasource/nosql/mongodb/filtering.go index ad5ad722..fa7e35b7 100644 --- a/app/server/datasource/nosql/mongodb/filtering.go +++ b/app/server/datasource/nosql/mongodb/filtering.go @@ -2,6 +2,7 @@ package mongodb import ( "encoding/hex" + "errors" "fmt" "go.mongodb.org/mongo-driver/bson" @@ -27,10 +28,11 @@ func makeFilter( if readingMode == api_common.TMongoDbDataSourceOptions_TABLE { what := split.Select.What if what == nil { - return nil, nil, fmt.Errorf("not specified columns to query in Select.What") + return nil, nil, errors.New("not specified columns to query in Select.What") } projection := bson.D{} + for _, item := range what.GetItems() { projection = append(projection, bson.E{Key: item.GetColumn().Name, Value: 1}) } @@ -62,6 +64,7 @@ func makeFilter( api_service_protos.TReadSplitsRequest_FILTERING_OPTIONAL: if common.OptionalFilteringAllowedErrors.Match(err) { logger.Warn("considering pushdown error as acceptable", zap.Error(err)) + return filter, opts, nil } @@ -177,6 +180,7 @@ func getConjunctionFilter( operand, err := makePredicateFilter(logger, op, false) if err != nil { err = fmt.Errorf("unable to format one of the predicates in conjunction: %w", err) + if !suppressErrors { return nil, err } @@ -332,6 +336,7 @@ func getInSetFilter( in *api_service_protos.TPredicate_TIn, ) (bson.D, error) { var fieldName string + switch e := in.Value.Payload.(type) { case *api_service_protos.TExpression_Column: fieldName = e.Column diff --git a/app/server/datasource/nosql/mongodb/type_mapping.go b/app/server/datasource/nosql/mongodb/type_mapping.go index 9052c4d9..5cda32f3 100644 --- a/app/server/datasource/nosql/mongodb/type_mapping.go +++ b/app/server/datasource/nosql/mongodb/type_mapping.go @@ -137,6 +137,7 @@ func bsonToYql(logger *zap.Logger, docs []bson.Raw, omitUnsupported, typeMapIdOn for _, doc := range docs { if typeMapIdOnly { elem := doc.Lookup(idColumn) + err := bsonToYqlColumn( logger, idColumn, @@ -147,7 +148,6 @@ func bsonToYql(logger *zap.Logger, docs []bson.Raw, omitUnsupported, typeMapIdOn omitUnsupported, objectIdType, ) - if err != nil { return nil, fmt.Errorf("bsonToYqlColumn: %w", err) } @@ -171,7 +171,6 @@ func bsonToYql(logger *zap.Logger, docs []bson.Raw, omitUnsupported, typeMapIdOn omitUnsupported, objectIdType, ) - if err != nil { return nil, fmt.Errorf("bsonToYqlColumn: %w", err) } diff --git a/app/server/datasource/nosql/opensearch/datasource.go b/app/server/datasource/nosql/opensearch/datasource.go index fab94030..914aa2fc 100644 --- a/app/server/datasource/nosql/opensearch/datasource.go +++ b/app/server/datasource/nosql/opensearch/datasource.go @@ -4,6 +4,7 @@ import ( "context" "crypto/tls" "encoding/json" + "errors" "fmt" "io" "net" @@ -68,6 +69,7 @@ func (ds *dataSource) DescribeTable( err := ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var err error + client, err = ds.makeConnection(ctx, logger, dsi) return err @@ -78,11 +80,11 @@ func (ds *dataSource) DescribeTable( } indexName := request.Table + res, err := client.Indices.Mapping.Get( ctx, &opensearchapi.MappingGetReq{Indices: []string{indexName}}, ) - if err != nil { return nil, fmt.Errorf("get mapping: %w", err) } @@ -103,7 +105,7 @@ func (ds *dataSource) DescribeTable( mapping, ok := result[indexName].(map[string]any)["mappings"].(map[string]any) if !ok { - return nil, fmt.Errorf("extract mappings: invalid response format") + return nil, errors.New("extract mappings: invalid response format") } columns, err := parseMapping(logger, mapping) @@ -151,6 +153,7 @@ func (ds *dataSource) ReadSplit( err := ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var err error + client, err = ds.makeConnection(ctx, logger, dsi) return err @@ -204,7 +207,7 @@ func (ds *dataSource) doReadSplitSingleConn( } if searchResp.ScrollID == nil { - return fmt.Errorf("scroll id is nil") + return errors.New("scroll id is nil") } reader, err := prepareDocumentReader(split, ds.cc) @@ -218,6 +221,7 @@ func (ds *dataSource) doReadSplitSingleConn( for { if len(hits.Hits) == 0 { logger.Info("no hits found") + break } @@ -239,6 +243,7 @@ func (ds *dataSource) doReadSplitSingleConn( } closeResponseBody(logger, nextResp.Inspect().Response.Body) + hits = nextResp.Hits } @@ -306,13 +311,11 @@ func prepareDocumentReader( cc conversion.Collection, ) (*documentReader, error) { arrowSchema, err := common.SelectWhatToArrowSchema(split.Select.What) - if err != nil { return nil, fmt.Errorf("select what to Arrow schema: %w", err) } ydbSchema, err := common.SelectWhatToYDBTypes(split.Select.What) - if err != nil { return nil, fmt.Errorf("select what to YDB schema: %w", err) } @@ -369,6 +372,7 @@ func (ds *dataSource) getNextScrollBatch( err := ds.retrierSet.Query.Run(ctx, logger, func() error { var err error + resp, err = client.Scroll.Get(ctx, opensearchapi.ScrollGetReq{ ScrollID: scrollID, Params: opensearchapi.ScrollGetParams{ diff --git a/app/server/datasource/nosql/opensearch/document_reader.go b/app/server/datasource/nosql/opensearch/document_reader.go index ff95b5da..6b16747e 100644 --- a/app/server/datasource/nosql/opensearch/document_reader.go +++ b/app/server/datasource/nosql/opensearch/document_reader.go @@ -83,6 +83,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok { *a = nil + continue } @@ -93,6 +94,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -103,6 +105,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -113,6 +116,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -123,6 +127,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok { *a = nil + continue } @@ -133,6 +138,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -140,20 +146,20 @@ func (r *documentReader) accept( return fmt.Errorf("convert: %w", err) } case *string: - if f.Name == "_id" { - *a = hit.ID - } else { + if f.Name != "_id" { return fmt.Errorf("unsupported type %T: for field %T, %w", acceptors[i], f.Name, common.ErrDataTypeNotSupported) } + + *a = hit.ID case **string: value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } str, err := convertToString(logger, value) - if err != nil { if !errors.Is(err, common.ErrDataTypeNotSupported) { return fmt.Errorf("json to string: %w", err) @@ -165,6 +171,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -178,6 +185,7 @@ func (r *documentReader) accept( value, ok := doc[f.Name] if !ok || value == nil { *a = nil + continue } @@ -212,7 +220,7 @@ func convertToMapStringAny(value any, fieldName string) (*map[string]any, error) func parseTime(value any) (time.Time, error) { if value == nil { - return time.Time{}, fmt.Errorf("time value is nil") + return time.Time{}, errors.New("time value is nil") } switch v := value.(type) { @@ -260,6 +268,7 @@ func parseTime(value any) (time.Time, error) { func convertPtr[INTO any](acceptor **INTO, value any) error { if v, ok := value.(INTO); ok { *acceptor = ptr.T(v) + return nil } @@ -403,6 +412,8 @@ func addAcceptorAppenderNullable( case Ydb.Type_TIMESTAMP: acceptors = append(acceptors, new(*time.Time)) appenders = append(appenders, utils.MakeAppenderNullable[time.Time, uint64, *array.Uint64Builder](cc.Timestamp())) + default: + return nil, nil, fmt.Errorf("unsupported: %v", ydbType.String()) } case *Ydb.Type_StructType: acceptors = append(acceptors, new(*map[string]any)) @@ -434,6 +445,7 @@ func createStructAppender(structType *Ydb.StructType) func(any, array.Builder) e if *pt == nil { structBuilder.AppendNull() + return nil } @@ -449,6 +461,7 @@ func createStructAppender(structType *Ydb.StructType) func(any, array.Builder) e fieldValue := (*data)[fieldName] if fieldValue == nil { fieldBuilder.AppendNull() + continue } diff --git a/app/server/datasource/nosql/opensearch/query_builder.go b/app/server/datasource/nosql/opensearch/query_builder.go index 8b2e5006..511d2710 100644 --- a/app/server/datasource/nosql/opensearch/query_builder.go +++ b/app/server/datasource/nosql/opensearch/query_builder.go @@ -3,6 +3,7 @@ package opensearch import ( "bytes" "encoding/json" + "errors" "fmt" "io" "time" @@ -48,11 +49,12 @@ func (qb *queryBuilder) buildSearchQuery( what := split.Select.GetWhat() if what == nil { - return nil, nil, fmt.Errorf("not specified columns to query in Select.What") + return nil, nil, errors.New("not specified columns to query in Select.What") } // TODO (Test for top to bottom struct projection) var projection []string + for _, item := range what.GetItems() { projection = append(projection, item.GetColumn().Name) } @@ -84,11 +86,11 @@ func (qb *queryBuilder) buildSearchQuery( case api_service_protos.TReadSplitsRequest_FILTERING_MANDATORY: return nil, nil, fmt.Errorf("make predicate filter: %w", err) case api_service_protos.TReadSplitsRequest_FILTERING_OPTIONAL: - if common.OptionalFilteringAllowedErrors.Match(err) { - qb.logger.Warn("considering pushdown error as acceptable", zap.Error(err)) - } else { + if !common.OptionalFilteringAllowedErrors.Match(err) { return nil, nil, fmt.Errorf("encountered an error making a filter: %w", err) } + + qb.logger.Warn("considering pushdown error as acceptable", zap.Error(err)) default: return nil, nil, fmt.Errorf("unknown filtering mode: %d", filtering) } @@ -235,6 +237,7 @@ func (qb *queryBuilder) makeConjunctionFilter( if err != nil { if topLevel { errs = append(errs, fmt.Errorf("operand error: %w", err)) + continue } @@ -501,7 +504,7 @@ func (qb *queryBuilder) makeTypedValue(expr *Ydb.TypedValue) (any, error) { ydbType := expr.GetType() if v == nil { - return nil, fmt.Errorf("typed value container is nil") + return nil, errors.New("typed value container is nil") } if v.Value == nil { @@ -513,6 +516,7 @@ func (qb *queryBuilder) makeTypedValue(expr *Ydb.TypedValue) (any, error) { } var value any + switch t := v.Value.(type) { case *Ydb.Value_BoolValue: value = t.BoolValue diff --git a/app/server/datasource/nosql/opensearch/type_mapping.go b/app/server/datasource/nosql/opensearch/type_mapping.go index f79f5695..f1e05a37 100644 --- a/app/server/datasource/nosql/opensearch/type_mapping.go +++ b/app/server/datasource/nosql/opensearch/type_mapping.go @@ -1,6 +1,7 @@ package opensearch import ( + "errors" "fmt" "sort" @@ -22,6 +23,7 @@ func parseMapping( // to the index. The "_meta" property is used during schema construction to identify // which fields should be considered as arrays (lists). meta := make(map[string]any) + if metaSection, ok := mappings["_meta"].(map[string]any); ok { meta = metaSection } else { @@ -131,8 +133,8 @@ func processChildFields( } childQualifiedName := fmt.Sprintf("%s.%s", parentQualifiedName, childFieldName) - childField, err := inferField(logger, childFieldName, childQualifiedName, childProps, meta) + childField, err := inferField(logger, childFieldName, childQualifiedName, childProps, meta) if err != nil { return nil, fmt.Errorf("process child field '%s': %w", childFieldName, err) } @@ -200,7 +202,7 @@ func typeMap( ) (*Ydb.Type, error) { fieldType, ok := mapping["type"].(string) if !ok { - return nil, fmt.Errorf("missing 'type' in mapping") + return nil, errors.New("missing 'type' in mapping") } var ydbType *Ydb.Type diff --git a/app/server/datasource/nosql/redis/bench/native_client.go b/app/server/datasource/nosql/redis/bench/native_client.go index 69a4b5fc..c516a2a3 100644 --- a/app/server/datasource/nosql/redis/bench/native_client.go +++ b/app/server/datasource/nosql/redis/bench/native_client.go @@ -38,7 +38,6 @@ func NewBench(interval time.Duration) (*Bench, error) { } t, err := p.Times() - if err != nil { return nil, err } @@ -78,6 +77,7 @@ func (b *Bench) Stop() { // Add tracks internal byte count and row count func (b *Bench) Add(internalBytes, rowCount int) { b.mu.Lock() + b.bytesInt += int64(internalBytes) b.rows += int64(rowCount) b.mu.Unlock() @@ -185,16 +185,20 @@ func scanAll(b *Bench) { log.Fatalf("cmd result: %v", errRes) } - if t == "string" { + switch t { + case "string": strKeys = append(strKeys, keys[i]) - } else if t == "hash" { + case "hash": hashKeys = append(hashKeys, keys[i]) + default: + log.Fatalf("unexpected type: %s", t) } } // 3) GET pipeline if len(strKeys) > 0 { pipe = rdb.Pipeline() + getCmds := make([]*redis.StringCmd, len(strKeys)) for i, key := range strKeys { @@ -218,6 +222,7 @@ func scanAll(b *Bench) { // 4) HGETALL pipeline if len(hashKeys) > 0 { pipe = rdb.Pipeline() + hgetCmds := make([]*redis.MapStringStringCmd, len(hashKeys)) for i, key := range hashKeys { diff --git a/app/server/datasource/nosql/redis/datasource.go b/app/server/datasource/nosql/redis/datasource.go index da3594f7..96e27c28 100644 --- a/app/server/datasource/nosql/redis/datasource.go +++ b/app/server/datasource/nosql/redis/datasource.go @@ -2,6 +2,7 @@ package redis import ( "context" + "errors" "fmt" "sort" "strings" @@ -71,6 +72,8 @@ func newRedisRowTransformer(items []*api_service_protos.TSelect_TWhat_TItem) (*r t.acceptors[i] = &t.stringVal case HashColumnName: t.acceptors[i] = &t.hashVal + default: + return nil, fmt.Errorf("unsupported column name: %s", column.Name) } } @@ -121,7 +124,7 @@ func getHashFields(items []*api_service_protos.TSelect_TWhat_TItem) ([]string, e for _, item := range items { column := item.GetColumn() if column == nil { - return nil, fmt.Errorf("select.what has nil column") + return nil, errors.New("select.what has nil column") } if column.Name == HashColumnName { @@ -180,6 +183,7 @@ func (*dataSource) readKeys( return nil default: logger.Warn("unsupported key type for specific key", zap.String("key", pattern), zap.String("type", typ)) + return nil } } @@ -194,6 +198,7 @@ func (*dataSource) readKeys( case api_service_protos.TPredicate_TComparison_CONTAINS: // LIKE '%foo%' → '*foo*' pattern = "*" + pattern + "*" + default: } var cursor, unsupported uint64 @@ -341,6 +346,7 @@ func processHashKeys( } transformer.key = keys[i] + m := make(map[string]string, len(transformer.hashFields)) for j, field := range transformer.hashFields { @@ -380,11 +386,11 @@ func (ds *dataSource) ReadSplit( err := ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var err error + client, err = ds.makeConnection(ctx, logger, dsi) return err }) - if err != nil { return fmt.Errorf("make connection: %w", err) } @@ -436,11 +442,11 @@ func (ds *dataSource) DescribeTable( err := ds.retrierSet.MakeConnection.Run(ctx, logger, func() error { var err error + client, err = ds.makeConnection(ctx, logger, dsi) return err }) - if err != nil { return nil, fmt.Errorf("make connection: %w", err) } @@ -456,7 +462,6 @@ func (ds *dataSource) DescribeTable( } allKeys, err := ds.accumulateKeys(ctx, client, request.Table, int(count)) - if err != nil { return nil, fmt.Errorf("accumulate keys: %w", err) } @@ -540,8 +545,8 @@ func (*dataSource) analyzeKeys( res.stringExists = true case TypeHash: res.hashExists = true - fields, err := client.HKeys(ctx, key).Result() + fields, err := client.HKeys(ctx, key).Result() if err != nil { return nil, fmt.Errorf("get hash keys for key %s: %w", key, err) } @@ -571,6 +576,7 @@ func buildSchema(spec keysSpec) []*Ydb.Column { Name: KeyColumnName, Type: common.MakePrimitiveType(Ydb.Type_STRING), } + columns = append(columns, keyColumn) // Add "string_values" column if string keys exist. @@ -579,6 +585,7 @@ func buildSchema(spec keysSpec) []*Ydb.Column { Name: StringColumnName, Type: common.MakeOptionalType(common.MakePrimitiveType(Ydb.Type_STRING)), } + columns = append(columns, stringColumn) } @@ -615,6 +622,7 @@ func buildSchema(spec keysSpec) []*Ydb.Column { Name: HashColumnName, Type: common.MakeOptionalType(structType), } + columns = append(columns, hashColumn) } @@ -694,6 +702,7 @@ func (t *redisRowTransformer) AppendToArrowBuilders(_ *arrow.Schema, builders [] func (t *redisRowTransformer) appendKey(builderIn array.Builder) error { if builder, ok := builderIn.(*array.BinaryBuilder); ok { builder.Append([]byte(t.key)) + return nil } @@ -722,6 +731,7 @@ func (t *redisRowTransformer) appendHashValue(builderIn array.Builder) error { if t.hashVal == nil { builder.AppendNull() + return nil } diff --git a/app/server/datasource/prometheus/analysis/bench/client.go b/app/server/datasource/prometheus/analysis/bench/client.go index ccec7d9b..a6e2c01c 100644 --- a/app/server/datasource/prometheus/analysis/bench/client.go +++ b/app/server/datasource/prometheus/analysis/bench/client.go @@ -82,14 +82,17 @@ func main() { switch vt { case chunkenc.ValFloat: ts, v := iter.At() + vv += float64(ts) + v fmt.Printf("%s %g %d\n", l, v, ts) case chunkenc.ValHistogram: ts, h := iter.AtHistogram(nil) + vv += float64(ts) + h.Sum fmt.Printf("%s %s %d\n", l, h.String(), ts) case chunkenc.ValFloatHistogram: ts, h := iter.AtFloatHistogram(nil) + vv += float64(ts) + h.Sum fmt.Printf("%s %s %d\n", l, h.String(), ts) default: diff --git a/app/server/datasource/prometheus/metrics_reader.go b/app/server/datasource/prometheus/metrics_reader.go index ce4834ce..1c110fe7 100644 --- a/app/server/datasource/prometheus/metrics_reader.go +++ b/app/server/datasource/prometheus/metrics_reader.go @@ -159,6 +159,7 @@ func (r *metricsReader) accept(l labels.Labels, timestamp int64, val float64) er labelValue := l.Get(f.Name) if labelValue == "" { acceptors[i] = nil + continue } @@ -167,6 +168,7 @@ func (r *metricsReader) accept(l labels.Labels, timestamp int64, val float64) er labelValue := l.Get(f.Name) if labelValue == "" { *a = nil + continue } diff --git a/app/server/datasource/prometheus/promql.go b/app/server/datasource/prometheus/promql.go index 0dcb47cd..e8bc4be7 100644 --- a/app/server/datasource/prometheus/promql.go +++ b/app/server/datasource/prometheus/promql.go @@ -60,11 +60,13 @@ func (p PromQLBuilder) From(from string) PromQLBuilder { func (p PromQLBuilder) WithStartTime(start time.Time) PromQLBuilder { p.startTime = toPromTime(start) + return p } func (p PromQLBuilder) WithEndTime(end time.Time) PromQLBuilder { p.endTime = toPromTime(end) + return p } @@ -111,6 +113,7 @@ func (p PromQLBuilder) applyComparisonPredicate(c *protos.TPredicate_TComparison lv, rv, op := c.GetLeftValue(), c.GetRightValue(), c.GetOperation() if op == protos.TPredicate_TComparison_COMPARISON_OPERATION_UNSPECIFIED || lv == nil || rv == nil { p.predicateErrors = append(p.predicateErrors, fmt.Errorf("get comparison predicate: %w", common.ErrInvalidRequest)) + return p } @@ -132,6 +135,7 @@ func (p PromQLBuilder) applyComparisonPredicate(c *protos.TPredicate_TComparison } default: p.predicateErrors = append(p.predicateErrors, fmt.Errorf("apply comparison predicate: %w", common.ErrUnsupportedExpression)) + return p } } @@ -149,6 +153,7 @@ func (p PromQLBuilder) applyTimestampExpr(op protos.TPredicate_TComparison_EOper if value.GetValue() == nil || value.GetValue().GetUint64Value() == 0 { p.predicateErrors = append(p.predicateErrors, fmt.Errorf("get timestamp value: %w", common.ErrInvalidRequest)) + return p } @@ -189,6 +194,7 @@ func (p PromQLBuilder) applyStringExpr(op protos.TPredicate_TComparison_EOperati if value.GetValue() == nil { p.predicateErrors = append(p.predicateErrors, fmt.Errorf("get string value: %w", common.ErrInvalidRequest)) + return p } @@ -225,6 +231,7 @@ func (p PromQLBuilder) matchPredicateErrors(filtering protos.TReadSplitsRequest_ for _, err := range p.predicateErrors { if acceptableErrors.Match(err) { p.logger.Info("considering pushdown error as acceptable", zap.Error(err)) + continue } diff --git a/app/server/datasource/prometheus/promql_test.go b/app/server/datasource/prometheus/promql_test.go index 4d7a78cd..ca1b64b4 100644 --- a/app/server/datasource/prometheus/promql_test.go +++ b/app/server/datasource/prometheus/promql_test.go @@ -135,7 +135,7 @@ func TestWithYdbWhereUnsupportedPredicate(t *testing.T) { // // With filtering mandatory parsing // - builder, err = prometheus.NewPromQLBuilder(logger). + _, err = prometheus.NewPromQLBuilder(logger). WithYdbWhere(where, api_service_protos.TReadSplitsRequest_FILTERING_MANDATORY) assert.ErrorIs(t, err, common.ErrUnimplementedPredicateType) diff --git a/app/server/datasource/prometheus/read_client.go b/app/server/datasource/prometheus/read_client.go index 1ea4e71c..9735cf3d 100644 --- a/app/server/datasource/prometheus/read_client.go +++ b/app/server/datasource/prometheus/read_client.go @@ -69,6 +69,7 @@ func (rc *ReadClient) Read(ctx context.Context, pbQuery *prompb.Query) (storage. timeSeries, err := rc.promClient.Read(ctx, pbQuery, false) if err != nil { cancel() + return nil, nil, fmt.Errorf("client remote read: %w", err) } diff --git a/app/server/datasource/rdbms/clickhouse/connection_http.go b/app/server/datasource/rdbms/clickhouse/connection_http.go index f78b8e1a..41ebdc39 100644 --- a/app/server/datasource/rdbms/clickhouse/connection_http.go +++ b/app/server/datasource/rdbms/clickhouse/connection_http.go @@ -54,10 +54,10 @@ type connectionHTTP struct { tableName string } -func (c *connectionHTTP) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connectionHTTP) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) - out, err := c.DB.QueryContext(params.Ctx, params.QueryText, rewriteQueryArgs(params.QueryArgs.Values())...) + out, err := c.QueryContext(params.Ctx, params.QueryText, rewriteQueryArgs(params.QueryArgs.Values())...) if err != nil { return nil, fmt.Errorf("query context: %w", err) } @@ -72,7 +72,11 @@ func (c *connectionHTTP) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Row return nil, fmt.Errorf("rows err: %w", err) } - return &rows{Rows: out}, nil + rows := &rows{Rows: out} + + return &rdbms_utils.QueryResult{ + Rows: rows, + }, nil } func (c *connectionHTTP) DataSourceInstance() *api_common.TGenericDataSourceInstance { diff --git a/app/server/datasource/rdbms/clickhouse/connection_manager.go b/app/server/datasource/rdbms/clickhouse/connection_manager.go index b60d3dac..383c855d 100644 --- a/app/server/datasource/rdbms/clickhouse/connection_manager.go +++ b/app/server/datasource/rdbms/clickhouse/connection_manager.go @@ -2,6 +2,7 @@ package clickhouse import ( "context" + "errors" "fmt" "go.uber.org/zap" @@ -23,7 +24,7 @@ func (c *connectionManager) Make( params *rdbms_utils.ConnectionParams, ) ([]rdbms_utils.Connection, error) { if params.DataSourceInstance.GetCredentials().GetBasic() == nil { - return nil, fmt.Errorf("currently only basic auth is supported") + return nil, errors.New("currently only basic auth is supported") } var ( diff --git a/app/server/datasource/rdbms/clickhouse/connection_native.go b/app/server/datasource/rdbms/clickhouse/connection_native.go index 1d878f7f..e063b434 100644 --- a/app/server/datasource/rdbms/clickhouse/connection_native.go +++ b/app/server/datasource/rdbms/clickhouse/connection_native.go @@ -54,7 +54,7 @@ type connectionNative struct { tableName string } -func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) out, err := c.Conn.Query(params.Ctx, params.QueryText, rewriteQueryArgs(params.QueryArgs.Values())...) @@ -72,7 +72,11 @@ func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (rdbms_utils.R return nil, fmt.Errorf("rows err: %w", err) } - return &rowsNative{Rows: out}, nil + rows := &rowsNative{Rows: out} + + return &rdbms_utils.QueryResult{ + Rows: rows, + }, nil } func (c *connectionNative) DataSourceInstance() *api_common.TGenericDataSourceInstance { diff --git a/app/server/datasource/rdbms/clickhouse/sql_formatter.go b/app/server/datasource/rdbms/clickhouse/sql_formatter.go index dbdb9430..3a5ce995 100644 --- a/app/server/datasource/rdbms/clickhouse/sql_formatter.go +++ b/app/server/datasource/rdbms/clickhouse/sql_formatter.go @@ -17,7 +17,7 @@ type sqlFormatter struct { cfg *config.TPushdownConfig } -//nolint:gocyclo +//nolint:gocyclo,revive func (f *sqlFormatter) supportsType(typeID Ydb.Type_PrimitiveTypeId) bool { switch typeID { case Ydb.Type_BOOL: @@ -88,6 +88,7 @@ func (sqlFormatter) GetPlaceholder(_ int) string { func (sqlFormatter) SanitiseIdentifier(ident string) string { // https: //clickhouse.com/docs/en/sql-reference/syntax#identifiers sanitizedIdent := strings.ReplaceAll(ident, string([]byte{0}), "") + sanitizedIdent = `"` + strings.ReplaceAll(sanitizedIdent, `"`, `""`) + `"` return sanitizedIdent diff --git a/app/server/datasource/rdbms/clickhouse/sql_formatter_test.go b/app/server/datasource/rdbms/clickhouse/sql_formatter_test.go index ae9cd976..ad31b7c5 100644 --- a/app/server/datasource/rdbms/clickhouse/sql_formatter_test.go +++ b/app/server/datasource/rdbms/clickhouse/sql_formatter_test.go @@ -471,6 +471,7 @@ func TestMakeSelectQuery(t *testing.T) { ) if tc.err != nil { require.True(t, errors.Is(err, tc.err), err, tc.err) + return } diff --git a/app/server/datasource/rdbms/datasource.go b/app/server/datasource/rdbms/datasource.go index 9df3c6d4..1878b563 100644 --- a/app/server/datasource/rdbms/datasource.go +++ b/app/server/datasource/rdbms/datasource.go @@ -2,6 +2,7 @@ package rdbms import ( "context" + "errors" "fmt" "go.uber.org/zap" @@ -111,7 +112,6 @@ func (ds *dataSourceImpl) ReadSplit( return nil }, ) - if err != nil { return fmt.Errorf("make connection: %w", err) } @@ -195,7 +195,7 @@ func (ds *dataSourceImpl) doReadSplitSingleConn( sink paging.Sink[any], conn rdbms_utils.Connection, ) (int64, error) { - var rows rdbms_utils.Rows + var queryResult *rdbms_utils.QueryResult err := ds.retrierSet.Query.Run( ctx, @@ -203,20 +203,50 @@ func (ds *dataSourceImpl) doReadSplitSingleConn( func() error { var queryErr error - if rows, queryErr = conn.Query(&query.QueryParams); queryErr != nil { + if queryResult, queryErr = conn.Query(&query.QueryParams); queryErr != nil { return fmt.Errorf("query error: %w", queryErr) } return nil }, ) - if err != nil { return 0, fmt.Errorf("query: %w", err) } - defer common.LogCloserError(logger, rows, "close rows") + defer common.LogCloserError(logger, queryResult, "close query result") + var ( + rowsRead int64 + processErr error + ) + + // Choose the appropriate processing method based on which field is filled + if queryResult.Rows != nil { + rowsRead, processErr = ds.processRowBasedResult(query, queryResult.Rows, sink) + } else if queryResult.Columns != nil { + rowsRead, processErr = ds.processArrowBasedResult(queryResult.Columns, sink) + } else { + return 0, errors.New("query result contains neither Rows nor Columns") + } + + if processErr != nil { + return 0, processErr + } + + // Notify sink that there will be no more data from this connection. + // Hours lost in attempts to move this call into defer: 2 + sink.Finish() + + return rowsRead, nil +} + +// processRowBasedResult processes row-based results from the database +func (ds *dataSourceImpl) processRowBasedResult( + query *rdbms_utils.SelectQuery, + rows rdbms_utils.Rows, + sink paging.Sink[any], +) (int64, error) { transformer, err := rows.MakeTransformer(query.YdbColumns, ds.converterCollection) if err != nil { return 0, fmt.Errorf("make transformer: %w", err) @@ -248,9 +278,29 @@ func (ds *dataSourceImpl) doReadSplitSingleConn( return 0, fmt.Errorf("rows error: %w", err) } - // Notify sink that there will be no more data from this connection. - // Hours lost in attempts to move this call into defer: 2 - sink.Finish() + return rowsRead, nil +} + +// processArrowBasedResult processes results obtained from the database directly in Arrow format +func (dataSourceImpl) processArrowBasedResult( + columns rdbms_utils.Columns, + sink paging.Sink[any], +) (int64, error) { + rowsRead := int64(0) + + for columns.Next() { + record := columns.Record() + + rowsRead += record.NumRows() + + if err := sink.AddArrowRecord(record); err != nil { + return 0, fmt.Errorf("add arrow record to paging writer: %w", err) + } + } + + if err := columns.Err(); err != nil { + return 0, fmt.Errorf("columns error: %w", err) + } return rowsRead, nil } diff --git a/app/server/datasource/rdbms/datasource_test.go b/app/server/datasource/rdbms/datasource_test.go index df976f3f..50a11383 100644 --- a/app/server/datasource/rdbms/datasource_test.go +++ b/app/server/datasource/rdbms/datasource_test.go @@ -3,7 +3,6 @@ package rdbms import ( "context" "errors" - "fmt" "testing" "github.com/stretchr/testify/mock" @@ -136,6 +135,7 @@ func TestReadSplit(t *testing.T) { dataSource := NewDataSource(logger, preset, converterCollection, observationStorage) queryID := "test-query-id" + err = dataSource.ReadSplit(ctx, logger, queryID, readSplitsRequest, split, sinkFactory) require.NoError(t, err) @@ -175,7 +175,7 @@ func TestReadSplit(t *testing.T) { }, } - scanErr := fmt.Errorf("scan failed") + scanErr := errors.New("scan failed") rows.On("MakeTransformer", []*Ydb.Column{ @@ -207,6 +207,7 @@ func TestReadSplit(t *testing.T) { dataSource := NewDataSource(logger, preset, converterCollection, observationStorage) queryID := "test-query-id" + err = dataSource.ReadSplit(ctx, logger, queryID, readSplitsRequest, split, sinkFactory) require.True(t, errors.Is(err, scanErr)) diff --git a/app/server/datasource/rdbms/logging/resolver_dynamic.go b/app/server/datasource/rdbms/logging/resolver_dynamic.go index 122b8198..aee2b7c2 100644 --- a/app/server/datasource/rdbms/logging/resolver_dynamic.go +++ b/app/server/datasource/rdbms/logging/resolver_dynamic.go @@ -2,6 +2,7 @@ package logging import ( "crypto/tls" + "errors" "fmt" "go.uber.org/zap" @@ -24,7 +25,7 @@ func (r *dynamicResolver) resolve( request *resolveRequest, ) (*resolveResponse, error) { if request.credentials.GetToken().GetValue() == "" { - return nil, fmt.Errorf("IAM token is missing") + return nil, errors.New("IAM token is missing") } md := metadata.Pairs("authorization", fmt.Sprintf("Bearer %s", request.credentials.GetToken().GetValue())) @@ -39,7 +40,6 @@ func (r *dynamicResolver) resolve( FolderId: request.folderId, GroupName: request.logGroupName, }) - if err != nil { return nil, fmt.Errorf("get reading endpoint: %w", err) } @@ -98,7 +98,7 @@ func newResolverDynamic(cfg *config.TLoggingConfig) (Resolver, error) { tlsCfg := &tls.Config{} - grpcConn, err := grpc.Dial(endpoint, grpc.WithTransportCredentials(credentials.NewTLS(tlsCfg))) + grpcConn, err := grpc.NewClient(endpoint, grpc.WithTransportCredentials(credentials.NewTLS(tlsCfg))) if err != nil { return nil, fmt.Errorf("GRPC dial: %w", err) } diff --git a/app/server/datasource/rdbms/logging/resolver_static.go b/app/server/datasource/rdbms/logging/resolver_static.go index 203ae5a0..831bfc5c 100644 --- a/app/server/datasource/rdbms/logging/resolver_static.go +++ b/app/server/datasource/rdbms/logging/resolver_static.go @@ -1,6 +1,7 @@ package logging import ( + "errors" "fmt" "html/template" "math/rand" @@ -15,7 +16,7 @@ type staticResolver struct { func (r *staticResolver) resolve(request *resolveRequest) (*resolveResponse, error) { if len(r.cfg.Databases) == 0 { - return nil, fmt.Errorf("no YDB endpoints provided") + return nil, errors.New("no YDB endpoints provided") } // get random YDB endpoint from provided list diff --git a/app/server/datasource/rdbms/logging/split_provider.go b/app/server/datasource/rdbms/logging/split_provider.go index 9b4b44fa..cc6d99af 100644 --- a/app/server/datasource/rdbms/logging/split_provider.go +++ b/app/server/datasource/rdbms/logging/split_provider.go @@ -92,7 +92,6 @@ func (s *splitProviderImpl) handleYDBSource( return nil }, ) - if err != nil { return fmt.Errorf("retry: %w", err) } @@ -104,7 +103,6 @@ func (s *splitProviderImpl) handleYDBSource( params.Logger, cs[0], ) - if err != nil { return fmt.Errorf("get column shard tablet ids: %w", err) } diff --git a/app/server/datasource/rdbms/logging/sql_formatter.go b/app/server/datasource/rdbms/logging/sql_formatter.go index 3d4400ac..37626fdf 100644 --- a/app/server/datasource/rdbms/logging/sql_formatter.go +++ b/app/server/datasource/rdbms/logging/sql_formatter.go @@ -197,8 +197,8 @@ func (sqlFormatter) TransformPredicateComparison( // For the comparison related to `level` field if src.LeftValue.GetColumn() == levelColumnName && src.RightValue.GetTypedValue() != nil { - if !(src.Operation == api_service_protos.TPredicate_TComparison_EQ || - src.Operation == api_service_protos.TPredicate_TComparison_NE) { + if src.Operation != api_service_protos.TPredicate_TComparison_EQ && + src.Operation != api_service_protos.TPredicate_TComparison_NE { return nil, fmt.Errorf("unsupported operation %v for `level` column comparison", src.Operation) } @@ -337,6 +337,7 @@ func checkTimestampFilter(predicate *api_service_protos.TPredicate) (bool, error return true, nil } } + default: } return false, nil diff --git a/app/server/datasource/rdbms/ms_sql_server/connection.go b/app/server/datasource/rdbms/ms_sql_server/connection.go index 6fee45b2..1bcd3d0c 100644 --- a/app/server/datasource/rdbms/ms_sql_server/connection.go +++ b/app/server/datasource/rdbms/ms_sql_server/connection.go @@ -32,12 +32,17 @@ func (c *Connection) TableName() string { return c.tableName } -func (c *Connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *Connection) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) out, err := c.db.QueryContext(params.Ctx, params.QueryText, params.QueryArgs.Values()...) + if err != nil { + return nil, err + } - return rows{out}, err + return &rdbms_utils.QueryResult{ + Rows: rows{out}, + }, nil } func (c *Connection) Logger() *zap.Logger { diff --git a/app/server/datasource/rdbms/ms_sql_server/connection_manager.go b/app/server/datasource/rdbms/ms_sql_server/connection_manager.go index 9ca3f3c5..5b2c7dfa 100644 --- a/app/server/datasource/rdbms/ms_sql_server/connection_manager.go +++ b/app/server/datasource/rdbms/ms_sql_server/connection_manager.go @@ -58,6 +58,7 @@ func (c *connectionManager) Make( err = db.PingContext(pingCtx) if err != nil { common.LogCloserError(logger, db, "close connection") + return nil, fmt.Errorf("ping: %w", err) } diff --git a/app/server/datasource/rdbms/ms_sql_server/sql_formatter.go b/app/server/datasource/rdbms/ms_sql_server/sql_formatter.go index e9f50b32..47ad784a 100644 --- a/app/server/datasource/rdbms/ms_sql_server/sql_formatter.go +++ b/app/server/datasource/rdbms/ms_sql_server/sql_formatter.go @@ -20,6 +20,7 @@ type sqlFormatter struct { cfg *config.TPushdownConfig } +//nolint:revive func (f *sqlFormatter) supportsType(typeID Ydb.Type_PrimitiveTypeId) bool { switch typeID { case Ydb.Type_BOOL: @@ -83,6 +84,7 @@ func (sqlFormatter) GetPlaceholder(n int) string { func (sqlFormatter) SanitiseIdentifier(ident string) string { sanitizedIdent := strings.ReplaceAll(ident, string([]byte{0}), "") + sanitizedIdent = `"` + strings.ReplaceAll(sanitizedIdent, `"`, `""`) + `"` return sanitizedIdent diff --git a/app/server/datasource/rdbms/ms_sql_server/type_mapping.go b/app/server/datasource/rdbms/ms_sql_server/type_mapping.go index 1df39d4a..953a45fe 100644 --- a/app/server/datasource/rdbms/ms_sql_server/type_mapping.go +++ b/app/server/datasource/rdbms/ms_sql_server/type_mapping.go @@ -58,19 +58,16 @@ func (typeMapper) SQLTypeToYDBColumn( ydbType = common.MakePrimitiveType(Ydb.Type_UTF8) case "date": ydbType, err = common.MakeYdbDateTimeType(Ydb.Type_DATE, rules.GetDateTimeFormat()) - if err != nil { return nil, fmt.Errorf("make YDB date time type: %w", err) } case "smalldatetime": ydbType, err = common.MakeYdbDateTimeType(Ydb.Type_DATETIME, rules.GetDateTimeFormat()) - if err != nil { return nil, fmt.Errorf("make YDB date time type: %w", err) } case "datetime", "datetime2": ydbType, err = common.MakeYdbDateTimeType(Ydb.Type_TIMESTAMP, rules.GetDateTimeFormat()) - if err != nil { return nil, fmt.Errorf("make YDB date time type: %w", err) } @@ -93,6 +90,7 @@ func (typeMapper) SQLTypeToYDBColumn( //nolint:funlen,gocyclo func transformerFromSQLTypes(types []string, ydbTypes []*Ydb.Type, cc conversion.Collection) (paging.RowTransformer[any], error) { _ = ydbTypes + acceptors := make([]any, 0, len(types)) appenders := make([]func(acceptor any, builder array.Builder) error, 0, len(types)) diff --git a/app/server/datasource/rdbms/mysql/connection.go b/app/server/datasource/rdbms/mysql/connection.go index e68eb511..b70ccde9 100644 --- a/app/server/datasource/rdbms/mysql/connection.go +++ b/app/server/datasource/rdbms/mysql/connection.go @@ -42,7 +42,7 @@ func transformArgs(src *rdbms_utils.QueryArgs) []any { return dst } -func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connection) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) results := make(chan rowData, c.cfg.ResultChanCapacity) @@ -62,7 +62,7 @@ func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, e stmt, err := c.conn.Prepare(params.QueryText) if err != nil { - return r, fmt.Errorf("mysql: failed to prepare query: %w", err) + return &rdbms_utils.QueryResult{Rows: r}, fmt.Errorf("mysql: failed to prepare query: %w", err) } go func() { @@ -79,6 +79,7 @@ func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, e for i, r := range row { newRow[i].valueType = r.Type + val := r.Value() switch val.(type) { @@ -105,7 +106,9 @@ func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, e ) }() - return r, nil + return &rdbms_utils.QueryResult{ + Rows: r, + }, nil } func (c *connection) Logger() *zap.Logger { diff --git a/app/server/datasource/rdbms/mysql/connection_manager.go b/app/server/datasource/rdbms/mysql/connection_manager.go index 55dda7a6..79474c42 100644 --- a/app/server/datasource/rdbms/mysql/connection_manager.go +++ b/app/server/datasource/rdbms/mysql/connection_manager.go @@ -31,7 +31,7 @@ func (c *connectionManager) Make( optionFuncs := make([]func(c *client.Conn), 0) if dsi.GetCredentials().GetBasic() == nil { - return nil, fmt.Errorf("currently only basic auth is supported") + return nil, errors.New("currently only basic auth is supported") } if dsi.GetUseTls() { diff --git a/app/server/datasource/rdbms/mysql/rows.go b/app/server/datasource/rdbms/mysql/rows.go index 52f9e1a4..c49a1ee4 100644 --- a/app/server/datasource/rdbms/mysql/rows.go +++ b/app/server/datasource/rdbms/mysql/rows.go @@ -2,6 +2,7 @@ package mysql import ( "context" + "errors" "fmt" "io" "sync/atomic" @@ -75,6 +76,7 @@ func (r *rows) maybeInitializeTransformer(fields []*mysql.Field) { for i := range fields { t := fields[i].Type + mySQLTypes = append(mySQLTypes, t) } @@ -82,6 +84,7 @@ func (r *rows) maybeInitializeTransformer(fields []*mysql.Field) { case r.transformerInitChan <- mySQLTypes: case <-r.ctx.Done(): } + close(r.transformerInitChan) } } @@ -225,6 +228,7 @@ func scanDateValue(dest, value any, fieldValueType mysql.FieldValueType) error { if fieldValueType == mysql.FieldValueTypeNull { *out = nil + return nil } @@ -244,6 +248,7 @@ func scanDatetimeValue(dest, value any, fieldValueType mysql.FieldValueType) err if fieldValueType == mysql.FieldValueTypeNull { *out = nil + return nil } @@ -286,7 +291,7 @@ func (r *rows) MakeTransformer(ydbColumns []*Ydb.Column, cc conversion.Collectio select { case mySQLTypes, ok = <-r.transformerInitChan: if !ok { - return nil, fmt.Errorf("mysql types are not ready") + return nil, errors.New("mysql types are not ready") } case err := <-r.errChan: if err != nil { diff --git a/app/server/datasource/rdbms/mysql/sql_formatter.go b/app/server/datasource/rdbms/mysql/sql_formatter.go index a56ae933..86059e81 100644 --- a/app/server/datasource/rdbms/mysql/sql_formatter.go +++ b/app/server/datasource/rdbms/mysql/sql_formatter.go @@ -18,6 +18,7 @@ type sqlFormatter struct { cfg *config.TPushdownConfig } +//nolint:revive func (f *sqlFormatter) supportsType(typeID Ydb.Type_PrimitiveTypeId) bool { switch typeID { case Ydb.Type_BOOL: @@ -80,7 +81,7 @@ func (sqlFormatter) GetPlaceholder(_ int) string { } func (sqlFormatter) SanitiseIdentifier(ident string) string { - return fmt.Sprintf("`%s`", strings.Replace(ident, "`", "``", -1)) + return fmt.Sprintf("`%s`", strings.ReplaceAll(ident, "`", "``")) } func (f sqlFormatter) FormatWhat(what *api_service_protos.TSelect_TWhat, _ string) (string, error) { diff --git a/app/server/datasource/rdbms/mysql/type_mapping.go b/app/server/datasource/rdbms/mysql/type_mapping.go index 1ec46fcf..b705ed21 100644 --- a/app/server/datasource/rdbms/mysql/type_mapping.go +++ b/app/server/datasource/rdbms/mysql/type_mapping.go @@ -41,8 +41,8 @@ func (tm *typeMapper) SQLTypeToYDBColumn( if matches := tm.reType.FindStringSubmatch(columnDescription.Type); len(matches) > 0 { typeName = matches[tm.reType.SubexpIndex("type")] - typeSize, err = strconv.ParseUint(matches[tm.reType.SubexpIndex("size")], 10, 64) + typeSize, err = strconv.ParseUint(matches[tm.reType.SubexpIndex("size")], 10, 64) if err != nil { return nil, fmt.Errorf("parse uint: %w", err) } @@ -97,14 +97,12 @@ func (tm *typeMapper) SQLTypeToYDBColumn( } ydbColumn.Type = common.MakePrimitiveType(ydbType) - case typeLongBlob, typeBlob, typeMediumBlob, typeTinyBlob: + case typeLongBlob, typeBlob, typeMediumBlob, typeTinyBlob, + typeBinary, typeVarBinary, + typeText, typeLongText, typeTinyText, typeMediumText: ydbColumn.Type = common.MakePrimitiveType(Ydb.Type_STRING) case typeVarChar, typeChar: ydbColumn.Type = common.MakePrimitiveType(Ydb.Type_UTF8) - case typeBinary, typeVarBinary: - ydbColumn.Type = common.MakePrimitiveType(Ydb.Type_STRING) - case typeText, typeLongText, typeTinyText, typeMediumText: - ydbColumn.Type = common.MakePrimitiveType(Ydb.Type_STRING) case typeDate: ydbColumn.Type, err = common.MakeYdbDateTimeType(Ydb.Type_DATE, typeMapperSettings.GetDateTimeFormat()) if err != nil { diff --git a/app/server/datasource/rdbms/oracle/connection.go b/app/server/datasource/rdbms/oracle/connection.go index 3db52555..39c781fc 100644 --- a/app/server/datasource/rdbms/oracle/connection.go +++ b/app/server/datasource/rdbms/oracle/connection.go @@ -25,7 +25,7 @@ func (c *connection) Close() error { return c.conn.Close() } -func (c *connection) Query(queryParams *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connection) Query(queryParams *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(queryParams.QueryText, queryParams.QueryArgs.Values()...) valueArgs := make([]driver.NamedValue, queryParams.QueryArgs.Count()) @@ -46,7 +46,9 @@ func (c *connection) Query(queryParams *rdbms_utils.QueryParams) (rdbms_utils.Ro rows := newRows(out) - return rows, nil + return &rdbms_utils.QueryResult{ + Rows: rows, + }, nil } func (c *connection) DataSourceInstance() *api_common.TGenericDataSourceInstance { diff --git a/app/server/datasource/rdbms/oracle/connection_manager.go b/app/server/datasource/rdbms/oracle/connection_manager.go index 11f23ecb..d11cf06f 100644 --- a/app/server/datasource/rdbms/oracle/connection_manager.go +++ b/app/server/datasource/rdbms/oracle/connection_manager.go @@ -31,6 +31,7 @@ func (c *connectionManager) Make( var err error urlOptions := make(map[string]string) + if dsi.UseTls { // more information in YQ-3456 urlOptions["SSL"] = "TRUE" @@ -68,6 +69,7 @@ func (c *connectionManager) Make( err = conn.Ping(pingCtx) if err != nil { conn.Close() + return nil, fmt.Errorf("ping database: %w", err) } diff --git a/app/server/datasource/rdbms/oracle/rows.go b/app/server/datasource/rdbms/oracle/rows.go index 54f98ba6..98ea214b 100644 --- a/app/server/datasource/rdbms/oracle/rows.go +++ b/app/server/datasource/rdbms/oracle/rows.go @@ -2,6 +2,7 @@ package oracle import ( "database/sql/driver" + "errors" "fmt" "io" "strconv" @@ -196,7 +197,7 @@ func (r *rows) MakeTransformer(ydbColumns []*Ydb.Column, cc conversion.Collectio // https://cs.opensource.google/go/go/+/refs/tags/go1.22.5:src/database/sql/sql.go;l=3244 prop, ok := r.rows.(driver.RowsColumnTypeDatabaseTypeName) if !ok { - return nil, fmt.Errorf("can't cast driver.Rows to driver.RowsColumnTypeDatabaseTypeName") + return nil, errors.New("can't cast driver.Rows to driver.RowsColumnTypeDatabaseTypeName") } typeNames := make([]string, 0, len(r.rows.Columns())) diff --git a/app/server/datasource/rdbms/oracle/sql_formatter.go b/app/server/datasource/rdbms/oracle/sql_formatter.go index 75cf2826..0e9b1c22 100644 --- a/app/server/datasource/rdbms/oracle/sql_formatter.go +++ b/app/server/datasource/rdbms/oracle/sql_formatter.go @@ -18,6 +18,7 @@ type sqlFormatter struct { cfg *config.TPushdownConfig } +//nolint:revive func (f *sqlFormatter) supportsType(typeID Ydb.Type_PrimitiveTypeId) bool { switch typeID { // case Ydb.Type_BOOL: // TODO: YQ-3527 @@ -83,6 +84,7 @@ func (sqlFormatter) GetPlaceholder(n int) string { func (sqlFormatter) SanitiseIdentifier(ident string) string { sanitizedIdent := strings.ReplaceAll(ident, string([]byte{0}), "") + sanitizedIdent = `"` + strings.ReplaceAll(sanitizedIdent, `"`, `""`) + `"` return sanitizedIdent diff --git a/app/server/datasource/rdbms/oracle/type_mapping.go b/app/server/datasource/rdbms/oracle/type_mapping.go index f4b1a77c..1ccf661f 100644 --- a/app/server/datasource/rdbms/oracle/type_mapping.go +++ b/app/server/datasource/rdbms/oracle/type_mapping.go @@ -90,6 +90,7 @@ func (tm typeMapper) SQLTypeToYDBColumn( //nolint:gocyclo func transformerFromSQLTypes(types []string, ydbTypes []*Ydb.Type, cc conversion.Collection) (paging.RowTransformer[any], error) { _ = ydbTypes + acceptors := make([]any, 0, len(types)) appenders := make([]func(acceptor any, builder array.Builder) error, 0, len(types)) diff --git a/app/server/datasource/rdbms/postgresql/connection_manager.go b/app/server/datasource/rdbms/postgresql/connection_manager.go index 151d93a5..aea40297 100644 --- a/app/server/datasource/rdbms/postgresql/connection_manager.go +++ b/app/server/datasource/rdbms/postgresql/connection_manager.go @@ -2,6 +2,7 @@ package postgresql import ( "context" + "errors" "fmt" "time" @@ -57,7 +58,7 @@ func (c *connection) Close() error { return c.Conn.Close(context.TODO()) } -func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connection) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) out, err := c.Conn.Query(params.Ctx, params.QueryText, params.QueryArgs.Values()...) @@ -65,7 +66,9 @@ func (c *connection) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, e return nil, fmt.Errorf("query error: %w", err) } - return rows{Rows: out}, nil + return &rdbms_utils.QueryResult{ + Rows: rows{Rows: out}, + }, nil } func (c *connection) DataSourceInstance() *api_common.TGenericDataSourceInstance { @@ -93,7 +96,7 @@ func (c *connectionManager) Make( ) ([]rdbms_utils.Connection, error) { dsi, ctx, logger := params.DataSourceInstance, params.Ctx, params.Logger if dsi.GetCredentials().GetBasic() == nil { - return nil, fmt.Errorf("currently only basic auth is supported") + return nil, errors.New("currently only basic auth is supported") } if dsi.Protocol != api_common.EGenericProtocol_NATIVE { @@ -105,6 +108,7 @@ func (c *connectionManager) Make( } connStr := "dbname=DBNAME user=USER password=PASSWORD host=HOST port=5432" + if dsi.UseTls { connStr += " sslmode=verify-full" } else { @@ -152,7 +156,7 @@ func (*connectionManager) Release(_ context.Context, logger *zap.Logger, cs []rd defer cancel() for _, conn := range cs { - if err := conn.(*connection).Conn.DeallocateAll(ctx); err != nil { + if err := conn.(*connection).DeallocateAll(ctx); err != nil { logger.Error("deallocate prepared statements", zap.Error(err)) } diff --git a/app/server/datasource/rdbms/postgresql/split_provider.go b/app/server/datasource/rdbms/postgresql/split_provider.go index 69cdfffd..54613515 100644 --- a/app/server/datasource/rdbms/postgresql/split_provider.go +++ b/app/server/datasource/rdbms/postgresql/split_provider.go @@ -2,6 +2,7 @@ package postgresql import ( "context" + "errors" "fmt" "go.uber.org/zap" @@ -58,7 +59,6 @@ func (s *splitProviderImpl) ListSplits( return nil }, ) - if err != nil { return fmt.Errorf("retry: %w", err) } @@ -75,7 +75,6 @@ func (s *splitProviderImpl) ListSplits( slct.DataSourceInstance.GetPgOptions().Schema, slct.From.Table, ) - if err != nil { return fmt.Errorf("get table physical size: %w", err) } @@ -178,19 +177,20 @@ func (splitProviderImpl) getTablePhysicalSize( QueryArgs: args, } - rows, err := conn.Query(queryParams) + result, err := conn.Query(queryParams) if err != nil { return 0, fmt.Errorf("conn query: %w", err) } - defer rows.Close() + + defer result.Close() var pgTableSize uint64 - if !rows.Next() { - return 0, fmt.Errorf("no rows returned from query") + if !result.Rows.Next() { + return 0, errors.New("no rows returned from query") } - if err := rows.Scan(&pgTableSize); err != nil { + if err := result.Rows.Scan(&pgTableSize); err != nil { return 0, fmt.Errorf("rows scan: %w", err) } @@ -237,16 +237,17 @@ WHERE QueryArgs: args, } - rows, err := conn.Query(queryParams) + result, err := conn.Query(queryParams) if err != nil { return nil, fmt.Errorf("conn query: %w", err) } - defer rows.Close() + defer result.Close() var ( columnName string columnType string results []*primaryKey + rows = result.Rows ) for cont := true; cont; cont = rows.NextResultSet() { @@ -316,12 +317,14 @@ WHERE QueryArgs: args, } - rows, err := conn.Query(queryParams) + result, err := conn.Query(queryParams) if err != nil { return nil, fmt.Errorf("conn query: %w", err) } - defer rows.Close() + defer result.Close() + + rows := result.Rows var bounds []T @@ -341,20 +344,20 @@ WHERE logger.Debug("discovered histogram bounds", zap.String("column_name", pk.columnName), zap.Int("total_bounds", len(bounds))) // Now we need to transfer histogram bounds into splits - result := make([]*TSplitDescription_THistogramBounds, 0, len(bounds)+1) + splits := make([]*TSplitDescription_THistogramBounds, 0, len(bounds)+1) // Add first open interval - result = append(result, createHistogramBound(pk.columnName, nil, &bounds[0])) + splits = append(splits, createHistogramBound(pk.columnName, nil, &bounds[0])) // Add intervals between bounds for i := 0; i < len(bounds)-1; i++ { - result = append(result, createHistogramBound(pk.columnName, &bounds[i], &bounds[i+1])) + splits = append(splits, createHistogramBound(pk.columnName, &bounds[i], &bounds[i+1])) } // Add last open interval - result = append(result, createHistogramBound(pk.columnName, &bounds[len(bounds)-1], nil)) + splits = append(splits, createHistogramBound(pk.columnName, &bounds[len(bounds)-1], nil)) - return result, nil + return splits, nil } func createHistogramBound[T int32 | int64 | string](columnName string, lower, upper *T) *TSplitDescription_THistogramBounds { @@ -365,6 +368,7 @@ func createHistogramBound[T int32 | int64 | string](columnName string, lower, up switch any(zeroVal).(type) { case int32: var lowerVal, upperVal *wrapperspb.Int32Value + if lower != nil { lowerVal = wrapperspb.Int32(any(*lower).(int32)) } @@ -381,6 +385,7 @@ func createHistogramBound[T int32 | int64 | string](columnName string, lower, up } case int64: var lowerVal, upperVal *wrapperspb.Int64Value + if lower != nil { lowerVal = wrapperspb.Int64(any(*lower).(int64)) } diff --git a/app/server/datasource/rdbms/postgresql/sql_formatter.go b/app/server/datasource/rdbms/postgresql/sql_formatter.go index 349274c6..a333dba9 100644 --- a/app/server/datasource/rdbms/postgresql/sql_formatter.go +++ b/app/server/datasource/rdbms/postgresql/sql_formatter.go @@ -1,6 +1,7 @@ package postgresql import ( + "errors" "fmt" "strings" @@ -20,6 +21,7 @@ type sqlFormatter struct { cfg *config.TPushdownConfig } +//nolint:revive func (f *sqlFormatter) supportsType(typeID Ydb.Type_PrimitiveTypeId) bool { // TODO Json_document - binary form of json switch typeID { @@ -90,6 +92,7 @@ func (sqlFormatter) SanitiseIdentifier(ident string) string { // https://github.com/jackc/pgx/blob/v5.4.3/conn.go#L93 // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS sanitizedIdent := strings.ReplaceAll(ident, string([]byte{0}), "") + sanitizedIdent = `"` + strings.ReplaceAll(sanitizedIdent, `"`, `""`) + `"` return sanitizedIdent @@ -205,13 +208,13 @@ func (f sqlFormatter) renderSelectQueryTextWithBoundsHelper( lower, upper any, ) (string, error) { if columnName == "" { - return "", fmt.Errorf("column name is empty") + return "", errors.New("column name is empty") } columnName = f.SanitiseIdentifier(columnName) if lower == nil && upper == nil { - return "", fmt.Errorf("you must fill either lower bounds, either upper bounds, or both of them") + return "", errors.New("you must fill either lower bounds, either upper bounds, or both of them") } if lower == nil && upper != nil { diff --git a/app/server/datasource/rdbms/postgresql/sql_formatter_test.go b/app/server/datasource/rdbms/postgresql/sql_formatter_test.go index 3b02f073..92b67a31 100644 --- a/app/server/datasource/rdbms/postgresql/sql_formatter_test.go +++ b/app/server/datasource/rdbms/postgresql/sql_formatter_test.go @@ -6,9 +6,9 @@ import ( "fmt" "testing" - "github.com/golang/protobuf/proto" "github.com/stretchr/testify/require" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/wrapperspb" ydb "github.com/ydb-platform/ydb-go-genproto/protos/Ydb" @@ -588,6 +588,7 @@ func TestMakeSelectQuery(t *testing.T) { if tc.err != nil { require.True(t, errors.Is(err, tc.err), err, tc.err) + return } diff --git a/app/server/datasource/rdbms/postgresql/type_mapping.go b/app/server/datasource/rdbms/postgresql/type_mapping.go index 94dc5e4f..144d8e05 100644 --- a/app/server/datasource/rdbms/postgresql/type_mapping.go +++ b/app/server/datasource/rdbms/postgresql/type_mapping.go @@ -50,7 +50,6 @@ func (tm typeMapper) SQLTypeToYDBColumn( return nil, fmt.Errorf("convert type '%s': %w", columnDescription.Type, common.ErrDataTypeNotSupported) }() - if err != nil { return nil, err } @@ -196,21 +195,13 @@ func transformerFromOIDs(oids []uint32, ydbTypes []*Ydb.Type, cc conversion.Coll return appendValuePtrToArrowBuilder[float64, float64, *array.Float64Builder]( &cast.Float64, builder, cast.Valid, cc.Float64()) }) - case pgtype.TextOID, pgtype.BPCharOID, pgtype.VarcharOID: - acceptors = append(acceptors, new(pgtype.Text)) - appenders = append(appenders, func(acceptor any, builder array.Builder) error { - cast := acceptor.(*pgtype.Text) - - return appendValuePtrToArrowBuilder[string, string, *array.StringBuilder](&cast.String, builder, cast.Valid, cc.String()) - }) - case pgtype.JSONOID: + case pgtype.TextOID, pgtype.BPCharOID, pgtype.VarcharOID, pgtype.JSONOID: acceptors = append(acceptors, new(pgtype.Text)) appenders = append(appenders, func(acceptor any, builder array.Builder) error { cast := acceptor.(*pgtype.Text) return appendValuePtrToArrowBuilder[string, string, *array.StringBuilder](&cast.String, builder, cast.Valid, cc.String()) }) - // TODO: review all pgtype.json* types case pgtype.ByteaOID: acceptors = append(acceptors, new(*[]byte)) appenders = append(appenders, func(acceptor any, builder array.Builder) error { diff --git a/app/server/datasource/rdbms/utils/doc.go b/app/server/datasource/rdbms/utils/doc.go index adebe5d2..1b6e37dd 100644 --- a/app/server/datasource/rdbms/utils/doc.go +++ b/app/server/datasource/rdbms/utils/doc.go @@ -1,3 +1,3 @@ // Package utils contains helper types and functions that can be used by any // relational data source. -package utils +package utils //nolint:revive diff --git a/app/server/datasource/rdbms/utils/interface.go b/app/server/datasource/rdbms/utils/interface.go index 87dfe811..00993a04 100644 --- a/app/server/datasource/rdbms/utils/interface.go +++ b/app/server/datasource/rdbms/utils/interface.go @@ -1,8 +1,9 @@ -package utils +package utils //nolint:revive import ( "context" + "github.com/apache/arrow/go/v13/arrow" "go.uber.org/zap" "github.com/ydb-platform/ydb-go-genproto/protos/Ydb" @@ -16,6 +17,26 @@ import ( "github.com/ydb-platform/fq-connector-go/common" ) +// Rows represents an iterator returning data from a row-oriented storage. +// Each data piece is a row. +type Rows interface { + Close() error + Err() error + Next() bool + NextResultSet() bool + Scan(dest ...any) error + MakeTransformer(columns []*Ydb.Column, cc conversion.Collection) (paging.RowTransformer[any], error) +} + +// Columns represents an iterator returning data from a column-oriented storage. +// Each data piece is an Apache Arrow Record. +type Columns interface { + Close() error + Err() error + Next() bool + Record() arrow.Record +} + type QueryParams struct { Ctx context.Context Logger *zap.Logger @@ -23,9 +44,33 @@ type QueryParams struct { QueryArgs *QueryArgs } +type QueryResult struct { + Rows Rows + Columns Columns +} + +// Close implements io.Closer interface +func (qr *QueryResult) Close() error { + var rowsErr, columnsErr error + + if qr.Rows != nil { + rowsErr = qr.Rows.Close() + } + + if qr.Columns != nil { + columnsErr = qr.Columns.Close() + } + + if rowsErr != nil { + return rowsErr + } + + return columnsErr +} + type Connection interface { // Query runs a query on a specific connection. - Query(params *QueryParams) (Rows, error) + Query(params *QueryParams) (*QueryResult, error) // DataSourceInstance comprehensively describing the target of the connection DataSourceInstance() *api_common.TGenericDataSourceInstance // The name of a table that will be read via this connection. @@ -37,15 +82,6 @@ type Connection interface { Close() error } -type Rows interface { - Close() error - Err() error - Next() bool - NextResultSet() bool - Scan(dest ...any) error - MakeTransformer(columns []*Ydb.Column, cc conversion.Collection) (paging.RowTransformer[any], error) -} - //go:generate stringer -type=QueryPhase type QueryPhase int8 diff --git a/app/server/datasource/rdbms/utils/mock.go b/app/server/datasource/rdbms/utils/mock.go index 8b1a2dec..c7e74a18 100644 --- a/app/server/datasource/rdbms/utils/mock.go +++ b/app/server/datasource/rdbms/utils/mock.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "context" @@ -21,12 +21,21 @@ type ConnectionMock struct { mock.Mock } -func (m *ConnectionMock) Query(params *QueryParams) (Rows, error) { +func (m *ConnectionMock) Query(params *QueryParams) (*QueryResult, error) { called := []any{params.QueryText} + called = append(called, params.QueryArgs.Values()...) + args := m.Called(called...) - return args.Get(0).(Rows), args.Error(1) + rows := args.Get(0) + if rows == nil { + return nil, args.Error(1) + } + + return &QueryResult{ + Rows: rows.(Rows), + }, args.Error(1) } func (m *ConnectionMock) Close() error { diff --git a/app/server/datasource/rdbms/utils/predicate_builder.go b/app/server/datasource/rdbms/utils/predicate_builder.go index 40d77c56..2fb85c80 100644 --- a/app/server/datasource/rdbms/utils/predicate_builder.go +++ b/app/server/datasource/rdbms/utils/predicate_builder.go @@ -1,6 +1,7 @@ -package utils +package utils //nolint:revive import ( + "errors" "fmt" "strings" "time" @@ -61,35 +62,43 @@ func (pb *predicateBuilder) formatTypedValue( return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Int32Value: pb.args.AddTyped(value.Type, v.Int32Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Uint32Value: pb.args.AddTyped(value.Type, v.Uint32Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Int64Value: switch value.Type.GetTypeId() { case Ydb.Type_INT64: pb.args.AddTyped(value.Type, v.Int64Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case Ydb.Type_TIMESTAMP: // YQL Timestamp is always UTC pb.args.AddTyped(value.Type, time.UnixMicro(v.Int64Value).UTC()) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil default: return "", fmt.Errorf("unsupported type '%T': %w", v, common.ErrUnimplementedTypedValue) } case *Ydb.Value_Uint64Value: pb.args.AddTyped(value.Type, v.Uint64Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_FloatValue: pb.args.AddTyped(value.Type, v.FloatValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_DoubleValue: pb.args.AddTyped(value.Type, v.DoubleValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_BytesValue: switch t := value.Type.Type.(type) { case *Ydb.Type_TypeId: pb.args.AddTyped(value.Type, v.BytesValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Type_DecimalType: decimalValue := decimal.Deserialize(v.BytesValue, t.DecimalType.Scale) @@ -101,6 +110,7 @@ func (pb *predicateBuilder) formatTypedValue( } case *Ydb.Value_TextValue: pb.args.AddTyped(value.Type, v.TextValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_NullFlagValue: placeholder, err := pb.formatNullFlagValue(value) @@ -118,30 +128,39 @@ func (pb *predicateBuilder) formatOptionalValue(value *Ydb.TypedValue) (string, switch v := value.Value.Value.(type) { case *Ydb.Value_BoolValue: pb.args.AddTyped(value.Type, &v.BoolValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Int32Value: pb.args.AddTyped(value.Type, &v.Int32Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Uint32Value: pb.args.AddTyped(value.Type, &v.Uint32Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Int64Value: pb.args.AddTyped(value.Type, &v.Int64Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_Uint64Value: pb.args.AddTyped(value.Type, &v.Uint64Value) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_FloatValue: pb.args.AddTyped(value.Type, &v.FloatValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_DoubleValue: pb.args.AddTyped(value.Type, &v.DoubleValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_BytesValue: pb.args.AddTyped(value.Type, &v.BytesValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_TextValue: pb.args.AddTyped(value.Type, &v.TextValue) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil case *Ydb.Value_NullFlagValue: placeholder, err := pb.formatNullFlagValue(value) @@ -160,6 +179,7 @@ func addTypedNull[ACCEPTOR_TYPE any]( ydbType *Ydb.Type, ) (string, error) { pb.args.AddTyped(ydbType, (*ACCEPTOR_TYPE)(nil)) + return pb.formatter.GetPlaceholder(pb.args.Count() - 1), nil } @@ -440,7 +460,6 @@ func (pb *predicateBuilder) formatConjunction( for _, predicate := range conjunction.Operands { statement, err = pb.formatPredicate(predicate, false, false) - if err != nil { if !topLevel { return "", fmt.Errorf("format predicate: %w", err) @@ -511,7 +530,7 @@ func (pb *predicateBuilder) formatDisjunction( } if cnt == 0 { - return "", fmt.Errorf("no operands") + return "", errors.New("no operands") } if cnt == 1 { @@ -680,6 +699,7 @@ func formatWhereClause( if common.OptionalFilteringAllowedErrors.Match(err) { logger.Warn("considering pushdown error as acceptable", zap.Error(err)) + return clause, pb.args, nil } diff --git a/app/server/datasource/rdbms/utils/queryphase_string.go b/app/server/datasource/rdbms/utils/queryphase_string.go index ba62bbea..ce866b0d 100644 --- a/app/server/datasource/rdbms/utils/queryphase_string.go +++ b/app/server/datasource/rdbms/utils/queryphase_string.go @@ -1,6 +1,6 @@ // Code generated by "stringer -type=QueryPhase"; DO NOT EDIT. -package utils +package utils //nolint:revive import "strconv" diff --git a/app/server/datasource/rdbms/utils/schema_builder.go b/app/server/datasource/rdbms/utils/schema_builder.go index 62e6672a..2c6f3e5e 100644 --- a/app/server/datasource/rdbms/utils/schema_builder.go +++ b/app/server/datasource/rdbms/utils/schema_builder.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "errors" diff --git a/app/server/datasource/rdbms/utils/schema_provider.go b/app/server/datasource/rdbms/utils/schema_provider.go index c7a114fb..2d7513cf 100644 --- a/app/server/datasource/rdbms/utils/schema_provider.go +++ b/app/server/datasource/rdbms/utils/schema_provider.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "context" @@ -51,12 +51,12 @@ func (f *defaultSchemaProvider) GetSchema( QueryArgs: args, } - rows, err := conn.Query(queryParams) + queryResult, err := conn.Query(queryParams) if err != nil { return nil, fmt.Errorf("query builder error: %w", err) } - defer func() { common.LogCloserError(logger, rows, "close rows") }() + defer func() { common.LogCloserError(logger, queryResult, "close query result") }() sb := NewSchemaBuilder(f.typeMapper, request.TypeMappingSettings) @@ -67,6 +67,8 @@ func (f *defaultSchemaProvider) GetSchema( scale *int64 ) + rows := queryResult.Rows + for rows.Next() { if err = rows.Scan(&columnName, &typeName, &precision, &scale); err != nil { return nil, fmt.Errorf("rows scan: %w", err) diff --git a/app/server/datasource/rdbms/utils/select_helpers.go b/app/server/datasource/rdbms/utils/select_helpers.go index bda0022e..5f66b1a7 100644 --- a/app/server/datasource/rdbms/utils/select_helpers.go +++ b/app/server/datasource/rdbms/utils/select_helpers.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "fmt" diff --git a/app/server/datasource/rdbms/utils/select_query.go b/app/server/datasource/rdbms/utils/select_query.go index f5549958..36eaebe2 100644 --- a/app/server/datasource/rdbms/utils/select_query.go +++ b/app/server/datasource/rdbms/utils/select_query.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "context" @@ -53,6 +53,7 @@ func MakeSelectQuery( } var queryArgs *QueryArgs + if split.Select.Where != nil { parts.WhereClause, queryArgs, err = formatWhereClause( logger, @@ -61,7 +62,6 @@ func MakeSelectQuery( split.Select.Where, split.Select.DataSourceInstance.Kind, ) - if err != nil { return nil, fmt.Errorf("format where clause: %w", err) } diff --git a/app/server/datasource/rdbms/utils/select_query_args.go b/app/server/datasource/rdbms/utils/select_query_args.go index 30fb3ef4..eecbfe72 100644 --- a/app/server/datasource/rdbms/utils/select_query_args.go +++ b/app/server/datasource/rdbms/utils/select_query_args.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import "github.com/ydb-platform/ydb-go-genproto/protos/Ydb" diff --git a/app/server/datasource/rdbms/utils/split_provider.go b/app/server/datasource/rdbms/utils/split_provider.go index c031df37..c7af64b4 100644 --- a/app/server/datasource/rdbms/utils/split_provider.go +++ b/app/server/datasource/rdbms/utils/split_provider.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "github.com/ydb-platform/fq-connector-go/app/server/datasource" diff --git a/app/server/datasource/rdbms/utils/sql_formatter_default.go b/app/server/datasource/rdbms/utils/sql_formatter_default.go index 5d1b27e8..b4257b45 100644 --- a/app/server/datasource/rdbms/utils/sql_formatter_default.go +++ b/app/server/datasource/rdbms/utils/sql_formatter_default.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "strings" diff --git a/app/server/datasource/rdbms/utils/unit_test_helpers.go b/app/server/datasource/rdbms/utils/unit_test_helpers.go index 98f93535..4937a53b 100644 --- a/app/server/datasource/rdbms/utils/unit_test_helpers.go +++ b/app/server/datasource/rdbms/utils/unit_test_helpers.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "fmt" diff --git a/app/server/datasource/rdbms/utils/unit_test_helpers_test.go b/app/server/datasource/rdbms/utils/unit_test_helpers_test.go index f08aaff5..a542ad4d 100644 --- a/app/server/datasource/rdbms/utils/unit_test_helpers_test.go +++ b/app/server/datasource/rdbms/utils/unit_test_helpers_test.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "fmt" diff --git a/app/server/datasource/rdbms/ydb/connection_database_sql.go b/app/server/datasource/rdbms/ydb/connection_database_sql.go index 8930cfba..986b15fe 100644 --- a/app/server/datasource/rdbms/ydb/connection_database_sql.go +++ b/app/server/datasource/rdbms/ydb/connection_database_sql.go @@ -52,10 +52,10 @@ type connectionDatabaseSQL struct { tableName string } -func (c *connectionDatabaseSQL) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { +func (c *connectionDatabaseSQL) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { c.queryLogger.Dump(params.QueryText, params.QueryArgs.Values()...) - out, err := c.DB.QueryContext( + out, err := c.QueryContext( ydb_sdk.WithQueryMode(params.Ctx, ydb_sdk.ScanQueryMode), params.QueryText, params.QueryArgs.Values()...) @@ -73,7 +73,11 @@ func (c *connectionDatabaseSQL) Query(params *rdbms_utils.QueryParams) (rdbms_ut return nil, fmt.Errorf("rows err: %w", err) } - return rowsDatabaseSQL{Rows: out}, nil + rows := rowsDatabaseSQL{Rows: out} + + return &rdbms_utils.QueryResult{ + Rows: rows, + }, nil } func (c *connectionDatabaseSQL) Driver() *ydb_sdk.Driver { @@ -122,7 +126,6 @@ func newConnectionDatabaseSQL( ydb_sdk.WithPositionalArgs(), ydb_sdk.WithTablePathPrefix(dsi.Database), ) - if err != nil { return nil, fmt.Errorf("connector error: %w", err) } @@ -136,6 +139,7 @@ func newConnectionDatabaseSQL( if err := conn.PingContext(pingCtx); err != nil { common.LogCloserError(logger, conn, "close YDB connection") + return nil, fmt.Errorf("conn ping: %w", err) } diff --git a/app/server/datasource/rdbms/ydb/connection_manager.go b/app/server/datasource/rdbms/ydb/connection_manager.go index 2da4ee7e..a100ed1f 100644 --- a/app/server/datasource/rdbms/ydb/connection_manager.go +++ b/app/server/datasource/rdbms/ydb/connection_manager.go @@ -96,12 +96,11 @@ func (c *connectionManager) Make( var ydbConn Connection switch c.cfg.Mode { - case config.TYdbConfig_MODE_UNSPECIFIED: - fallthrough - case config.TYdbConfig_MODE_QUERY_SERVICE_NATIVE: + case config.TYdbConfig_MODE_UNSPECIFIED, config.TYdbConfig_MODE_QUERY_SERVICE_NATIVE: logger.Debug("connector will use Native SDK over Query Service") formatter := NewSQLFormatter(config.TYdbConfig_MODE_QUERY_SERVICE_NATIVE, c.cfg.Pushdown) + ydbConn = newConnectionNative( logger, c.QueryLoggerFactory, @@ -110,9 +109,11 @@ func (c *connectionManager) Make( ydbDriver, formatter, c.cfg.ResourcePool, + dsi.GetYdbOptions().GetQueryDataFormat(), ) case config.TYdbConfig_MODE_TABLE_SERVICE_STDLIB_SCAN_QUERIES: logger.Debug("connector will use database/sql SDK with scan queries over Table Service") + ydbConn, err = newConnectionDatabaseSQL(ctx, logger, c.QueryLoggerFactory.Make(logger), c.cfg, dsi, params.TableName, ydbDriver) default: return nil, fmt.Errorf("unknown mode: %v", c.cfg.Mode) diff --git a/app/server/datasource/rdbms/ydb/connection_native.go b/app/server/datasource/rdbms/ydb/connection_native.go index 20a2fe6c..f28a1595 100644 --- a/app/server/datasource/rdbms/ydb/connection_native.go +++ b/app/server/datasource/rdbms/ydb/connection_native.go @@ -8,6 +8,8 @@ import ( "io" "time" + "github.com/apache/arrow/go/v13/arrow" + "github.com/apache/arrow/go/v13/arrow/ipc" "go.uber.org/zap" "github.com/ydb-platform/ydb-go-genproto/protos/Ydb" @@ -38,7 +40,6 @@ func (r *rowsNative) Next() bool { var err error r.lastRow, err = r.lastResultSet.NextRow(r.ctx) - if err != nil { if errors.Is(err, io.EOF) { r.err = nil @@ -79,7 +80,7 @@ func (r *rowsNative) Scan(dest ...any) error { func (r *rowsNative) MakeTransformer(ydbColumns []*Ydb.Column, cc conversion.Collection) (paging.RowTransformer[any], error) { if r.lastResultSet == nil { - return nil, fmt.Errorf("last result set is not ready yet") + return nil, errors.New("last result set is not ready yet") } columnTypes := r.lastResultSet.ColumnTypes() @@ -107,6 +108,91 @@ func (r *rowsNative) Close() error { return nil } +var _ rdbms_utils.Columns = (*columnsNative)(nil) + +type columnsNative struct { + ctx context.Context + err error + arrowResult ydb_sdk_query.ArrowResult + currentPart io.Reader + reader *ipc.Reader + record arrow.Record + closeChan chan struct{} +} + +func (c *columnsNative) Close() error { + if err := c.arrowResult.Close(c.ctx); err != nil { + return fmt.Errorf("arrow result close: %w", err) + } + + return nil +} + +func (c *columnsNative) Err() error { + return c.err +} + +func (c *columnsNative) Next() bool { + // If we have a reader and it has more records, get the next one + if c.reader != nil && c.reader.Next() { + c.record = c.reader.Record() + + return true + } + + // Try to get the next part + var part io.Reader + + var err error + + for p, e := range c.arrowResult.Parts(c.ctx) { + if e != nil { + if errors.Is(e, io.EOF) { + c.err = nil + } else { + c.err = fmt.Errorf("next part: %w", e) + } + + return false + } + + part = p + + break + } + + if part == nil { + return false + } + + // Create a new reader for this part + c.currentPart = part + + reader, err := ipc.NewReader(part) + if err != nil { + c.err = fmt.Errorf("create arrow reader: %w", err) + + return false + } + + c.reader = reader + + // Get the first record from this part + if !c.reader.Next() { + c.err = errors.New("no records in arrow part") + + return false + } + + c.record = c.reader.Record() + + return true +} + +func (c *columnsNative) Record() arrow.Record { + return c.record +} + var _ rdbms_utils.Connection = (*connectionNative)(nil) type connectionNative struct { @@ -117,13 +203,19 @@ type connectionNative struct { tableName string formatter rdbms_utils.SQLFormatter resourcePool string + queryDataFormat api_common.TYdbDataSourceOptions_EQueryDataFormat } // nolint: gocyclo,funlen -func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (rdbms_utils.Rows, error) { - // prepare parameter list +func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (*rdbms_utils.QueryResult, error) { paramsBuilder := ydb_sdk.ParamsBuilder() + // modify query with args + queryRewritten, err := c.rewriteQuery(params) + if err != nil { + return nil, fmt.Errorf("rewrite query: %w", err) + } + for i, arg := range params.QueryArgs.Values() { placeholder := c.formatter.GetPlaceholder(i) @@ -200,71 +292,100 @@ func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (rdbms_utils.R } type result struct { - rows rdbms_utils.Rows - err error + result *rdbms_utils.QueryResult + err error } // We cannot use the results of a query from outside of the SDK callback. // See https://github.com/ydb-platform/ydb-go-sdk/issues/1862 for details. resultChan := make(chan result) - // context coming from the connector's clien (federated YDB) + + // context coming from the client (the federated YDB) parentCtx := params.Ctx go func() { finalErr := c.driver.Query().Do( parentCtx, func(ctx context.Context, session ydb_sdk_query.Session) (err error) { - // modify query with args - queryRewritten, err := c.rewriteQuery(params) - if err != nil { - return fmt.Errorf("rewrite query: %w", err) - } - queryLogger := c.queryLoggerFactory.Make(params.Logger, zap.String("resource_pool", c.resourcePool)) queryLogger.Dump(queryRewritten, params.QueryArgs.Values()...) - // execute query - streamResult, err := session.Query( - ctx, - queryRewritten, - ydb_sdk_query.WithParameters(paramsBuilder.Build()), - ydb_sdk_query.WithResourcePool(c.resourcePool), - ) - if err != nil { - return fmt.Errorf("session query: %w", err) - } + var queryResult *rdbms_utils.QueryResult + + switch c.queryDataFormat { + case api_common.TYdbDataSourceOptions_QUERY_DATA_FORMAT_UNSPECIFIED: + // execute query + streamResult, err := session.Query( + ctx, + queryRewritten, + ydb_sdk_query.WithParameters(paramsBuilder.Build()), + ydb_sdk_query.WithResourcePool(c.resourcePool), + ) + if err != nil { + return fmt.Errorf("session query: %w", err) + } - defer func() { - if closeErr := streamResult.Close(ctx); closeErr != nil { - params.Logger.Error("close stream result", zap.Error(closeErr)) + defer func() { + if closeErr := streamResult.Close(ctx); closeErr != nil { + params.Logger.Error("close stream result", zap.Error(closeErr)) + } + }() + + // obtain first result set because it's necessary + // to create type transformers + resultSet, err := streamResult.NextResultSet(ctx) + if err != nil { + return fmt.Errorf("next result set: %w", err) } - }() - // obtain first result set because it's necessary - // to create type transformers - resultSet, err := streamResult.NextResultSet(ctx) - if err != nil { - return fmt.Errorf("next result set: %w", err) - } + queryResult = &rdbms_utils.QueryResult{ + Rows: &rowsNative{ + ctx: parentCtx, + streamResult: streamResult, + lastResultSet: resultSet, + closeChan: make(chan struct{}), + }, + } + case api_common.TYdbDataSourceOptions_ARROW: + // execute query + arrowResult, err := session.QueryArrow( + ctx, + queryRewritten, + ydb_sdk_query.WithParameters(paramsBuilder.Build()), + ydb_sdk_query.WithResourcePool(c.resourcePool), + ) + if err != nil { + return fmt.Errorf("session query: %w", err) + } - rows := &rowsNative{ - ctx: parentCtx, - streamResult: streamResult, - lastResultSet: resultSet, - closeChan: make(chan struct{}), + defer func() { + if closeErr := arrowResult.Close(ctx); closeErr != nil { + params.Logger.Error("close stream result", zap.Error(closeErr)) + } + }() + + queryResult = &rdbms_utils.QueryResult{ + Columns: &columnsNative{ + ctx: parentCtx, + arrowResult: arrowResult, + closeChan: make(chan struct{}), + }, + } + default: + return fmt.Errorf("unsupported query data format: %v", c.queryDataFormat) } // push iterator over GRPC stream into the outer space select { - case resultChan <- result{rows: rows}: + case resultChan <- result{result: queryResult}: case <-ctx.Done(): return ctx.Err() } - // Keep waiting until the rowsNative object is closed by a caller. - // The context (and the rowsNative object) will be invalidated otherwise. + // Keep waiting until the rowsNative/columnsNative object is closed by a caller. + // The context (and the objects) will be otherwise invalidated by the SDK. select { - case <-rows.closeChan: + case <-extractCloseChan(queryResult): return nil case <-ctx.Done(): return ctx.Err() @@ -289,12 +410,20 @@ func (c *connectionNative) Query(params *rdbms_utils.QueryParams) (rdbms_utils.R return nil, r.err } - return r.rows, nil + return r.result, nil case <-parentCtx.Done(): return nil, parentCtx.Err() } } +func extractCloseChan(queryResult *rdbms_utils.QueryResult) <-chan struct{} { + if queryResult.Rows != nil { + return queryResult.Rows.(*rowsNative).closeChan + } + + return queryResult.Columns.(*columnsNative).closeChan +} + func (c *connectionNative) Driver() *ydb_sdk.Driver { return c.driver } @@ -368,6 +497,7 @@ func newConnectionNative( driver *ydb_sdk.Driver, formatter rdbms_utils.SQLFormatter, resourcePool string, + queryDataFormat api_common.TYdbDataSourceOptions_EQueryDataFormat, ) Connection { return &connectionNative{ driver: driver, @@ -377,5 +507,6 @@ func newConnectionNative( tableName: tableName, formatter: formatter, resourcePool: resourcePool, + queryDataFormat: queryDataFormat, } } diff --git a/app/server/datasource/rdbms/ydb/schema_provider.go b/app/server/datasource/rdbms/ydb/schema_provider.go index 198cda7e..209ba1be 100644 --- a/app/server/datasource/rdbms/ydb/schema_provider.go +++ b/app/server/datasource/rdbms/ydb/schema_provider.go @@ -30,6 +30,7 @@ func (f *schemaProvider) GetSchema( request *api_service_protos.TDescribeTableRequest, ) (*api_service_protos.TSchema, error) { prefix := path.Join(request.DataSourceInstance.Database, request.Table) + logger = logger.With(zap.String("prefix", prefix)) logger.Debug("obtaining table metadata from YDB") diff --git a/app/server/datasource/rdbms/ydb/split_provider.go b/app/server/datasource/rdbms/ydb/split_provider.go index c8b6b6ac..983f6512 100644 --- a/app/server/datasource/rdbms/ydb/split_provider.go +++ b/app/server/datasource/rdbms/ydb/split_provider.go @@ -82,7 +82,6 @@ func (sp SplitProvider) ListSplits( return nil }, ) - if err != nil { return fmt.Errorf("retry: %w", err) } @@ -317,7 +316,6 @@ func (sp SplitProvider) GetColumnShardTabletIDs( }, query.WithIdempotent(), ) - if err != nil { return nil, fmt.Errorf("querying column table tablet ids: %w", err) } diff --git a/app/server/datasource/rdbms/ydb/sql_formatter.go b/app/server/datasource/rdbms/ydb/sql_formatter.go index 7f8408ed..c503e08d 100644 --- a/app/server/datasource/rdbms/ydb/sql_formatter.go +++ b/app/server/datasource/rdbms/ydb/sql_formatter.go @@ -1,6 +1,7 @@ package ydb import ( + "errors" "fmt" "strings" @@ -21,7 +22,7 @@ type SQLFormatter struct { cfg *config.TPushdownConfig } -//nolint:gocyclo +//nolint:gocyclo,revive func (f SQLFormatter) supportsTypeForPushdown(typeID Ydb.Type_PrimitiveTypeId) bool { switch typeID { case Ydb.Type_BOOL: @@ -166,7 +167,7 @@ func (SQLFormatter) RenderSelectQueryTextForColumnShard( case 1: sb.WriteString(fmt.Sprintf(" WITH TabletId='%d'", tabletIDs[0])) default: - return "", fmt.Errorf("column shard split description must contain either 0, or 1 tablet id") + return "", errors.New("column shard split description must contain either 0, or 1 tablet id") } if parts.WhereClause != "" { @@ -217,7 +218,7 @@ func (SQLFormatter) FormatCast(value string, ydbType *Ydb.Type) (string, error) primitiveType := ydbType.GetTypeId() if primitiveType == Ydb.Type_PRIMITIVE_TYPE_ID_UNSPECIFIED { - return "", fmt.Errorf("primitive type is unspecified") + return "", errors.New("primitive type is unspecified") } typeName, err := primitiveYqlTypeName(primitiveType) diff --git a/app/server/datasource/rdbms/ydb/sql_formatter_test.go b/app/server/datasource/rdbms/ydb/sql_formatter_test.go index 44c42df3..a24bb5e1 100644 --- a/app/server/datasource/rdbms/ydb/sql_formatter_test.go +++ b/app/server/datasource/rdbms/ydb/sql_formatter_test.go @@ -533,6 +533,7 @@ func TestMakeSelectQuery(t *testing.T) { ) if tc.err != nil { require.True(t, errors.Is(err, tc.err)) + return } diff --git a/app/server/datasource/rdbms/ydb/table_metadata_cache/ristretto.go b/app/server/datasource/rdbms/ydb/table_metadata_cache/ristretto.go index 062c8b42..742e8053 100644 --- a/app/server/datasource/rdbms/ydb/table_metadata_cache/ristretto.go +++ b/app/server/datasource/rdbms/ydb/table_metadata_cache/ristretto.go @@ -83,7 +83,6 @@ func newRistrettoCache(cfg *config.TYdbConfig_TTableMetadataCache) (*ristrettoCa BufferItems: 64, // reasonable default Metrics: true, }) - if err != nil { return nil, fmt.Errorf("ristretto new cache: %w", err) } diff --git a/app/server/datasource/rdbms/ydb/type_mapping.go b/app/server/datasource/rdbms/ydb/type_mapping.go index 0f240560..32cf9376 100644 --- a/app/server/datasource/rdbms/ydb/type_mapping.go +++ b/app/server/datasource/rdbms/ydb/type_mapping.go @@ -92,6 +92,7 @@ func (typeMapper) SQLTypeToYDBColumn( typeName := columnDescription.Type optional := false + if matches := isOptional.FindStringSubmatch(typeName); len(matches) > 0 { optional = true typeName = matches[1] @@ -140,7 +141,8 @@ func makePrimitiveTypeFromString(typeName string) (*Ydb.Type, error) { return common.MakePrimitiveType(Ydb.Type_STRING), nil case typeUtf8: return common.MakePrimitiveType(Ydb.Type_UTF8), nil - case typeJSON: + case typeJSON, typeJSONDocument: + // Both types are mapping to YDB JSON due to KIKIMR-22201 return common.MakePrimitiveType(Ydb.Type_JSON), nil case typeDate: // YDB connector always returns date / time columns in YQL_FORMAT, because it is always fits YDB's date / time type value ranges @@ -149,9 +151,6 @@ func makePrimitiveTypeFromString(typeName string) (*Ydb.Type, error) { return common.MakePrimitiveType(Ydb.Type_DATETIME), nil case typeTimestamp: return common.MakePrimitiveType(Ydb.Type_TIMESTAMP), nil - case typeJSONDocument: - // This inconsistency is due to KIKIMR-22201 - return common.MakePrimitiveType(Ydb.Type_JSON), nil default: return nil, fmt.Errorf("convert type '%s': %w", typeName, common.ErrDataTypeNotSupported) } diff --git a/app/server/embedded.go b/app/server/embedded.go index 7c22dc5d..3e6d907e 100644 --- a/app/server/embedded.go +++ b/app/server/embedded.go @@ -1,6 +1,7 @@ package server import ( + "errors" "fmt" "os" "os/signal" @@ -65,10 +66,10 @@ func (s *serverEmbedded) ClientStreaming() *common.ClientStreaming { return s.cl func (s *serverEmbedded) MetricsSnapshot() (*common.MetricsSnapshot, error) { if s.cfg.MetricsServer == nil { - return nil, fmt.Errorf("metrics server is not initialized") + return nil, errors.New("metrics server is not initialized") } - mp, err := common.NewMetricsSnapshot(s.cfg.MetricsServer.Endpoint, s.cfg.Tls != nil) + mp, err := common.NewMetricsSnapshot(s.cfg.MetricsServer.Endpoint, false) if err != nil { return nil, fmt.Errorf("new metrics provider: %w", err) } @@ -83,12 +84,14 @@ func (s *serverEmbedded) Stop() { if s.operational { s.launcher.Stop() s.clientBuffering.Close() + s.operational = false } } func NewEmbedded(options ...EmbeddedOption) (common.TestingServer, error) { cfg := app_server_config.NewDefaultConfig() + for _, o := range options { o.apply(cfg) } diff --git a/app/server/grpc_metrics.go b/app/server/grpc_metrics.go index df34e0f2..d9b9be81 100644 --- a/app/server/grpc_metrics.go +++ b/app/server/grpc_metrics.go @@ -60,6 +60,7 @@ func maybeRegisterStatusCode(statusCount metrics.CounterVec, opName string, stre ydbStatus := eg.GetError().Status var ydbStatusStr string + if ydbStatus != Ydb.StatusIds_SUCCESS { // convert YDB status code to string ydbStatusStr = ydbStatus.String() @@ -202,6 +203,7 @@ func StreamServerMetrics(logger *zap.Logger, registry metrics.Registry) grpc.Str // return transport error to the client err := status.Errorf(codes.Internal, "Server paniced") + _ = ss.SendMsg(err) } } @@ -257,7 +259,6 @@ type serverStreamWithMessagesCount struct { func (s serverStreamWithMessagesCount) SendMsg(m any) error { err := s.ServerStream.SendMsg(m) - if err == nil { s.sentStreamMessages.Inc() s.sentBytes.Add(int64(proto.Size(m.(proto.Message)))) @@ -270,7 +271,6 @@ func (s serverStreamWithMessagesCount) SendMsg(m any) error { func (s serverStreamWithMessagesCount) RecvMsg(m any) error { err := s.ServerStream.RecvMsg(m) - if err == nil { s.receivedStreamMessages.Inc() s.receivedBytes.Add(int64(proto.Size(m.(proto.Message)))) diff --git a/app/server/httppuller.go b/app/server/httppuller.go index eaa6214e..423ad7d3 100644 --- a/app/server/httppuller.go +++ b/app/server/httppuller.go @@ -82,7 +82,6 @@ func (h handler) okSpack(header http.Header) bool { func (h handler) okLZ4Compression(header http.Header) bool { for _, header := range header[headers.AcceptEncodingKey] { encodings, err := headers.ParseAcceptEncoding(header) - if err != nil { h.logger.Warn("Can't parse accept-encoding header", zap.Error(err), zap.String("header", header)) @@ -108,8 +107,8 @@ func (h handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { } w.Header().Set(headers.ContentTypeKey, headers.TypeApplicationXSolomonSpack.String()) - _, err := h.registry.StreamSpack(r.Context(), w, compression) + _, err := h.registry.StreamSpack(r.Context(), w, compression) if err != nil { h.logger.Error("Failed to write compressed spack", zap.Error(err)) } @@ -118,8 +117,8 @@ func (h handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { } w.Header().Set(headers.ContentTypeKey, headers.TypeApplicationJSON.String()) - _, err := h.registry.StreamJSON(r.Context(), w) + _, err := h.registry.StreamJSON(r.Context(), w) if err != nil { h.logger.Error("Failed to write json", zap.Error(err)) } diff --git a/app/server/observation/service.go b/app/server/observation/service.go index 16ac59cf..10ff1527 100644 --- a/app/server/observation/service.go +++ b/app/server/observation/service.go @@ -114,6 +114,7 @@ func (s *serviceImpl) ListIncomingQueries( } if err := stream.Send(response); err != nil { logger.Error("Failed to send query to client", zap.Error(err)) + return status.Errorf(codes.Internal, "failed to send query to client: %v", err) } } @@ -171,6 +172,7 @@ func (s *serviceImpl) ListOutgoingQueries( } if err := stream.Send(response); err != nil { logger.Error("Failed to send query to client", zap.Error(err)) + return status.Errorf(codes.Internal, "failed to send query to client: %v", err) } } @@ -188,8 +190,9 @@ func NewService( ) (utils.Service, error) { // Create a listener addr := common.EndpointToString(cfg.Server.GetEndpoint()) - listener, err := net.Listen("tcp", addr) + //nolint:noctx + listener, err := net.Listen("tcp", addr) if err != nil { return nil, fmt.Errorf("net listen: %w", err) } diff --git a/app/server/observation/storage_sqlite.go b/app/server/observation/storage_sqlite.go index 3e945af1..d8e5ce0c 100644 --- a/app/server/observation/storage_sqlite.go +++ b/app/server/observation/storage_sqlite.go @@ -82,44 +82,47 @@ func (s *storageSQLite) initialize() error { var err error + ctx, cancel := context.WithTimeout(context.Background(), time.Second*10) + defer cancel() + // Enable foreign key support - _, err = s.db.Exec("PRAGMA foreign_keys = ON;") + _, err = s.db.ExecContext(ctx, "PRAGMA foreign_keys = ON;") if err != nil { return fmt.Errorf("enabling foreign keys: %w", err) } // Create tables - _, err = s.db.Exec(createIncomingTableSQL) + _, err = s.db.ExecContext(ctx, createIncomingTableSQL) if err != nil { return fmt.Errorf("creating incoming_queries table: %w", err) } - _, err = s.db.Exec(createOutgoingTableSQL) + _, err = s.db.ExecContext(ctx, createOutgoingTableSQL) if err != nil { return fmt.Errorf("creating outgoing_queries table: %w", err) } // Prepare statements for better performance - s.createIncomingQueryStmt, err = s.db.Prepare( + s.createIncomingQueryStmt, err = s.db.PrepareContext(ctx, "INSERT INTO incoming_queries (id, data_source_kind, created_at, rows_read, bytes_read, state) VALUES (?, ?, ?, ?, ?, ?)") if err != nil { return fmt.Errorf("preparing create incoming query statement: %w", err) } - s.finishIncomingQueryStmt, err = s.db.Prepare( + s.finishIncomingQueryStmt, err = s.db.PrepareContext(ctx, "UPDATE incoming_queries SET state = ?, finished_at = ?, rows_read = ?, bytes_read = ? WHERE id = ?") if err != nil { return fmt.Errorf("preparing finish incoming query statement: %w", err) } - s.cancelIncomingQueryStmt, err = s.db.Prepare( + s.cancelIncomingQueryStmt, err = s.db.PrepareContext(ctx, "UPDATE incoming_queries SET state = ?, finished_at = ?, error = ?, rows_read = ?, bytes_read = ? WHERE id = ?") if err != nil { return fmt.Errorf("preparing cancel incoming query statement: %w", err) } // Prepare statements for outgoing queries - s.createOutgoingQueryStmt, err = s.db.Prepare(` + s.createOutgoingQueryStmt, err = s.db.PrepareContext(ctx, ` INSERT INTO outgoing_queries (id, incoming_query_id, database_name, database_endpoint, rows_read, query_text, query_args, created_at, state) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`) @@ -127,13 +130,13 @@ func (s *storageSQLite) initialize() error { return fmt.Errorf("preparing create outgoing query statement: %w", err) } - s.finishOutgoingQueryStmt, err = s.db.Prepare( + s.finishOutgoingQueryStmt, err = s.db.PrepareContext(ctx, "UPDATE outgoing_queries SET state = ?, finished_at = ?, rows_read = ? WHERE id = ?") if err != nil { return fmt.Errorf("preparing finish outgoing query statement: %w", err) } - s.cancelOutgoingQueryStmt, err = s.db.Prepare( + s.cancelOutgoingQueryStmt, err = s.db.PrepareContext(ctx, "UPDATE outgoing_queries SET state = ?, finished_at = ?, error = ? WHERE id = ?") if err != nil { return fmt.Errorf("preparing cancel outgoing query statement: %w", err) @@ -352,11 +355,13 @@ func (s *storageSQLite) CreateOutgoingQuery( err = tx.QueryRowContext(ctx, "SELECT EXISTS(SELECT 1 FROM incoming_queries WHERE id = ?)", incomingQueryID).Scan(&exists) if err != nil { rollback() + return logger, "", fmt.Errorf("checking incoming query existence: %w", err) } if !exists { rollback() + return logger, "", fmt.Errorf("incoming query not found: %s", incomingQueryID) } @@ -365,19 +370,21 @@ func (s *storageSQLite) CreateOutgoingQuery( // Use the prepared statement for better performance stmt := tx.StmtContext(ctx, s.createOutgoingQueryStmt) + _, err = stmt.ExecContext(ctx, id, incomingQueryID, dsi.Database, common.EndpointToString(dsi.Endpoint), 0, queryText, fmt.Sprint(queryArgs), now, stateToString(observation.QueryState_QUERY_STATE_RUNNING), ) - if err != nil { rollback() + return logger, "", fmt.Errorf("creating outgoing query: %w", err) } // Commit the transaction if err = tx.Commit(); err != nil { rollback() + return logger, "", fmt.Errorf("committing transaction: %w", err) } @@ -438,7 +445,7 @@ func (s *storageSQLite) CancelOutgoingQuery(ctx context.Context, logger *zap.Log } // ListOutgoingQueries retrieves a list of outgoing queries with optional filtering -func (s *storageSQLite) ListOutgoingQueries(_ context.Context, _ *zap.Logger, +func (s *storageSQLite) ListOutgoingQueries(ctx context.Context, _ *zap.Logger, incomingQueryID *string, state *observation.QueryState, limit, offset int, @@ -449,6 +456,7 @@ func (s *storageSQLite) ListOutgoingQueries(_ context.Context, _ *zap.Logger, ) stateStr := "" + if state != nil && *state != observation.QueryState_QUERY_STATE_UNSPECIFIED { stateStr = stateToString(*state) } @@ -487,7 +495,7 @@ func (s *storageSQLite) ListOutgoingQueries(_ context.Context, _ *zap.Logger, args = []any{limit, offset} } - rows, err := s.db.Query(querySQL, args...) + rows, err := s.db.QueryContext(ctx, querySQL, args...) if err != nil { return nil, fmt.Errorf("listing outgoing queries: %w", err) } @@ -609,6 +617,7 @@ func (s *storageSQLite) collectGarbage(logger *zap.Logger, ttl time.Duration) { logger.Error("get database size before cleanup", zap.Error(err)) } + //nolint:noctx _, err = s.db.Exec(` DELETE FROM incoming_queries WHERE created_at < ?; DELETE FROM outgoing_queries WHERE created_at < ?; @@ -617,6 +626,7 @@ func (s *storageSQLite) collectGarbage(logger *zap.Logger, ttl time.Duration) { logger.Error("clean up old queries", zap.Error(err)) } + //nolint:noctx _, err = s.db.Exec(` VACUUM; `) if err != nil { logger.Error("vacuum database", zap.Error(err)) @@ -672,8 +682,11 @@ func newStorageSQLite(logger *zap.Logger, cfg *config.TObservationConfig_TStorag "PRAGMA wal_autocheckpoint = 1000", // Checkpoint WAL file after 1000 pages } + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + for _, pragma := range pragmas { - if _, err = db.Exec(pragma); err != nil { + if _, err = db.ExecContext(ctx, pragma); err != nil { return nil, fmt.Errorf("setting pragma %s: %w", pragma, err) } } @@ -688,6 +701,7 @@ func newStorageSQLite(logger *zap.Logger, cfg *config.TObservationConfig_TStorag if err = storage.initialize(); err != nil { common.LogCloserError(logger, db, "close SQLite database") + return nil, fmt.Errorf("initialize: %w", err) } diff --git a/app/server/paging/columnar_buffer_arrow_ipc_streaming_default.go b/app/server/paging/columnar_buffer_arrow_ipc_streaming_default.go index ba485e83..13c90d03 100644 --- a/app/server/paging/columnar_buffer_arrow_ipc_streaming_default.go +++ b/app/server/paging/columnar_buffer_arrow_ipc_streaming_default.go @@ -20,6 +20,8 @@ type columnarBufferArrowIPCStreamingDefault[T Acceptor] struct { builders []array.Builder schema *arrow.Schema logger *zap.Logger + arrowRecord arrow.Record // Store the Arrow Record directly + rowsAdded bool // Track if rows were added via addRow } // AddRow saves a row obtained from the datasource into the buffer @@ -30,23 +32,43 @@ func (cb *columnarBufferArrowIPCStreamingDefault[T]) addRow(transformer RowTrans return fmt.Errorf("append values to arrow builders: %w", err) } + cb.rowsAdded = true + return nil } // ToResponse returns all the accumulated data and clears buffer func (cb *columnarBufferArrowIPCStreamingDefault[T]) ToResponse() (*api_service_protos.TReadSplitsResponse, error) { - // chunk consists of columns - chunk := make([]arrow.Array, 0, len(cb.builders)) - - // prepare arrow record - for _, builder := range cb.builders { - chunk = append(chunk, builder.NewArray()) - } - - record := array.NewRecord(cb.schema, chunk, -1) - - for _, col := range chunk { - col.Release() + var record arrow.Record + + var releaseRecord bool + + // If we have a stored Arrow Record, use it directly + if cb.arrowRecord != nil { + record = cb.arrowRecord + // We'll release our reference to the record at the end + releaseRecord = false + } else if cb.rowsAdded { + // If rows were added, create a new record from the builders + chunk := make([]arrow.Array, 0, len(cb.builders)) + + // prepare arrow record + for _, builder := range cb.builders { + chunk = append(chunk, builder.NewArray()) + } + + record = array.NewRecord(cb.schema, chunk, -1) + + // We need to release the arrays after creating the record + for _, col := range chunk { + col.Release() + } + + // We'll need to release this record after writing it + releaseRecord = true + } else { + // No data to return + return &api_service_protos.TReadSplitsResponse{}, nil } // prepare arrow writer @@ -55,13 +77,26 @@ func (cb *columnarBufferArrowIPCStreamingDefault[T]) ToResponse() (*api_service_ writer := ipc.NewWriter(&buf, ipc.WithSchema(cb.schema), ipc.WithAllocator(cb.arrowAllocator)) if err := writer.Write(record); err != nil { + if releaseRecord { + record.Release() + } + return nil, fmt.Errorf("write record: %w", err) } if err := writer.Close(); err != nil { + if releaseRecord { + record.Release() + } + return nil, fmt.Errorf("close arrow writer: %w", err) } + // Release the record if we created it + if releaseRecord { + record.Release() + } + out := &api_service_protos.TReadSplitsResponse{ Payload: &api_service_protos.TReadSplitsResponse_ArrowIpcStreaming{ ArrowIpcStreaming: buf.Bytes(), @@ -71,7 +106,17 @@ func (cb *columnarBufferArrowIPCStreamingDefault[T]) ToResponse() (*api_service_ return out, nil } -func (cb *columnarBufferArrowIPCStreamingDefault[T]) TotalRows() int { return cb.builders[0].Len() } +func (cb *columnarBufferArrowIPCStreamingDefault[T]) TotalRows() int { + if cb.arrowRecord != nil { + return int(cb.arrowRecord.NumRows()) + } + + if len(cb.builders) > 0 { + return cb.builders[0].Len() + } + + return 0 +} // Frees resources if buffer is no longer used func (cb *columnarBufferArrowIPCStreamingDefault[T]) Release() { @@ -79,4 +124,11 @@ func (cb *columnarBufferArrowIPCStreamingDefault[T]) Release() { for _, b := range cb.builders { b.Release() } + + // Release the stored Arrow Record if it exists + if cb.arrowRecord != nil { + cb.arrowRecord.Release() + + cb.arrowRecord = nil + } } diff --git a/app/server/paging/columnar_buffer_factory.go b/app/server/paging/columnar_buffer_factory.go index 0577ff3d..a26a45ec 100644 --- a/app/server/paging/columnar_buffer_factory.go +++ b/app/server/paging/columnar_buffer_factory.go @@ -24,11 +24,7 @@ type columnarBufferFactoryImpl[T Acceptor] struct { func (cbf *columnarBufferFactoryImpl[T]) MakeBuffer() (ColumnarBuffer[T], error) { switch cbf.format { case api_service_protos.TReadSplitsRequest_ARROW_IPC_STREAMING: - builders, err := common.YdbTypesToArrowBuilders(cbf.ydbTypes, cbf.arrowAllocator) - if err != nil { - return nil, fmt.Errorf("convert Select.What to arrow.Schema: %w", err) - } - + // Special case for empty columns if len(cbf.ydbTypes) == 0 { return &columnarBufferArrowIPCStreamingEmptyColumns[T]{ arrowAllocator: cbf.arrowAllocator, @@ -37,6 +33,12 @@ func (cbf *columnarBufferFactoryImpl[T]) MakeBuffer() (ColumnarBuffer[T], error) }, nil } + // Create the default implementation + builders, err := common.YdbTypesToArrowBuilders(cbf.ydbTypes, cbf.arrowAllocator) + if err != nil { + return nil, fmt.Errorf("convert Select.What to arrow.Schema: %w", err) + } + return &columnarBufferArrowIPCStreamingDefault[T]{ arrowAllocator: cbf.arrowAllocator, builders: builders, diff --git a/app/server/paging/interface.go b/app/server/paging/interface.go index f509c97a..cf1d44c5 100644 --- a/app/server/paging/interface.go +++ b/app/server/paging/interface.go @@ -43,6 +43,7 @@ type ColumnarBufferFactory[T Acceptor] interface { // 4. flag marking this stream as completed type ReadResult[T Acceptor] struct { ColumnarBuffer ColumnarBuffer[T] + Data []byte // serialized Arrow Record Stats *api_service_protos.TReadSplitsResponse_TStats Error error IsTerminalMessage bool @@ -54,6 +55,9 @@ type Sink[T Acceptor] interface { // AddRow saves the row obtained from a stream incoming from an external data source. AddRow(rowTransformer RowTransformer[T]) error + // AddArrowRecord saves the Arrow block obtained from a stream incoming from an external data source. + AddArrowRecord(record arrow.Record) error + // Finish reports the successful (!) completion of data stream reading. // Never call this method if the request has failed. // This method can be called only once. diff --git a/app/server/paging/mock.go b/app/server/paging/mock.go index d4d9a510..4d33cc00 100644 --- a/app/server/paging/mock.go +++ b/app/server/paging/mock.go @@ -1,6 +1,7 @@ package paging import ( + "github.com/apache/arrow/go/v13/arrow" "github.com/stretchr/testify/mock" "go.uber.org/zap" @@ -19,6 +20,12 @@ func (m *SinkMock) AddRow(transformer RowTransformer[any]) error { return args.Error(0) } +func (m *SinkMock) AddArrowRecord(record arrow.Record) error { + args := m.Called(record) + + return args.Error(0) +} + func (m *SinkMock) AddError(err error) { m.Called(err) } diff --git a/app/server/paging/sink.go b/app/server/paging/sink.go index 7252ab0b..2f8d385d 100644 --- a/app/server/paging/sink.go +++ b/app/server/paging/sink.go @@ -2,9 +2,12 @@ package paging import ( + "bytes" "context" "fmt" + "github.com/apache/arrow/go/v13/arrow" + "github.com/apache/arrow/go/v13/arrow/ipc" "go.uber.org/zap" api_service_protos "github.com/ydb-platform/fq-connector-go/api/service/protos" @@ -70,6 +73,60 @@ func (s *sinkImpl[T]) AddRow(rowTransformer RowTransformer[T]) error { return nil } +// AddArrowRecord saves the Arrow block obtained from a stream incoming from an external data source. +// It directly flushes the record to the resultQueue without using the columnar buffer. +func (s *sinkImpl[T]) AddArrowRecord(record arrow.Record) error { + if s.state != sinkOperational { + panic(s.unexpectedState(sinkOperational)) + } + + if record == nil { + return nil + } + + if record.NumRows() == 0 { + return nil + } + + // Apply read limiter for each row in the record + rowCount := record.NumRows() + for i := int64(0); i < rowCount; i++ { + if err := s.readLimiter.addRow(); err != nil { + return fmt.Errorf("add row to read limiter: %w", err) + } + } + + // Check if we can add the Arrow record without exceeding page size limit + ok, err := s.trafficTracker.tryAddArrowRecord(record) + if err != nil { + return fmt.Errorf("add arrow record to traffic tracker: %w", err) + } + + // If page is already too large, flush buffer to the channel and try again + if !ok { + if err := s.flush(true, false); err != nil { + return fmt.Errorf("flush: %w", err) + } + + // Try again with a fresh buffer + _, err := s.trafficTracker.tryAddArrowRecord(record) + if err != nil { + return fmt.Errorf("add arrow record to traffic tracker: %w", err) + } + } + + // Get stats + stats := s.trafficTracker.DumpStats(false) + + // Send the response directly to the result queue + s.respondWithArrowRecord(record, stats, nil, false) + + // Reset counters for the next record + s.trafficTracker.refreshCounters() + + return nil +} + func (s *sinkImpl[T]) flush(makeNewBuffer bool, isTerminalMessage bool) error { if s.currBuffer.TotalRows() == 0 { return nil @@ -106,6 +163,7 @@ func (s *sinkImpl[T]) Finish() { err := s.flush(false, true) if err != nil { s.respondWith(nil, nil, fmt.Errorf("flush: %w", err), true) + s.state = sinkFailed } else { s.state = sinkFinished @@ -138,6 +196,49 @@ func (s *sinkImpl[T]) respondWith( } } +// respondWithArrowRecord creates a response with an Arrow record and sends it to the result queue +func (s *sinkImpl[T]) respondWithArrowRecord( + record arrow.Record, + stats *api_service_protos.TReadSplitsResponse_TStats, + err error, + isTerminalMessage bool) { + // Create a response directly from the Arrow record + var buf bytes.Buffer + + writer := ipc.NewWriter(&buf, ipc.WithSchema(record.Schema())) + + if writeErr := writer.Write(record); writeErr != nil { + s.respondWith(nil, stats, fmt.Errorf("write record: %w", writeErr), isTerminalMessage) + + return + } + + if closeErr := writer.Close(); closeErr != nil { + s.respondWith(nil, stats, fmt.Errorf("close arrow writer: %w", closeErr), isTerminalMessage) + + return + } + + // Get the serialized data from the buffer + serializedData := buf.Bytes() + + // Create a result with the serialized data + result := &ReadResult[T]{ + ColumnarBuffer: nil, + Data: serializedData, + Stats: stats, + Error: err, + IsTerminalMessage: isTerminalMessage, + Logger: s.logger, + } + + // Send the result to the queue + select { + case s.resultQueue <- result: + case <-s.ctx.Done(): + } +} + func (s *sinkImpl[T]) unexpectedState(expected ...sinkState) error { return fmt.Errorf( "unexpected state '%v' (expected are '%v'): %w", diff --git a/app/server/paging/sink_factory.go b/app/server/paging/sink_factory.go index 9e6d115b..f5bdfe28 100644 --- a/app/server/paging/sink_factory.go +++ b/app/server/paging/sink_factory.go @@ -2,6 +2,7 @@ package paging import ( "context" + "errors" "fmt" "go.uber.org/zap" @@ -40,7 +41,7 @@ type sinkFactoryImpl[T Acceptor] struct { // This method can be called only once. func (f *sinkFactoryImpl[T]) MakeSinks(params []*SinkParams) ([]Sink[T], error) { if f.state != sinkFactoryIdle { - return nil, fmt.Errorf("sink factory is already in use") + return nil, errors.New("sink factory is already in use") } f.totalSinks = len(params) @@ -54,11 +55,13 @@ func (f *sinkFactoryImpl[T]) MakeSinks(params []*SinkParams) ([]Sink[T], error) buffer, err := f.bufferFactory.MakeBuffer() if err != nil { f.state = sinkFactoryFailed + return nil, fmt.Errorf("make buffer: %w", err) } // preserve traffic tracker to obtain stats in future trafficTracker := newTrafficTracker[T](f.cfg) + f.trafficTrackers = append(f.trafficTrackers, trafficTracker) sink := &sinkImpl[T]{ @@ -95,6 +98,7 @@ func (f *sinkFactoryImpl[T]) FinalStats() *api_service_protos.TReadSplitsRespons for _, tracker := range f.trafficTrackers { partialStats := tracker.DumpStats(true) + overallStats.Rows += partialStats.Rows overallStats.Bytes += partialStats.Bytes } @@ -120,6 +124,7 @@ func (f *sinkFactoryImpl[T]) sinkTerminationHandler(terminateChan <-chan Sink[T] // notify reader about the end of data f.logger.Info("all sinks terminated") close(f.resultQueue) + f.state = sinkFactoryFinished return diff --git a/app/server/paging/size.go b/app/server/paging/size.go index f452e817..db0433ae 100644 --- a/app/server/paging/size.go +++ b/app/server/paging/size.go @@ -262,6 +262,7 @@ func sizeOfValueBloated(v any) (uint64, acceptorKind, error) { return 12, fixedSize, nil case map[string]string: var size uint64 + for k, v := range t { size += uint64(len(k) + len(v)) } diff --git a/app/server/paging/size_arrow.go b/app/server/paging/size_arrow.go new file mode 100644 index 00000000..cb5cf94a --- /dev/null +++ b/app/server/paging/size_arrow.go @@ -0,0 +1,175 @@ +package paging + +import ( + "fmt" + + "github.com/apache/arrow/go/v13/arrow" + "github.com/apache/arrow/go/v13/arrow/array" +) + +// estimateArrowRecordSize estimates the size of an Arrow Record without serializing it. +// This is an approximation based on the data types and number of rows. +func estimateArrowRecordSize(record arrow.Record) (uint64, error) { + if record == nil { + return 0, nil + } + + numRows := record.NumRows() + if numRows == 0 { + return 0, nil + } + + // Start with a base size for the record structure itself + // This includes metadata, schema, and other overhead + size := uint64(64) // Base overhead for record structure + + // Add size for each column + for i := 0; i < int(record.NumCols()); i++ { + col := record.Column(i) + + colSize, err := estimateArrowArraySize(col) + if err != nil { + return 0, fmt.Errorf("estimate column %d size: %w", i, err) + } + + size += colSize + } + + return size, nil +} + +// estimateArrowArraySize estimates the size of an Arrow Array without serializing it. +// nolint:gocyclo,funlen +func estimateArrowArraySize(arr arrow.Array) (uint64, error) { + if arr == nil { + return 0, nil + } + + // Base size for array structure + size := uint64(32) // Base overhead for array structure + + // Add size for validity bitmap (null values) + // This is approximately 1 bit per value, rounded up to bytes + validityBitmapSize := uint64((arr.Len() + 7) / 8) + + size += validityBitmapSize + + // Get the number of non-null values + nonNullCount := arr.Len() - arr.NullN() + + // Add size based on data type and length + switch arr := arr.(type) { + case *array.Boolean: + // For boolean arrays, we need about 1 bit per value (rounded up to bytes) + size += uint64((nonNullCount + 7) / 8) + case *array.Int8: + size += uint64(nonNullCount) // 1 byte per value + case *array.Int16: + size += uint64(nonNullCount * 2) // 2 bytes per value + case *array.Int32: + size += uint64(nonNullCount * 4) // 4 bytes per value + case *array.Int64: + size += uint64(nonNullCount * 8) // 8 bytes per value + case *array.Uint8: + size += uint64(nonNullCount) // 1 byte per value + case *array.Uint16: + size += uint64(nonNullCount * 2) // 2 bytes per value + case *array.Uint32: + size += uint64(nonNullCount * 4) // 4 bytes per value + case *array.Uint64: + size += uint64(nonNullCount * 8) // 8 bytes per value + case *array.Float32: + size += uint64(nonNullCount * 4) // 4 bytes per value + case *array.Float64: + size += uint64(nonNullCount * 8) // 8 bytes per value + case *array.String: + // For string arrays, we need to account for the string data and offsets + // Offsets are int32, so 4 bytes per value plus 1 + size += uint64((arr.Len() + 1) * 4) // Offsets are needed for all positions, even nulls + + // Estimate the string data size + // This is an approximation; we iterate through values to get actual sizes + for i := 0; i < arr.Len(); i++ { + if arr.IsValid(i) { + size += uint64(len(arr.Value(i))) + } + } + case *array.Binary: + // For binary arrays, similar to string arrays + size += uint64((arr.Len() + 1) * 4) // Offsets are needed for all positions, even nulls + + // Estimate the binary data size + for i := 0; i < arr.Len(); i++ { + if arr.IsValid(i) { + size += uint64(len(arr.Value(i))) + } + } + case *array.Timestamp: + size += uint64(nonNullCount * 8) // 8 bytes per timestamp + case *array.Date32: + size += uint64(nonNullCount * 4) // 4 bytes per date + case *array.Date64: + size += uint64(nonNullCount * 8) // 8 bytes per date + case *array.Time32: + size += uint64(nonNullCount * 4) // 4 bytes per time + case *array.Time64: + size += uint64(nonNullCount * 8) // 8 bytes per time + case *array.Decimal128: + size += uint64(nonNullCount * 16) // 16 bytes per decimal + case *array.Decimal256: + size += uint64(nonNullCount * 32) // 32 bytes per decimal + case *array.Struct: + // For struct arrays, we need to account for each field + for i := 0; i < arr.NumField(); i++ { + fieldArr := arr.Field(i) + + fieldSize, err := estimateArrowArraySize(fieldArr) + if err != nil { + return 0, fmt.Errorf("estimate struct field %d size: %w", i, err) + } + + size += fieldSize + } + case array.ListLike: + // This case handles all list-like arrays, including Map, List, LargeList, etc. + // For list arrays, we need to account for the offsets and the values + size += uint64((arr.Len() + 1) * 4) // Offsets (int32) are needed for all positions, even nulls + + // For Map arrays, we need to handle keys and items separately + if mapArr, ok := arr.(*array.Map); ok { + // Estimate the key-value pairs size + keyArr := mapArr.Keys() + + keySize, err := estimateArrowArraySize(keyArr) + if err != nil { + return 0, fmt.Errorf("estimate map keys size: %w", err) + } + + size += keySize + + itemArr := mapArr.Items() + + itemSize, err := estimateArrowArraySize(itemArr) + if err != nil { + return 0, fmt.Errorf("estimate map items size: %w", err) + } + + size += itemSize + } else { + // For regular list arrays, estimate the values size + valueArr := arr.ListValues() + + valueSize, err := estimateArrowArraySize(valueArr) + if err != nil { + return 0, fmt.Errorf("estimate list values size: %w", err) + } + + size += valueSize + } + default: + // For other types, return an error + return 0, fmt.Errorf("unsupported arrow array type: %T", arr) + } + + return size, nil +} diff --git a/app/server/paging/size_test.go b/app/server/paging/size_test.go index 28efe54e..3e0ea1ff 100644 --- a/app/server/paging/size_test.go +++ b/app/server/paging/size_test.go @@ -32,8 +32,11 @@ func (tc testCaseSize[Type]) execute(t *testing.T) { t.Run(fnName+"_"+typeName, func(t *testing.T) { x0 := tc.value x1 := new(Type) + *x1 = x0 + x2 := new(*Type) + *x2 = x1 size0, kind0, err := fn(x0) diff --git a/app/server/paging/traffic_tracker.go b/app/server/paging/traffic_tracker.go index 12320d77..120239f4 100644 --- a/app/server/paging/traffic_tracker.go +++ b/app/server/paging/traffic_tracker.go @@ -3,6 +3,8 @@ package paging import ( "fmt" + "github.com/apache/arrow/go/v13/arrow" + api_service_protos "github.com/ydb-platform/fq-connector-go/api/service/protos" "github.com/ydb-platform/fq-connector-go/app/config" "github.com/ydb-platform/fq-connector-go/app/server/utils" @@ -51,12 +53,48 @@ func (tt *trafficTracker[T]) tryAddRow(acceptors []T) (bool, error) { return true, nil } +// tryAddArrowRecord checks if the addition of the Arrow record +// would exceed the limits on the page size. +// If there's enough space in buffer, it returns true and increases the internal counters. +// Otherwise it returns false, but doesn't change internal state. +func (tt *trafficTracker[T]) tryAddArrowRecord(record arrow.Record) (bool, error) { + if record == nil { + return true, nil + } + + // Estimate the size of the Arrow record + totalBytes, err := estimateArrowRecordSize(record) + if err != nil { + return false, fmt.Errorf("estimate arrow record size: %w", err) + } + + // Get the number of rows in the record + rowCount := uint64(record.NumRows()) + if rowCount == 0 { + return true, nil + } + + wouldBeEnough, err := tt.checkPageSizeLimit(totalBytes, rowCount) + if err != nil { + return false, fmt.Errorf("check page size limit: %w", err) + } + + if wouldBeEnough { + return false, nil + } + + tt.bytesCurr.Add(totalBytes) + tt.rowsCurr.Add(rowCount) + + return true, nil +} + func (tt *trafficTracker[T]) maybeInit(acceptors []T) error { if tt.sizePattern == nil { // lazy initialization when the first row is ready var err error - tt.sizePattern, err = newSizePattern(acceptors) + tt.sizePattern, err = newSizePattern(acceptors) if err != nil { return fmt.Errorf("new size pattern: %w", err) } diff --git a/app/server/service_connector.go b/app/server/service_connector.go index 25f1a072..723d92ab 100644 --- a/app/server/service_connector.go +++ b/app/server/service_connector.go @@ -3,6 +3,7 @@ package server import ( "context" "crypto/tls" + "errors" "fmt" "net" @@ -34,6 +35,7 @@ type serviceConnector struct { logger *zap.Logger } +//nolint:staticcheck func (*serviceConnector) ListTables(_ *api_service_protos.TListTablesRequest, _ api_service.Connector_ListTablesServer) error { return nil } @@ -43,6 +45,7 @@ func (s *serviceConnector) DescribeTable( request *api_service_protos.TDescribeTableRequest, ) (*api_service_protos.TDescribeTableResponse, error) { logger := utils.LoggerMustFromContext(ctx) + logger = common.AnnotateLoggerWithDataSourceInstance(logger, request.DataSourceInstance) logger.Info("request handling started", zap.String("table", request.GetTable())) @@ -129,8 +132,8 @@ func (s *serviceConnector) ReadSplits( logger.Info("request handling started", zap.Int("total_splits", len(request.Splits))) var err error - logger, err = s.doReadSplits(logger, request, stream) + logger, err = s.doReadSplits(logger, request, stream) if err != nil { logger.Error("request handling failed", zap.Error(err)) @@ -173,7 +176,6 @@ func (s *serviceConnector) doReadSplits( request, split, ) - if err != nil { return splitLogger, fmt.Errorf("read split %d: %w", split.Id, err) } @@ -211,8 +213,6 @@ func makeGRPCOptions(logger *zap.Logger, cfg *config.TServerConfig, registry *so switch { case cfg.GetConnectorServer().GetTls() != nil: tlsConfig = cfg.GetConnectorServer().GetTls() - case cfg.GetTls() != nil: - tlsConfig = cfg.GetTls() default: logger.Warn("server will use insecure connections") @@ -230,6 +230,7 @@ func makeGRPCOptions(logger *zap.Logger, cfg *config.TServerConfig, registry *so // for security reasons we do not allow TLS < 1.2, see YQ-1877 creds := credentials.NewTLS(&tls.Config{Certificates: []tls.Certificate{cert}, MinVersion: tls.VersionTLS12}) + opts = append(opts, grpc.Creds(creds)) return opts, nil @@ -255,14 +256,11 @@ func newServiceConnector( switch { case cfg.GetConnectorServer().GetEndpoint() != nil: endpoint = cfg.ConnectorServer.GetEndpoint() - case cfg.GetEndpoint() != nil: - logger.Warn("Using deprecated field `endpoint` from config. Please update your config.") - - endpoint = cfg.GetEndpoint() default: - return nil, fmt.Errorf("invalid config: no endpoint") + return nil, errors.New("invalid config: no endpoint") } + //nolint:noctx listener, err := net.Listen("tcp", common.EndpointToString(endpoint)) if err != nil { return nil, fmt.Errorf("net listen: %w", err) diff --git a/app/server/streaming/list_splits_streamer.go b/app/server/streaming/list_splits_streamer.go index 02213944..d67bfcf8 100644 --- a/app/server/streaming/list_splits_streamer.go +++ b/app/server/streaming/list_splits_streamer.go @@ -76,7 +76,6 @@ func (s *ListSplitsStreamer[T]) sendResultToStream(result *datasource.ListSplitR UseProtoNames: true, EmitUnpopulated: false, }.Marshal(result.Description) - if err != nil { return fmt.Errorf("marshal description to JSON: %w", err) } diff --git a/app/server/streaming/read_splits_streamer.go b/app/server/streaming/read_splits_streamer.go index dde537dd..59edb3fc 100644 --- a/app/server/streaming/read_splits_streamer.go +++ b/app/server/streaming/read_splits_streamer.go @@ -2,6 +2,7 @@ package streaming import ( "context" + "errors" "fmt" "sync" @@ -60,13 +61,29 @@ func (s *ReadSplitsStreamer[T]) writeDataToStream() error { } func (s *ReadSplitsStreamer[T]) sendResultToStream(result *paging.ReadResult[T]) error { - // buffer must be explicitly marked as unused, - // otherwise memory will leak - defer result.ColumnarBuffer.Release() + var resp *api_service_protos.TReadSplitsResponse - resp, err := result.ColumnarBuffer.ToResponse() - if err != nil { - return fmt.Errorf("buffer to response: %w", err) + var err error + + if result.Data != nil { + // Handle the case where we have serialized Arrow data + resp = &api_service_protos.TReadSplitsResponse{ + Payload: &api_service_protos.TReadSplitsResponse_ArrowIpcStreaming{ + ArrowIpcStreaming: result.Data, + }, + } + } else if result.ColumnarBuffer != nil { + // Handle the case where we have a columnar buffer + // buffer must be explicitly marked as unused, + // otherwise memory will leak + defer result.ColumnarBuffer.Release() + + resp, err = result.ColumnarBuffer.ToResponse() + if err != nil { + return fmt.Errorf("buffer to response: %w", err) + } + } else { + return errors.New("result contains neither Data nor ColumnarBuffer") } resp.Stats = result.Stats diff --git a/app/server/streaming/read_splits_streamer_test.go b/app/server/streaming/read_splits_streamer_test.go index d0de7745..6483410d 100644 --- a/app/server/streaming/read_splits_streamer_test.go +++ b/app/server/streaming/read_splits_streamer_test.go @@ -173,8 +173,11 @@ func (tc testCaseStreaming) execute(t *testing.T) { connection.On("Query", `SELECT "col0", "col1" FROM "example_1"`).Return(rows, nil).Once() col0Acceptor := new(*int32) + *col0Acceptor = new(int32) + col1Acceptor := new(*string) + *col1Acceptor = new(string) transformer := &rdbms_utils.RowTransformerMock{ @@ -233,6 +236,7 @@ func (tc testCaseStreaming) execute(t *testing.T) { expectedColumnarBlock := expectedColumnarBlocks[sendCallID] rowsInMessage := tc.rowsPerPage + if sendCallID == totalMessages-1 { rowsInMessage = rowsInLastMessage } @@ -348,10 +352,11 @@ func TestStreaming(t *testing.T) { }) t.Run("scan error", func(t *testing.T) { - scanErr := fmt.Errorf("scan error") + scanErr := errors.New("scan error") for _, tc := range testCases { tc := tc + tc.scanErr = scanErr t.Run(tc.name(), func(t *testing.T) { tc.execute(t) @@ -360,10 +365,11 @@ func TestStreaming(t *testing.T) { }) t.Run("send error", func(t *testing.T) { - sendErr := fmt.Errorf("stream send error") + sendErr := errors.New("stream send error") for _, tc := range testCases { tc := tc + tc.sendErr = sendErr t.Run(tc.name(), func(t *testing.T) { tc.execute(t) diff --git a/app/server/utils/arrow_builder.go b/app/server/utils/arrow_builder.go index 32b47979..2370794d 100644 --- a/app/server/utils/arrow_builder.go +++ b/app/server/utils/arrow_builder.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "errors" diff --git a/app/server/utils/counter.go b/app/server/utils/counter.go index 67690b96..d9d5ced8 100644 --- a/app/server/utils/counter.go +++ b/app/server/utils/counter.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import "golang.org/x/exp/constraints" diff --git a/app/server/utils/counter_test.go b/app/server/utils/counter_test.go index e8836a1c..add20fa8 100644 --- a/app/server/utils/counter_test.go +++ b/app/server/utils/counter_test.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "testing" diff --git a/app/server/utils/decimal/deserialize.go b/app/server/utils/decimal/deserialize.go index 646a4dc9..e3dfdabf 100644 --- a/app/server/utils/decimal/deserialize.go +++ b/app/server/utils/decimal/deserialize.go @@ -29,6 +29,7 @@ func Deserialize( if isNegative { // For negative numbers: subtract from 2^{8*blobSize} to get the original negative value twoToThe128 := new(big.Int).Lsh(big.NewInt(1), uint(blobSize*8)) + bigInt = new(big.Int).Sub(twoToThe128, bigInt) bigInt.Neg(bigInt) } diff --git a/app/server/utils/doc.go b/app/server/utils/doc.go index 9925b62f..3ce31dce 100644 --- a/app/server/utils/doc.go +++ b/app/server/utils/doc.go @@ -1,2 +1,2 @@ // Package utils contains various helpers and utility functions -package utils +package utils //nolint:revive diff --git a/app/server/utils/metadata.go b/app/server/utils/metadata.go index 168cc848..71f20b68 100644 --- a/app/server/utils/metadata.go +++ b/app/server/utils/metadata.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "context" @@ -93,5 +93,6 @@ func insertMetadataToContext(serverContext context.Context, logger *zap.Logger, func LoggerMustFromContext(ctx context.Context) *zap.Logger { logger := ctx.Value(loggerKeyRequest).(*zap.Logger) + return logger } diff --git a/app/server/utils/retry/retry.go b/app/server/utils/retry/retry.go index 53d875d5..3fd444a2 100644 --- a/app/server/utils/retry/retry.go +++ b/app/server/utils/retry/retry.go @@ -31,7 +31,6 @@ func (r *retrierDefault) Run(ctx context.Context, logger *zap.Logger, op Operati attempts++ err := op() - if err != nil { // It's convinient to disable retries for negative tests. // These tests are marked with 'ForbidRetries' flag in GRPC Metadata. @@ -63,6 +62,7 @@ func NewRetrierFromConfig(cfg *config.TExponentialBackoffConfig, retriableErrorC retriableErrorChecker: retriableErrorChecker, backoffFactory: func() *backoff.ExponentialBackOff { b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = common.MustDurationFromString(cfg.MaxElapsedTime) b.InitialInterval = common.MustDurationFromString(cfg.InitialInterval) b.MaxInterval = common.MustDurationFromString(cfg.MaxInterval) diff --git a/app/server/utils/service.go b/app/server/utils/service.go index eb136b28..67fe487e 100644 --- a/app/server/utils/service.go +++ b/app/server/utils/service.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive // Service is an abstract interface representing some internal service // running in a distinct thread. diff --git a/app/server/validate.go b/app/server/validate.go index e3625843..5a8063cd 100644 --- a/app/server/validate.go +++ b/app/server/validate.go @@ -131,11 +131,8 @@ func validateDataSourceOptions(dsi *api_common.TGenericDataSourceInstance) error if dsi.GetOracleOptions().GetServiceName() == "" { return fmt.Errorf("service_name field is empty: %w", common.ErrInvalidRequest) } - case api_common.EGenericDataSourceKind_MS_SQL_SERVER: - // TODO: check schema - return nil - - case api_common.EGenericDataSourceKind_GREENPLUM: + case api_common.EGenericDataSourceKind_MS_SQL_SERVER, api_common.EGenericDataSourceKind_GREENPLUM: + // TODO: check schema in MS SQL Server return nil case api_common.EGenericDataSourceKind_LOGGING: if dsi.GetLoggingOptions().GetFolderId() == "" { diff --git a/app/validate/cmd.go b/app/validate/cmd.go index 925eb7e0..39af2b01 100644 --- a/app/validate/cmd.go +++ b/app/validate/cmd.go @@ -2,7 +2,6 @@ package validate import ( "fmt" - "io/ioutil" "os" "github.com/spf13/cobra" @@ -73,7 +72,7 @@ func validateHelmConfigurationFile(cmd *cobra.Command, _ []string) error { return fmt.Errorf("key '%s' not found in YAML file", key) } - tempFile, err := ioutil.TempFile("", "connector-config-*.yaml") + tempFile, err := os.CreateTemp("", "connector-config-*.yaml") if err != nil { return fmt.Errorf("create temp file: %v", err) } @@ -88,7 +87,7 @@ func validateHelmConfigurationFile(cmd *cobra.Command, _ []string) error { } }() - if err = ioutil.WriteFile(tempFile.Name(), []byte(keyPart.(string)), 0644); err != nil { + if err = os.WriteFile(tempFile.Name(), []byte(keyPart.(string)), 0644); err != nil { return fmt.Errorf("write temp file: %v", err) } diff --git a/common/api_helpers.go b/common/api_helpers.go index de1234b9..577e3ef6 100644 --- a/common/api_helpers.go +++ b/common/api_helpers.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "bytes" @@ -64,6 +64,7 @@ func ReadResponsesToArrowRecords(responses []*api_service_protos.TReadSplitsResp record := reader.Record() record.Retain() + out = append(out, record) } diff --git a/common/arrow_helpers.go b/common/arrow_helpers.go index 47684245..e935bd82 100644 --- a/common/arrow_helpers.go +++ b/common/arrow_helpers.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" @@ -116,6 +116,7 @@ func ydbTypeToArrowBuilder(ydbType *Ydb.Type, arrowAllocator memory.Allocator) ( } structType := arrow.StructOf(fields...) + builder = array.NewStructBuilder(arrowAllocator, structType) case *Ydb.Type_DecimalType: builder = array.NewFixedSizeBinaryBuilder(arrowAllocator, &arrow.FixedSizeBinaryType{ByteWidth: 16}) @@ -131,7 +132,7 @@ func ydbTypeToArrowBuilder(ydbType *Ydb.Type, arrowAllocator memory.Allocator) ( return builder, nil } -//nolint:gocyclo +//nolint:gocyclo,revive func ydbTypeIdToArrowBuilder(typeID Ydb.Type_PrimitiveTypeId, arrowAllocator memory.Allocator) (array.Builder, error) { var builder array.Builder @@ -247,7 +248,7 @@ func ydbTypeToArrowField(ydbType *Ydb.Type, column *Ydb.Column) (arrow.Field, er return field, nil } -//nolint:gocyclo +//nolint:gocyclo,revive func ydbTypeIdToArrowField(typeID Ydb.Type_PrimitiveTypeId, column *Ydb.Column) (arrow.Field, error) { var field arrow.Field diff --git a/common/call_stack.go b/common/call_stack.go index c655a2fe..97b6e3df 100644 --- a/common/call_stack.go +++ b/common/call_stack.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import "runtime" @@ -13,6 +13,7 @@ func GetCallStackFunctionNames() []string { } pc = pc[:n] + frames := runtime.CallersFrames(pc) for { diff --git a/common/client_basic.go b/common/client_basic.go index 2ac031ab..7cacac55 100644 --- a/common/client_basic.go +++ b/common/client_basic.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "context" diff --git a/common/client_buffering.go b/common/client_buffering.go index 75023c6f..6b338d9a 100644 --- a/common/client_buffering.go +++ b/common/client_buffering.go @@ -1,7 +1,8 @@ -package common +package common //nolint:revive import ( "context" + "errors" "fmt" "io" @@ -99,7 +100,7 @@ func NewClientBufferingFromServerConfig(logger *zap.Logger, serverCfg *config.TS } if serverCfg.ConnectorServer.Tls != nil { - return nil, fmt.Errorf("TLS connections are not implemented yet") + return nil, errors.New("TLS connections are not implemented yet") } return NewClientBufferingFromClientConfig(logger, clientCfg) diff --git a/common/client_streaming.go b/common/client_streaming.go index 41e8b24e..5b51f96c 100644 --- a/common/client_streaming.go +++ b/common/client_streaming.go @@ -1,7 +1,8 @@ -package common +package common //nolint:revive import ( "context" + "errors" "fmt" "io" "math" @@ -116,7 +117,6 @@ func NewClientStreamingFromClientConfig(logger *zap.Logger, clientCfg *config.TC conn, err := makeConnection(logger, clientCfg, grpc.WithDefaultCallOptions( grpc.MaxCallRecvMsgSize(math.MaxInt64), )) - if err != nil { return nil, fmt.Errorf("make connection: %w", err) } @@ -137,7 +137,7 @@ func NewClientStreamingFromServerConfig(logger *zap.Logger, serverCfg *config.TS } if serverCfg.ConnectorServer.Tls != nil { - return nil, fmt.Errorf("TLS connections are not implemented yet") + return nil, errors.New("TLS connections are not implemented yet") } return NewClientStreamingFromClientConfig(logger, clientCfg) diff --git a/common/config.go b/common/config.go index 5a6222ac..0ab73274 100644 --- a/common/config.go +++ b/common/config.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/connection.go b/common/connection.go index 2fb87b21..e4678b84 100644 --- a/common/connection.go +++ b/common/connection.go @@ -1,8 +1,9 @@ -package common +package common //nolint:revive import ( "crypto/tls" "crypto/x509" + "errors" "fmt" "os" @@ -39,7 +40,7 @@ func makeConnection(logger *zap.Logger, cfg *config.TClientConfig, additionalOpt certPool := x509.NewCertPool() if !certPool.AppendCertsFromPEM(caCrt) { - return nil, fmt.Errorf("failed to add server CA's certificate") + return nil, errors.New("failed to add server CA's certificate") } tlsCfg.RootCAs = certPool @@ -54,7 +55,7 @@ func makeConnection(logger *zap.Logger, cfg *config.TClientConfig, additionalOpt opts = append(opts, additionalOpts...) - conn, err := grpc.Dial(EndpointToString(cfg.ConnectorServerEndpoint), opts...) + conn, err := grpc.NewClient(EndpointToString(cfg.ConnectorServerEndpoint), opts...) if err != nil { return nil, fmt.Errorf("grpc dial: %w", err) } diff --git a/common/consts.go b/common/consts.go index 28797169..5475834c 100644 --- a/common/consts.go +++ b/common/consts.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive const ( ForbidRetries = "forbid_retries" diff --git a/common/credentials.go b/common/credentials.go index 656f0dcc..db1b21cb 100644 --- a/common/credentials.go +++ b/common/credentials.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "os" diff --git a/common/duration.go b/common/duration.go index b45fca1a..d8962d2b 100644 --- a/common/duration.go +++ b/common/duration.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/endpoint.go b/common/endpoint.go index f1d807b4..80431a98 100644 --- a/common/endpoint.go +++ b/common/endpoint.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/errors.go b/common/errors.go index 153c5e71..e73e637c 100644 --- a/common/errors.go +++ b/common/errors.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "crypto/tls" @@ -27,23 +27,23 @@ import ( ) var ( - ErrTableDoesNotExist = fmt.Errorf("table does not exist") - ErrDataSourceNotSupported = fmt.Errorf("data source not supported") - ErrDataTypeNotSupported = fmt.Errorf("data type not supported") - ErrDataTypeMismatch = fmt.Errorf("data type mismatch") - ErrMethodNotSupported = fmt.Errorf("method not supported") - ErrReadLimitExceeded = fmt.Errorf("read limit exceeded") - ErrInvalidRequest = fmt.Errorf("invalid request") - ErrValueOutOfTypeBounds = fmt.Errorf("value is out of possible range of values for the type") - ErrUnimplementedTypedValue = fmt.Errorf("unimplemented typed value") - ErrUnimplementedExpression = fmt.Errorf("unimplemented expression") - ErrUnsupportedExpression = fmt.Errorf("expression is not supported") - ErrUnimplementedOperation = fmt.Errorf("unimplemented operation") - ErrUnimplementedPredicateType = fmt.Errorf("unimplemented predicate type") - ErrInvariantViolation = fmt.Errorf("implementation error (invariant violation)") - ErrUnimplementedArithmeticalExpression = fmt.Errorf("unimplemented arithmetical expression") - ErrEmptyTableName = fmt.Errorf("empty table name") - ErrPageSizeExceeded = fmt.Errorf("page size exceeded, check service configuration") + ErrTableDoesNotExist = errors.New("table does not exist") + ErrDataSourceNotSupported = errors.New("data source not supported") + ErrDataTypeNotSupported = errors.New("data type not supported") + ErrDataTypeMismatch = errors.New("data type mismatch") + ErrMethodNotSupported = errors.New("method not supported") + ErrReadLimitExceeded = errors.New("read limit exceeded") + ErrInvalidRequest = errors.New("invalid request") + ErrValueOutOfTypeBounds = errors.New("value is out of possible range of values for the type") + ErrUnimplementedTypedValue = errors.New("unimplemented typed value") + ErrUnimplementedExpression = errors.New("unimplemented expression") + ErrUnsupportedExpression = errors.New("expression is not supported") + ErrUnimplementedOperation = errors.New("unimplemented operation") + ErrUnimplementedPredicateType = errors.New("unimplemented predicate type") + ErrInvariantViolation = errors.New("implementation error (invariant violation)") + ErrUnimplementedArithmeticalExpression = errors.New("unimplemented arithmetical expression") + ErrEmptyTableName = errors.New("empty table name") + ErrPageSizeExceeded = errors.New("page size exceeded, check service configuration") ) var OptionalFilteringAllowedErrors = NewErrorMatcher( @@ -112,11 +112,9 @@ func newAPIErrorFromPostgreSQLError(err error) *api_service_protos.TError { pgError, ok := pgConnectError.Unwrap().(*pgconn.PgError) if ok { switch pgError.Code { - case pgerrcode.InvalidPassword: - // Invalid password in PostgreSQL 15 - status = ydb_proto.StatusIds_UNAUTHORIZED - case pgerrcode.InvalidAuthorizationSpecification: - // Invalid password in Greenplum 6.25 + // Invalid password in PostgreSQL 15 + // Invalid password in Greenplum 6.25 + case pgerrcode.InvalidPassword, pgerrcode.InvalidAuthorizationSpecification: status = ydb_proto.StatusIds_UNAUTHORIZED default: status = ydb_proto.StatusIds_INTERNAL_ERROR @@ -421,14 +419,12 @@ func NewAPIErrorFromStdError(err error, kind api_common.EGenericDataSourceKind) apiError = newAPIErrorFromPostgreSQLError(err) case api_common.EGenericDataSourceKind_MYSQL: apiError = newAPIErrorFromMySQLError(err) - case api_common.EGenericDataSourceKind_YDB: + case api_common.EGenericDataSourceKind_YDB, api_common.EGenericDataSourceKind_LOGGING: apiError = newAPIErrorFromYdbError(err) case api_common.EGenericDataSourceKind_ORACLE: apiError = newAPIErrorFromOracleError(err) case api_common.EGenericDataSourceKind_MS_SQL_SERVER: apiError = newAPIErrorFromMsSQLServer(err) - case api_common.EGenericDataSourceKind_LOGGING: - apiError = newAPIErrorFromYdbError(err) case api_common.EGenericDataSourceKind_MONGO_DB: apiError = newAPIErrorFromMongoDbError(err) case api_common.EGenericDataSourceKind_REDIS: diff --git a/common/logger.go b/common/logger.go index e4bfafe0..97e510d0 100644 --- a/common/logger.go +++ b/common/logger.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" @@ -57,6 +57,12 @@ func AnnotateLoggerWithDataSourceInstance(l *zap.Logger, dsi *api_common.TGeneri fields = append(fields, zap.String("folder_id", dsi.GetLoggingOptions().GetFolderId())) } + if dsi.GetYdbOptions() != nil { + if dsi.GetYdbOptions().GetQueryDataFormat() != api_common.TYdbDataSourceOptions_QUERY_DATA_FORMAT_UNSPECIFIED { + fields = append(fields, zap.Stringer("query_data_format", dsi.GetYdbOptions().GetQueryDataFormat())) + } + } + return l.With(fields...) } @@ -99,6 +105,7 @@ func NewDefaultLogger() *zap.Logger { func newDefaultLoggerConfig() zap.Config { loggerCfg := zap.NewProductionConfig() + loggerCfg.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder loggerCfg.Encoding = "console" loggerCfg.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder @@ -154,6 +161,7 @@ func (ql *QueryLogger) Dump(query string, args ...any) { } logFields := []zap.Field{zap.String("query", query)} + if len(args) > 0 { logFields = append(logFields, zap.Any("args", args)) } diff --git a/common/metrics.go b/common/metrics.go index e5b4a2e8..76bd32f5 100644 --- a/common/metrics.go +++ b/common/metrics.go @@ -1,7 +1,8 @@ -package common +package common //nolint:revive import ( "encoding/json" + "errors" "fmt" "net/url" "strings" @@ -61,7 +62,7 @@ func (ms *MetricsSnapshot) FindStatusSensors(typ, method, name, status string) [ func (ms *MetricsSnapshot) FindFloat64Sensor(name string) (float64, error) { metrics, ok := ms.data["metrics"].([]any) if !ok { - return 0, fmt.Errorf("invalid response: metrics field not found or not an array") + return 0, errors.New("invalid response: metrics field not found or not an array") } for _, itemUntyped := range metrics { diff --git a/common/protobuf.go b/common/protobuf.go index b022a84c..a499c5fb 100644 --- a/common/protobuf.go +++ b/common/protobuf.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/stacktrace.go b/common/stacktrace.go index 00086e36..a44e1637 100644 --- a/common/stacktrace.go +++ b/common/stacktrace.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/testing_server.go b/common/testing_server.go index e0cfba1a..e029c1d2 100644 --- a/common/testing_server.go +++ b/common/testing_server.go @@ -1,6 +1,7 @@ -package common +package common //nolint:revive import ( + "errors" "fmt" "go.uber.org/zap" @@ -31,7 +32,7 @@ func (ts *testingServerRemote) ClientBuffering() *ClientBuffering { return ts.cl func (ts *testingServerRemote) ClientStreaming() *ClientStreaming { return ts.clientStreaming } func (*testingServerRemote) MetricsSnapshot() (*MetricsSnapshot, error) { - return nil, fmt.Errorf("not implemented") + return nil, errors.New("not implemented") } func (ts *testingServerRemote) Stop() { diff --git a/common/time.go b/common/time.go index e00eac4b..a40c82fe 100644 --- a/common/time.go +++ b/common/time.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" diff --git a/common/time_test.go b/common/time_test.go index c77aba32..eb00cf83 100644 --- a/common/time_test.go +++ b/common/time_test.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "errors" @@ -17,22 +17,22 @@ func TestTimeToYDBDate(t *testing.T) { tcs := []testCase{ { - input: time.Date(1970, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: nil, }, { - input: time.Date(1970, 01, 02, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 2, 0, 0, 0, 0, time.UTC), output: 1, err: nil, }, { - input: time.Date(1969, 12, 31, 23, 59, 00, 00, time.UTC), + input: time.Date(1969, 12, 31, 23, 59, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, { - input: time.Date(9999, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(9999, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, @@ -63,22 +63,22 @@ func TestTimeToYDBDatetime(t *testing.T) { tcs := []testCase{ { - input: time.Date(1970, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: nil, }, { - input: time.Date(1970, 01, 02, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 2, 0, 0, 0, 0, time.UTC), output: 86400, err: nil, }, { - input: time.Date(1969, 12, 31, 23, 59, 00, 00, time.UTC), + input: time.Date(1969, 12, 31, 23, 59, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, { - input: time.Date(9999, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(9999, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, @@ -109,22 +109,22 @@ func TestTimeToYDBTimestamp(t *testing.T) { tcs := []testCase{ { - input: time.Date(1970, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: nil, }, { - input: time.Date(1970, 01, 02, 00, 00, 00, 00, time.UTC), + input: time.Date(1970, 1, 2, 0, 0, 0, 0, time.UTC), output: 86400000000, err: nil, }, { - input: time.Date(1969, 12, 31, 23, 59, 00, 00, time.UTC), + input: time.Date(1969, 12, 31, 23, 59, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, { - input: time.Date(29427, 01, 01, 00, 00, 00, 00, time.UTC), + input: time.Date(29427, 1, 1, 0, 0, 0, 0, time.UTC), output: 0, err: ErrValueOutOfTypeBounds, }, diff --git a/common/ydb_type_helpers.go b/common/ydb_type_helpers.go index 689c7944..edc8a71f 100644 --- a/common/ydb_type_helpers.go +++ b/common/ydb_type_helpers.go @@ -1,4 +1,4 @@ -package common +package common //nolint:revive import ( "fmt" @@ -137,6 +137,7 @@ func TypesEqual(lhs, rhs *Ydb.Type) bool { return rhs.GetNullType() != structpb.NullValue(0) case *Ydb.Type_OptionalType: rhsType := rhs.GetOptionalType() + return rhsType != nil && TypesEqual(rhsType.Item, lhsType.OptionalType.Item) case *Ydb.Type_DictType: @@ -147,6 +148,7 @@ func TypesEqual(lhs, rhs *Ydb.Type) bool { TypesEqual(rhsType.Payload, lhsType.DictType.Payload) case *Ydb.Type_ListType: rhsType := rhs.GetListType() + return rhsType != nil && TypesEqual(rhsType.Item, lhsType.ListType.Item) case *Ydb.Type_DecimalType: @@ -157,15 +159,19 @@ func TypesEqual(lhs, rhs *Ydb.Type) bool { rhsType.Scale == lhsType.DecimalType.Scale case *Ydb.Type_TupleType: rhsType := rhs.GetTupleType() + return rhsType != nil && tuplesEqual(rhsType, lhsType.TupleType) case *Ydb.Type_StructType: rhsType := rhs.GetStructType() + return rhsType != nil && structsEqual(rhsType, lhsType.StructType) case *Ydb.Type_VariantType: rhsType := rhs.GetVariantType() + return rhsType != nil && variantsEqual(rhsType, lhsType.VariantType) case *Ydb.Type_TaggedType: rhsType := rhs.GetTaggedType() + return rhsType.Tag == lhsType.TaggedType.Tag && TypesEqual(rhsType.Type, lhsType.TaggedType.Type) case *Ydb.Type_VoidType: @@ -176,6 +182,7 @@ func TypesEqual(lhs, rhs *Ydb.Type) bool { return rhs.GetEmptyDictType() != structpb.NullValue(0) case *Ydb.Type_PgType: rhsType := rhs.GetPgType() + return rhsType != nil && rhs.GetPgType().TypeName == lhsType.PgType.TypeName } diff --git a/go.mod b/go.mod index 4cab53dd..a4323603 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/ydb-platform/fq-connector-go -go 1.23.0 - -toolchain go1.23.8 +go 1.24.6 require ( github.com/ClickHouse/ch-go v0.58.2 @@ -41,7 +39,7 @@ require ( github.com/stretchr/testify v1.10.0 github.com/ydb-platform/ydb-go-genproto v0.0.0-20250911135631-b3beddd517d9 // never update to version v3.113.1 or higher: this will break reading from YDB - github.com/ydb-platform/ydb-go-sdk/v3 v3.113.1 + github.com/ydb-platform/ydb-go-sdk/v3 v3.117.2 github.com/ydb-platform/ydb-go-yc v0.11.0 go.mongodb.org/mongo-driver v1.17.1 go.uber.org/atomic v1.11.0 @@ -70,7 +68,7 @@ require ( github.com/AzureAD/microsoft-authentication-library-for-go v1.3.3 // indirect github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect github.com/andybalholm/brotli v1.1.0 // indirect - github.com/apache/thrift v0.16.0 // indirect + github.com/apache/thrift v0.17.0 // indirect github.com/aws/aws-sdk-go v1.55.6 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect @@ -90,14 +88,14 @@ require ( github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect github.com/go-viper/mapstructure/v2 v2.2.1 // indirect - github.com/goccy/go-json v0.10.0 // indirect + github.com/goccy/go-json v0.10.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/google/flatbuffers v23.1.21+incompatible // indirect + github.com/google/flatbuffers v23.5.26+incompatible // indirect github.com/google/gnostic-models v0.6.8 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/s2a-go v0.1.9 // indirect @@ -117,7 +115,7 @@ require ( github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/asmfmt v1.3.2 // indirect github.com/klauspost/compress v1.18.0 // indirect - github.com/klauspost/cpuid/v2 v2.2.3 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect github.com/knadh/koanf/v2 v2.1.2 // indirect @@ -140,13 +138,12 @@ require ( github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/prometheus/sigv4 v0.1.2 // indirect github.com/segmentio/asm v1.2.0 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect - github.com/shoenig/test v0.6.6 // indirect github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 // indirect - github.com/siddontang/go-log v0.0.0-20190221022429-1e957dd83bed // indirect + github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect @@ -154,8 +151,8 @@ require ( github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/yandex-cloud/go-genproto v0.0.0-20240425114406-68c9b49389a1 // indirect - github.com/ydb-platform/ydb-go-yc-metadata v0.5.3 // indirect + github.com/yandex-cloud/go-genproto v0.0.0-20211115083454-9ca41db5ed9e // indirect + github.com/ydb-platform/ydb-go-yc-metadata v0.5.2 // indirect github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect diff --git a/go.sum b/go.sum index f09e2272..83dc53f5 100644 --- a/go.sum +++ b/go.sum @@ -46,8 +46,8 @@ github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer5 github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow/go/v13 v13.0.0-20230512153032-cd6e2a4d2b93 h1:1uoAHmSNxtDmpRieflWwA89gJbxCH1sp6drdLrCczxo= github.com/apache/arrow/go/v13 v13.0.0-20230512153032-cd6e2a4d2b93/go.mod h1:/XatdE3kDIBqZKhZ7OBUHwP2jaASDFZHqF4puOWM8po= -github.com/apache/thrift v0.16.0 h1:qEy6UW60iVOlUy+b9ZR0d5WzUWYGOo4HfopoyBaNmoY= -github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= +github.com/apache/thrift v0.17.0 h1:cMd2aj52n+8VoAtvSvLn4kDC3aZ6IAkBuqWQ2IDu7wo= +github.com/apache/thrift v0.17.0/go.mod h1:OLxhMRJxomX+1I/KUw03qoV3mMz16BwaKI+d4fPBx7Q= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk= @@ -178,8 +178,8 @@ github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIx github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I= github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw= -github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA= -github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= @@ -194,7 +194,6 @@ github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= @@ -213,8 +212,8 @@ github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6 github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/flatbuffers v23.1.21+incompatible h1:bUqzx/MXCDxuS0hRJL2EfjyZL3uQrPbMocUa8zGqsTA= -github.com/google/flatbuffers v23.1.21+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= +github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -358,8 +357,8 @@ github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= -github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= -github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= +github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= +github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs= github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= github.com/knadh/koanf/providers/confmap v0.1.0 h1:gOkxhHkemwG4LezxxN8DMOFopOPghxRVp7JbIvdvqzU= @@ -484,9 +483,8 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b h1:0LFwY6Q3gMACTjAbMZBjXAqTOzOwFaj2Ld6cjeQ7Rig= -github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/client_golang v1.21.0-rc.0 h1:bR+RxBlwcr4q8hXkgSOA/J18j6n0/qH0Gb0DH+8c+RY= github.com/prometheus/client_golang v1.21.0-rc.0/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -523,18 +521,16 @@ github.com/shirou/gopsutil/v3 v3.24.2 h1:kcR0erMbLg5/3LcInpw0X/rrPSqq4CDPyI6A6ZR github.com/shirou/gopsutil/v3 v3.24.2/go.mod h1:tSg/594BcA+8UdQU2XcW803GWYgdtauFFPgJCJKZlVk= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= -github.com/shoenig/test v0.6.6 h1:Oe8TPH9wAbv++YPNDKJWUnI8Q4PPWCx3UbOfH+FxiMU= -github.com/shoenig/test v0.6.6/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 h1:xT+JlYxNGqyT+XcU8iUrN18JYed2TvG9yN5ULG2jATM= github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw= +github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 h1:oI+RNwuC9jF2g2lP0u0cVEEZrc/AYBCuFdvwrLWM/6Q= github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07/go.mod h1:yFdBgwXP24JziuRl2NMUahT7nGLNOKi1SIiFxMttVD4= -github.com/siddontang/go-log v0.0.0-20190221022429-1e957dd83bed h1:KMgQoLJGCq1IoZpLZE3AIffh9veYWoVlsvA4ib55TMM= -github.com/siddontang/go-log v0.0.0-20190221022429-1e957dd83bed/go.mod h1:yFdBgwXP24JziuRl2NMUahT7nGLNOKi1SIiFxMttVD4= github.com/sijms/go-ora/v2 v2.8.19 h1:7LoKZatDYGi18mkpQTR/gQvG9yOdtc7hPAex96Bqisc= github.com/sijms/go-ora/v2 v2.8.19/go.mod h1:EHxlY6x7y9HAsdfumurRfTd+v8NrEOTR3Xl4FWlH6xk= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= @@ -590,20 +586,18 @@ github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3k github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/yandex-cloud/go-genproto v0.0.0-20211115083454-9ca41db5ed9e h1:9LPdmD1vqadsDQUva6t2O9MbnyvoOgo8nFNPaOIH5U8= github.com/yandex-cloud/go-genproto v0.0.0-20211115083454-9ca41db5ed9e/go.mod h1:HEUYX/p8966tMUHHT+TsS0hF/Ca/NYwqprC5WXSDMfE= -github.com/yandex-cloud/go-genproto v0.0.0-20240425114406-68c9b49389a1 h1:VDGcTxVXpQ6N2sKdKVzSrt1Rp6xm4thrCH5TeqMoWtY= -github.com/yandex-cloud/go-genproto v0.0.0-20240425114406-68c9b49389a1/go.mod h1:HEUYX/p8966tMUHHT+TsS0hF/Ca/NYwqprC5WXSDMfE= github.com/ydb-platform/ydb-go-genproto v0.0.0-20220203104745-929cf9c248bc/go.mod h1:cc138nptTn9eKptCQl/grxP6pBKpo/bnXDiOxuVZtps= github.com/ydb-platform/ydb-go-genproto v0.0.0-20250911135631-b3beddd517d9 h1:SKqSRP6/ocY2Z4twOqKEKxpmawVTHTvQiom7hrU6jt0= github.com/ydb-platform/ydb-go-genproto v0.0.0-20250911135631-b3beddd517d9/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I= github.com/ydb-platform/ydb-go-sdk/v3 v3.25.3/go.mod h1:PFizF/vJsdAgEwjK3DVSBD52kdmRkWfSIS2q2pA+e88= -github.com/ydb-platform/ydb-go-sdk/v3 v3.113.1 h1:VRRUtl0JlovbiZOEwqpreVYJNixY7IdgGvEkXRO2mK0= -github.com/ydb-platform/ydb-go-sdk/v3 v3.113.1/go.mod h1:Pp1w2xxUoLQ3NCNAwV7pvDq0TVQOdtAqs+ZiC+i8r14= +github.com/ydb-platform/ydb-go-sdk/v3 v3.117.2 h1:GHWxnXbKLQR8h3wvOy0pUPxANJJ/q/PwkoGzdVRWwvI= +github.com/ydb-platform/ydb-go-sdk/v3 v3.117.2/go.mod h1:UEMMk+JMunUveo2j+zlJEJ5I7ntf2+MbimciVNJYnNs= github.com/ydb-platform/ydb-go-yc v0.11.0 h1:DwrjZ+yCUqWhhCQOHKk4HnIt1CiWKgVYXKMiDNi5QUY= github.com/ydb-platform/ydb-go-yc v0.11.0/go.mod h1:uZ5l31+K3rnIeJAi6pzSkEQYT83Ozgxvr3UY/AV1L4w= +github.com/ydb-platform/ydb-go-yc-metadata v0.5.2 h1:nMtixUijP0Z7iHJNT9fOL+dbmEzZxqU6Xk87ll7hqXg= github.com/ydb-platform/ydb-go-yc-metadata v0.5.2/go.mod h1:82SQ4L3PewiEmFW4oTMc1sfPjODasIYxD/SKGsbK74s= -github.com/ydb-platform/ydb-go-yc-metadata v0.5.3 h1:rA1U3ocLyxX1h2Jfwlgphs03kc1ItXxN619QCI/oGBk= -github.com/ydb-platform/ydb-go-yc-metadata v0.5.3/go.mod h1:82SQ4L3PewiEmFW4oTMc1sfPjODasIYxD/SKGsbK74s= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= @@ -773,9 +767,9 @@ golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= diff --git a/scripts/debug/kqprun/scheme.YQ-4416.txt b/scripts/debug/kqprun/scheme.YQ-4416.txt new file mode 100644 index 00000000..3e06c192 --- /dev/null +++ b/scripts/debug/kqprun/scheme.YQ-4416.txt @@ -0,0 +1,10 @@ +CREATE OBJECT ydb_local_password (TYPE SECRET) WITH (value = password); + +CREATE EXTERNAL DATA SOURCE external_datasource WITH ( + SOURCE_TYPE="Ydb", + LOCATION="localhost:2136", + AUTH_METHOD="BASIC", + LOGIN="admin", + DATABASE_NAME="/Root", + PASSWORD_SECRET_NAME="ydb_local_password" +); diff --git a/scripts/debug/kqprun/schema.redis.local.txt b/scripts/debug/kqprun/scheme.redis.local.txt similarity index 100% rename from scripts/debug/kqprun/schema.redis.local.txt rename to scripts/debug/kqprun/scheme.redis.local.txt diff --git a/scripts/debug/kqprun/script.YQ-4416.txt b/scripts/debug/kqprun/script.YQ-4416.txt new file mode 100644 index 00000000..5fc94ef8 --- /dev/null +++ b/scripts/debug/kqprun/script.YQ-4416.txt @@ -0,0 +1,21 @@ +PRAGMA generic.UsePredicatePushdown="true"; + +SELECT + MIN(l_comment), + MIN(l_commitdate), + MIN(l_discount), + MIN(l_extendedprice), + MIN(l_linenumber), + MIN(l_linestatus), + MIN(l_orderkey), + MIN(l_partkey), + MIN(l_quantity), + MIN(l_receiptdate), + MIN(l_returnflag), + MIN(l_shipdate), + MIN(l_shipinstruct), + MIN(l_shipmode), + MIN(l_suppkey), + MIN(l_tax) +FROM (SELECT * FROM external_datasource.olap_lineitem_s10 LIMIT 10000000); + diff --git a/tests/infra/datasource/clickhouse/datasource.go b/tests/infra/datasource/clickhouse/datasource.go index d6b088f1..7719c0bf 100644 --- a/tests/infra/datasource/clickhouse/datasource.go +++ b/tests/infra/datasource/clickhouse/datasource.go @@ -38,6 +38,7 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* ) dsiNative := proto.Clone(dsi).(*api_common.TGenericDataSourceInstance) + dsiNative.Protocol = api_common.EGenericProtocol_NATIVE dsiNative.Endpoint, err = ed.GetEndpoint(serviceName, internalPortNative) @@ -46,6 +47,7 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } dsiHTTP := proto.Clone(dsi).(*api_common.TGenericDataSourceInstance) + dsiHTTP.Protocol = api_common.EGenericProtocol_HTTP dsiHTTP.Endpoint, err = ed.GetEndpoint(serviceName, internalPortHTTP) diff --git a/tests/infra/datasource/clickhouse/tables.go b/tests/infra/datasource/clickhouse/tables.go index 86102ae7..1c612951 100644 --- a/tests/infra/datasource/clickhouse/tables.go +++ b/tests/infra/datasource/clickhouse/tables.go @@ -96,25 +96,25 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint16( common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), ptr.Uint16( - common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_15_date32": []*uint16{ ptr.Uint16(common.MustTimeToYDBType[uint16]( common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), ptr.Uint16(common.MustTimeToYDBType[uint16]( - common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_16_datetime": []*uint32{ ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "col_17_datetime64": []*uint64{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), }, }, }, @@ -174,28 +174,28 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ }, "col_14_date": []*uint16{ ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), nil, }, "col_15_date32": []*uint16{ ptr.Uint16( common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), ptr.Uint16( - common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), nil, }, "col_16_datetime": []*uint32{ ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), nil, }, "col_17_datetime64": []*uint64{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), nil, }, }, @@ -225,29 +225,29 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ Columns: map[string]any{ "id": []int32{1, 2, 3}, "col_01_date": []*uint16{ - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1970, 01, 01, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_02_date32": []*uint16{ nil, ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_03_datetime": []*uint32{ ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(1970, 01, 01, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "col_04_datetime64": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123456000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 987654320, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 987654320, time.UTC))), }, }, }, @@ -594,21 +594,21 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ {}, { ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(1970, 01, 01, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), }, { ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(1970, 01, 01, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), }, { ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(1970, 01, 01, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, }, // "col": [][]time.Time{{}, {time.Now()}, {time.Now(), time.Now()}, {time.Now(), time.Now(), time.Now()}}, diff --git a/tests/infra/datasource/docker-compose.yaml b/tests/infra/datasource/docker-compose.yaml index 42b75e1b..c4e192dc 100644 --- a/tests/infra/datasource/docker-compose.yaml +++ b/tests/infra/datasource/docker-compose.yaml @@ -34,7 +34,7 @@ services: - ./postgresql/init:/docker-entrypoint-initdb.d ydb: - image: ghcr.io/ydb-platform/local-ydb:24.3.11.13 + image: ghcr.io/ydb-platform/local-ydb:stable-25-3-1-fix-openssl-certs container_name: ${USER}-fq-connector-go-tests-ydb hostname: localhost ports: diff --git a/tests/infra/datasource/greenplum/datasource.go b/tests/infra/datasource/greenplum/datasource.go index 6884816b..e13eecff 100644 --- a/tests/infra/datasource/greenplum/datasource.go +++ b/tests/infra/datasource/greenplum/datasource.go @@ -39,8 +39,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/greenplum/tables.go b/tests/infra/datasource/greenplum/tables.go index 096316b6..c571c2b6 100644 --- a/tests/infra/datasource/greenplum/tables.go +++ b/tests/infra/datasource/greenplum/tables.go @@ -188,14 +188,14 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), nil, }, "col_24_date": []*uint16{ ptr.Uint16(common.MustTimeToYDBType[uint16]( common.TimeToYDBDate, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint16(common.MustTimeToYDBType[uint16]( - common.TimeToYDBDate, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDate, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), nil, }, }, @@ -227,12 +227,12 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), }, "col_02_date": []*uint16{ nil, ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, }, }, diff --git a/tests/infra/datasource/mongodb/datasource.go b/tests/infra/datasource/mongodb/datasource.go index 3b63e772..c951ec4a 100644 --- a/tests/infra/datasource/mongodb/datasource.go +++ b/tests/infra/datasource/mongodb/datasource.go @@ -62,8 +62,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/mongodb/suite.go b/tests/infra/datasource/mongodb/suite.go index 1d97edaf..d7634878 100644 --- a/tests/infra/datasource/mongodb/suite.go +++ b/tests/infra/datasource/mongodb/suite.go @@ -501,6 +501,7 @@ func (s *Suite) TestPushdownWithCoalesce() { func hexEncoded(s string) []byte { bytes, _ := hex.DecodeString(s) + return bytes } diff --git a/tests/infra/datasource/ms_sql_server/datasource.go b/tests/infra/datasource/ms_sql_server/datasource.go index 95c7778e..45f50684 100644 --- a/tests/infra/datasource/ms_sql_server/datasource.go +++ b/tests/infra/datasource/ms_sql_server/datasource.go @@ -33,8 +33,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/ms_sql_server/tables.go b/tests/infra/datasource/ms_sql_server/tables.go index efd62314..890a71d9 100644 --- a/tests/infra/datasource/ms_sql_server/tables.go +++ b/tests/infra/datasource/ms_sql_server/tables.go @@ -111,28 +111,28 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ "col_17_date": []*uint16{ ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), nil, - ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_18_smalldatetime": []*uint32{ ptr.Uint32(common.MustTimeToYDBType(common.TimeToYDBDatetime, - time.Date(1988, 11, 20, 12, 55, 00, 0, time.UTC))), + time.Date(1988, 11, 20, 12, 55, 0, 0, time.UTC))), nil, ptr.Uint32(common.MustTimeToYDBType(common.TimeToYDBDatetime, - time.Date(2023, 03, 21, 11, 21, 00, 0, time.UTC))), + time.Date(2023, 3, 21, 11, 21, 0, 0, time.UTC))), }, "col_19_datetime": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "col_20_datetime2": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123123000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, }, }, @@ -165,28 +165,28 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint16(common.MustTimeToYDBType[uint16]( common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), ptr.Uint16(common.MustTimeToYDBType[uint16]( - common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_02_smalldatetime": []*uint32{ nil, ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 0, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 0, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 0, 0, time.UTC))), }, "col_03_datetime": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "col_04_datetime2": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123123000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, }, }, diff --git a/tests/infra/datasource/mysql/datasource.go b/tests/infra/datasource/mysql/datasource.go index cc2bd3e0..a1d47a27 100644 --- a/tests/infra/datasource/mysql/datasource.go +++ b/tests/infra/datasource/mysql/datasource.go @@ -34,8 +34,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/mysql/tables.go b/tests/infra/datasource/mysql/tables.go index 96c59d13..a2067147 100644 --- a/tests/infra/datasource/mysql/tables.go +++ b/tests/infra/datasource/mysql/tables.go @@ -104,21 +104,21 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ "col_13_date": []*uint16{ ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), nil, - ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(2024, 07, 01, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType(common.TimeToYDBDate, time.Date(2024, 7, 1, 0, 0, 0, 0, time.UTC))), }, "col_14_datetime": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 34, 56, 777777000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(2024, 07, 01, 01, 02, 03, 444444000, time.UTC))), + time.Date(2024, 7, 1, 1, 2, 3, 444444000, time.UTC))), }, "col_15_timestamp": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 34, 56, 777777000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(2024, 07, 01, 01, 02, 03, 444444000, time.UTC))), + time.Date(2024, 7, 1, 1, 2, 3, 444444000, time.UTC))), }, "col_16_char": []*string{ptr.String("az"), nil, ptr.String("буки")}, "col_17_varchar": []*string{ptr.String("az"), nil, ptr.String("буки")}, @@ -168,21 +168,21 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ "col_01_date": []*uint16{ nil, ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, "col_02_datetime": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "col_03_timestamp": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, }, }, diff --git a/tests/infra/datasource/opensearch/datasource.go b/tests/infra/datasource/opensearch/datasource.go index f2eb0bb4..61db0816 100644 --- a/tests/infra/datasource/opensearch/datasource.go +++ b/tests/infra/datasource/opensearch/datasource.go @@ -33,8 +33,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/oracle/datasource.go b/tests/infra/datasource/oracle/datasource.go index 0e0d25c5..da115fcf 100644 --- a/tests/infra/datasource/oracle/datasource.go +++ b/tests/infra/datasource/oracle/datasource.go @@ -39,8 +39,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(endpointServiceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(endpointServiceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/oracle/init/scripts/startup/01_init.sh b/tests/infra/datasource/oracle/init/scripts/startup/01_init.sh index 29eee405..4f1ac66e 100755 --- a/tests/infra/datasource/oracle/init/scripts/startup/01_init.sh +++ b/tests/infra/datasource/oracle/init/scripts/startup/01_init.sh @@ -73,7 +73,7 @@ col_17_date, col_18_timestamp, col_19_timestamp_w_timezone, col_20_timestamp_w_local_timezone, col_21_json) VALUES (1, 1, 1.1, 123, 1.1, 1.1, 'varchar', N'варчар', 'c', 'cha', N'ч', N'чар', 'clob', N'клоб', utl_raw.cast_to_raw('ABCD'), utl_raw.cast_to_raw('EF'), - TO_DATE('01 01, 1970, 00:00:00', 'mm dd, YYYY, HH24:MI:SS'), + TO_DATE('01 1, 1970, 00:00:00', 'mm dd, YYYY, HH24:MI:SS'), TO_TIMESTAMP('1970-01-01 01:01:01.111111', 'YYYY-mm-dd HH24:MI:SS.FF'), TO_TIMESTAMP_TZ('1970-01-01 01:01:01.111111 -1:00', 'YYYY-mm-dd HH24:MI:SS.FF TZH:TZM'), TO_TIMESTAMP_TZ('1970-01-01 01:01:01.111111 -1:11', 'YYYY-mm-dd HH24:MI:SS.FF TZH:TZM'), @@ -81,7 +81,7 @@ VALUES ), (2, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL), (3, -1, -1.1, -123, -1.1, -1.1, 'varchar', N'варчар', 'c', 'cha', N'ч', N'чар', 'clob', N'клоб', utl_raw.cast_to_raw('1234'), utl_raw.cast_to_raw('5678'), - TO_DATE('01 01, 1970, 00:00:00', 'mm dd, YYYY, HH24:MI:SS'), + TO_DATE('01 1, 1970, 00:00:00', 'mm dd, YYYY, HH24:MI:SS'), TO_TIMESTAMP('1970-01-01 01:01:01.111111', 'YYYY-mm-dd HH24:MI:SS.FF'), TO_TIMESTAMP_TZ('1970-01-01 01:01:01.111111 -1:00', 'YYYY-mm-dd HH24:MI:SS.FF TZH:TZM'), TO_TIMESTAMP_TZ('1970-01-01 01:01:01.111111 -1:11', 'YYYY-mm-dd HH24:MI:SS.FF TZH:TZM'), diff --git a/tests/infra/datasource/oracle/tables.go b/tests/infra/datasource/oracle/tables.go index b2ecf549..851d16c1 100644 --- a/tests/infra/datasource/oracle/tables.go +++ b/tests/infra/datasource/oracle/tables.go @@ -161,31 +161,31 @@ var tables = map[string]*test_utils.Table[int64, *array.Int64Builder]{ }, "COL_17_DATE": []*uint32{ ptr.Uint32(common.MustTimeToYDBType(common.TimeToYDBDatetime, - time.Date(1970, 01, 01, 00, 00, 00, 000000000, time.UTC))), + time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), nil, ptr.Uint32(common.MustTimeToYDBType(common.TimeToYDBDatetime, - time.Date(1970, 01, 01, 00, 00, 00, 000000000, time.UTC))), + time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC))), }, "COL_18_TIMESTAMP": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 01, 01, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 1, 1, 1, 111111000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 01, 01, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 1, 1, 1, 111111000, time.UTC))), }, "COL_19_TIMESTAMP_W_TIMEZONE": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 02, 01, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 2, 1, 1, 111111000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 02, 01, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 2, 1, 1, 111111000, time.UTC))), }, "COL_20_TIMESTAMP_W_LOCAL_TIMEZONE": []*uint64{ ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 02, 12, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 2, 12, 1, 111111000, time.UTC))), nil, ptr.Uint64(common.MustTimeToYDBType(common.TimeToYDBTimestamp, - time.Date(1970, 01, 01, 02, 12, 01, 111111000, time.UTC))), + time.Date(1970, 1, 1, 2, 12, 1, 111111000, time.UTC))), }, "COL_21_JSON": []*string{ ptr.String("{\"friends\":" + @@ -275,14 +275,14 @@ var tables = map[string]*test_utils.Table[int64, *array.Int64Builder]{ ptr.Uint32(common.MustTimeToYDBType[uint32]( common.TimeToYDBDatetime, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint32(common.MustTimeToYDBType[uint32]( - common.TimeToYDBDatetime, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDatetime, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, "COL_02_TIMESTAMP": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), }, }, }, @@ -349,37 +349,37 @@ var tables = map[string]*test_utils.Table[int64, *array.Int64Builder]{ "COL_01_TIMESTAMP_0": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 000000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 32, 000000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 32, 0, time.UTC))), }, "COL_02_TIMESTAMP_1": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 100000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 900000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 900000000, time.UTC))), }, "COL_03_TIMESTAMP_6": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123123000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 888889000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 888889000, time.UTC))), }, "COL_04_TIMESTAMP_7": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123123100, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 888888900, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 888888900, time.UTC))), }, "COL_05_TIMESTAMP_9": []*uint64{ nil, ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123123123, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 888888888, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 888888888, time.UTC))), }, }, }, diff --git a/tests/infra/datasource/postgresql/datasource.go b/tests/infra/datasource/postgresql/datasource.go index dd968b64..8e049e5a 100644 --- a/tests/infra/datasource/postgresql/datasource.go +++ b/tests/infra/datasource/postgresql/datasource.go @@ -39,8 +39,8 @@ func deriveDataSourceFromDockerCompose(ed *docker_compose.EndpointDeterminer) (* } var err error - dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) + dsi.Endpoint, err = ed.GetEndpoint(serviceName, internalPort) if err != nil { return nil, fmt.Errorf("derive endpoint: %w", err) } diff --git a/tests/infra/datasource/postgresql/tables.go b/tests/infra/datasource/postgresql/tables.go index d39d182e..929e8f37 100644 --- a/tests/infra/datasource/postgresql/tables.go +++ b/tests/infra/datasource/postgresql/tables.go @@ -193,14 +193,14 @@ var tablesIDInt32 = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), nil, }, "col_24_date": []*uint16{ ptr.Uint16(common.MustTimeToYDBType[uint16]( common.TimeToYDBDate, time.Date(1988, 11, 20, 12, 55, 28, 0, time.UTC))), ptr.Uint16(common.MustTimeToYDBType[uint16]( - common.TimeToYDBDate, time.Date(2023, 03, 21, 11, 21, 31, 0, time.UTC))), + common.TimeToYDBDate, time.Date(2023, 3, 21, 11, 21, 31, 0, time.UTC))), nil, }, "col_25_json": []*string{ @@ -254,12 +254,12 @@ var tablesIDInt32 = map[string]*test_utils.Table[int32, *array.Int32Builder]{ ptr.Uint64(common.MustTimeToYDBType[uint64]( common.TimeToYDBTimestamp, time.Date(1988, 11, 20, 12, 55, 28, 123000000, time.UTC))), ptr.Uint64(common.MustTimeToYDBType[uint64]( - common.TimeToYDBTimestamp, time.Date(2023, 03, 21, 11, 21, 31, 456000000, time.UTC))), + common.TimeToYDBTimestamp, time.Date(2023, 3, 21, 11, 21, 31, 456000000, time.UTC))), }, "col_02_date": []*uint16{ nil, ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(1988, 11, 20, 0, 0, 0, 0, time.UTC))), - ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 03, 21, 0, 0, 0, 0, time.UTC))), + ptr.Uint16(common.MustTimeToYDBType[uint16](common.TimeToYDBDate, time.Date(2023, 3, 21, 0, 0, 0, 0, time.UTC))), }, }, }, diff --git a/tests/infra/datasource/redis/suite.go b/tests/infra/datasource/redis/suite.go index be169bd9..eaad95df 100644 --- a/tests/infra/datasource/redis/suite.go +++ b/tests/infra/datasource/redis/suite.go @@ -3,6 +3,7 @@ package redis import ( "context" "crypto/tls" + "errors" "fmt" "time" @@ -21,7 +22,7 @@ type Suite struct { func connectRedisFromDS(ctx context.Context, ds *datasource.DataSource) (*redis.Client, error) { if len(ds.Instances) == 0 { - return nil, fmt.Errorf("no data source instances") + return nil, errors.New("no data source instances") } dsi := ds.Instances[0] @@ -119,6 +120,8 @@ func (s *Suite) populateTestDataForCase(caseName string) error { } s.T().Logf("Hash value for %s: %v\n", key, val) + default: + return fmt.Errorf("unexpected type for key %s: %s", key, typ) } } diff --git a/tests/infra/datasource/ydb/init/01_basic.sh b/tests/infra/datasource/ydb/init/01_basic.sh index 2fd66ba2..867dd592 100644 --- a/tests/infra/datasource/ydb/init/01_basic.sh +++ b/tests/infra/datasource/ydb/init/01_basic.sh @@ -178,6 +178,12 @@ set -x ("6758ddf04f23be19dc7adf08356c697f21dc751aabc1c71b55d340ee920781ca", "a"), ("6758ddf04f23be19dc7adf08356c697f21dc751aabc1c71b55d340ee920781cb", NULL); COMMIT; + + CREATE TABLE invalid_credentials ( + id Int32 NOT NULL, + PRIMARY KEY (id) + ); + COMMIT; ' # YQ-3494 diff --git a/tests/infra/datasource/ydb/suite.go b/tests/infra/datasource/ydb/suite.go index 931cf371..bb6cfd59 100644 --- a/tests/infra/datasource/ydb/suite.go +++ b/tests/infra/datasource/ydb/suite.go @@ -432,13 +432,13 @@ func (s *Suite) TestMissingDataSource() { func (s *Suite) TestInvalidLogin() { for _, dsi := range s.dataSource.Instances { - suite.TestInvalidLogin(s.Base, dsi, tables["simple"]) + suite.TestInvalidLogin(s.Base, dsi, tables["invalid_credentials"]) } } func (s *Suite) TestInvalidPassword() { for _, dsi := range s.dataSource.Instances { - suite.TestInvalidPassword(s.Base, dsi, tables["simple"]) + suite.TestInvalidPassword(s.Base, dsi, tables["invalid_credentials"]) } } diff --git a/tests/infra/datasource/ydb/tables.go b/tests/infra/datasource/ydb/tables.go index 7335dce2..ad3db12e 100644 --- a/tests/infra/datasource/ydb/tables.go +++ b/tests/infra/datasource/ydb/tables.go @@ -723,6 +723,16 @@ var tables = map[string]*test_utils.Table[int32, *array.Int32Builder]{ }, }, }, + "invalid_credentials": { + Name: "invalid_credentials", + IDArrayBuilderFactory: newInt32IDArrayBuilder(memPool), + Schema: &test_utils.TableSchema{ + Columns: map[string]*Ydb.Type{ + "id": common.MakePrimitiveType(Ydb.Type_INT32), + }, + }, + Records: []*test_utils.Record[int32, *array.Int32Builder]{}, + }, } func pushdownSchemaYdb() *test_utils.TableSchema { diff --git a/tests/infra/docker_compose/port.go b/tests/infra/docker_compose/port.go index f72666bf..5e69ad91 100644 --- a/tests/infra/docker_compose/port.go +++ b/tests/infra/docker_compose/port.go @@ -1,6 +1,7 @@ package docker_compose import ( + "context" "fmt" "net" "os" @@ -8,6 +9,7 @@ import ( "path/filepath" "strconv" "strings" + "time" api_common "github.com/ydb-platform/fq-connector-go/api/common" ) @@ -27,9 +29,13 @@ func (ed *EndpointDeterminer) GetEndpoint(service string, internalPort int) (*ap fmt.Sprint(internalPort), } - out, err := exec.Command(cmd, args...).CombinedOutput() + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + out, err := exec.CommandContext(ctx, cmd, args...).CombinedOutput() if err != nil { cmdStr := fmt.Sprintf("%s %s", cmd, strings.Join(args, " ")) + return nil, fmt.Errorf("exec cmd '%v': %w\n%s", cmdStr, err, string(out)) } diff --git a/tests/main_test.go b/tests/main_test.go index ef55210d..8ce136bb 100644 --- a/tests/main_test.go +++ b/tests/main_test.go @@ -3,7 +3,6 @@ package tests import ( "flag" "fmt" - "log" "testing" "github.com/apache/arrow/go/v13/arrow/array" @@ -36,7 +35,7 @@ func TestMain(m *testing.M) { state, err = suite.NewState() if err != nil { - log.Fatal(err) + panic(err) } m.Run() diff --git a/tests/suite/scenario.go b/tests/suite/scenario.go index bd8e6b71..c39fb8e3 100644 --- a/tests/suite/scenario.go +++ b/tests/suite/scenario.go @@ -95,7 +95,7 @@ func TestInvalidLogin[ID test_utils.TableIDTypes, IDBUILDER test_utils.ArrowIDBu // read some table resp, err := s.Connector.ClientBuffering().DescribeTable(context.Background(), dsi, nil, table.Name) s.Require().NoError(err) - s.Require().Equal(Ydb.StatusIds_UNAUTHORIZED, resp.Error.Status) + s.Require().Equal(Ydb.StatusIds_UNAUTHORIZED, resp.Error.Status, resp.Error) // get stats snapshot after table reading snapshot2, err := s.Connector.MetricsSnapshot() diff --git a/tests/suite/state.go b/tests/suite/state.go index fb921935..c064d120 100644 --- a/tests/suite/state.go +++ b/tests/suite/state.go @@ -1,6 +1,7 @@ package suite import ( + "errors" "flag" "fmt" "log" @@ -34,6 +35,7 @@ func (s *State) SkipSuiteIfNotEnabled(t *testing.T) { functionNames := common.GetCallStackFunctionNames() if len(functionNames) == 0 { t.FailNow() + return } @@ -56,7 +58,7 @@ func NewState() (*State, error) { flag.Parse() if *projectPath == "" { - return nil, fmt.Errorf("empty projectPath parameter") + return nil, errors.New("empty projectPath parameter") } projectPathInfo, err := os.Stat(*projectPath) diff --git a/tests/suite/suite.go b/tests/suite/suite.go index b975f9de..b4fa1204 100644 --- a/tests/suite/suite.go +++ b/tests/suite/suite.go @@ -193,6 +193,7 @@ func (b *Base[ID, IDBUILDER]) doValidateTableMetadata( customOptions ...ValidateTableOption, ) { options := newDefaultValidateTableOptions() + for _, option := range customOptions { option.apply(options) } @@ -228,6 +229,7 @@ func (b *Base[ID, IDBUILDER]) doValidateTable( customOptions ...ValidateTableOption, ) { options := newDefaultValidateTableOptions() + for _, option := range customOptions { option.apply(options) } diff --git a/tests/utils/cast_test.go b/tests/utils/cast_test.go index d669b3c2..dba1d126 100644 --- a/tests/utils/cast_test.go +++ b/tests/utils/cast_test.go @@ -1,7 +1,6 @@ //go:build ignore -// +build ignore -package utils +package utils //nolint:revive import "testing" diff --git a/tests/utils/constraints.go b/tests/utils/constraints.go index 939e51e2..2474c536 100644 --- a/tests/utils/constraints.go +++ b/tests/utils/constraints.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive type TableIDTypes interface { int32 | int64 | []byte | string diff --git a/tests/utils/context.go b/tests/utils/context.go index b0b5bdf6..3a97d355 100644 --- a/tests/utils/context.go +++ b/tests/utils/context.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "context" @@ -18,6 +18,7 @@ func getTestName() string { for _, functionName := range functionNames { if strings.Contains(functionName, "*Suite") { split := strings.Split(functionName, ".") + return split[len(split)-1] } } @@ -27,5 +28,6 @@ func getTestName() string { func NewContextWithTestName() context.Context { md := metadata.Pairs(common.TestName, getTestName()) + return metadata.NewOutgoingContext(context.Background(), md) } diff --git a/tests/utils/id_array_builders.go b/tests/utils/id_array_builders.go index 49f605a4..7cc51415 100644 --- a/tests/utils/id_array_builders.go +++ b/tests/utils/id_array_builders.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "github.com/apache/arrow/go/v13/arrow/array" diff --git a/tests/utils/matching.go b/tests/utils/matching.go index d6867b0f..8a4b55af 100644 --- a/tests/utils/matching.go +++ b/tests/utils/matching.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "bytes" @@ -70,6 +70,7 @@ func swapColumns(table arrow.Record, schema *api_service_protos.TSchema) (arrow. for i, field := range table.Schema().Fields() { if field.Name == "id" || field.Name == "ID" || field.Name == "COL_00_ID" || field.Name == "_id" || field.Name == "key" { idIndex = i + break } } @@ -77,21 +78,25 @@ func swapColumns(table arrow.Record, schema *api_service_protos.TSchema) (arrow. // build new record with the correct order of columns newColumns := make([]arrow.Array, table.NumCols()) for i := range newColumns { - if i == 0 { + switch i { + case 0: newColumns[i] = table.Column(idIndex) - } else if i == idIndex { + case idIndex: newColumns[i] = table.Column(0) - } else { + default: newColumns[i] = table.Column(i) } } fields := table.Schema().Fields() + fields[0], fields[idIndex] = fields[idIndex], fields[0] + newTable := array.NewRecord(arrow.NewSchema(fields, nil), newColumns, table.NumRows()) // fix order in table schema as well newSchema := proto.Clone(schema).(*api_service_protos.TSchema) + newSchema.Columns[0], newSchema.Columns[idIndex] = newSchema.Columns[idIndex], newSchema.Columns[0] return newTable, newSchema @@ -315,6 +320,7 @@ func sortTableByID[ID TableIDTypes, IDBUILDER ArrowIDBuilder[ID]](table arrow.Re case *array.Struct: // Создаем новый StructBuilder на основе существующего типа structType := table.Column(colIdx + 1).DataType().(*arrow.StructType) + restBuilders[colIdx] = array.NewStructBuilder(pool, structType) default: panic(fmt.Sprintf("UNSUPPORTED TYPE: %T", table.Column(colIdx+1))) @@ -371,6 +377,7 @@ func sortTableByID[ID TableIDTypes, IDBUILDER ArrowIDBuilder[ID]](table arrow.Re fieldValue := structData[fieldName] if fieldValue == nil { fieldBuilder.AppendNull() + continue } @@ -537,6 +544,7 @@ func matchStructArrays( if expectedStructsBytes[i] == nil { require.True(t, actual.IsNull(i), fmt.Sprintf("struct column: %v\nexpected NULL at index %d, got non-NULL\n", columnName, i)) + continue } @@ -558,6 +566,7 @@ func matchStructArrays( if expectedFieldValue == nil { require.True(t, fieldArray.IsNull(i), fmt.Sprintf("struct field %s: expected NULL at row %d, got non-NULL", fieldName, i)) + continue } diff --git a/tests/utils/matching_test.go b/tests/utils/matching_test.go index 73f9a44c..9c970847 100644 --- a/tests/utils/matching_test.go +++ b/tests/utils/matching_test.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "testing" @@ -17,6 +17,7 @@ func TestSortTableByID(t *testing.T) { t.Run("Test single row table", func(t *testing.T) { idBuilder := array.NewInt32Builder(pool) idBuilder.Append(1) + idArr := idBuilder.NewArray() defer idArr.Release() @@ -37,12 +38,14 @@ func TestSortTableByID(t *testing.T) { t.Run("Test multiple rows table", func(t *testing.T) { idBuilder := array.NewInt32Builder(pool) idBuilder.AppendValues([]int32{3, 1, 2}, nil) + idArr := idBuilder.NewArray() defer idArr.Release() stringBuilder := array.NewStringBuilder(pool) stringBuilder.AppendValues([]string{"three", "one", "two"}, nil) + stringArr := stringBuilder.NewArray() defer stringArr.Release() @@ -70,18 +73,21 @@ func TestSortTableByID(t *testing.T) { t.Run("Test with different data types", func(t *testing.T) { idBuilder := array.NewInt32Builder(pool) idBuilder.AppendValues([]int32{2, 3, 1}, nil) + idArr := idBuilder.NewArray() defer idArr.Release() int64Builder := array.NewInt64Builder(pool) int64Builder.AppendValues([]int64{200, 300, 100}, nil) + int64Arr := int64Builder.NewArray() defer int64Arr.Release() float32Builder := array.NewFloat32Builder(pool) float32Builder.AppendValues([]float32{2.2, 3.3, 1.1}, nil) + float32Arr := float32Builder.NewArray() defer float32Arr.Release() @@ -112,12 +118,14 @@ func TestSortTableByID(t *testing.T) { t.Run("Test with optional int values", func(t *testing.T) { idBuilder := array.NewInt32Builder(pool) idBuilder.AppendValues([]int32{3, 1, 2}, nil) + idArr := idBuilder.NewArray() defer idArr.Release() int32Builder := array.NewInt32Builder(pool) int32Builder.AppendValues([]int32{30, 0, 20}, []bool{true, false, true}) + int32Arr := int32Builder.NewArray() defer int32Arr.Release() diff --git a/tests/utils/predicate.go b/tests/utils/predicate.go index df9ae77a..15b9e13b 100644 --- a/tests/utils/predicate.go +++ b/tests/utils/predicate.go @@ -1,4 +1,4 @@ -package utils +package utils //nolint:revive import ( "github.com/ydb-platform/ydb-go-genproto/protos/Ydb" diff --git a/tools/docker_compose_update/internal.go b/tools/docker_compose_update/internal.go index 5fb207a4..fedb3880 100644 --- a/tools/docker_compose_update/internal.go +++ b/tools/docker_compose_update/internal.go @@ -5,6 +5,7 @@ import ( "bytes" "context" "encoding/json" + "errors" "fmt" "io" "log" @@ -100,6 +101,7 @@ func getChecksum(tag string) (string, error) { line := scanner.Text() if strings.Contains(line, "sha256") { fmt.Println(line) + line = strings.Split(line, "class=\"Link\">")[1] line = strings.Split(line, "")[0] checksum = line @@ -109,7 +111,7 @@ func getChecksum(tag string) (string, error) { } if checksum == "" { - return "", fmt.Errorf("no checksum found by latest tag") + return "", errors.New("no checksum found by latest tag") } return checksum, nil @@ -173,10 +175,11 @@ func changeDockerCompose(logger *zap.Logger, path string, newImage string) error if services, ok := data["services"].(map[string]any); ok { if _, ok := services["fq-connector-go"].(map[string]any); !ok { - return fmt.Errorf("error finding fq-connector-go in services") + return errors.New("error finding fq-connector-go in services") } fqConnectorGo := services["fq-connector-go"].(map[string]any) + fqConnectorGo["image"] = newImage } else { return fmt.Errorf("error finding services in file: %s", path) @@ -208,6 +211,7 @@ func generateURL(baseURL string, params map[string]string) (string, error) { } q := u.Query() + for key, value := range params { q.Set(key, value) } diff --git a/tools/docker_compose_update/main.go b/tools/docker_compose_update/main.go index 10387ff9..d83baa7c 100644 --- a/tools/docker_compose_update/main.go +++ b/tools/docker_compose_update/main.go @@ -24,6 +24,7 @@ func main() { func run(logger *zap.Logger) error { path := flag.String("path", "path", "Specify the path to ydb file.") + flag.Parse() if err := checkFileExistance(*path); err != nil { diff --git a/tools/version/main.go b/tools/version/main.go index 2233160a..385ed5e4 100644 --- a/tools/version/main.go +++ b/tools/version/main.go @@ -2,12 +2,15 @@ package main import ( "bytes" + "context" "encoding/json" + "errors" "fmt" "os" "os/exec" "strings" "text/template" + "time" "go.uber.org/zap" @@ -72,7 +75,7 @@ func run(logger *zap.Logger) error { ) if len(os.Args) != 2 { - return fmt.Errorf("wrong args") + return errors.New("wrong number of args") } switch os.Args[1] { @@ -137,7 +140,6 @@ func getArcVersion() (versionData, error) { } branch, err := execCommand("bash", "-c", "arc branch | grep \\* | cut -d ' ' -f2") - if err != nil { return data, fmt.Errorf("branch exec command: %w", err) } @@ -313,7 +315,10 @@ func getGitVersion() (versionData, error) { func execCommand(command string, args ...string) (string, error) { var stderr bytes.Buffer - cmd := exec.Command(command, args...) + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + cmd := exec.CommandContext(ctx, command, args...) cmd.Stderr = &stderr diff --git a/tools/ydb/dump_tablet_id_data/main.go b/tools/ydb/dump_tablet_id_data/main.go index da81a721..4504f9dd 100644 --- a/tools/ydb/dump_tablet_id_data/main.go +++ b/tools/ydb/dump_tablet_id_data/main.go @@ -34,7 +34,6 @@ func (r *tabletRows) Next() bool { var err error r.lastRow, err = r.lastResultSet.NextRow(r.ctx) - if err != nil { if errors.Is(err, io.EOF) { r.err = nil @@ -57,6 +56,7 @@ func (r *tabletRows) NextResultSet() bool { r.err = nil } else { fmt.Println("obtaining next result", err, r.ctx.Err()) + r.err = fmt.Errorf("next result set: %w", err) } @@ -151,6 +151,7 @@ func makeDriver(ctx context.Context, logger *zap.Logger, endpoint, database, tok } var scheme string + if useTLS { scheme = "grpcs" @@ -166,6 +167,7 @@ func makeDriver(ctx context.Context, logger *zap.Logger, endpoint, database, tok dsn := fmt.Sprintf("%s://%s%s", scheme, endpoint, database) authMethod := "none" + if token != "" { authMethod = "IAM token" } @@ -233,7 +235,6 @@ func executeQuery(parentCtx context.Context, logger *zap.Logger, ydbDriver *ydb. return ctx.Err() } }, query.WithIdempotent()) - if finalErr != nil { return nil, fmt.Errorf("execute query: %w", finalErr) } @@ -290,6 +291,7 @@ func main() { } logger := common.NewDefaultLogger() + defer func() { _ = logger.Sync() }() diff --git a/tools/ydb/olap_inconsistency/main.go b/tools/ydb/olap_inconsistency/main.go index 8479a083..38154672 100644 --- a/tools/ydb/olap_inconsistency/main.go +++ b/tools/ydb/olap_inconsistency/main.go @@ -182,7 +182,6 @@ func getTabletIDs(ctx context.Context, logger *zap.Logger, ydbDriver *ydb.Driver return nil }, query.WithIdempotent()) - if err != nil { return nil, fmt.Errorf("querying tablet IDs: %w", err) } @@ -252,7 +251,7 @@ func parseFlags() (*Config, error) { } if endTime.Before(startTime) { - return nil, fmt.Errorf("end time must be after start time") + return nil, errors.New("end time must be after start time") } return &Config{ @@ -277,6 +276,7 @@ func main() { } logger := common.NewDefaultLogger() + defer func() { _ = logger.Sync() }() @@ -376,6 +376,7 @@ func makeDriver(ctx context.Context, logger *zap.Logger, endpoint, database, tok } var scheme string + if useTLS { scheme = "grpcs" @@ -391,6 +392,7 @@ func makeDriver(ctx context.Context, logger *zap.Logger, endpoint, database, tok dsn := fmt.Sprintf("%s://%s%s", scheme, endpoint, database) authMethod := "none" + if token != "" { authMethod = "IAM token" } @@ -521,6 +523,7 @@ func executeQuery( queryText := fmt.Sprintf(queryTemplate, table, tabletID) paramsBuilder := ydb.ParamsBuilder() + paramsBuilder = paramsBuilder.Param("$p0").Timestamp(startTime) paramsBuilder = paramsBuilder.Param("$p1").Timestamp(endTime) @@ -578,7 +581,6 @@ func executeQuery( return nil }, query.WithIdempotent()) - if err != nil { return nil, fmt.Errorf("execute query: %w", err) } diff --git a/tools/ydb/query_service_negative/main.go b/tools/ydb/query_service_negative/main.go index 645a15c5..adc9958e 100644 --- a/tools/ydb/query_service_negative/main.go +++ b/tools/ydb/query_service_negative/main.go @@ -116,7 +116,6 @@ func getTableDescription(ctx context.Context, ydbDriver *ydb.Driver) (*options.D }, table.WithIdempotent(), ) - if err != nil { return nil, fmt.Errorf("get table description: %w", err) } @@ -132,6 +131,7 @@ func getData(ctx context.Context, ydbDriver *ydb.Driver) error { ` paramsBuilder := ydb.ParamsBuilder() + paramsBuilder = paramsBuilder.Param("$p0").BeginOptional().Int32(nil).EndOptional() result, err := s.Query(ctx, fmt.Sprintf(queryText, tableName), query.WithParameters(paramsBuilder.Build())) @@ -150,6 +150,7 @@ func getData(ctx context.Context, ydbDriver *ydb.Driver) error { if err != nil { if errors.Is(err, io.EOF) { fmt.Println("EOF") + return nil } @@ -158,7 +159,6 @@ func getData(ctx context.Context, ydbDriver *ydb.Driver) error { return nil }) - if finalErr != nil { return fmt.Errorf("get data: %w", finalErr) }