diff --git a/code/go/internal/validator/semantic/validate_policy_template_path.go b/code/go/internal/validator/semantic/validate_policy_template_path.go new file mode 100644 index 000000000..153aaacfc --- /dev/null +++ b/code/go/internal/validator/semantic/validate_policy_template_path.go @@ -0,0 +1,197 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package semantic + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/elastic/package-spec/v3/code/go/internal/fspath" + "github.com/elastic/package-spec/v3/code/go/pkg/specerrors" +) + +const ( + defaultStreamTemplatePath = "stream.yml.hbs" +) + +var ( + errRequiredTemplatePath = errors.New("template_path is required for input type packages") + errFailedToReadManifest = errors.New("failed to read manifest") + errFailedToParseManifest = errors.New("failed to parse manifest") + errTemplateNotFound = errors.New("template file not found") +) + +type policyTemplateInput struct { + Type string `yaml:"type"` + TemplatePath string `yaml:"template_path"` // optional for integration packages +} + +type policyTemplate struct { + Name string `yaml:"name"` + TemplatePath string `yaml:"template_path"` // input type packages require this field + Inputs []policyTemplateInput `yaml:"inputs"` // integration type packages +} + +type packageManifest struct { // package manifest + Type string `yaml:"type"` // integration or input + PolicyTemplates []policyTemplate `yaml:"policy_templates"` +} + +type stream struct { + Input string `yaml:"input"` + TemplatePath string `yaml:"template_path"` +} + +type streamManifest struct { + Streams []stream `yaml:"streams"` +} + +// ValidatePolicyTemplates validates that all referenced template_path files exist for integration and input policy templates +func ValidatePolicyTemplates(fsys fspath.FS) specerrors.ValidationErrors { + var errs specerrors.ValidationErrors + + manifestPath := "manifest.yml" + data, err := fs.ReadFile(fsys, manifestPath) + if err != nil { + return specerrors.ValidationErrors{ + specerrors.NewStructuredErrorf("file \"%s\" is invalid: %ww", fsys.Path(manifestPath), errFailedToReadManifest)} + } + + var manifest packageManifest + err = yaml.Unmarshal(data, &manifest) + if err != nil { + return specerrors.ValidationErrors{ + specerrors.NewStructuredErrorf("file \"%s\" is invalid: %w", fsys.Path(manifestPath), errFailedToParseManifest)} + } + + for _, policyTemplate := range manifest.PolicyTemplates { + switch manifest.Type { + case "integration": + err := validateIntegrationPackagePolicyTemplate(fsys, policyTemplate) + if err != nil { + errs = append(errs, specerrors.NewStructuredErrorf( + "file \"%s\" is invalid: policy template \"%s\" references input template_path: %w", + fsys.Path(manifestPath), policyTemplate.Name, err)) + } + case "input": + err := validateInputPackagePolicyTemplate(fsys, policyTemplate) + if err != nil { + errs = append(errs, specerrors.NewStructuredErrorf( + "file \"%s\" is invalid: policy template \"%s\" references template_path \"%s\": %w", + fsys.Path(manifestPath), policyTemplate.Name, policyTemplate.TemplatePath, err)) + } + } + } + + return errs +} + +// validateInputPackagePolicyTemplate validates the template_path at the policy template level for input type packages +// if the template_path is empty, it returns an error as this field is required for input type packages +func validateInputPackagePolicyTemplate(fsys fspath.FS, policyTemplate policyTemplate) error { + if policyTemplate.TemplatePath == "" { + return errRequiredTemplatePath + } + return validateAgentInputTemplatePath(fsys, policyTemplate.TemplatePath) +} + +func validateAgentInputTemplatePath(fsys fspath.FS, tmplPath string) error { + templatePath := path.Join("agent", "input", tmplPath) + _, err := fs.Stat(fsys, templatePath) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return errTemplateNotFound + } + return fmt.Errorf("failed to stat template file %s: %w", fsys.Path(templatePath), err) + } + + return nil +} + +// validateIntegrationPackagePolicyTemplate validates the template_path at the inputs level for integration type packages +// if the template_path is empty, it looks up at the data stream manifest for the stream input that matches the input type of the policy template +// and uses its template_path to look for the corresponding template file at the data stream stream directory +// if no matching stream input is found, it returns an error as at least one stream input must match the input type of the policy template +// if a matching stream input is found but its template_path file does not exist, it returns an error +func validateIntegrationPackagePolicyTemplate(fsys fspath.FS, policyTemplate policyTemplate) error { + for _, input := range policyTemplate.Inputs { + if input.TemplatePath != "" { + err := validateAgentInputTemplatePath(fsys, input.TemplatePath) + if err != nil { + return err + } + continue + } + + var found bool + // when an input.TemplatePath is empty, lookup at the data stream manifest + err := fs.WalkDir( + fsys, + path.Join("data_stream"), + func(p string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if found { + return fs.SkipAll + } + // read the data stream manifest and look for the stream input that matches the input.type of the policy template + if !d.IsDir() && d.Name() == "manifest.yml" { + data, err := fs.ReadFile(fsys, p) + if err != nil { + return err + } + var sm streamManifest + err = yaml.Unmarshal(data, &sm) + if err != nil { + return err + } + for _, stream := range sm.Streams { + // skip if the stream input type does not match the policy template input type + if stream.Input == input.Type { + streamName := path.Base(path.Dir(p)) + // as template_path is optional at the stream level, default to "stream.yml.hbs" if not set + templatePath := stream.TemplatePath + if templatePath == "" { + templatePath = defaultStreamTemplatePath + } + + // look for the template_path file at the data stream stream directory + err := fs.WalkDir( + fsys, + path.Join("data_stream", streamName, "agent", "stream"), + func(p string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if !d.IsDir() && d.Name() != "" && strings.HasSuffix(d.Name(), templatePath) { + found = true + return fs.SkipAll + } + return nil + }) + if err != nil { + return err + } + } + } + } + return nil + }) + if err != nil { + return err + } + if !found { + return errTemplateNotFound + } + } + return nil +} diff --git a/code/go/internal/validator/semantic/validate_policy_template_path_test.go b/code/go/internal/validator/semantic/validate_policy_template_path_test.go new file mode 100644 index 000000000..9db2445d2 --- /dev/null +++ b/code/go/internal/validator/semantic/validate_policy_template_path_test.go @@ -0,0 +1,214 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package semantic + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/package-spec/v3/code/go/internal/fspath" +) + +func TestValidatePolicyTemplates(t *testing.T) { + + t.Run("input_manifest_with_policy_template_success", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "agent", "input"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: input +policy_templates: + - name: udp + template_path: udp.yml.hbs +`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "agent", "input", "udp.yml.hbs"), []byte("# UDP template"), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.Empty(t, errs, "expected no validation errors") + + }) + + t.Run("input_manifest_with_policy_template_missing_template_path", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "agent", "input"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: input +policy_templates: + - name: udp +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.NotEmpty(t, errs, "expected no validation errors") + + assert.Len(t, errs, 1) + assert.ErrorIs(t, errs[0], errRequiredTemplatePath) + }) + + t.Run("input_manifest_with_policy_template_missing_template_file", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "agent", "input"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: input +policy_templates: + - name: udp + template_path: missing.yml.hbs +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.NotEmpty(t, errs, "expected validation errors") + assert.Len(t, errs, 1) + assert.ErrorIs(t, errs[0], errTemplateNotFound) + }) + t.Run("integration_manifest_with_policy_template_success", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "agent", "input"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - title: Test UDP + template_path: udp.yml.hbs +`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "agent", "input", "udp.yml.hbs"), []byte("# UDP template"), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.Empty(t, errs, "expected no validation errors") + }) + + t.Run("integration_manifest_with_policy_template_invalid", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "agent", "input"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - title: Test UDP + template_path: missing.yml.hbs +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.NotEmpty(t, errs, "expected validation errors") + assert.Len(t, errs, 1) + assert.ErrorIs(t, errs[0], errTemplateNotFound) + }) + + t.Run("integration_manifest_with_policy_template_empty_input_default_stream_success", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "data_stream", "stream", "agent", "stream"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "manifest.yml"), []byte(` +streams: + - input: httpjson`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "agent", "stream", "stream.yml.hbs"), []byte("# HTTP JSON template"), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - type: httpjson +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.Empty(t, errs, "expected no validation errors") + }) + + t.Run("integration_manifest_with_policy_template_empty_input_custom_stream_success", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "data_stream", "stream", "agent", "stream"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "manifest.yml"), []byte(` +streams: + - input: httpjson + template_path: httpjson.yml.hbs`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "agent", "stream", "httpjson.yml.hbs"), []byte("# HTTP JSON template"), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - type: httpjson +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + require.Empty(t, errs, "expected no validation errors") + }) + + t.Run("integration_manifest_with_policy_template_empty_input_custom_stream_invalid", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "data_stream", "stream", "agent", "stream"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "manifest.yml"), []byte(` +streams: + - input: httpjson + template_path: httpjson.yml.hbs`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - type: httpjson +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + + require.NotEmpty(t, errs, "expected validation errors") + assert.Len(t, errs, 1) + assert.ErrorIs(t, errs[0], errTemplateNotFound) + }) + + t.Run("integration_manifest_with_policy_template_empty_input_default_stream_invalid", func(t *testing.T) { + d := t.TempDir() + + err := os.MkdirAll(filepath.Join(d, "data_stream", "stream", "agent", "stream"), 0o755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "data_stream", "stream", "manifest.yml"), []byte(` +streams: + - input: httpjson +`), 0o644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(d, "manifest.yml"), []byte(` +type: integration +policy_templates: + - inputs: + - type: httpjson +`), 0o644) + require.NoError(t, err) + + errs := ValidatePolicyTemplates(fspath.DirFS(d)) + + require.NotEmpty(t, errs, "expected validation errors") + assert.Len(t, errs, 1) + assert.ErrorIs(t, errs[0], errTemplateNotFound) + }) + +} diff --git a/code/go/internal/validator/spec.go b/code/go/internal/validator/spec.go index 0db1c315d..6d1b6231f 100644 --- a/code/go/internal/validator/spec.go +++ b/code/go/internal/validator/spec.go @@ -217,6 +217,7 @@ func (s Spec) rules(pkgType string, rootSpec spectypes.ItemSpec) validationRules {fn: semantic.ValidateDocsStructure}, {fn: semantic.ValidateDeploymentModes, types: []string{"integration"}}, {fn: semantic.ValidateDurationVariables, since: semver.MustParse("3.5.0")}, + {fn: semantic.ValidatePolicyTemplates, types: []string{"integration", "input"}}, } var validationRules validationRules diff --git a/code/go/pkg/validator/limits_test.go b/code/go/pkg/validator/limits_test.go index 6b9ff91f2..964440486 100644 --- a/code/go/pkg/validator/limits_test.go +++ b/code/go/pkg/validator/limits_test.go @@ -163,6 +163,7 @@ func (fs *mockFS) Good() *mockFS { newMockFile("_dev/deploy/docker/docker-compose.yml").WithContent("version: 2.3"), newMockFile("data_stream/foo/manifest.yml").WithContent(datastreamManifestYml), newMockFile("data_stream/foo/fields/base-fields.yml").WithContent(fieldsYml), + newMockFile("data_stream/foo/agent/stream/stream.yml.hbs").WithContent("# Stream template"), ) } diff --git a/code/go/pkg/validator/validator_test.go b/code/go/pkg/validator/validator_test.go index 90ff17ece..cb3d040d5 100644 --- a/code/go/pkg/validator/validator_test.go +++ b/code/go/pkg/validator/validator_test.go @@ -310,6 +310,27 @@ func TestValidateFile(t *testing.T) { "field policy_templates.0.input: Must not be present", }, }, + "input_policy_template_valid": {}, + "integration_policy_template_valid": {}, + "stream_templates_valid": {}, + "input_policy_template_invalid": { + invalidPkgFilePath: "manifest.yml", + expectedErrContains: []string{ + "policy template \"sample\" references template_path \"missing.yml.hbs\": template file not found", + }, + }, + "integration_policy_template_invalid": { + invalidPkgFilePath: "manifest.yml", + expectedErrContains: []string{ + "policy template \"sample\" references template_path \"missing.yml.hbs\": template file not found", + }, + }, + "stream_templates_invalid": { + invalidPkgFilePath: "data_stream/test_stream/manifest.yml", + expectedErrContains: []string{ + "stream \"filestream\" references template_path \"missing.yml.hbs\": template file not found", + }, + }, } for pkgName, test := range tests { diff --git a/spec/changelog.yml b/spec/changelog.yml index 5b1a0f8e3..7144162b8 100644 --- a/spec/changelog.yml +++ b/spec/changelog.yml @@ -20,6 +20,9 @@ - description: Add support for script testing in data streams. type: enhancement link: https://github.com/elastic/package-spec/pull/985 + - description: Add validation for template_path in policy templates and data streams. + type: enhancement + link: https://github.com/elastic/package-spec/pull/986 - version: 3.5.0 changes: - description: Add `duration` variable data type with `min_duration` and `max_duration` validation properties. diff --git a/spec/input/manifest.spec.yml b/spec/input/manifest.spec.yml index 926ea0a5b..ecce958dc 100644 --- a/spec/input/manifest.spec.yml +++ b/spec/input/manifest.spec.yml @@ -94,6 +94,7 @@ spec: - description - type - input + - template_path icons: $ref: "../integration/manifest.spec.yml#/definitions/icons" screenshots: diff --git a/spec/integration/data_stream/manifest.spec.yml b/spec/integration/data_stream/manifest.spec.yml index ac3114878..3bd372602 100644 --- a/spec/integration/data_stream/manifest.spec.yml +++ b/spec/integration/data_stream/manifest.spec.yml @@ -586,6 +586,7 @@ spec: template_path: description: "Path to Elasticsearch index template for stream." type: string + default: "stream.yml.hbs" required_vars: $ref: "#/definitions/required_vars" vars: diff --git a/spec/integration/manifest.spec.yml b/spec/integration/manifest.spec.yml index 38c233d56..5d99db571 100644 --- a/spec/integration/manifest.spec.yml +++ b/spec/integration/manifest.spec.yml @@ -589,7 +589,7 @@ spec: description: Path of the config template for the input. type: string examples: - - ./agent/input/template.yml.hbs + - template.yml.hbs input_group: description: Name of the input group type: string diff --git a/test/packages/bad_duplicated_fields_input/manifest.yml b/test/packages/bad_duplicated_fields_input/manifest.yml index 1158d13b0..988f863b3 100644 --- a/test/packages/bad_duplicated_fields_input/manifest.yml +++ b/test/packages/bad_duplicated_fields_input/manifest.yml @@ -27,6 +27,7 @@ policy_templates: type: logs input: log_file description: Collect sample logs + template_path: input.yml.hbs vars: - name: paths required: true diff --git a/test/packages/docs_extra_files/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/docs_extra_files/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/good/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs b/test/packages/good/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs b/test/packages/good/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/good/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v2/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/good_v2/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v2/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs b/test/packages/good_v2/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v2/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs b/test/packages/good_v2/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v2/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/good_v2/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/agent/input/apache_metrics.yml.hbs b/test/packages/good_v3/agent/input/apache_metrics.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/agent/input/aws_s3.yml.hbs b/test/packages/good_v3/agent/input/aws_s3.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/agent/input/httpjson.yml.hbs b/test/packages/good_v3/agent/input/httpjson.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/good_v3/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs b/test/packages/good_v3/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs b/test/packages/good_v3/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/good_v3/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/data_stream/subobjects/agent/stream/stream.yml.hbs b/test/packages/good_v3/data_stream/subobjects/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/good_v3/manifest.yml b/test/packages/good_v3/manifest.yml index cde55e300..f33c6dab2 100644 --- a/test/packages/good_v3/manifest.yml +++ b/test/packages/good_v3/manifest.yml @@ -94,6 +94,7 @@ policy_templates: - http://127.0.0.1 hide_in_deployment_modes: - agentless + template_path: apache_metrics.yml.hbs - type: httpjson title: Collect data via HTTP JSON API description: Collecting data from HTTP JSON API (default only) @@ -110,6 +111,7 @@ policy_templates: min_duration: 10s max_duration: 4h3m2s1ms default: 1m + template_path: httpjson.yml.hbs - name: apache-agentless title: Apache logs and metrics in agentless description: Collect logs and metrics from Apache instances in agentless @@ -143,6 +145,7 @@ policy_templates: show_user: true default: - http://127.0.0.1 + template_path: apache_metrics.yml.hbs - type: aws/s3 title: Collect S3 logs (agentless only) description: Collecting logs from AWS S3 in agentless mode @@ -154,6 +157,7 @@ policy_templates: title: S3 Bucket Name show_user: true required: true + template_path: aws_s3.yml.hbs owner: github: elastic/foobar type: elastic diff --git a/test/packages/input_policy_template_invalid/LICENSE.txt b/test/packages/input_policy_template_invalid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/input_policy_template_invalid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/input_policy_template_invalid/_dev/build/docs/README.md b/test/packages/input_policy_template_invalid/_dev/build/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/input_policy_template_invalid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/input_policy_template_invalid/agent/input/input.yml.hbs b/test/packages/input_policy_template_invalid/agent/input/input.yml.hbs new file mode 100644 index 000000000..bbfe99b59 --- /dev/null +++ b/test/packages/input_policy_template_invalid/agent/input/input.yml.hbs @@ -0,0 +1,10 @@ +data_stream: + dataset: {{data_stream.dataset}} +paths: +{{#each paths as |path i|}} + - {{path}} +{{/each}} +exclude_files: [".gz$"] +processors: + - add_locale: ~ + diff --git a/test/packages/input_policy_template_invalid/changelog.yml b/test/packages/input_policy_template_invalid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/input_policy_template_invalid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/input_policy_template_invalid/docs/README.md b/test/packages/input_policy_template_invalid/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/input_policy_template_invalid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/input_policy_template_invalid/fields/base-fields.yml b/test/packages/input_policy_template_invalid/fields/base-fields.yml new file mode 100644 index 000000000..7c798f453 --- /dev/null +++ b/test/packages/input_policy_template_invalid/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/input_policy_template_invalid/img/sample-logo.svg b/test/packages/input_policy_template_invalid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/input_policy_template_invalid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/input_policy_template_invalid/img/sample-screenshot.png b/test/packages/input_policy_template_invalid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/input_policy_template_invalid/img/sample-screenshot.png differ diff --git a/test/packages/input_policy_template_invalid/manifest.yml b/test/packages/input_policy_template_invalid/manifest.yml new file mode 100644 index 000000000..440aefebe --- /dev/null +++ b/test/packages/input_policy_template_invalid/manifest.yml @@ -0,0 +1,46 @@ +format_version: 3.6.0 +name: input_policy_template_valid +title: "New Package" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: input +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + type: logs + title: Sample logs + description: Collect sample logs + input: logfile + template_path: missing.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + default: + - /var/log/*.log +elasticsearch: + index_template: + mappings: + subobjects: false +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/input_policy_template_valid/LICENSE.txt b/test/packages/input_policy_template_valid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/input_policy_template_valid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/input_policy_template_valid/_dev/build/docs/README.md b/test/packages/input_policy_template_valid/_dev/build/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/input_policy_template_valid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/input_policy_template_valid/agent/input/input.yml.hbs b/test/packages/input_policy_template_valid/agent/input/input.yml.hbs new file mode 100644 index 000000000..bbfe99b59 --- /dev/null +++ b/test/packages/input_policy_template_valid/agent/input/input.yml.hbs @@ -0,0 +1,10 @@ +data_stream: + dataset: {{data_stream.dataset}} +paths: +{{#each paths as |path i|}} + - {{path}} +{{/each}} +exclude_files: [".gz$"] +processors: + - add_locale: ~ + diff --git a/test/packages/input_policy_template_valid/changelog.yml b/test/packages/input_policy_template_valid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/input_policy_template_valid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/input_policy_template_valid/docs/README.md b/test/packages/input_policy_template_valid/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/input_policy_template_valid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/input_policy_template_valid/fields/base-fields.yml b/test/packages/input_policy_template_valid/fields/base-fields.yml new file mode 100644 index 000000000..7c798f453 --- /dev/null +++ b/test/packages/input_policy_template_valid/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/input_policy_template_valid/img/sample-logo.svg b/test/packages/input_policy_template_valid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/input_policy_template_valid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/input_policy_template_valid/img/sample-screenshot.png b/test/packages/input_policy_template_valid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/input_policy_template_valid/img/sample-screenshot.png differ diff --git a/test/packages/input_policy_template_valid/manifest.yml b/test/packages/input_policy_template_valid/manifest.yml new file mode 100644 index 000000000..582096e7c --- /dev/null +++ b/test/packages/input_policy_template_valid/manifest.yml @@ -0,0 +1,46 @@ +format_version: 3.6.0 +name: input_policy_template_valid +title: "New Package" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: input +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + type: logs + title: Sample logs + description: Collect sample logs + input: logfile + template_path: input.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + default: + - /var/log/*.log +elasticsearch: + index_template: + mappings: + subobjects: false +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/input_template/manifest.yml b/test/packages/input_template/manifest.yml index 95095dcd1..897386057 100644 --- a/test/packages/input_template/manifest.yml +++ b/test/packages/input_template/manifest.yml @@ -15,6 +15,6 @@ policy_templates: - type: apm title: Collect traces description: Collect traces - template_path: ./agent/input/template.yml.hbs + template_path: template.yml.hbs owner: github: elastic/foobar \ No newline at end of file diff --git a/test/packages/integration_policy_template_invalid/LICENSE.txt b/test/packages/integration_policy_template_invalid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/integration_policy_template_invalid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/integration_policy_template_invalid/_dev/build/docs/README.md b/test/packages/integration_policy_template_invalid/_dev/build/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/integration_policy_template_invalid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/integration_policy_template_invalid/agent/input/template.yml.hbs b/test/packages/integration_policy_template_invalid/agent/input/template.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/integration_policy_template_invalid/changelog.yml b/test/packages/integration_policy_template_invalid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/integration_policy_template_invalid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/integration_policy_template_invalid/docs/README.md b/test/packages/integration_policy_template_invalid/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/integration_policy_template_invalid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/integration_policy_template_invalid/img/sample-logo.svg b/test/packages/integration_policy_template_invalid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/integration_policy_template_invalid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/integration_policy_template_invalid/img/sample-screenshot.png b/test/packages/integration_policy_template_invalid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/integration_policy_template_invalid/img/sample-screenshot.png differ diff --git a/test/packages/integration_policy_template_invalid/manifest.yml b/test/packages/integration_policy_template_invalid/manifest.yml new file mode 100644 index 000000000..f4adde658 --- /dev/null +++ b/test/packages/integration_policy_template_invalid/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: integration_policy_template_valid +title: "New Package" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: integration +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs + template_path: missing.yml.hbs +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/integration_policy_template_invalid/sample_event.json b/test/packages/integration_policy_template_invalid/sample_event.json new file mode 100644 index 000000000..2cf93e50b --- /dev/null +++ b/test/packages/integration_policy_template_invalid/sample_event.json @@ -0,0 +1,3 @@ +{ + "description": "This is an example sample-event for New Package. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file." +} diff --git a/test/packages/integration_policy_template_valid/LICENSE.txt b/test/packages/integration_policy_template_valid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/integration_policy_template_valid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/integration_policy_template_valid/_dev/build/docs/README.md b/test/packages/integration_policy_template_valid/_dev/build/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/integration_policy_template_valid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/integration_policy_template_valid/agent/input/template.yml.hbs b/test/packages/integration_policy_template_valid/agent/input/template.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/integration_policy_template_valid/changelog.yml b/test/packages/integration_policy_template_valid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/integration_policy_template_valid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/integration_policy_template_valid/docs/README.md b/test/packages/integration_policy_template_valid/docs/README.md new file mode 100644 index 000000000..12bb2d9b0 --- /dev/null +++ b/test/packages/integration_policy_template_valid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# New Package Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The New Package integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/integration_policy_template_valid/img/sample-logo.svg b/test/packages/integration_policy_template_valid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/integration_policy_template_valid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/integration_policy_template_valid/img/sample-screenshot.png b/test/packages/integration_policy_template_valid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/integration_policy_template_valid/img/sample-screenshot.png differ diff --git a/test/packages/integration_policy_template_valid/manifest.yml b/test/packages/integration_policy_template_valid/manifest.yml new file mode 100644 index 000000000..4d690fb12 --- /dev/null +++ b/test/packages/integration_policy_template_valid/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: integration_policy_template_valid +title: "New Package" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: integration +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs + template_path: template.yml.hbs +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/integration_policy_template_valid/sample_event.json b/test/packages/integration_policy_template_valid/sample_event.json new file mode 100644 index 000000000..2cf93e50b --- /dev/null +++ b/test/packages/integration_policy_template_valid/sample_event.json @@ -0,0 +1,3 @@ +{ + "description": "This is an example sample-event for New Package. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file." +} diff --git a/test/packages/missing_image_files/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/missing_image_files/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/missing_pipeline_dashes/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/missing_pipeline_dashes/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/missing_required_fields/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/missing_required_fields/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/missing_required_fields_input/manifest.yml b/test/packages/missing_required_fields_input/manifest.yml index b87198d56..feadc48c3 100644 --- a/test/packages/missing_required_fields_input/manifest.yml +++ b/test/packages/missing_required_fields_input/manifest.yml @@ -27,6 +27,7 @@ policy_templates: input: log_file title: Sample logs description: Collect sample logs + template_path: input.yml.hbs vars: - name: paths required: true diff --git a/test/packages/stream_templates_invalid/LICENSE.txt b/test/packages/stream_templates_invalid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/stream_templates_invalid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/stream_templates_invalid/_dev/build/docs/README.md b/test/packages/stream_templates_invalid/_dev/build/docs/README.md new file mode 100644 index 000000000..9830e75b7 --- /dev/null +++ b/test/packages/stream_templates_invalid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# Stream Templates Valid Test Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The Stream Templates Valid Test integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/stream_templates_invalid/agent/input/template.yml.hbs b/test/packages/stream_templates_invalid/agent/input/template.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/stream_templates_invalid/changelog.yml b/test/packages/stream_templates_invalid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/stream_templates_invalid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/stream_templates_invalid/data_stream/test_stream/agent/stream/filestream.yml.hbs b/test/packages/stream_templates_invalid/data_stream/test_stream/agent/stream/filestream.yml.hbs new file mode 100644 index 000000000..3bede6328 --- /dev/null +++ b/test/packages/stream_templates_invalid/data_stream/test_stream/agent/stream/filestream.yml.hbs @@ -0,0 +1,44 @@ +paths: +{{#each paths as |path|}} + - {{path}} +{{/each}} +{{#if exclude_files}} +prospector.scanner.exclude_files: +{{#each exclude_files as |pattern f|}} + - {{pattern}} +{{/each}} +{{/if}} +{{#if multiline_json}} +multiline.pattern: '^{' +multiline.negate: true +multiline.match: after +multiline.max_lines: 5000 +multiline.timeout: 10 +{{/if}} +{{#if custom}} +{{custom}} +{{/if}} + +{{#if tags.length}} +tags: +{{#each tags as |tag|}} +- {{tag}} +{{/each}} +{{#if preserve_original_event}} +- preserve_original_event +{{/if}} +{{else}} +{{#if preserve_original_event}} +tags: +- preserve_original_event +{{/if}} +{{/if}} + +{{#contains "forwarded" tags}} +publisher_pipeline.disable_host: true +{{/contains}} + +{{#if processors}} +processors: +{{processors}} +{{/if}} \ No newline at end of file diff --git a/test/packages/stream_templates_invalid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml b/test/packages/stream_templates_invalid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml new file mode 100644 index 000000000..1a308fded --- /dev/null +++ b/test/packages/stream_templates_invalid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml @@ -0,0 +1,10 @@ +--- +description: Pipeline for processing sample logs +processors: +- set: + field: sample_field + value: "1" +on_failure: +- set: + field: error.message + value: '{{ _ingest.on_failure_message }}' diff --git a/test/packages/stream_templates_invalid/data_stream/test_stream/fields/base-fields.yml b/test/packages/stream_templates_invalid/data_stream/test_stream/fields/base-fields.yml new file mode 100644 index 000000000..7c798f453 --- /dev/null +++ b/test/packages/stream_templates_invalid/data_stream/test_stream/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/stream_templates_invalid/data_stream/test_stream/manifest.yml b/test/packages/stream_templates_invalid/data_stream/test_stream/manifest.yml new file mode 100644 index 000000000..44d372b1e --- /dev/null +++ b/test/packages/stream_templates_invalid/data_stream/test_stream/manifest.yml @@ -0,0 +1,256 @@ +title: "New Data Stream" +type: logs +streams: + - input: filestream + title: "logs via filestream" + description: |- + Collect logs with filestream + template_path: missing.yml.hbs + vars: + - name: paths + type: text + title: "Paths" + multi: true + required: true + show_user: true + default: + - /var/log/*.log + - name: data_stream.dataset + type: text + title: "Dataset name" + description: |- + Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html). + required: true + show_user: true + default: filestream.generic + - name: pipeline + type: text + title: "Ingest Pipeline" + description: |- + The Ingest Node pipeline ID to be used by the integration. + show_user: true + - name: parsers + type: yaml + title: "Parsers" + description: |- + This option expects a list of parsers that the log line has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_parsers) + show_user: true + default: "" + #- ndjson: + # target: "" + # message_key: msg + #- multiline: + # type: count + # count_lines: 3 + - name: exclude_files + type: text + title: "Exclude Files" + description: |- + A list of regular expressions to match the files that you want Elastic Agent to ignore. By default no files are excluded. + multi: true + show_user: true + default: + - \.gz$ + - name: include_files + type: text + title: "Include Files" + description: |- + A list of regular expressions to match the files that you want Elastic Agent to include. If a list of regexes is provided, only the files that are allowed by the patterns are harvested. + multi: true + show_user: true + - name: processors + type: yaml + title: "Processors" + description: |- + Processors are used to reduce the number of fields in the exported event or to enhance the event with metadata. This executes in the agent before the logs are parsed. See [Processors](https://www.elastic.co/guide/en/beats/filebeat/current/filtering-and-enhancing-data.html) for details. + - name: tags + type: text + title: "Tags" + description: |- + Tags to include in the published event + multi: true + show_user: true + - name: encoding + type: text + title: "Encoding" + description: |- + The file encoding to use for reading data that contains international characters. For a full list of valid encodings, see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_encoding_2) + - name: recursive_glob + type: bool + title: "Recursive Glob" + description: |- + Enable expanding `**` into recursive glob patterns. With this feature enabled, the rightmost `**` in each path is expanded into a fixed number of glob patterns. For example: `/foo/**` expands to `/foo`, `/foo/*`, `/foo/*/*`, and so on. If enabled it expands a single `**` into a 8-level deep `*` pattern. + This feature is enabled by default. Set prospector.scanner.recursive_glob to false to disable it. + default: true + - name: symlinks + type: bool + title: "Enable symlinks" + description: |- + The symlinks option allows Elastic Agent to harvest symlinks in addition to regular files. When harvesting symlinks, Elastic Agent opens and reads the original file even though it reports the path of the symlink. + **Because this option may lead to data loss, it is disabled by default.** + - name: resend_on_touch + type: bool + title: "Resend on touch" + description: |- + If this option is enabled a file is resent if its size has not changed but its modification time has changed to a later time than before. It is disabled by default to avoid accidentally resending files. + - name: check_interval + type: text + title: "Check Interval" + description: |- + How often Elastic Agent checks for new files in the paths that are specified for harvesting. For example Specify 1s to scan the directory as frequently as possible without causing Elastic Agent to scan too frequently. **We do not recommend to set this value <1s.** + - name: ignore_older + type: text + title: "Ignore Older" + description: |- + If this option is enabled, Elastic Agent ignores any files that were modified before the specified timespan. You can use time strings like 2h (2 hours) and 5m (5 minutes). The default is 0, which disables the setting. + You must set Ignore Older to be greater than On State Change Inactive. + For more information, please see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-ignore-older) + - name: ignore_inactive + type: text + title: "Ignore Inactive" + description: |- + If this option is enabled, Elastic Agent ignores every file that has not been updated since the selected time. Possible options are since_first_start and since_last_start. + - name: close_on_state_changed_inactive + type: text + title: "Close on State Changed Inactive" + description: |- + When this option is enabled, Elastic Agent closes the file handle if a file has not been harvested for the specified duration. The counter for the defined period starts when the last log line was read by the harvester. It is not based on the modification time of the file. If the closed file changes again, a new harvester is started and the latest changes will be picked up after Check Interval has elapsed. + - name: close_on_state_changed_renamed + type: bool + title: "Close on State Changed Renamed" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent closes the file handler when a file is renamed. This happens, for example, when rotating files. By default, the harvester stays open and keeps reading the file because the file handler does not depend on the file name. + - name: close_on_state_changed_removed + type: bool + title: "Close on State Changed Removed" + description: |- + When this option is enabled, Elastic Agent closes the harvester when a file is removed. Normally a file should only be removed after it’s inactive for the duration specified by close.on_state_change.inactive. + - name: close_reader_eof + type: bool + title: "Close Reader EOF" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent closes a file as soon as the end of a file is reached. This is useful when your files are only written once and not updated from time to time. For example, this happens when you are writing every single log event to a new file. This option is disabled by default. + - name: close_reader_after_interval + type: text + title: "Close Reader After Interval" + description: |- + **Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires.** + This option is particularly useful in case the output is blocked, which makes Elastic Agent keep open file handlers even for files that were deleted from the disk. + For more information see the [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-close-timeout). + - name: clean_inactive + type: text + title: "Clean Inactive" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent removes the state of a file after the specified period of inactivity has elapsed. + E.g: "30m", Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". By default cleaning inactive states is disabled, -1 is used to disable it. + default: -1 + - name: clean_removed + type: bool + title: "Clean Removed" + description: |- + When this option is enabled, Elastic Agent cleans files from the registry if they cannot be found on disk anymore under the last known name. + **You must disable this option if you also disable Close Removed.** + - name: harvester_limit + type: integer + title: "Harvester Limit" + description: |- + The harvester_limit option limits the number of harvesters + that are started in parallel for one input. This directly + relates to the maximum number of file handlers that are + opened. The default is 0 (no limit). + default: 0 + - name: backoff_init + type: text + title: "Backoff Init" + description: |- + The backoff option defines how long Elastic Agent waits before checking a file again after EOF is reached. The default is 1s. + - name: backoff_max + type: text + title: "Backoff Max" + description: |- + The maximum time for Elastic Agent to wait before checking a file again after EOF is reached. The default is 10s. + **Requirement: Set Backoff Max to be greater than or equal to Backoff Init and less than or equal to Check Interval (Backoff Init <= Backoff Max <= Check Interval).** + - name: fingerprint + type: bool + title: "File identity: Fingerprint" + description: |- + **Changing file_identity methods between runs may result in + duplicated events in the output.** + Uses a fingerprint generated from the first few bytes (1k is + the default, this can be configured via Fingerprint offset + and length) to identify a file instead inode + device ID. + Refer to https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_file_identity_2 + for more details. If this option is disabled (and 'Native + file identity is not enabled'), Elastic-Agent < 9.0.0 will + use Native as the file identity, and >= 9.0.0 will use + Fingerprint with the default offset and length. + default: true + - name: fingerprint_offset + type: integer + title: "File identity: Fingerprint offset" + description: |- + Offset from the beginning of the file to start calculating + the fingerprint. The default is 0. Only used when the + fingerprint file identity is selected + default: 0 + - name: fingerprint_length + type: integer + title: "File identity: Fingerprint length" + description: |- + The number of bytes used to calculate the fingerprint. The + default is 1024. Only used when the fingerprint file + identity is selected. + default: 1024 + - name: file_identity_native + type: bool + title: "File identity: Native" + description: |- + **Changing file_identity methods between runs may result in + duplicated events in the output.** + Uses a native identifier for files, on most Unix-like + file systems this is the inode + device ID. On file systems + that do not support inode, the native equivalent is used. + If you enable this option you **MUST disable Fingerprint + file identity**. Refer to + https://www.elastic.co/docs/reference/beats/filebeat/filebeat-input-filestream + for more details. + default: false + - name: rotation_external_strategy_copytruncate + type: yaml + title: "Rotation Strategy" + description: "If the log rotating application copies the contents of the active file and then truncates the original file, use these options to help Elastic Agent to read files correctly.\nSet the option suffix_regex so Elastic Agent can tell active and rotated files apart. \nThere are two supported suffix types in the input: numberic and date." + - name: exclude_lines + type: text + title: "Exclude Lines" + description: |- + A list of regular expressions to match the lines that you want Elastic Agent to exclude. Elastic Agent drops any lines that match a regular expression in the list. By default, no lines are dropped. Empty lines are ignored. + multi: true + - name: include_lines + type: text + title: "Include Lines" + description: |- + A list of regular expressions to match the lines that you want Elastic Agent to include. Elastic Agent exports only the lines that match a regular expression in the list. By default, all lines are exported. Empty lines are ignored. + multi: true + - name: buffer_size + type: text + title: "Buffer Size" + description: |- + The size in bytes of the buffer that each harvester uses when fetching a file. The default is 16384. + - name: message_max_bytes + type: text + title: "Message Max Bytes" + description: |- + The maximum number of bytes that a single log message can have. All bytes after mesage_max_bytes are discarded and not sent. The default is 10MB (10485760). + - name: condition + type: text + title: "Condition" + description: |- + Condition to filter when to collect this input. See [Dynamic Input Configuration](https://www.elastic.co/guide/en/fleet/current/dynamic-input-configuration.html) for details. + show_user: true +elasticsearch: + index_template: + mappings: + subobjects: false diff --git a/test/packages/stream_templates_invalid/docs/README.md b/test/packages/stream_templates_invalid/docs/README.md new file mode 100644 index 000000000..9830e75b7 --- /dev/null +++ b/test/packages/stream_templates_invalid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# Stream Templates Valid Test Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The Stream Templates Valid Test integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/stream_templates_invalid/img/sample-logo.svg b/test/packages/stream_templates_invalid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/stream_templates_invalid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/stream_templates_invalid/img/sample-screenshot.png b/test/packages/stream_templates_invalid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/stream_templates_invalid/img/sample-screenshot.png differ diff --git a/test/packages/stream_templates_invalid/manifest.yml b/test/packages/stream_templates_invalid/manifest.yml new file mode 100644 index 000000000..6c633faa2 --- /dev/null +++ b/test/packages/stream_templates_invalid/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: stream_templates_valid +title: "Stream Templates Valid Test" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: integration +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs + template_path: template.yml.hbs +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/stream_templates_invalid/sample_event.json b/test/packages/stream_templates_invalid/sample_event.json new file mode 100644 index 000000000..0ac49cb72 --- /dev/null +++ b/test/packages/stream_templates_invalid/sample_event.json @@ -0,0 +1,3 @@ +{ + "description": "This is an example sample-event for Stream Templates Valid Test. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file." +} diff --git a/test/packages/stream_templates_valid/LICENSE.txt b/test/packages/stream_templates_valid/LICENSE.txt new file mode 100644 index 000000000..809108b85 --- /dev/null +++ b/test/packages/stream_templates_valid/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/stream_templates_valid/_dev/build/docs/README.md b/test/packages/stream_templates_valid/_dev/build/docs/README.md new file mode 100644 index 000000000..9830e75b7 --- /dev/null +++ b/test/packages/stream_templates_valid/_dev/build/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# Stream Templates Valid Test Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The Stream Templates Valid Test integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/stream_templates_valid/agent/input/template.yml.hbs b/test/packages/stream_templates_valid/agent/input/template.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/stream_templates_valid/changelog.yml b/test/packages/stream_templates_valid/changelog.yml new file mode 100644 index 000000000..bb0320a52 --- /dev/null +++ b/test/packages/stream_templates_valid/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/stream_templates_valid/data_stream/test_stream/agent/stream/filestream.yml.hbs b/test/packages/stream_templates_valid/data_stream/test_stream/agent/stream/filestream.yml.hbs new file mode 100644 index 000000000..3bede6328 --- /dev/null +++ b/test/packages/stream_templates_valid/data_stream/test_stream/agent/stream/filestream.yml.hbs @@ -0,0 +1,44 @@ +paths: +{{#each paths as |path|}} + - {{path}} +{{/each}} +{{#if exclude_files}} +prospector.scanner.exclude_files: +{{#each exclude_files as |pattern f|}} + - {{pattern}} +{{/each}} +{{/if}} +{{#if multiline_json}} +multiline.pattern: '^{' +multiline.negate: true +multiline.match: after +multiline.max_lines: 5000 +multiline.timeout: 10 +{{/if}} +{{#if custom}} +{{custom}} +{{/if}} + +{{#if tags.length}} +tags: +{{#each tags as |tag|}} +- {{tag}} +{{/each}} +{{#if preserve_original_event}} +- preserve_original_event +{{/if}} +{{else}} +{{#if preserve_original_event}} +tags: +- preserve_original_event +{{/if}} +{{/if}} + +{{#contains "forwarded" tags}} +publisher_pipeline.disable_host: true +{{/contains}} + +{{#if processors}} +processors: +{{processors}} +{{/if}} \ No newline at end of file diff --git a/test/packages/stream_templates_valid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml b/test/packages/stream_templates_valid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml new file mode 100644 index 000000000..1a308fded --- /dev/null +++ b/test/packages/stream_templates_valid/data_stream/test_stream/elasticsearch/ingest_pipeline/default.yml @@ -0,0 +1,10 @@ +--- +description: Pipeline for processing sample logs +processors: +- set: + field: sample_field + value: "1" +on_failure: +- set: + field: error.message + value: '{{ _ingest.on_failure_message }}' diff --git a/test/packages/stream_templates_valid/data_stream/test_stream/fields/base-fields.yml b/test/packages/stream_templates_valid/data_stream/test_stream/fields/base-fields.yml new file mode 100644 index 000000000..7c798f453 --- /dev/null +++ b/test/packages/stream_templates_valid/data_stream/test_stream/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/stream_templates_valid/data_stream/test_stream/manifest.yml b/test/packages/stream_templates_valid/data_stream/test_stream/manifest.yml new file mode 100644 index 000000000..251bec6c4 --- /dev/null +++ b/test/packages/stream_templates_valid/data_stream/test_stream/manifest.yml @@ -0,0 +1,256 @@ +title: "New Data Stream" +type: logs +streams: + - input: filestream + title: "logs via filestream" + description: |- + Collect logs with filestream + template_path: filestream.yml.hbs + vars: + - name: paths + type: text + title: "Paths" + multi: true + required: true + show_user: true + default: + - /var/log/*.log + - name: data_stream.dataset + type: text + title: "Dataset name" + description: |- + Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html). + required: true + show_user: true + default: filestream.generic + - name: pipeline + type: text + title: "Ingest Pipeline" + description: |- + The Ingest Node pipeline ID to be used by the integration. + show_user: true + - name: parsers + type: yaml + title: "Parsers" + description: |- + This option expects a list of parsers that the log line has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_parsers) + show_user: true + default: "" + #- ndjson: + # target: "" + # message_key: msg + #- multiline: + # type: count + # count_lines: 3 + - name: exclude_files + type: text + title: "Exclude Files" + description: |- + A list of regular expressions to match the files that you want Elastic Agent to ignore. By default no files are excluded. + multi: true + show_user: true + default: + - \.gz$ + - name: include_files + type: text + title: "Include Files" + description: |- + A list of regular expressions to match the files that you want Elastic Agent to include. If a list of regexes is provided, only the files that are allowed by the patterns are harvested. + multi: true + show_user: true + - name: processors + type: yaml + title: "Processors" + description: |- + Processors are used to reduce the number of fields in the exported event or to enhance the event with metadata. This executes in the agent before the logs are parsed. See [Processors](https://www.elastic.co/guide/en/beats/filebeat/current/filtering-and-enhancing-data.html) for details. + - name: tags + type: text + title: "Tags" + description: |- + Tags to include in the published event + multi: true + show_user: true + - name: encoding + type: text + title: "Encoding" + description: |- + The file encoding to use for reading data that contains international characters. For a full list of valid encodings, see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_encoding_2) + - name: recursive_glob + type: bool + title: "Recursive Glob" + description: |- + Enable expanding `**` into recursive glob patterns. With this feature enabled, the rightmost `**` in each path is expanded into a fixed number of glob patterns. For example: `/foo/**` expands to `/foo`, `/foo/*`, `/foo/*/*`, and so on. If enabled it expands a single `**` into a 8-level deep `*` pattern. + This feature is enabled by default. Set prospector.scanner.recursive_glob to false to disable it. + default: true + - name: symlinks + type: bool + title: "Enable symlinks" + description: |- + The symlinks option allows Elastic Agent to harvest symlinks in addition to regular files. When harvesting symlinks, Elastic Agent opens and reads the original file even though it reports the path of the symlink. + **Because this option may lead to data loss, it is disabled by default.** + - name: resend_on_touch + type: bool + title: "Resend on touch" + description: |- + If this option is enabled a file is resent if its size has not changed but its modification time has changed to a later time than before. It is disabled by default to avoid accidentally resending files. + - name: check_interval + type: text + title: "Check Interval" + description: |- + How often Elastic Agent checks for new files in the paths that are specified for harvesting. For example Specify 1s to scan the directory as frequently as possible without causing Elastic Agent to scan too frequently. **We do not recommend to set this value <1s.** + - name: ignore_older + type: text + title: "Ignore Older" + description: |- + If this option is enabled, Elastic Agent ignores any files that were modified before the specified timespan. You can use time strings like 2h (2 hours) and 5m (5 minutes). The default is 0, which disables the setting. + You must set Ignore Older to be greater than On State Change Inactive. + For more information, please see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-ignore-older) + - name: ignore_inactive + type: text + title: "Ignore Inactive" + description: |- + If this option is enabled, Elastic Agent ignores every file that has not been updated since the selected time. Possible options are since_first_start and since_last_start. + - name: close_on_state_changed_inactive + type: text + title: "Close on State Changed Inactive" + description: |- + When this option is enabled, Elastic Agent closes the file handle if a file has not been harvested for the specified duration. The counter for the defined period starts when the last log line was read by the harvester. It is not based on the modification time of the file. If the closed file changes again, a new harvester is started and the latest changes will be picked up after Check Interval has elapsed. + - name: close_on_state_changed_renamed + type: bool + title: "Close on State Changed Renamed" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent closes the file handler when a file is renamed. This happens, for example, when rotating files. By default, the harvester stays open and keeps reading the file because the file handler does not depend on the file name. + - name: close_on_state_changed_removed + type: bool + title: "Close on State Changed Removed" + description: |- + When this option is enabled, Elastic Agent closes the harvester when a file is removed. Normally a file should only be removed after it’s inactive for the duration specified by close.on_state_change.inactive. + - name: close_reader_eof + type: bool + title: "Close Reader EOF" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent closes a file as soon as the end of a file is reached. This is useful when your files are only written once and not updated from time to time. For example, this happens when you are writing every single log event to a new file. This option is disabled by default. + - name: close_reader_after_interval + type: text + title: "Close Reader After Interval" + description: |- + **Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires.** + This option is particularly useful in case the output is blocked, which makes Elastic Agent keep open file handlers even for files that were deleted from the disk. + For more information see the [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-close-timeout). + - name: clean_inactive + type: text + title: "Clean Inactive" + description: |- + **Only use this option if you understand that data loss is a potential side effect.** + When this option is enabled, Elastic Agent removes the state of a file after the specified period of inactivity has elapsed. + E.g: "30m", Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". By default cleaning inactive states is disabled, -1 is used to disable it. + default: -1 + - name: clean_removed + type: bool + title: "Clean Removed" + description: |- + When this option is enabled, Elastic Agent cleans files from the registry if they cannot be found on disk anymore under the last known name. + **You must disable this option if you also disable Close Removed.** + - name: harvester_limit + type: integer + title: "Harvester Limit" + description: |- + The harvester_limit option limits the number of harvesters + that are started in parallel for one input. This directly + relates to the maximum number of file handlers that are + opened. The default is 0 (no limit). + default: 0 + - name: backoff_init + type: text + title: "Backoff Init" + description: |- + The backoff option defines how long Elastic Agent waits before checking a file again after EOF is reached. The default is 1s. + - name: backoff_max + type: text + title: "Backoff Max" + description: |- + The maximum time for Elastic Agent to wait before checking a file again after EOF is reached. The default is 10s. + **Requirement: Set Backoff Max to be greater than or equal to Backoff Init and less than or equal to Check Interval (Backoff Init <= Backoff Max <= Check Interval).** + - name: fingerprint + type: bool + title: "File identity: Fingerprint" + description: |- + **Changing file_identity methods between runs may result in + duplicated events in the output.** + Uses a fingerprint generated from the first few bytes (1k is + the default, this can be configured via Fingerprint offset + and length) to identify a file instead inode + device ID. + Refer to https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_file_identity_2 + for more details. If this option is disabled (and 'Native + file identity is not enabled'), Elastic-Agent < 9.0.0 will + use Native as the file identity, and >= 9.0.0 will use + Fingerprint with the default offset and length. + default: true + - name: fingerprint_offset + type: integer + title: "File identity: Fingerprint offset" + description: |- + Offset from the beginning of the file to start calculating + the fingerprint. The default is 0. Only used when the + fingerprint file identity is selected + default: 0 + - name: fingerprint_length + type: integer + title: "File identity: Fingerprint length" + description: |- + The number of bytes used to calculate the fingerprint. The + default is 1024. Only used when the fingerprint file + identity is selected. + default: 1024 + - name: file_identity_native + type: bool + title: "File identity: Native" + description: |- + **Changing file_identity methods between runs may result in + duplicated events in the output.** + Uses a native identifier for files, on most Unix-like + file systems this is the inode + device ID. On file systems + that do not support inode, the native equivalent is used. + If you enable this option you **MUST disable Fingerprint + file identity**. Refer to + https://www.elastic.co/docs/reference/beats/filebeat/filebeat-input-filestream + for more details. + default: false + - name: rotation_external_strategy_copytruncate + type: yaml + title: "Rotation Strategy" + description: "If the log rotating application copies the contents of the active file and then truncates the original file, use these options to help Elastic Agent to read files correctly.\nSet the option suffix_regex so Elastic Agent can tell active and rotated files apart. \nThere are two supported suffix types in the input: numberic and date." + - name: exclude_lines + type: text + title: "Exclude Lines" + description: |- + A list of regular expressions to match the lines that you want Elastic Agent to exclude. Elastic Agent drops any lines that match a regular expression in the list. By default, no lines are dropped. Empty lines are ignored. + multi: true + - name: include_lines + type: text + title: "Include Lines" + description: |- + A list of regular expressions to match the lines that you want Elastic Agent to include. Elastic Agent exports only the lines that match a regular expression in the list. By default, all lines are exported. Empty lines are ignored. + multi: true + - name: buffer_size + type: text + title: "Buffer Size" + description: |- + The size in bytes of the buffer that each harvester uses when fetching a file. The default is 16384. + - name: message_max_bytes + type: text + title: "Message Max Bytes" + description: |- + The maximum number of bytes that a single log message can have. All bytes after mesage_max_bytes are discarded and not sent. The default is 10MB (10485760). + - name: condition + type: text + title: "Condition" + description: |- + Condition to filter when to collect this input. See [Dynamic Input Configuration](https://www.elastic.co/guide/en/fleet/current/dynamic-input-configuration.html) for details. + show_user: true +elasticsearch: + index_template: + mappings: + subobjects: false diff --git a/test/packages/stream_templates_valid/docs/README.md b/test/packages/stream_templates_valid/docs/README.md new file mode 100644 index 000000000..9830e75b7 --- /dev/null +++ b/test/packages/stream_templates_valid/docs/README.md @@ -0,0 +1,101 @@ +{{- generatedHeader }} +{{/* +This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details +described in the comments. + +Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines +*/}} +# Stream Templates Valid Test Integration for Elastic + +## Overview +{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}} +The Stream Templates Valid Test integration for Elastic enables collection of ... +This integration facilitates ... + +### Compatibility +{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}} +This integration is compatible with ... + +### How it works +{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}} + +## What data does this integration collect? +{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}} +The {{.Manifest.Title}} integration collects log messages of the following types: +* ... + +### Supported use cases +{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}} + +## What do I need to use this integration? +{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}} + +## How do I deploy this integration? + +### Agent-based deployment + +Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host. + +Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines. + +{{/* If agentless is available for this integration, we'll want to include that here as well. +### Agentless deployment + +Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it. + +For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html) +*/}} + +### Onboard / configure +{{/* List the steps that will need to be followed in order to completely set up a working inte completely set up a working integration. +For integrations that support multiple input types, be sure to add steps for all inputs. +*/}} + +### Validation +{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}} + +## Troubleshooting + +For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems). +{{/* +Add any vendor specific troubleshooting here. + +Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved? +If applicable, links to the third-party software’s troubleshooting documentation. +*/}} + +## Scaling + +For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation. +{{/* Add any vendor specific scaling information here */}} + +## Reference +{{/* Repeat for each data stream of the current type +### {Data stream name} + +The `{data stream name}` data stream provides events from {source} of the following types: {list types}. + +For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event. + +The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration. + +#### {data stream name} fields + +To include a generated list of fields from the `fields/` directory, uncomment and use: +{{ fields "data_stream_name" }} + +The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration. + +To include a sample event from `sample_event.json`, uncomment and use: +{{ event "data_stream_name" }} + +*/}} + +### Inputs used +{{/* All inputs used by this package will be automatically listed here. */}} +{{ inputDocs }} + +### API usage +{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}} +These APIs are used with this integration: +* ... diff --git a/test/packages/stream_templates_valid/img/sample-logo.svg b/test/packages/stream_templates_valid/img/sample-logo.svg new file mode 100644 index 000000000..6268dd88f --- /dev/null +++ b/test/packages/stream_templates_valid/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/stream_templates_valid/img/sample-screenshot.png b/test/packages/stream_templates_valid/img/sample-screenshot.png new file mode 100644 index 000000000..d7a56a3ec Binary files /dev/null and b/test/packages/stream_templates_valid/img/sample-screenshot.png differ diff --git a/test/packages/stream_templates_valid/manifest.yml b/test/packages/stream_templates_valid/manifest.yml new file mode 100644 index 000000000..6c633faa2 --- /dev/null +++ b/test/packages/stream_templates_valid/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: stream_templates_valid +title: "Stream Templates Valid Test" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This is a new package." +type: integration +categories: + - custom +conditions: + kibana: + version: "^9.1.3" + elastic: + subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs + template_path: template.yml.hbs +owner: + github: elastic/integrations + type: elastic diff --git a/test/packages/stream_templates_valid/sample_event.json b/test/packages/stream_templates_valid/sample_event.json new file mode 100644 index 000000000..0ac49cb72 --- /dev/null +++ b/test/packages/stream_templates_valid/sample_event.json @@ -0,0 +1,3 @@ +{ + "description": "This is an example sample-event for Stream Templates Valid Test. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file." +} diff --git a/test/packages/with_links/data_stream/foo/agent/stream/stream.yml.hbs b/test/packages/with_links/data_stream/foo/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/with_links/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs b/test/packages/with_links/data_stream/k8s_data_stream/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/with_links/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs b/test/packages/with_links/data_stream/k8s_data_stream_no_definitions/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb diff --git a/test/packages/with_links/data_stream/pe/agent/stream/stream.yml.hbs b/test/packages/with_links/data_stream/pe/agent/stream/stream.yml.hbs new file mode 100644 index 000000000..e69de29bb