diff --git a/pkg/plugin_packager/decoder/fs.go b/pkg/plugin_packager/decoder/fs.go index 45080cb66..d5766aeae 100644 --- a/pkg/plugin_packager/decoder/fs.go +++ b/pkg/plugin_packager/decoder/fs.go @@ -125,12 +125,72 @@ func (d *FSPluginDecoder) Close() error { return nil } +// secureResolvePath securely resolves a path relative to a root directory. +// +// This function prevents path traversal attacks by validating that the resolved +// path stays within the root directory. It handles both forward slashes and +// OS-specific path separators, making it safe for cross-platform use. +// +// Parameters: +// - root: The base directory path that acts as a security boundary +// - name: A relative path (potentially with forward slashes) to resolve +// +// Returns: +// - The absolute, resolved path if it stays within root +// - An error if the path attempts to escape the root directory +// +// Security: This prevents attacks like "../../../etc/passwd" by computing +// the relative path from root to the target and rejecting any path that +// starts with ".." (indicating an escape attempt). +// +// Algorithm: +// 1. Join root with name, converting forward slashes to OS format +// 2. Clean the joined path to resolve any "." or ".." segments +// 3. Convert both root and target to absolute paths +// 4. Compute the relative path from root to target +// 5. If relative path starts with "..", reject as path traversal +// +// Example: +// root="/app/plugins", name="config/settings.yaml" -> "/app/plugins/config/settings.yaml" +// root="/app/plugins", name="../../../etc/passwd" -> error (path traversal) +func secureResolvePath(root, name string) (string, error) { + p := filepath.Join(root, filepath.FromSlash(name)) + clean := filepath.Clean(p) + rootAbs, err := filepath.Abs(root) + if err != nil { + return "", err + } + cleanAbs, err := filepath.Abs(clean) + if err != nil { + return "", err + } + rel, err := filepath.Rel(rootAbs, cleanAbs) + if err != nil { + return "", err + } + if rel == "." { + return cleanAbs, nil + } + if strings.HasPrefix(rel, "..") { + return "", os.ErrPermission + } + return cleanAbs, nil +} + func (d *FSPluginDecoder) Stat(filename string) (fs.FileInfo, error) { - return os.Stat(filepath.Join(d.root, filename)) + abs, err := secureResolvePath(d.root, filename) + if err != nil { + return nil, err + } + return os.Stat(abs) } func (d *FSPluginDecoder) ReadFile(filename string) ([]byte, error) { - return os.ReadFile(filepath.Join(d.root, filename)) + abs, err := secureResolvePath(d.root, filename) + if err != nil { + return nil, err + } + return os.ReadFile(abs) } func (d *FSPluginDecoder) ReadDir(dirname string) ([]string, error) { @@ -158,7 +218,11 @@ func (d *FSPluginDecoder) ReadDir(dirname string) ([]string, error) { } func (d *FSPluginDecoder) FileReader(filename string) (io.ReadCloser, error) { - return os.Open(filepath.Join(d.root, filename)) + abs, err := secureResolvePath(d.root, filename) + if err != nil { + return nil, err + } + return os.Open(abs) } func (d *FSPluginDecoder) Signature() (string, error) { diff --git a/pkg/plugin_packager/decoder/helper.go b/pkg/plugin_packager/decoder/helper.go index 73d4e76f8..bd4ec38b4 100644 --- a/pkg/plugin_packager/decoder/helper.go +++ b/pkg/plugin_packager/decoder/helper.go @@ -4,11 +4,13 @@ import ( "errors" "fmt" "os" + "path" "path/filepath" "regexp" "strings" "github.com/langgenius/dify-plugin-daemon/pkg/entities/plugin_entities" + "github.com/langgenius/dify-plugin-daemon/pkg/utils/log" "github.com/langgenius/dify-plugin-daemon/pkg/utils/parser" ) @@ -38,8 +40,13 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P // try to load plugins plugins := dec.Plugins for _, tool := range plugins.Tools { - // read yaml - pluginYaml, err := decoder.ReadFile(tool) + // read YAML + nTool, err := normalizeLogicalPath(tool) + if err != nil || nTool == "" { + log.Warn("skip invalid tool provider path", "path", tool, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nTool) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read tool file: %s", tool)) } @@ -50,15 +57,20 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } // read tools - for _, tool_file := range pluginDec.ToolFiles { - toolFileContent, err := decoder.ReadFile(tool_file) + for _, toolFile := range pluginDec.ToolFiles { + nToolFile, err := normalizeLogicalPath(toolFile) + if err != nil || nToolFile == "" { + log.Warn("skip invalid tool file", "path", toolFile, "reason", err) + continue + } + toolFileContent, err := decoder.ReadFile(nToolFile) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read tool file: %s", tool_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read tool file: %s", toolFile)) } toolFileDec, err := parser.UnmarshalYamlBytes[plugin_entities.ToolDeclaration](toolFileContent) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal tool file: %s", tool_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal tool file: %s", toolFile)) } pluginDec.Tools = append(pluginDec.Tools, toolFileDec) @@ -69,7 +81,12 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P for _, endpoint := range plugins.Endpoints { // read yaml - pluginYaml, err := decoder.ReadFile(endpoint) + nEndpoint, err := normalizeLogicalPath(endpoint) + if err != nil || nEndpoint == "" { + log.Warn("skip invalid endpoint provider path", "path", endpoint, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nEndpoint) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read endpoint file: %s", endpoint)) } @@ -82,15 +99,20 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P // read detailed endpoints endpointsFiles := pluginDec.EndpointFiles - for _, endpoint_file := range endpointsFiles { - endpointFileContent, err := decoder.ReadFile(endpoint_file) + for _, endpointFile := range endpointsFiles { + nEndpointFile, err := normalizeLogicalPath(endpointFile) + if err != nil || nEndpointFile == "" { + log.Warn("skip invalid endpoint file", "path", endpointFile, "reason", err) + continue + } + endpointFileContent, err := decoder.ReadFile(nEndpointFile) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read endpoint file: %s", endpoint_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read endpoint file: %s", endpointFile)) } endpointFileDec, err := parser.UnmarshalYamlBytes[plugin_entities.EndpointDeclaration](endpointFileContent) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal endpoint file: %s", endpoint_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal endpoint file: %s", endpointFile)) } pluginDec.Endpoints = append(pluginDec.Endpoints, endpointFileDec) @@ -101,7 +123,12 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P for _, model := range plugins.Models { // read yaml - pluginYaml, err := decoder.ReadFile(model) + nModel, err := normalizeLogicalPath(model) + if err != nil || nModel == "" { + log.Warn("skip invalid model provider path", "path", model, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nModel) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read model file: %s", model)) } @@ -115,114 +142,125 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P if pluginDec.PositionFiles != nil { pluginDec.Position = &plugin_entities.ModelPosition{} - llmFileName, ok := pluginDec.PositionFiles["llm"] - if ok { - llmFile, err := decoder.ReadFile(llmFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read llm position file: %s", llmFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](llmFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal llm position file: %s", llmFileName)) + if v, ok := pluginDec.PositionFiles["llm"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid llm position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read llm position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal llm position file: %s", v)) + } + pluginDec.Position.LLM = &pos } - - pluginDec.Position.LLM = &position } - textEmbeddingFileName, ok := pluginDec.PositionFiles["text_embedding"] - if ok { - textEmbeddingFile, err := decoder.ReadFile(textEmbeddingFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read text embedding position file: %s", textEmbeddingFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](textEmbeddingFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal text embedding position file: %s", textEmbeddingFileName)) + if v, ok := pluginDec.PositionFiles["text_embedding"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid text_embedding position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read text embedding position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal text embedding position file: %s", v)) + } + pluginDec.Position.TextEmbedding = &pos } - - pluginDec.Position.TextEmbedding = &position } - rerankFileName, ok := pluginDec.PositionFiles["rerank"] - if ok { - rerankFile, err := decoder.ReadFile(rerankFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read rerank position file: %s", rerankFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](rerankFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal rerank position file: %s", rerankFileName)) + if v, ok := pluginDec.PositionFiles["rerank"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid rerank position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read rerank position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal rerank position file: %s", v)) + } + pluginDec.Position.Rerank = &pos } - - pluginDec.Position.Rerank = &position } - ttsFileName, ok := pluginDec.PositionFiles["tts"] - if ok { - ttsFile, err := decoder.ReadFile(ttsFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read tts position file: %s", ttsFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](ttsFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal tts position file: %s", ttsFileName)) + if v, ok := pluginDec.PositionFiles["tts"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid tts position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read tts position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal tts position file: %s", v)) + } + pluginDec.Position.TTS = &pos } - - pluginDec.Position.TTS = &position } - speech2textFileName, ok := pluginDec.PositionFiles["speech2text"] - if ok { - speech2textFile, err := decoder.ReadFile(speech2textFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read speech2text position file: %s", speech2textFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](speech2textFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal speech2text position file: %s", speech2textFileName)) + if v, ok := pluginDec.PositionFiles["speech2text"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid speech2text position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read speech2text position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal speech2text position file: %s", v)) + } + pluginDec.Position.Speech2text = &pos } - - pluginDec.Position.Speech2text = &position } - moderationFileName, ok := pluginDec.PositionFiles["moderation"] - if ok { - moderationFile, err := decoder.ReadFile(moderationFileName) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read moderation position file: %s", moderationFileName)) - } - - position, err := parser.UnmarshalYamlBytes[[]string](moderationFile) - if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal moderation position file: %s", moderationFileName)) + if v, ok := pluginDec.PositionFiles["moderation"]; ok { + if pth, err := normalizeLogicalPath(v); err != nil || pth == "" { + log.Warn("skip invalid moderation position file", "path", v, "reason", err) + } else { + data, err := decoder.ReadFile(pth) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read moderation position file: %s", v)) + } + pos, err := parser.UnmarshalYamlBytes[[]string](data) + if err != nil { + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal moderation position file: %s", v)) + } + pluginDec.Position.Moderation = &pos } - - pluginDec.Position.Moderation = &position } } // read models if err := decoder.Walk(func(filename, dir string) error { - modelPatterns := pluginDec.ModelFiles - // using glob to match if dir/filename is in models - modelFileName := filepath.Join(dir, filename) - if strings.HasSuffix(modelFileName, "_position.yaml") { + // Normalize walked relative path to forward slashes so matching is OS-independent + rel, _ := normalizeLogicalPath(filepath.ToSlash(filepath.Join(dir, filename))) + if strings.HasSuffix(rel, "_position.yaml") { return nil } - for _, model_pattern := range modelPatterns { - matched, err := filepath.Match(model_pattern, modelFileName) + // Normalize patterns to forward slashes and use POSIX-style matching + for _, modelPattern := range pluginDec.ModelFiles { + pat, err := normalizeLogicalPath(modelPattern) + if err != nil || pat == "" { + log.Warn("skip invalid model pattern", "pattern", modelPattern, "reason", err) + continue + } + matched, err := path.Match(pat, rel) if err != nil { return err } if matched { - // read model file - modelFile, err := decoder.ReadFile(modelFileName) + // Read using forward-slash path so both zip and fs decoders work + modelFile, err := decoder.ReadFile(rel) if err != nil { return err } @@ -233,6 +271,7 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } pluginDec.Models = append(pluginDec.Models, modelDec) + break } } @@ -245,8 +284,13 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } for _, agentStrategy := range plugins.AgentStrategies { - // read yaml - pluginYaml, err := decoder.ReadFile(agentStrategy) + // read yaml (manifest logical path) + nAgent, err := normalizeLogicalPath(agentStrategy) + if err != nil || nAgent == "" { + log.Warn("skip invalid agent strategy provider path", "path", agentStrategy, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nAgent) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read agent strategy file: %s", agentStrategy)) } @@ -257,7 +301,12 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } for _, strategyFile := range pluginDec.StrategyFiles { - strategyFileContent, err := decoder.ReadFile(strategyFile) + nStrategy, err := normalizeLogicalPath(strategyFile) + if err != nil || nStrategy == "" { + log.Warn("skip invalid agent strategy file", "path", strategyFile, "reason", err) + continue + } + strategyFileContent, err := decoder.ReadFile(nStrategy) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read agent strategy file: %s", strategyFile)) } @@ -274,8 +323,13 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } for _, datasource := range plugins.Datasources { - // read yaml - pluginYaml, err := decoder.ReadFile(datasource) + // read yaml (manifest logical path) + nDS, err := normalizeLogicalPath(datasource) + if err != nil || nDS == "" { + log.Warn("skip invalid datasource provider path", "path", datasource, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nDS) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read datasource file: %s", datasource)) } @@ -286,7 +340,12 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } for _, datasourceFile := range pluginDec.DatasourceFiles { - datasourceFileContent, err := decoder.ReadFile(datasourceFile) + nDSFile, err := normalizeLogicalPath(datasourceFile) + if err != nil || nDSFile == "" { + log.Warn("skip invalid datasource file", "path", datasourceFile, "reason", err) + continue + } + datasourceFileContent, err := decoder.ReadFile(nDSFile) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read datasource file: %s", datasourceFile)) } @@ -303,8 +362,13 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } for _, trigger := range plugins.Triggers { - // read yaml - pluginYaml, err := decoder.ReadFile(trigger) + // read yaml (manifest logical path) + nTrig, err := normalizeLogicalPath(trigger) + if err != nil || nTrig == "" { + log.Warn("skip invalid trigger provider path", "path", trigger, "reason", err) + continue + } + pluginYaml, err := decoder.ReadFile(nTrig) if err != nil { return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read trigger file: %s", trigger)) } @@ -315,15 +379,20 @@ func (p *PluginDecoderHelper) Manifest(decoder PluginDecoder) (plugin_entities.P } // read events - for _, event_file := range pluginDec.EventFiles { - eventFileContent, err := decoder.ReadFile(event_file) + for _, eventFile := range pluginDec.EventFiles { + nEvent, err := normalizeLogicalPath(eventFile) + if err != nil || nEvent == "" { + log.Warn("skip invalid event file", "path", eventFile, "reason", err) + continue + } + eventFileContent, err := decoder.ReadFile(nEvent) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read event file: %s", event_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to read event file: %s", eventFile)) } eventFileDec, err := parser.UnmarshalYamlBytes[plugin_entities.EventDeclaration](eventFileContent) if err != nil { - return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal event file: %s", event_file)) + return plugin_entities.PluginDeclaration{}, errors.Join(err, fmt.Errorf("failed to unmarshal event file: %s", eventFile)) } pluginDec.Events = append(pluginDec.Events, eventFileDec) diff --git a/pkg/plugin_packager/decoder/helper_fs_fromslash_test.go b/pkg/plugin_packager/decoder/helper_fs_fromslash_test.go new file mode 100644 index 000000000..dbe367c5f --- /dev/null +++ b/pkg/plugin_packager/decoder/helper_fs_fromslash_test.go @@ -0,0 +1,55 @@ +package decoder + +import ( + "io" + "os" + "path/filepath" + "testing" +) + +func TestFSDecoder_FromSlashBoundary(t *testing.T) { + root := t.TempDir() + // Create nested file using OS-native separators + p := filepath.Join(root, "dir", "sub", "file.txt") + if err := os.MkdirAll(filepath.Dir(p), 0o755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(p, []byte("ok"), 0o644); err != nil { + t.Fatal(err) + } + + dec := &FSPluginDecoder{root: root} + if err := dec.Open(); err != nil { + t.Fatalf("init open: %v", err) + } + + // Use forward-slash logical path; decoder should convert via FromSlash at boundary + b, err := dec.ReadFile("dir/sub/file.txt") + if err != nil { + t.Fatalf("ReadFile with forward slashes: %v", err) + } + if string(b) != "ok" { + t.Fatalf("unexpected content: %q", string(b)) + } + + // Stat also accepts forward slashes + if _, err := dec.Stat("dir/sub/file.txt"); err != nil { + t.Fatalf("Stat with forward slashes: %v", err) + } + + // FileReader also accepts forward slashes + r, err := dec.FileReader("dir/sub/file.txt") + if err != nil { + t.Fatalf("FileReader with forward slashes: %v", err) + } + defer r.Close() + data, _ := io.ReadAll(r) + if string(data) != "ok" { + t.Fatalf("unexpected reader content: %q", string(data)) + } + + // Negative: traversal should fail at boundary + if _, err := dec.ReadFile("../file.txt"); err == nil { + t.Fatalf("expected error for traversal read, got nil") + } +} diff --git a/pkg/plugin_packager/decoder/helper_models_test.go b/pkg/plugin_packager/decoder/helper_models_test.go new file mode 100644 index 000000000..1d1168b85 --- /dev/null +++ b/pkg/plugin_packager/decoder/helper_models_test.go @@ -0,0 +1,134 @@ +package decoder + +import ( + "archive/zip" + "bytes" + "os" + "path/filepath" + "testing" +) + +// writeFile writes a file ensuring parent dirs exist. +func writeFile(t *testing.T, root, rel string, data []byte) { + t.Helper() + p := filepath.Join(root, rel) + if err := os.MkdirAll(filepath.Dir(p), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(p, data, 0o644); err != nil { + t.Fatalf("write %s: %v", rel, err) + } +} + +func minimalManifest(modelProviderPath string) []byte { + return []byte("" + + "version: \"0.0.1\"\n" + + "type: plugin\n" + + "author: test\n" + + "name: demo\n" + + "label:\n en_US: demo\n" + + "description:\n en_US: demo\n" + + "icon: icon.svg\n" + + "resource:\n memory: 134217728\n" + + "plugins:\n models:\n - '" + modelProviderPath + "'\n" + + "meta:\n version: \"0.0.1\"\n arch: [amd64]\n runner:\n language: python\n version: \"3.10\"\n entrypoint: run.sh\n") +} + +func minimalProviderYAML(useWindowsPaths bool) []byte { + pattern := "models/llm/*.yaml" + pos := "positions/llm_position.yaml" + if useWindowsPaths { + pattern = "models\\llm\\*.yaml" + pos = "positions\\llm_position.yaml" + } + return []byte("" + + "provider: demo\n" + + "label:\n en_US: demo\n" + + "supported_model_types:\n - llm\n" + + "configurate_methods:\n - predefined-model\n" + + "models:\n llm:\n position: '" + pos + "'\n predefined:\n - '" + pattern + "'\n") +} + +func minimalModel(id string) []byte { + return []byte("" + + "model: " + id + "\n" + + "label:\n en_US: " + id + "\n" + + "model_type: llm\n") +} + +func TestManifestModelDiscovery_FSDecoder_UnixAndWindowsPatterns(t *testing.T) { + root := t.TempDir() + // Common files + writeFile(t, root, "icon.svg", []byte("x")) + + // UNIX-style provider + writeFile(t, root, "manifest.yaml", minimalManifest("provider_unix.yaml")) + writeFile(t, root, "provider_unix.yaml", minimalProviderYAML(false)) + writeFile(t, root, filepath.Join("models", "llm", "a.yaml"), minimalModel("a")) + writeFile(t, root, filepath.Join("models", "llm", "b.yaml"), minimalModel("b")) + writeFile(t, root, filepath.Join("positions", "llm_position.yaml"), []byte("- a\n- b\n")) + + dec, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("FS decoder (unix) init: %v", err) + } + m, err := dec.Manifest() + if err != nil { + t.Fatalf("FS decoder (unix) manifest: %v", err) + } + if m.Model == nil || len(m.Model.Models) != 2 { + t.Fatalf("expected 2 models for unix patterns, got %d", len(m.Model.Models)) + } + + // WINDOWS-style provider (manifest uses backslash in plugins.models entry) + writeFile(t, root, "manifest.yaml", minimalManifest("provider\\win.yaml")) + writeFile(t, root, filepath.Join("provider", "win.yaml"), minimalProviderYAML(true)) + + dec2, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("FS decoder (win) init: %v", err) + } + m2, err := dec2.Manifest() + if err != nil { + t.Fatalf("FS decoder (win) manifest: %v", err) + } + if m2.Model == nil || len(m2.Model.Models) != 2 { + t.Fatalf("expected 2 models for windows patterns, got %d", len(m2.Model.Models)) + } +} + +func TestManifestModelDiscovery_ZipDecoder_WindowsPatterns(t *testing.T) { + // Build an in-memory zip with forward-slash filenames, but provider uses windows backslashes + buf := new(bytes.Buffer) + zw := zip.NewWriter(buf) + add := func(name string, data []byte) { + f, err := zw.Create(name) + if err != nil { + t.Fatalf("zip create %s: %v", name, err) + } + if _, err := f.Write(data); err != nil { + t.Fatalf("zip write %s: %v", name, err) + } + } + add("icon.svg", []byte("x")) + add("manifest.yaml", minimalManifest("provider\\win.yaml")) + add("provider/win.yaml", minimalProviderYAML(true)) // windows-style paths in provider + add("models/llm/a.yaml", minimalModel("a")) + add("models/llm/b.yaml", minimalModel("b")) + add("positions/llm_position.yaml", []byte("- a\n- b\n")) + if err := zw.Close(); err != nil { + t.Fatalf("zip close: %v", err) + } + + dec, err := NewZipPluginDecoder(buf.Bytes()) + if err != nil { + t.Fatalf("Zip decoder init: %v", err) + } + m, err := dec.Manifest() + if err != nil { + t.Fatalf("Zip decoder manifest: %v", err) + } + if m.Model == nil || len(m.Model.Models) != 2 { + t.Fatalf("expected 2 models for windows patterns in zip, got %d", len(m.Model.Models)) + } +} diff --git a/pkg/plugin_packager/decoder/helper_providers_test.go b/pkg/plugin_packager/decoder/helper_providers_test.go new file mode 100644 index 000000000..8b492d03c --- /dev/null +++ b/pkg/plugin_packager/decoder/helper_providers_test.go @@ -0,0 +1,248 @@ +package decoder + +import ( + "archive/zip" + "bytes" + "path/filepath" + "testing" +) + +func manifestWith(kind, providerPath string) []byte { + return []byte("" + + "version: \"0.0.1\"\n" + + "type: plugin\n" + + "author: test\n" + + "name: demo\n" + + "label:\n en_US: demo\n" + + "description:\n en_US: demo\n" + + "icon: icon.svg\n" + + "resource:\n memory: 134217728\n" + + "plugins:\n " + kind + ":\n - \"" + providerPath + "\"\n" + + "meta:\n version: \"0.0.1\"\n arch: [amd64]\n runner:\n language: python\n version: \"3.10\"\n entrypoint: run.sh\n") +} + +// ----- Tool provider tests ----- +func toolProviderYAML(win bool) []byte { + p := "tools/tool1.yaml" + if win { + p = "tools\\tool1.yaml" + } + return []byte("" + + "identity:\n author: test\n name: tp\n label:\n en_US: tp\n icon: icon.svg\n" + + "tools:\n - '" + p + "'\n") +} + +func toolYAML() []byte { + return []byte("" + + "identity:\n author: test\n name: t1\n label:\n en_US: t1\n" + + "description:\n human:\n en_US: x\n llm: x\nparameters: []\n") +} + +func TestToolProvider_WindowsPaths_FSAndZip(t *testing.T) { + root := t.TempDir() + writeFile(t, root, "icon.svg", []byte("x")) + // FS unix provider + writeFile(t, root, "manifest.yaml", manifestWith("tools", "tool_provider.yaml")) + writeFile(t, root, "tool_provider.yaml", toolProviderYAML(false)) + writeFile(t, root, filepath.Join("tools", "tool1.yaml"), toolYAML()) + + dec, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("fs unix init: %v", err) + } + m, err := dec.Manifest() + if err != nil { + t.Fatalf("fs unix manifest: %v", err) + } + if m.Tool == nil || len(m.Tool.Tools) != 1 { + t.Fatalf("want 1 tool, got %d", len(m.Tool.Tools)) + } + + // FS windows provider + writeFile(t, root, "manifest.yaml", manifestWith("tools", "tool_provider_win.yaml")) + writeFile(t, root, "tool_provider_win.yaml", toolProviderYAML(true)) + dec2, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("fs win init: %v", err) + } + m2, err := dec2.Manifest() + if err != nil { + t.Fatalf("fs win manifest: %v", err) + } + if m2.Tool == nil || len(m2.Tool.Tools) != 1 { + t.Fatalf("want 1 tool (win), got %d", len(m2.Tool.Tools)) + } + + // ZIP windows provider + buf := new(bytes.Buffer) + zw := zip.NewWriter(buf) + add := func(name string, data []byte) { + f, err := zw.Create(name) + if err != nil { + t.Fatalf("zip create %s: %v", name, err) + } + if _, err := f.Write(data); err != nil { + t.Fatalf("zip write %s: %v", name, err) + } + } + add("icon.svg", []byte("x")) + add("manifest.yaml", manifestWith("tools", "tool_provider.yaml")) + add("tool_provider.yaml", toolProviderYAML(true)) + add("tools/tool1.yaml", toolYAML()) + if err := zw.Close(); err != nil { + t.Fatalf("zip close: %v", err) + } + + zdec, err := NewZipPluginDecoder(buf.Bytes()) + if err != nil { + t.Fatalf("zip init: %v", err) + } + zm, err := zdec.Manifest() + if err != nil { + t.Fatalf("zip manifest: %v", err) + } + if zm.Tool == nil || len(zm.Tool.Tools) != 1 { + t.Fatalf("want 1 tool (zip), got %d", len(zm.Tool.Tools)) + } +} + +// ----- Endpoint provider tests ----- +func endpointProviderYAML(win bool) []byte { + p := "endpoints/get.yaml" + if win { + p = "endpoints\\get.yaml" + } + return []byte("endpoints:\n - '" + p + "'\n") +} + +func endpointYAML() []byte { return []byte("path: /hello\nmethod: GET\n") } + +func TestEndpointProvider_WindowsPaths_FSAndZip(t *testing.T) { + root := t.TempDir() + writeFile(t, root, "manifest.yaml", manifestWith("endpoints", "endpoint_provider.yaml")) + writeFile(t, root, "endpoint_provider.yaml", endpointProviderYAML(false)) + writeFile(t, root, filepath.Join("endpoints", "get.yaml"), endpointYAML()) + + dec, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("fs unix init: %v", err) + } + m, err := dec.Manifest() + if err != nil { + t.Fatalf("fs unix manifest: %v", err) + } + if m.Endpoint == nil || len(m.Endpoint.Endpoints) != 1 { + t.Fatalf("want 1 endpoint, got %d", len(m.Endpoint.Endpoints)) + } + + writeFile(t, root, "manifest.yaml", manifestWith("endpoints", "endpoint_provider_win.yaml")) + writeFile(t, root, "endpoint_provider_win.yaml", endpointProviderYAML(true)) + dec2, err := NewFSPluginDecoder(root) + if err != nil { + t.Fatalf("fs win init: %v", err) + } + m2, err := dec2.Manifest() + if err != nil { + t.Fatalf("fs win manifest: %v", err) + } + if m2.Endpoint == nil || len(m2.Endpoint.Endpoints) != 1 { + t.Fatalf("want 1 endpoint (win), got %d", len(m2.Endpoint.Endpoints)) + } + + buf := new(bytes.Buffer) + zw := zip.NewWriter(buf) + add := func(name string, data []byte) { f, _ := zw.Create(name); f.Write(data) } + add("manifest.yaml", manifestWith("endpoints", "endpoint_provider.yaml")) + add("endpoint_provider.yaml", endpointProviderYAML(true)) + add("endpoints/get.yaml", endpointYAML()) + _ = zw.Close() + zdec, err := NewZipPluginDecoder(buf.Bytes()) + if err != nil { + t.Fatalf("zip init: %v", err) + } + zm, err := zdec.Manifest() + if err != nil { + t.Fatalf("zip manifest: %v", err) + } + if zm.Endpoint == nil || len(zm.Endpoint.Endpoints) != 1 { + t.Fatalf("want 1 endpoint (zip), got %d", len(zm.Endpoint.Endpoints)) + } +} + +// ----- Trigger/Datasource/AgentStrategy smoke test with Windows paths ----- +func triggerProviderYAML(win bool) []byte { + p := "triggers/event.yaml" + if win { + p = "triggers\\event.yaml" + } + return []byte("identity:\n author: a\n name: tp\n label:\n en_US: t\n icon: icon.svg\nsubscription_schema: []\n" + + "events:\n - '" + p + "'\n") +} + +func triggerEventYAML() []byte { + return []byte("identity:\n author: a\n name: e\n label:\n en_US: e\n" + + "description:\n en_US: d\n") +} + +func datasourceProviderYAML(win bool) []byte { + p := "datasources/d.yaml" + if win { + p = "datasources\\d.yaml" + } + return []byte("identity:\n author: a\n name: dp\n label:\n en_US: d\n icon: icon.svg\nprovider_type: website_crawl\ncredentials_schema: []\n" + + "datasources:\n - '" + p + "'\n") +} + +func datasourceYAML() []byte { + return []byte("identity:\n author: a\n name: d\n label:\n en_US: d\n" + + "description:\n en_US: d\nparameters: []\n") +} + +func agentProviderYAML(win bool) []byte { + p := "strategies/s.yaml" + if win { + p = "strategies\\s.yaml" + } + return []byte("identity:\n author: a\n name: ap\n label:\n en_US: a\n icon: icon.svg\n" + + "strategies:\n - '" + p + "'\n") +} + +func agentStrategyYAML() []byte { + return []byte("identity:\n author: a\n name: s\n label:\n en_US: s\n" + + "description:\n en_US: d\nparameters: []\nfeatures: []\n") +} + +func TestOtherProviders_WindowsPaths_Zip(t *testing.T) { + buf := new(bytes.Buffer) + zw := zip.NewWriter(buf) + add := func(name string, data []byte) { f, _ := zw.Create(name); f.Write(data) } + + add("icon.svg", []byte("x")) + add("manifest.yaml", []byte(""+ + "version: \"0.0.1\"\ntype: plugin\nauthor: test\nname: demo\nlabel:\n en_US: demo\ndescription:\n en_US: demo\nicon: icon.svg\nresource:\n memory: 134217728\nplugins:\n triggers:\n - trigger_provider.yaml\n datasources:\n - datasource_provider.yaml\n agent_strategies:\n - agent_provider.yaml\nmeta:\n version: \"0.0.1\"\n arch: [amd64]\n runner:\n language: python\n version: \"3.10\"\n entrypoint: run.sh\n")) + add("trigger_provider.yaml", triggerProviderYAML(true)) + add("datasource_provider.yaml", datasourceProviderYAML(true)) + add("agent_provider.yaml", agentProviderYAML(true)) + add("triggers/event.yaml", triggerEventYAML()) + add("datasources/d.yaml", datasourceYAML()) + add("strategies/s.yaml", agentStrategyYAML()) + _ = zw.Close() + + dec, err := NewZipPluginDecoder(buf.Bytes()) + if err != nil { + t.Fatalf("zip init: %v", err) + } + m, err := dec.Manifest() + if err != nil { + t.Fatalf("zip manifest: %v", err) + } + if m.Trigger == nil || len(m.Trigger.Events) != 1 { + t.Fatalf("want 1 event, got %d", len(m.Trigger.Events)) + } + if m.Datasource == nil || len(m.Datasource.Datasources) != 1 { + t.Fatalf("want 1 datasource, got %d", len(m.Datasource.Datasources)) + } + if m.AgentStrategy == nil || len(m.AgentStrategy.Strategies) != 1 { + t.Fatalf("want 1 strategy, got %d", len(m.AgentStrategy.Strategies)) + } +} diff --git a/pkg/plugin_packager/decoder/pathutil.go b/pkg/plugin_packager/decoder/pathutil.go new file mode 100644 index 000000000..4ca90d12d --- /dev/null +++ b/pkg/plugin_packager/decoder/pathutil.go @@ -0,0 +1,59 @@ +package decoder + +import ( + "fmt" + "path" + "strings" +) + +// normalizeLogicalPath converts any manifest-provided path (which may contain +// either '/' or '\\', and may include redundant segments) into a normalized +// forward-slash, relative form suitable for OS-independent matching (path.Match) +// and for decoders (Zip requires '/', FS will convert at boundary). +// It returns an error if the input is an absolute path, has a Windows drive letter, +// or attempts to traverse outside the package (.. prefix). +func normalizeLogicalPath(p string) (string, error) { + // Trim surrounding spaces + p = strings.TrimSpace(p) + if p == "" { + return "", nil + } + // Treat backslash as a path separator from manifest authors (Windows habit) + p = strings.ReplaceAll(p, "\\", "/") + orig := p + + // Reject Windows drive-letter paths like C:/... or C:... + if len(p) >= 2 && p[1] == ':' { + c := p[0] + if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') { + return "", fmt.Errorf("invalid manifest path (drive letter): %q", p) + } + } + + // Clean collapses //, ./, ../ etc using POSIX semantics over '/' + p = path.Clean(p) + + // Reject absolute paths explicitly + if strings.HasPrefix(p, "/") { + return "", fmt.Errorf("invalid manifest path (absolute): %q", p) + } + + // Prevent path traversal: after cleaning, any leading ".." means escaping base + if p == ".." || strings.HasPrefix(p, "../") { + return "", fmt.Errorf("invalid manifest path (traversal): %q", p) + } + + // path.Clean("") => "."; if it collapses to root due to parent refs, reject + if p == "." { + // detect if original contained parent traversal segments + if strings.Contains(orig, "../") || strings.HasSuffix(orig, "/..") || strings.HasPrefix(orig, "../") || orig == ".." || strings.Contains(orig, "/../") { + return "", fmt.Errorf("invalid manifest path (collapses to root via traversal): %q", orig) + } + p = "" + } + + // Keep it relative: drop leading './' + p = strings.TrimPrefix(p, "./") + + return p, nil +} diff --git a/pkg/plugin_packager/decoder/pathutil_test.go b/pkg/plugin_packager/decoder/pathutil_test.go new file mode 100644 index 000000000..e1726059b --- /dev/null +++ b/pkg/plugin_packager/decoder/pathutil_test.go @@ -0,0 +1,41 @@ +package decoder + +import "testing" + +func TestNormalizeLogicalPath(t *testing.T) { + cases := []struct { + in string + out string + wantErr bool + }{ + {"", "", false}, + {" ", "", false}, + {"models\\llm\\*.yaml", "models/llm/*.yaml", false}, + {"models//llm/../llm/a.yaml", "models/llm/a.yaml", false}, + {"./models\\llm\\..\\b.yaml", "models/b.yaml", false}, + {"/absolute/path/file.yaml", "", true}, + {"C:\\models\\x.yaml", "", true}, + {"C:models\\x.yaml", "", true}, + {".\\dir\\file", "dir/file", false}, + {"../outside/file.yaml", "", true}, + {"a/..", "", true}, + {"a/./b", "a/b", false}, + {"a//b///c", "a/b/c", false}, + } + + for i, c := range cases { + got, err := normalizeLogicalPath(c.in) + if c.wantErr { + if err == nil { + t.Fatalf("case %d: expected error for %q, got nil with %q", i, c.in, got) + } + continue + } + if err != nil { + t.Fatalf("case %d: unexpected error for %q: %v", i, c.in, err) + } + if got != c.out { + t.Fatalf("case %d: normalizeLogicalPath(%q) = %q, want %q", i, c.in, got, c.out) + } + } +} diff --git a/pkg/utils/cache/helper/keys.go b/pkg/utils/cache/helper/keys.go index 816d8cc2c..32fd92891 100644 --- a/pkg/utils/cache/helper/keys.go +++ b/pkg/utils/cache/helper/keys.go @@ -22,4 +22,4 @@ func EndpointCacheKey(hookId string) string { }, ":", ) -} \ No newline at end of file +} diff --git a/pkg/utils/mapping/sync.go b/pkg/utils/mapping/sync.go index 58fe48d83..f46b64f1a 100644 --- a/pkg/utils/mapping/sync.go +++ b/pkg/utils/mapping/sync.go @@ -55,7 +55,7 @@ func (m *Map[K, V]) Range(f func(key K, value V) bool) { func (m *Map[K, V]) LoadOrStore(key K, value V) (actual V, loaded bool) { m.mu.Lock() defer m.mu.Unlock() - + v, loaded := m.store.LoadOrStore(key, value) actual = v.(V) if !loaded { diff --git a/pkg/utils/mapping/sync_test.go b/pkg/utils/mapping/sync_test.go index 6676d36cf..be3435d1d 100644 --- a/pkg/utils/mapping/sync_test.go +++ b/pkg/utils/mapping/sync_test.go @@ -56,7 +56,7 @@ func TestConcurrentAccess(t *testing.T) { var wg sync.WaitGroup wg.Add(workers) - + for i := 0; i < workers; i++ { go func(i int) { defer wg.Done() @@ -78,7 +78,7 @@ func TestLoadOrStore(t *testing.T) { m := Map[string, interface{}]{} // First store - val, loaded := m.LoadOrStore("data", []byte{1,2,3}) + val, loaded := m.LoadOrStore("data", []byte{1, 2, 3}) if loaded || val.([]byte)[0] != 1 { t.Error("Initial LoadOrStore failed") } @@ -90,8 +90,6 @@ func TestLoadOrStore(t *testing.T) { } } - - // TestEdgeCases covers special scenarios func TestEdgeCases(t *testing.T) { t.Parallel() @@ -108,4 +106,4 @@ func TestEdgeCases(t *testing.T) { if m.Len() != 0 { t.Error("Clear failed to reset map") } -} \ No newline at end of file +}