diff --git a/.cursor/rules/use-bun-instead-of-node-vite-npm-pnpm.mdc b/.cursor/rules/use-bun-instead-of-node-vite-npm-pnpm.mdc new file mode 100644 index 000000000..b8100b77e --- /dev/null +++ b/.cursor/rules/use-bun-instead-of-node-vite-npm-pnpm.mdc @@ -0,0 +1,111 @@ +--- +description: Use Bun instead of Node.js, npm, pnpm, or vite. +globs: "*.ts, *.tsx, *.html, *.css, *.js, *.jsx, package.json" +alwaysApply: false +--- + +Default to using Bun instead of Node.js. + +- Use `bun ` instead of `node ` or `ts-node ` +- Use `bun test` instead of `jest` or `vitest` +- Use `bun build ` instead of `webpack` or `esbuild` +- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install` +- Use `bun run + + +``` + +With the following `frontend.tsx`: + +```tsx#frontend.tsx +import React from "react"; + +// import .css files directly and it works +import './index.css'; + +import { createRoot } from "react-dom/client"; + +const root = createRoot(document.body); + +export default function Frontend() { + return

Hello, world!

; +} + +root.render(); +``` + +Then, run index.ts + +```sh +bun --hot ./index.ts +``` + +For more information, read the Bun API docs in `node_modules/bun-types/docs/**.md`. diff --git a/.github/scripts/version-bump.sh b/.github/scripts/version-bump.sh index 095d12795..fc3166190 100755 --- a/.github/scripts/version-bump.sh +++ b/.github/scripts/version-bump.sh @@ -192,8 +192,8 @@ main() { # Always run formatter to ensure consistent formatting echo "πŸ”§ Running formatter to ensure consistent formatting..." - if command -v bun >/dev/null 2>&1; then - bun fmt >/dev/null 2>&1 || echo "⚠️ Warning: bun fmt failed, but continuing..." + if command -v bun > /dev/null 2>&1; then + bun fmt > /dev/null 2>&1 || echo "⚠️ Warning: bun fmt failed, but continuing..." else echo "⚠️ Warning: bun not found, skipping formatting" fi diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3b2b4d1e9..c40708dc2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -394,10 +394,33 @@ Example: `https://github.com/coder/registry/compare/main...your-branch?template= ### Every Template Must Have - `main.tf` - Complete Terraform configuration -- `README.md` - Documentation with frontmatter +- `README.md` - Documentation with required frontmatter and sections Templates don't require test files like modules do, but should be manually tested before submission. +Template README files must include: + +1. Frontmatter with: + ```yaml + --- + display_name: "Template Name" # Required - Name shown on Registry website + description: "What it does" # Required - Brief description + icon: "../../../../.icons/platform.svg" # Required - Icon path + verified: false # Optional - Set by maintainers only + tags: ["platform", "use-case"] # Required - Descriptive tags + platform: "aws" # Required - Infrastructure platform + requirements: ["aws-cli"] # Required - Prerequisites + workload: "development" # Required - Type of workload + --- + ``` + +2. Required sections: + - Prerequisites - Required setup and dependencies + - Infrastructure/Resources - What gets provisioned + - Usage/Examples - How to use the template + - Cost and Permissions - Resource costs and required permissions + - Variables - All configuration options + ### README Frontmatter Module README frontmatter must include: diff --git a/README.md b/README.md index 23746bd61..b3535018b 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,25 @@ More information [about Coder Modules can be found here](https://coder.com/docs/ The easiest way to discover new modules and templates is by visiting [the official Coder Registry website](https://registry.coder.com/). The website is a full mirror of the Coder Registry repo, and it is where .tar versions of the various resources can be downloaded from, for use within your Coder deployment. -Note that while Coder has a baseline set of requirements for allowing an external PR to be published, Coder cannot vouch for the validity or functionality of a resource until that resource has been flagged with the `verified` status. [All modules under the Coder namespace](https://github.com/coder/registry/tree/main/registry/coder) are automatically verified. +### Quality Standards + +All modules and templates must meet our quality standards to be published: + +1. **Validation**: All READMEs must pass automated validation: + - Required frontmatter fields + - Proper document structure + - Required sections based on type (module vs template) + +2. **Verification**: While we validate submissions, we can't verify functionality until marked as `verified`: + - [All modules under the Coder namespace](https://github.com/coder/registry/tree/main/registry/coder) are automatically verified + - Community submissions require review and testing + - Look for the `verified` flag in frontmatter + +3. **Documentation**: Clear documentation is required: + - Prerequisites and setup requirements + - Infrastructure details and costs + - Usage examples and variables + - Platform requirements and permissions ### Getting started with modules diff --git a/cmd/readmevalidation/coderresources.go b/cmd/readmevalidation/coderresources.go index d919350fa..d3a66cb36 100644 --- a/cmd/readmevalidation/coderresources.go +++ b/cmd/readmevalidation/coderresources.go @@ -211,6 +211,20 @@ func validateCoderResourceReadme(rm coderResourceReadme) []error { for _, err := range validateCoderResourceIconURL(rm.frontmatter.IconURL) { errs = append(errs, addFilePathToError(rm.filePath, err)) } + + // For templates, perform additional validation + if rm.resourceType == "templates" { + templateReadme, err := parseTemplateReadme(readme{ + filePath: rm.filePath, + rawText: rm.body, + }) + if err != nil { + errs = append(errs, err) + } else { + templateErrs := validateTemplateReadme(templateReadme) + errs = append(errs, templateErrs...) + } + } return errs } diff --git a/cmd/readmevalidation/main.go b/cmd/readmevalidation/main.go index cce66df9d..cafbf6a45 100644 --- a/cmd/readmevalidation/main.go +++ b/cmd/readmevalidation/main.go @@ -31,9 +31,13 @@ func main() { if err != nil { errs = append(errs, err) } - err = validateAllCoderResourceFilesOfType("modules") - if err != nil { - errs = append(errs, err) + + // Validate both modules and templates + for _, resourceType := range supportedResourceTypes { + err = validateAllCoderResourceFilesOfType(resourceType) + if err != nil { + errs = append(errs, err) + } } if len(errs) == 0 { diff --git a/cmd/readmevalidation/templatelinter.go b/cmd/readmevalidation/templatelinter.go new file mode 100644 index 000000000..3415449e2 --- /dev/null +++ b/cmd/readmevalidation/templatelinter.go @@ -0,0 +1,203 @@ +package main + +import ( + "bufio" + "regexp" + "strings" + + "golang.org/x/xerrors" +) + +// sectionContent defines expected content and validation rules for each section +type sectionContent struct { + required bool + minLines int + patterns []string + suggestions []string +} + +// sectionValidation maps section names to their content requirements +var sectionValidation = map[string]sectionContent{ + "Prerequisites": { + required: true, + minLines: 3, + patterns: []string{ + `^[-*]\s+\w+`, // Bullet points + }, + suggestions: []string{ + "List all required tools and dependencies", + "Specify minimum versions if applicable", + "Include links to installation guides", + }, + }, + "Infrastructure": { + required: true, + minLines: 5, + patterns: []string{ + `instance|machine|container|cluster|resource`, // Resource types + `\d+\s*(GB|MB|CPU|core)`, // Resource specifications + }, + suggestions: []string{ + "Detail all infrastructure components", + "Include resource specifications", + "List any dependencies between resources", + }, + }, + "Usage": { + required: true, + minLines: 5, + patterns: []string{ + "```", // Code blocks + `^\d+\.\s+\w+`, // Numbered steps + }, + suggestions: []string{ + "Provide step-by-step instructions", + "Include code examples", + "Show common customization options", + }, + }, + "Cost and Permissions": { + required: true, + minLines: 4, + patterns: []string{ + `\$|\bUSD\b|cost`, // Cost references + `permission|role|policy|access`, // Permission references + }, + suggestions: []string{ + "Estimate hourly/monthly costs", + "List required permissions/roles", + "Include cost optimization tips", + }, + }, + "Variables": { + required: true, + minLines: 5, + patterns: []string{ + `^\|\s*\w+\s*\|`, // Table format + `type\s*=|description\s*=`, // Variable definitions + }, + suggestions: []string{ + "Document all variables in a table", + "Include type and description", + "Provide default values", + }, + }, +} + +type lintResult struct { + section string + content string + errors []string + suggestions []string +} + +// lintSection validates the content of a specific section +func lintSection(section, content string) lintResult { + result := lintResult{ + section: section, + content: content, + } + + validation, exists := sectionValidation[section] + if !exists { + return result + } + + // Check minimum length + lines := strings.Split(strings.TrimSpace(content), "\n") + if len(lines) < validation.minLines { + result.errors = append(result.errors, + xerrors.Errorf("section %q must have at least %d lines of content", + section, validation.minLines).Error()) + } + + // Check required patterns + for _, pattern := range validation.patterns { + re := regexp.MustCompile(pattern) + found := false + for _, line := range lines { + if re.MatchString(line) { + found = true + break + } + } + if !found { + result.errors = append(result.errors, + xerrors.Errorf("section %q missing required content matching %q", + section, pattern).Error()) + } + } + + // Add improvement suggestions + result.suggestions = validation.suggestions + + return result +} + +// lintTemplateReadme performs detailed content validation of template README sections +func lintTemplateReadme(body string) []lintResult { + var results []lintResult + + currentSection := "" + var sectionContent strings.Builder + + scanner := bufio.NewScanner(strings.NewReader(body)) + for scanner.Scan() { + line := scanner.Text() + + // Check for section headers + if strings.HasPrefix(line, "## ") { + // Process previous section if exists + if currentSection != "" { + results = append(results, + lintSection(currentSection, sectionContent.String())) + sectionContent.Reset() + } + + currentSection = strings.TrimPrefix(line, "## ") + currentSection = strings.TrimSpace(currentSection) + continue + } + + if currentSection != "" { + sectionContent.WriteString(line + "\n") + } + } + + // Process the last section + if currentSection != "" { + results = append(results, + lintSection(currentSection, sectionContent.String())) + } + + return results +} + +// formatLintResults returns a formatted string of lint results +func formatLintResults(results []lintResult) string { + var output strings.Builder + + output.WriteString("Template README Lint Results:\n\n") + + for _, result := range results { + output.WriteString("## " + result.section + "\n") + + if len(result.errors) > 0 { + output.WriteString("\nErrors:\n") + for _, err := range result.errors { + output.WriteString("- " + err + "\n") + } + } + + if len(result.suggestions) > 0 { + output.WriteString("\nSuggestions:\n") + for _, suggestion := range result.suggestions { + output.WriteString("- " + suggestion + "\n") + } + } + + output.WriteString("\n") + } + + return output.String() +} diff --git a/cmd/readmevalidation/templatelinter_test.go b/cmd/readmevalidation/templatelinter_test.go new file mode 100644 index 000000000..195647e2a --- /dev/null +++ b/cmd/readmevalidation/templatelinter_test.go @@ -0,0 +1,158 @@ +package main + +import ( + "strings" + "testing" +) + +func TestLintSection(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + section string + content string + wantErrors int + }{ + { + name: "valid prerequisites section", + section: "Prerequisites", + content: ` +- AWS CLI v2.0 or later +- Terraform v1.0 or later +- Docker installed locally +Links to installation guides: +- AWS CLI: https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html +`, + wantErrors: 0, + }, + { + name: "invalid prerequisites - too short", + section: "Prerequisites", + content: "- AWS CLI\n", + wantErrors: 1, + }, + { + name: "valid infrastructure section", + section: "Infrastructure", + content: ` +This template provisions the following resources: + +* EC2 instance (2 CPU, 8GB RAM) +* EBS volume (100GB) +* Security group for workspace access +* IAM role for workspace permissions + +All resources are created in your specified AWS region. +`, + wantErrors: 0, + }, + { + name: "valid variables section with table", + section: "Variables", + content: ` +| Name | Type | Description | Default | +|------|------|-------------|---------| +| instance_type | string | EC2 instance type | t3.large | +| region | string | AWS region | us-east-1 | +| volume_size | number | Size of root volume in GB | 100 | +`, + wantErrors: 0, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := lintSection(tt.section, tt.content) + + if len(result.errors) != tt.wantErrors { + t.Errorf("lintSection() got %d errors, want %d\nerrors: %v", + len(result.errors), tt.wantErrors, result.errors) + } + }) + } +} + +func TestLintTemplateReadme(t *testing.T) { + t.Parallel() + + validReadme := `# AWS Development Template + +Complete development environment on AWS. + +## Prerequisites +- AWS CLI v2.0 or later +- Terraform v1.0 or later +- Docker installed locally + +## Infrastructure +This template provisions: +* EC2 instance (2 CPU, 8GB RAM) +* EBS volume (100GB) +* Security group for workspace access + +## Usage +1. Configure AWS credentials +2. Select your region +3. Choose instance type +\`\`\`bash +coder templates push aws-dev +\`\`\` + +## Cost and Permissions +Estimated costs: +- t3.large: $0.20/hour +- EBS volume: $10/month + +Required permissions: +- ec2:RunInstances +- ec2:CreateTags + +## Variables +| Name | Type | Description | Default | +|------|------|-------------|---------| +| instance_type | string | EC2 instance type | t3.large | +| region | string | AWS region | us-east-1 | +` + + results := lintTemplateReadme(validReadme) + + // Should have results for all required sections + wantSections := []string{ + "Prerequisites", + "Infrastructure", + "Usage", + "Cost and Permissions", + "Variables", + } + + gotSections := make([]string, 0, len(results)) + for _, result := range results { + gotSections = append(gotSections, result.section) + } + + // Check if all required sections are present + for _, want := range wantSections { + found := false + for _, got := range gotSections { + if want == got { + found = true + break + } + } + if !found { + t.Errorf("missing required section %q in results", want) + } + } + + // Check that there are no errors in valid readme + for _, result := range results { + if len(result.errors) > 0 { + t.Errorf("unexpected errors in valid section %q: %v", + result.section, result.errors) + } + } +} diff --git a/cmd/readmevalidation/templateresources.go b/cmd/readmevalidation/templateresources.go new file mode 100644 index 000000000..ed79213ec --- /dev/null +++ b/cmd/readmevalidation/templateresources.go @@ -0,0 +1,279 @@ +package main + +import ( + "bufio" + "strings" + + "golang.org/x/xerrors" +) + +// templateResourceFrontmatter extends coderResourceFrontmatter with template-specific fields +type templateResourceFrontmatter struct { + coderResourceFrontmatter `yaml:",inline"` + Platform string `yaml:"platform"` + Requirements []string `yaml:"requirements"` + Workload string `yaml:"workload"` +} + +// templateResourceReadme represents a README specifically for templates +type templateResourceReadme struct { + coderResourceReadme + frontmatter templateResourceFrontmatter +} + +// templateSection defines required content for a template section +type templateSection struct { + name string + required bool + minItems int + requirements []string +} + +// Required sections and their specific requirements +var templateSections = []templateSection{ + { + name: "Prerequisites", + required: true, + minItems: 2, + requirements: []string{ + "Required tools or dependencies with version numbers", + "Installation instructions with working URLs", + "Environment setup steps if applicable", + }, + }, + { + name: "Infrastructure", + required: true, + minItems: 4, + requirements: []string{ + "List of provisioned resources with counts", + "Resource specifications (CPU, RAM, storage, etc.)", + "Architecture diagram in mermaid or similar format", + "Network architecture and security considerations", + }, + }, + { + name: "Usage", + required: true, + minItems: 4, + requirements: []string{ + "Step-by-step setup instructions", + "Example commands with expected output", + "Complete working Terraform configuration", + "Troubleshooting guide for common issues", + }, + }, + { + name: "Cost and Permissions", + required: true, + minItems: 4, + requirements: []string{ + "Detailed cost breakdown per resource", + "Monthly and hourly cost estimates", + "Required IAM policies or permissions in JSON/YAML", + "Cost optimization recommendations", + }, + }, + { + name: "Variables", + required: true, + minItems: 1, + requirements: []string{ + "Markdown table with columns: Name, Type, Description, Default, Required", + "Example values for each variable", + "Valid options for enum variables", + }, + }, +} + +// Get list of required section names +var requiredTemplateSections = func() []string { + var names []string + for _, section := range templateSections { + if section.required { + names = append(names, section.name) + } + } + return names +}() + +func validateTemplatePlatform(platform string) error { + if platform == "" { + return xerrors.New("platform field is required for templates") + } + + validPlatforms := []string{ + "aws", "gcp", "azure", "kubernetes", "docker", + "digitalocean", "openstack", "vsphere", "other", + } + + for _, valid := range validPlatforms { + if platform == valid { + return nil + } + } + + return xerrors.Errorf("invalid platform: %q. Must be one of: %v", platform, validPlatforms) +} + +func validateTemplateRequirements(requirements []string) error { + if len(requirements) == 0 { + return xerrors.New("requirements field is required for templates") + } + return nil +} + +func validateTemplateWorkload(workload string) error { + if workload == "" { + return xerrors.New("workload field is required for templates") + } + + validWorkloads := []string{ + "development", "data-science", "devops", "security", + "design", "ml", "other", + } + + for _, valid := range validWorkloads { + if workload == valid { + return nil + } + } + + return xerrors.Errorf("invalid workload: %q. Must be one of: %v", workload, validWorkloads) +} + +func validateTemplateSections(body string) []error { + var errs []error + + // Split content into sections + sections := make(map[string]string) + var currentSection string + var sectionContent strings.Builder + + scanner := bufio.NewScanner(strings.NewReader(body)) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, "## ") { + // Save previous section if it exists + if currentSection != "" { + sections[currentSection] = sectionContent.String() + sectionContent.Reset() + } + currentSection = strings.TrimPrefix(line, "## ") + currentSection = strings.TrimSpace(currentSection) + continue + } + if currentSection != "" { + sectionContent.WriteString(line + "\n") + } + } + // Save last section + if currentSection != "" { + sections[currentSection] = sectionContent.String() + } + + // Validate each required section + for _, reqSection := range templateSections { + content, exists := sections[reqSection.name] + if !exists { + if reqSection.required { + errs = append(errs, xerrors.Errorf("missing required section: %q", reqSection.name)) + } + continue + } + + // Count meaningful items (non-empty lines that aren't just formatting) + var items []string + for _, line := range strings.Split(content, "\n") { + line = strings.TrimSpace(line) + if line != "" && !strings.HasPrefix(line, "#") && !strings.HasPrefix(line, "---") { + items = append(items, line) + } + } + + if len(items) < reqSection.minItems { + errs = append(errs, xerrors.Errorf("section %q must have at least %d items", + reqSection.name, reqSection.minItems)) + } + + // Check for required content patterns + for _, req := range reqSection.requirements { + found := false + for _, item := range items { + if strings.Contains(strings.ToLower(item), strings.ToLower(req)) { + found = true + break + } + } + if !found { + errs = append(errs, xerrors.Errorf("section %q missing required content: %q", + reqSection.name, req)) + } + } + + // Special validations for specific sections + switch reqSection.name { + case "Usage": + if !strings.Contains(content, "```") { + errs = append(errs, xerrors.New("Usage section must include code examples")) + } + if !strings.Contains(content, "terraform {") { + errs = append(errs, xerrors.New("Usage section must include Terraform configuration example")) + } + case "Variables": + if !strings.Contains(content, "| Name | Type |") { + errs = append(errs, xerrors.New("Variables section must include a properly formatted table")) + } + case "Infrastructure": + if !strings.Contains(content, "```mermaid") && !strings.Contains(content, "![") { + errs = append(errs, xerrors.New("Infrastructure section should include a diagram")) + } + } + } + + return errs +} + +func validateTemplateReadme(rm templateResourceReadme) []error { + var errs []error + + // First validate base resource requirements + baseErrs := validateCoderResourceReadme(rm.coderResourceReadme) + errs = append(errs, baseErrs...) + + // Validate template-specific frontmatter + if err := validateTemplatePlatform(rm.frontmatter.Platform); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + if err := validateTemplateRequirements(rm.frontmatter.Requirements); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + if err := validateTemplateWorkload(rm.frontmatter.Workload); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + + // Validate template-specific sections + for _, err := range validateTemplateSections(rm.body) { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + + return errs +} + +func parseTemplateReadme(rm readme) (templateResourceReadme, error) { + base, err := parseCoderResourceReadme("templates", rm) + if err != nil { + return templateResourceReadme{}, err + } + + var frontmatter templateResourceFrontmatter + if err := yaml.Unmarshal([]byte(rm.frontmatter), &frontmatter); err != nil { + return templateResourceReadme{}, addFilePathToError(rm.filePath, + xerrors.Errorf("could not parse YAML frontmatter: %w", err)) + } + + return templateResourceReadme{ + coderResourceReadme: base, + frontmatter: frontmatter, + }, nil +} diff --git a/cmd/readmevalidation/templateresources_test.go b/cmd/readmevalidation/templateresources_test.go new file mode 100644 index 000000000..b34da09b8 --- /dev/null +++ b/cmd/readmevalidation/templateresources_test.go @@ -0,0 +1,288 @@ +package main + +import ( + "testing" +) + +func TestValidateTemplatePlatform(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + platform string + wantErr bool + }{ + { + name: "valid aws platform", + platform: "aws", + wantErr: false, + }, + { + name: "empty platform", + platform: "", + wantErr: true, + }, + { + name: "invalid platform", + platform: "invalid", + wantErr: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + err := validateTemplatePlatform(tt.platform) + if (err != nil) != tt.wantErr { + t.Errorf("validateTemplatePlatform() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestValidateTemplateRequirements(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + requirements []string + wantErr bool + }{ + { + name: "valid requirements", + requirements: []string{"aws-cli", "docker"}, + wantErr: false, + }, + { + name: "empty requirements", + requirements: []string{}, + wantErr: true, + }, + { + name: "nil requirements", + requirements: nil, + wantErr: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + err := validateTemplateRequirements(tt.requirements) + if (err != nil) != tt.wantErr { + t.Errorf("validateTemplateRequirements() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestValidateTemplateSections(t *testing.T) { + t.Parallel() + + validReadme := `# AWS Development Template + +Complete development environment on AWS. + +## Prerequisites +- AWS CLI v2.0 or later +- Terraform v1.0 or later +- Docker installed locally + +Links to installation guides: +- AWS CLI: https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html +- Terraform: https://developer.hashicorp.com/terraform/install + +## Infrastructure + +This template provisions the following resources: + +* EC2 instance (2 CPU, 8GB RAM) +* EBS volume (100GB SSD) +* Security group for workspace access +* IAM role for workspace permissions + +Architecture diagram: +\`\`\`mermaid +graph TD + A[Coder Workspace] --> B[EC2 Instance] + B --> C[EBS Volume] + B --> D[Security Group] + B --> E[IAM Role] +\`\`\` + +## Usage + +1. Configure AWS credentials: +\`\`\`bash +aws configure +\`\`\` + +2. Create template: +\`\`\`bash +coder templates create aws-dev +\`\`\` + +3. Create workspace: +\`\`\`bash +coder create --template aws-dev mydev +\`\`\` + +Example workspace configuration: +\`\`\`hcl +terraform { + required_providers { + coder = { + source = "coder/coder" + } + aws = { + source = "hashicorp/aws" + } + } +} + +provider "aws" { + region = data.coder_parameter.region.value +} + +data "coder_parameter" "region" { + name = "region" + display_name = "Region" + description = "The region to deploy the workspace in" + default = "us-east-1" + type = "string" + mutable = false +} + +resource "coder_agent" "main" { + arch = "amd64" + os = "linux" + startup_script = <<-EOT + # Install dependencies + sudo apt-get update + sudo apt-get install -y docker.io + EOT +} +\`\`\` + +## Cost and Permissions + +Estimated costs: +- EC2 t3.large: $0.0832/hour ($60/month) +- EBS gp3 100GB: $10/month +Total: ~$70/month + +Required AWS permissions: +\`\`\`json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "ec2:RunInstances", + "ec2:TerminateInstances", + "iam:CreateRole", + "iam:PutRolePolicy" + ], + "Resource": "*" + } + ] +} +\`\`\` + +Cost optimization tips: +- Use spot instances for dev environments +- Enable auto-shutdown during off-hours +- Use EBS snapshots for faster startup + +## Variables + +| Name | Type | Description | Default | Required | +|------|------|-------------|---------|----------| +| region | string | AWS region | us-east-1 | Yes | +| instance_type | string | EC2 instance type | t3.large | No | +| volume_size | number | Size of root volume in GB | 100 | No | +| additional_tags | map(string) | Additional resource tags | {} | No | +` + + missingSection := `# Template Name +Description + +## Prerequisites +Required setup + +## Infrastructure +Resources used` + + noResourceSpecs := strings.ReplaceAll(validReadme, + "EC2 instance (2 CPU, 8GB RAM)", + "EC2 instance") + + noArchDiagram := strings.ReplaceAll(validReadme, + "\nArchitecture diagram:\n```mermaid\ngraph TD\n A[Coder Workspace] --> B[EC2 Instance]\n B --> C[EBS Volume]\n B --> D[Security Group]\n B --> E[IAM Role]\n```", + "") + + noCosts := strings.ReplaceAll(validReadme, + "EC2 t3.large: $0.0832/hour ($60/month)", + "EC2 t3.large instance") + + poorVariables := strings.ReplaceAll(validReadme, + "| Name | Type | Description | Default | Required |\n|------|------|-------------|---------|----------|\n|", + "Variables:\n- ") + + tests := []struct { + name string + body string + wantErr bool + errMsg string + }{ + { + name: "valid template readme", + body: validReadme, + wantErr: false, + }, + { + name: "missing required sections", + body: missingSection, + wantErr: true, + errMsg: "missing required section", + }, + { + name: "no resource specifications", + body: noResourceSpecs, + wantErr: true, + errMsg: "Infrastructure section must include resource specifications", + }, + { + name: "no architecture diagram", + body: noArchDiagram, + wantErr: true, + errMsg: "Infrastructure section should include a diagram", + }, + { + name: "missing cost estimates", + body: noCosts, + wantErr: true, + errMsg: "Cost and Permissions section must include specific cost estimates", + }, + { + name: "poorly formatted variables", + body: poorVariables, + wantErr: true, + errMsg: "Variables section must include a properly formatted table", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + errs := validateTemplateSections(tt.body) + if (len(errs) > 0) != tt.wantErr { + t.Errorf("validateTemplateSections() errors = %v, wantErr %v", errs, tt.wantErr) + } + }) + } +} diff --git a/cmd/templatelint/README.md b/cmd/templatelint/README.md new file mode 100644 index 000000000..42f747de2 --- /dev/null +++ b/cmd/templatelint/README.md @@ -0,0 +1,114 @@ +# Template Linter + +A command-line tool for validating Coder template README files. + +## Features + +- Validates README structure and required sections +- Checks content quality and completeness +- Provides improvement suggestions +- Supports single file or directory scanning +- JSON output option for CI/CD integration + +## Installation + +```bash +go install github.com/coder/registry/cmd/templatelint@latest +``` + +## Usage + +```bash +# Lint a single README +templatelint -path ./registry/myuser/templates/mytemplate/README.md + +# Lint all templates in a directory +templatelint -path ./registry/myuser/templates + +# Lint and attempt to fix issues +templatelint -path ./README.md -fix + +# Output results in JSON format +templatelint -path ./README.md -json +``` + +## Validation Rules + +The linter checks: + +1. Frontmatter requirements: + - Required fields: display_name, description, icon, tags, platform, requirements, workload + - Valid platform values + - Non-empty requirements list + +2. Required sections: + - Prerequisites + - Infrastructure/Resources + - Usage/Examples + - Cost and Permissions + - Variables + +3. Section content: + - Minimum content length + - Required patterns and keywords + - Proper formatting + - Complete information + +## Example Output + +``` +Linting ./registry/myuser/templates/mytemplate/README.md: + +[ERROR] Prerequisites: + - Section must have at least 3 lines of content + - Missing bullet points for requirements + +[SUGGESTIONS] Infrastructure: + - Detail all infrastructure components + - Include resource specifications + - List any dependencies between resources + +βœ… All other sections look good! +``` + +## Integration + +### GitHub Actions + +```yaml +name: Lint Template READMEs + +on: + pull_request: + paths: + - 'registry/*/templates/*/README.md' + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-go@v2 + with: + go-version: '1.21' + - name: Install templatelint + run: go install github.com/coder/registry/cmd/templatelint@latest + - name: Lint READMEs + run: templatelint -path ./registry -json +``` + +### Pre-commit Hook + +Add to `.git/hooks/pre-commit`: + +```bash +#!/bin/sh +files=$(git diff --cached --name-only | grep 'templates/.*/README.md$') +if [ -n "$files" ]; then + templatelint -path "$files" +fi +``` + +## Contributing + +Contributions are welcome! Please see the main repository's [CONTRIBUTING.md](../../CONTRIBUTING.md) for guidelines. diff --git a/cmd/templatelint/main.go b/cmd/templatelint/main.go new file mode 100644 index 000000000..3f5c94da3 --- /dev/null +++ b/cmd/templatelint/main.go @@ -0,0 +1,204 @@ +package main + +import ( + "flag" + "fmt" + "os" + "path/filepath" + "strings" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "golang.org/x/xerrors" +) + +var logger = slog.Make(sloghuman.Sink(os.Stdout)) + +func main() { + var ( + path string + fixIssues bool + showHelp bool + jsonOutput bool + ) + + flag.StringVar(&path, "path", "", "Path to template README.md file or directory containing templates") + flag.BoolVar(&fixIssues, "fix", false, "Attempt to fix common issues automatically") + flag.BoolVar(&showHelp, "help", false, "Show help message") + flag.BoolVar(&jsonOutput, "json", false, "Output results in JSON format") + flag.Parse() + + if showHelp { + printHelp() + os.Exit(0) + } + + if path == "" { + logger.Error(nil, "path is required") + printHelp() + os.Exit(1) + } + + // Get list of files to check + files, err := getReadmeFiles(path) + if err != nil { + logger.Error(nil, "failed to get README files", "error", err) + os.Exit(1) + } + + hasErrors := false + for _, file := range files { + errs := lintFile(file, fixIssues, jsonOutput) + if len(errs) > 0 { + hasErrors = true + } + } + + if hasErrors { + os.Exit(1) + } +} + +func printHelp() { + fmt.Println(`Template README Linter + +Usage: + templatelint [options] -path + +Options: + -path string Path to template README.md file or directory containing templates + -fix Attempt to fix common issues automatically + -json Output results in JSON format + -help Show this help message + +Examples: + # Lint a single README + templatelint -path ./registry/myuser/templates/mytemplate/README.md + + # Lint all templates in a directory + templatelint -path ./registry/myuser/templates + + # Lint and attempt to fix issues + templatelint -path ./README.md -fix + + # Output results in JSON format + templatelint -path ./README.md -json`) +} + +func getReadmeFiles(path string) ([]string, error) { + var files []string + + fileInfo, err := os.Stat(path) + if err != nil { + return nil, xerrors.Errorf("failed to stat path: %w", err) + } + + if !fileInfo.IsDir() { + // Single file mode + if !strings.EqualFold(filepath.Base(path), "README.md") { + return nil, xerrors.New("specified file must be named README.md") + } + return []string{path}, nil + } + + // Directory mode - walk and find README.md files + err = filepath.Walk(path, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() && strings.EqualFold(info.Name(), "README.md") { + // Only include files that are in a templates directory + if strings.Contains(path, "templates") { + files = append(files, path) + } + } + return nil + }) + + if err != nil { + return nil, xerrors.Errorf("failed to walk directory: %w", err) + } + + return files, nil +} + +func lintFile(path string, fix, jsonOutput bool) []error { + content, err := os.ReadFile(path) + if err != nil { + return []error{xerrors.Errorf("failed to read file: %w", err)} + } + + // Create a readme struct + rm := readme{ + filePath: path, + rawText: string(content), + } + + // Parse as template readme + templateReadme, err := parseTemplateReadme(rm) + if err != nil { + return []error{xerrors.Errorf("failed to parse template README: %w", err)} + } + + // Get lint results + results := lintTemplateReadme(templateReadme.body) + + if jsonOutput { + printJsonResults(path, results) + return nil + } + + // Print results + fmt.Printf("\nLinting %s:\n", path) + hasErrors := false + + for _, result := range results { + if len(result.errors) > 0 { + hasErrors = true + fmt.Printf("\n[ERROR] %s:\n", result.section) + for _, err := range result.errors { + fmt.Printf(" - %s\n", err) + } + } + + if len(result.suggestions) > 0 { + fmt.Printf("\n[SUGGESTIONS] %s:\n", result.section) + for _, suggestion := range result.suggestions { + fmt.Printf(" - %s\n", suggestion) + } + } + } + + if !hasErrors { + fmt.Printf("\nβœ… No errors found!\n") + return nil + } + + if fix { + // TODO: Implement auto-fixing + fmt.Println("\nAuto-fix not yet implemented") + } + + var errs []error + for _, result := range results { + for _, err := range result.errors { + errs = append(errs, xerrors.New(err)) + } + } + return errs +} + +func printJsonResults(path string, results []lintResult) { + // TODO: Implement JSON output format + fmt.Printf("{\n \"path\": %q,\n \"results\": [\n", path) + for i, result := range results { + fmt.Printf(" {\n \"section\": %q,\n", result.section) + fmt.Printf(" \"errors\": %#v,\n", result.errors) + fmt.Printf(" \"suggestions\": %#v\n }", result.suggestions) + if i < len(results)-1 { + fmt.Print(",") + } + fmt.Println() + } + fmt.Println(" ]\n}") +} diff --git a/cmd/templatelint/main_test.go b/cmd/templatelint/main_test.go new file mode 100644 index 000000000..01b99784f --- /dev/null +++ b/cmd/templatelint/main_test.go @@ -0,0 +1,205 @@ +package main + +import ( + "os" + "path/filepath" + "testing" +) + +func TestGetReadmeFiles(t *testing.T) { + t.Parallel() + + // Create a temporary test directory + tmpDir, err := os.MkdirTemp("", "templatelint-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(tmpDir) + + // Create test directory structure + dirs := []string{ + filepath.Join(tmpDir, "registry/user/templates/template1"), + filepath.Join(tmpDir, "registry/user/templates/template2"), + filepath.Join(tmpDir, "registry/user/modules/module1"), + } + + files := map[string]string{ + filepath.Join(dirs[0], "README.md"): "# Template 1", + filepath.Join(dirs[1], "README.md"): "# Template 2", + filepath.Join(dirs[2], "README.md"): "# Module 1", + } + + // Create directories and files + for _, dir := range dirs { + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatal(err) + } + } + + for path, content := range files { + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + t.Fatal(err) + } + } + + tests := []struct { + name string + path string + wantCount int + wantErr bool + }{ + { + name: "single file", + path: filepath.Join(dirs[0], "README.md"), + wantCount: 1, + wantErr: false, + }, + { + name: "templates directory", + path: filepath.Join(tmpDir, "registry/user/templates"), + wantCount: 2, + wantErr: false, + }, + { + name: "non-existent path", + path: filepath.Join(tmpDir, "nonexistent"), + wantCount: 0, + wantErr: true, + }, + { + name: "wrong file name", + path: filepath.Join(tmpDir, "wrong.md"), + wantCount: 0, + wantErr: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got, err := getReadmeFiles(tt.path) + if (err != nil) != tt.wantErr { + t.Errorf("getReadmeFiles() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !tt.wantErr && len(got) != tt.wantCount { + t.Errorf("getReadmeFiles() got %d files, want %d", len(got), tt.wantCount) + } + }) + } +} + +func TestLintFile(t *testing.T) { + t.Parallel() + + validContent := `--- +display_name: "Valid Template" +description: "A valid template" +icon: "../../../../.icons/platform.svg" +verified: false +tags: ["test"] +platform: "aws" +requirements: ["aws-cli"] +workload: "development" +--- + +# Valid Template + +A valid template description. + +## Prerequisites +- AWS CLI v2.0 or later +- Terraform v1.0 or later +- Docker installed locally + +## Infrastructure +This template provisions: +* EC2 instance (2 CPU, 8GB RAM) +* EBS volume (100GB) +* Security group for workspace access + +## Usage +1. Configure AWS credentials +2. Select your region +3. Choose instance type + +## Cost and Permissions +Estimated costs: +- t3.large: $0.20/hour +- EBS volume: $10/month + +Required permissions: +- ec2:RunInstances +- ec2:CreateTags + +## Variables +| Name | Type | Description | Default | +|------|------|-------------|---------| +| instance_type | string | EC2 instance type | t3.large | +` + + invalidContent := `--- +display_name: "Invalid Template" +--- + +# Invalid Template + +Missing required sections. +` + + // Create temporary test files + tmpDir, err := os.MkdirTemp("", "templatelint-test") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(tmpDir) + + validFile := filepath.Join(tmpDir, "valid-README.md") + invalidFile := filepath.Join(tmpDir, "invalid-README.md") + + if err := os.WriteFile(validFile, []byte(validContent), 0644); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(invalidFile, []byte(invalidContent), 0644); err != nil { + t.Fatal(err) + } + + tests := []struct { + name string + path string + fix bool + json bool + wantErrs bool + }{ + { + name: "valid readme", + path: validFile, + wantErrs: false, + }, + { + name: "invalid readme", + path: invalidFile, + wantErrs: true, + }, + { + name: "json output", + path: validFile, + json: true, + wantErrs: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + errs := lintFile(tt.path, tt.fix, tt.json) + if (len(errs) > 0) != tt.wantErrs { + t.Errorf("lintFile() got %d errors, wantErrs %v", len(errs), tt.wantErrs) + } + }) + } +} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..a6077b1d5 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,326 @@ +{ + "name": "registry", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "registry", + "devDependencies": { + "@types/bun": "^1.2.18", + "bun-types": "^1.2.18", + "dedent": "^1.6.0", + "gray-matter": "^4.0.3", + "marked": "^16.0.0", + "prettier": "^3.6.2", + "prettier-plugin-sh": "^0.18.0", + "prettier-plugin-terraform-formatter": "^1.2.1" + }, + "peerDependencies": { + "typescript": "^5.8.3" + } + }, + "node_modules/@reteps/dockerfmt": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@reteps/dockerfmt/-/dockerfmt-0.3.6.tgz", + "integrity": "sha512-Tb5wIMvBf/nLejTQ61krK644/CEMB/cpiaIFXqGApfGqO3GwcR3qnI0DbmkFVCl2OyEp8LnLX3EkucoL0+tbFg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^v12.20.0 || ^14.13.0 || >=16.0.0" + } + }, + "node_modules/@types/bun": { + "version": "1.2.19", + "resolved": "https://registry.npmjs.org/@types/bun/-/bun-1.2.19.tgz", + "integrity": "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "bun-types": "1.2.19" + } + }, + "node_modules/@types/node": { + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.1.0.tgz", + "integrity": "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.8.0" + } + }, + "node_modules/@types/react": { + "version": "19.1.9", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.9.tgz", + "integrity": "sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "csstype": "^3.0.2" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/bun-types": { + "version": "1.2.19", + "resolved": "https://registry.npmjs.org/bun-types/-/bun-types-1.2.19.tgz", + "integrity": "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + }, + "peerDependencies": { + "@types/react": "^19" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/dedent": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", + "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gray-matter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-yaml": "^3.13.1", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + }, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/marked": { + "version": "16.1.1", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.1.1.tgz", + "integrity": "sha512-ij/2lXfCRT71L6u0M29tJPhP0bM5shLL3u5BePhFwPELj2blMJ6GDtD7PfJhRLhJ/c2UwrK17ySVcDzy2YHjHQ==", + "dev": true, + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/prettier": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", + "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-sh": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/prettier-plugin-sh/-/prettier-plugin-sh-0.18.0.tgz", + "integrity": "sha512-cW1XL27FOJQ/qGHOW6IHwdCiNWQsAgK+feA8V6+xUTaH0cD3Mh+tFAtBvEEWvuY6hTDzRV943Fzeii+qMOh7nQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@reteps/dockerfmt": "^0.3.6", + "sh-syntax": "^0.5.8" + }, + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + }, + "peerDependencies": { + "prettier": "^3.6.0" + } + }, + "node_modules/prettier-plugin-terraform-formatter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prettier-plugin-terraform-formatter/-/prettier-plugin-terraform-formatter-1.2.1.tgz", + "integrity": "sha512-rdzV61Bs/Ecnn7uAS/vL5usTX8xUWM+nQejNLZxt3I1kJH5WSeLEmq7LYu1wCoEQF+y7Uv1xGvPRfl3lIe6+tA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "prettier": ">= 1.16.0" + }, + "peerDependenciesMeta": { + "prettier": { + "optional": true + } + } + }, + "node_modules/section-matter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "extend-shallow": "^2.0.1", + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/sh-syntax": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/sh-syntax/-/sh-syntax-0.5.8.tgz", + "integrity": "sha512-JfVoxf4FxQI5qpsPbkHhZo+n6N9YMJobyl4oGEUBb/31oQYlgTjkXQD8PBiafS2UbWoxrTO0Z5PJUBXEPAG1Zw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/sh-syntax" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/strip-bom-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, + "node_modules/typescript": { + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz", + "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/package.json b/package.json index 7ca9f2ec2..66da802b1 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,10 @@ { "name": "registry", "scripts": { - "fmt": "bun x prettier --write **/*.sh **/*.ts **/*.md *.md && terraform fmt -recursive -diff", - "fmt:ci": "bun x prettier --check **/*.sh **/*.ts **/*.md *.md && terraform fmt -check -recursive -diff", + "fmt": "npx prettier --write **/*.sh **/*.ts **/*.md *.md && terraform fmt -recursive -diff", + "fmt:ci": "npx prettier --check **/*.sh **/*.ts **/*.md *.md && terraform fmt -check -recursive -diff", "terraform-validate": "./scripts/terraform_validate.sh", - "test": "bun test", + "test": "npx jest", "update-version": "./update-version.sh" }, "devDependencies": { @@ -25,5 +25,6 @@ "prettier-plugin-sh", "prettier-plugin-terraform-formatter" ] - } + }, + "private": true } diff --git a/registry/anomaly/modules/tmux/main.test.ts b/registry/anomaly/modules/tmux/main.test.ts index 802147dbc..4f436a1fe 100644 --- a/registry/anomaly/modules/tmux/main.test.ts +++ b/registry/anomaly/modules/tmux/main.test.ts @@ -28,7 +28,9 @@ describe("tmux module", async () => { // check that the script contains expected lines expect(scriptResource.script).toContain("Installing tmux"); - expect(scriptResource.script).toContain("Installing Tmux Plugin Manager (TPM)"); + expect(scriptResource.script).toContain( + "Installing Tmux Plugin Manager (TPM)", + ); expect(scriptResource.script).toContain("tmux configuration created at"); expect(scriptResource.script).toContain("βœ… tmux setup complete!"); }); diff --git a/registry/anomaly/modules/tmux/scripts/run.sh b/registry/anomaly/modules/tmux/scripts/run.sh index 90c0d84b9..5d6b7b560 100755 --- a/registry/anomaly/modules/tmux/scripts/run.sh +++ b/registry/anomaly/modules/tmux/scripts/run.sh @@ -8,75 +8,75 @@ TMUX_CONFIG="${TMUX_CONFIG}" # Function to install tmux install_tmux() { - printf "Checking for tmux installation\n" - - if command -v tmux &> /dev/null; then - printf "tmux is already installed \n\n" - return 0 - fi - - printf "Installing tmux \n\n" - - # Detect package manager and install tmux - if command -v apt-get &> /dev/null; then - sudo apt-get update - sudo apt-get install -y tmux - elif command -v yum &> /dev/null; then - sudo yum install -y tmux - elif command -v dnf &> /dev/null; then - sudo dnf install -y tmux - elif command -v zypper &> /dev/null; then - sudo zypper install -y tmux - elif command -v apk &> /dev/null; then - sudo apk add tmux - elif command -v brew &> /dev/null; then - brew install tmux - else - printf "No supported package manager found. Please install tmux manually. \n" - exit 1 - fi - - printf "tmux installed successfully \n" + printf "Checking for tmux installation\n" + + if command -v tmux &> /dev/null; then + printf "tmux is already installed \n\n" + return 0 + fi + + printf "Installing tmux \n\n" + + # Detect package manager and install tmux + if command -v apt-get &> /dev/null; then + sudo apt-get update + sudo apt-get install -y tmux + elif command -v yum &> /dev/null; then + sudo yum install -y tmux + elif command -v dnf &> /dev/null; then + sudo dnf install -y tmux + elif command -v zypper &> /dev/null; then + sudo zypper install -y tmux + elif command -v apk &> /dev/null; then + sudo apk add tmux + elif command -v brew &> /dev/null; then + brew install tmux + else + printf "No supported package manager found. Please install tmux manually. \n" + exit 1 + fi + + printf "tmux installed successfully \n" } # Function to install Tmux Plugin Manager (TPM) install_tpm() { - local tpm_dir="$HOME/.tmux/plugins/tpm" - - if [ -d "$tpm_dir" ]; then - printf "TPM is already installed" - return 0 - fi - - printf "Installing Tmux Plugin Manager (TPM) \n" - - # Create plugins directory - mkdir -p "$HOME/.tmux/plugins" - - # Clone TPM repository - if command -v git &> /dev/null; then - git clone https://github.com/tmux-plugins/tpm "$tpm_dir" - printf "TPM installed successfully" - else - printf "Git is not installed. Please install git to use tmux plugins. \n" - exit 1 - fi + local tpm_dir="$HOME/.tmux/plugins/tpm" + + if [ -d "$tpm_dir" ]; then + printf "TPM is already installed" + return 0 + fi + + printf "Installing Tmux Plugin Manager (TPM) \n" + + # Create plugins directory + mkdir -p "$HOME/.tmux/plugins" + + # Clone TPM repository + if command -v git &> /dev/null; then + git clone https://github.com/tmux-plugins/tpm "$tpm_dir" + printf "TPM installed successfully" + else + printf "Git is not installed. Please install git to use tmux plugins. \n" + exit 1 + fi } # Function to create tmux configuration setup_tmux_config() { - printf "Setting up tmux configuration \n" + printf "Setting up tmux configuration \n" - local config_dir="$HOME/.tmux" - local config_file="$HOME/.tmux.conf" + local config_dir="$HOME/.tmux" + local config_file="$HOME/.tmux.conf" - mkdir -p "$config_dir" + mkdir -p "$config_dir" - if [ -n "$TMUX_CONFIG" ]; then - printf "$TMUX_CONFIG" > "$config_file" - printf "$${BOLD}Custom tmux configuration applied at {$config_file} \n\n" - else - cat > "$config_file" << EOF + if [ -n "$TMUX_CONFIG" ]; then + printf "$TMUX_CONFIG" > "$config_file" + printf "$${BOLD}Custom tmux configuration applied at {$config_file} \n\n" + else + cat > "$config_file" << EOF # Tmux Configuration File # ============================================================================= @@ -106,48 +106,48 @@ bind C-r run-shell "~/.tmux/plugins/tmux-resurrect/scripts/restore.sh" # Initialize TMUX plugin manager (keep this line at the very bottom of tmux.conf) run '~/.tmux/plugins/tpm/tpm' EOF - printf "tmux configuration created at {$config_file} \n\n" - fi + printf "tmux configuration created at {$config_file} \n\n" + fi } # Function to install tmux plugins install_plugins() { - printf "Installing tmux plugins" + printf "Installing tmux plugins" - # Check if TPM is installed - if [ ! -d "$HOME/.tmux/plugins/tpm" ]; then - printf "TPM is not installed. Cannot install plugins. \n" - return 1 - fi + # Check if TPM is installed + if [ ! -d "$HOME/.tmux/plugins/tpm" ]; then + printf "TPM is not installed. Cannot install plugins. \n" + return 1 + fi - # Install plugins using TPM - "$HOME/.tmux/plugins/tpm/bin/install_plugins" + # Install plugins using TPM + "$HOME/.tmux/plugins/tpm/bin/install_plugins" - printf "tmux plugins installed successfully \n" + printf "tmux plugins installed successfully \n" } # Main execution main() { - printf "$${BOLD} πŸ› οΈSetting up tmux with session persistence! \n\n" - printf "" + printf "$${BOLD} πŸ› οΈSetting up tmux with session persistence! \n\n" + printf "" - # Install dependencies - install_tmux - install_tpm + # Install dependencies + install_tmux + install_tpm - # Setup tmux configuration - setup_tmux_config + # Setup tmux configuration + setup_tmux_config - # Install plugins - install_plugins + # Install plugins + install_plugins - printf "$${BOLD}βœ… tmux setup complete! \n\n" + printf "$${BOLD}βœ… tmux setup complete! \n\n" - printf "$${BOLD} Attempting to restore sessions\n" - tmux new-session -d \; source-file ~/.tmux.conf \; run-shell '~/.tmux/plugins/tmux-resurrect/scripts/restore.sh' - printf "$${BOLD} Sessions restored: -> %s\n" "$(tmux ls)" + printf "$${BOLD} Attempting to restore sessions\n" + tmux new-session -d \; source-file ~/.tmux.conf \; run-shell '~/.tmux/plugins/tmux-resurrect/scripts/restore.sh' + printf "$${BOLD} Sessions restored: -> %s\n" "$(tmux ls)" } # Run main function -main \ No newline at end of file +main diff --git a/registry/anomaly/modules/tmux/scripts/start.sh b/registry/anomaly/modules/tmux/scripts/start.sh index 4638c8f7d..84e546ed0 100755 --- a/registry/anomaly/modules/tmux/scripts/start.sh +++ b/registry/anomaly/modules/tmux/scripts/start.sh @@ -16,7 +16,7 @@ handle_session() { local session_name="$1" # Check if the session exists - if tmux has-session -t "$session_name" 2>/dev/null; then + if tmux has-session -t "$session_name" 2> /dev/null; then echo "Session '$session_name' exists, attaching to it..." tmux attach-session -t "$session_name" else diff --git a/registry/coder-labs/modules/gemini/README.md b/registry/coder-labs/modules/gemini/README.md index 65b9d9bc6..3f3feaab4 100644 --- a/registry/coder-labs/modules/gemini/README.md +++ b/registry/coder-labs/modules/gemini/README.md @@ -8,7 +8,14 @@ tags: [agent, gemini, ai, google, tasks] # Gemini CLI -Run [Gemini CLI](https://ai.google.dev/gemini-api/docs/cli) in your workspace to access Google's Gemini AI models, and custom pre/post install scripts. This module integrates with [AgentAPI](https://github.com/coder/agentapi) for Coder Tasks compatibility. +Run [Gemini CLI](https://ai.google.com/docs/gemini/tools/cli) in your workspace to access Google's Gemini AI models, and custom pre/post install scripts. This module integrates with [AgentAPI](https://github.com/coder/agentapi) for Coder Tasks compatibility. + +## Getting Started + +1. **Get a Gemini API Key**: + - Visit [Google AI Studio](https://makersuite.google.com/app/apikey) + - Create a new API key or use an existing one + - The API key starts with "AIza..." ```tf module "gemini" { @@ -44,10 +51,13 @@ module "gemini" { source = "registry.coder.com/coder-labs/gemini/coder" version = "1.0.0" agent_id = coder_agent.example.id - gemini_api_key = var.gemini_api_key # we recommend providing this parameter inorder to have a smoother experience (i.e. no google sign-in) + gemini_api_key = var.gemini_api_key # Required for automated setup gemini_model = "gemini-2.5-flash" - install_gemini = true - gemini_version = "latest" + install_gemini = true + gemini_version = "latest" + auto_approve = true # Automatically approve API key usage + yolo_mode = true # Enable faster responses without confirmations + folder = "/home/coder/project" # Custom working directory gemini_instruction_prompt = "Start every response with `Gemini says:`" } ``` @@ -64,7 +74,11 @@ module "gemini" { - If Gemini CLI is not found, ensure `install_gemini = true` and your API key is valid - Node.js and npm are installed automatically if missing (using NVM) - Check logs in `/home/coder/.gemini-module/` for install/start output -- We highly recommend using the `gemini_api_key` variable, this also ensures smooth tasks running without needing to sign in to Google. +- We highly recommend using the `gemini_api_key` variable, this also ensures smooth tasks running without needing to sign in to Google +- If experiencing prompts for approval or confirmation: + - Set `auto_approve = true` to automatically approve API key usage + - Set `yolo_mode = true` to enable faster responses without confirmation prompts + - These settings are configured in `~/.gemini/settings.json` automatically > [!IMPORTANT] > To use tasks with Gemini CLI, ensure you have the `gemini_api_key` variable set, and **you pass the `AI Prompt` Parameter**. diff --git a/registry/coder-labs/modules/gemini/main.test.ts b/registry/coder-labs/modules/gemini/main.test.ts index 181b61146..eaa5cc57b 100644 --- a/registry/coder-labs/modules/gemini/main.test.ts +++ b/registry/coder-labs/modules/gemini/main.test.ts @@ -102,7 +102,10 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini/settings.json"); + const resp = await readFileContainer( + id, + "/home/coder/.gemini/settings.json", + ); expect(resp).toContain("foo"); expect(resp).toContain("bar"); }); @@ -116,7 +119,10 @@ describe("gemini", async () => { }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini-module/agentapi-start.log"); + const resp = await readFileContainer( + id, + "/home/coder/.gemini-module/agentapi-start.log", + ); expect(resp).toContain("gemini_api_key provided !"); }); @@ -128,8 +134,11 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini-module/install.log"); - expect(resp).toContain('GOOGLE_GENAI_USE_VERTEXAI=\'true\''); + const resp = await readFileContainer( + id, + "/home/coder/.gemini-module/install.log", + ); + expect(resp).toContain("GOOGLE_GENAI_USE_VERTEXAI='true'"); }); test("gemini-model", async () => { @@ -141,7 +150,10 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini-module/install.log"); + const resp = await readFileContainer( + id, + "/home/coder/.gemini-module/install.log", + ); expect(resp).toContain(model); }); @@ -153,9 +165,15 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const preInstallLog = await readFileContainer(id, "/home/coder/.gemini-module/pre_install.log"); + const preInstallLog = await readFileContainer( + id, + "/home/coder/.gemini-module/pre_install.log", + ); expect(preInstallLog).toContain("pre-install-script"); - const postInstallLog = await readFileContainer(id, "/home/coder/.gemini-module/post_install.log"); + const postInstallLog = await readFileContainer( + id, + "/home/coder/.gemini-module/post_install.log", + ); expect(postInstallLog).toContain("post-install-script"); }); @@ -168,7 +186,10 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini-module/install.log"); + const resp = await readFileContainer( + id, + "/home/coder/.gemini-module/install.log", + ); expect(resp).toContain(folder); }); @@ -180,7 +201,10 @@ describe("gemini", async () => { }, }); await execModuleScript(id); - const resp = await readFileContainer(id, "/home/coder/.gemini/settings.json"); + const resp = await readFileContainer( + id, + "/home/coder/.gemini/settings.json", + ); expect(resp).toContain("custom"); expect(resp).toContain("enabled"); }); @@ -200,7 +224,11 @@ describe("gemini", async () => { test("start-without-prompt", async () => { const { id } = await setup(); await execModuleScript(id); - const prompt = await execContainer(id, ["ls", "-l", "/home/coder/GEMINI.md"]); + const prompt = await execContainer(id, [ + "ls", + "-l", + "/home/coder/GEMINI.md", + ]); expect(prompt.exitCode).not.toBe(0); expect(prompt.stderr).toContain("No such file or directory"); }); diff --git a/registry/coder-labs/modules/gemini/main.tf b/registry/coder-labs/modules/gemini/main.tf index ab4fa9450..846c28b27 100644 --- a/registry/coder-labs/modules/gemini/main.tf +++ b/registry/coder-labs/modules/gemini/main.tf @@ -33,7 +33,7 @@ variable "group" { variable "icon" { type = string description = "The icon to use for the app." - default = "/icon/gemini.svg" + default = "../../../../.icons/gemini.svg" } variable "folder" { @@ -42,6 +42,18 @@ variable "folder" { default = "/home/coder" } +variable "auto_approve" { + type = bool + description = "Whether to automatically approve Gemini API key usage." + default = true +} + +variable "yolo_mode" { + type = bool + description = "Whether to enable YOLO mode for faster responses without confirmation prompts." + default = true +} + variable "install_gemini" { type = bool description = "Whether to install Gemini." diff --git a/registry/coder-labs/modules/gemini/scripts/install.sh b/registry/coder-labs/modules/gemini/scripts/install.sh index a800dbd29..d3bf4582f 100644 --- a/registry/coder-labs/modules/gemini/scripts/install.sh +++ b/registry/coder-labs/modules/gemini/scripts/install.sh @@ -4,7 +4,7 @@ BOLD='\033[0;1m' # Function to check if a command exists command_exists() { - command -v "$1" >/dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 } set -o nounset @@ -24,30 +24,30 @@ set +o nounset function install_node() { # borrowed from claude-code module - if ! command_exists npm; then - printf "npm not found, checking for Node.js installation...\n" - if ! command_exists node; then - printf "Node.js not found, installing Node.js via NVM...\n" - export NVM_DIR="$HOME/.nvm" - if [ ! -d "$NVM_DIR" ]; then - mkdir -p "$NVM_DIR" - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" - else - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" - fi - - nvm install --lts - nvm use --lts - nvm alias default node - - printf "Node.js installed: %s\n" "$(node --version)" - printf "npm installed: %s\n" "$(npm --version)" + if ! command_exists npm; then + printf "npm not found, checking for Node.js installation...\n" + if ! command_exists node; then + printf "Node.js not found, installing Node.js via NVM...\n" + export NVM_DIR="$HOME/.nvm" + if [ ! -d "$NVM_DIR" ]; then + mkdir -p "$NVM_DIR" + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" else - printf "Node.js is installed but npm is not available. Please install npm manually.\n" - exit 1 + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" fi + + nvm install --lts + nvm use --lts + nvm alias default node + + printf "Node.js installed: %s\n" "$(node --version)" + printf "npm installed: %s\n" "$(npm --version)" + else + printf "Node.js is installed but npm is not available. Please install npm manually.\n" + exit 1 fi + fi } function install_gemini() { @@ -55,8 +55,8 @@ function install_gemini() { # we need node to install and run gemini-cli install_node - # If nvm does not exist, we will create a global npm directory (this os to prevent the possibility of EACCESS issues on npm -g) - if ! command_exists nvm; then + # If nvm does not exist, we will create a global npm directory (this os to prevent the possibility of EACCESS issues on npm -g) + if ! command_exists nvm; then printf "which node: %s\n" "$(which node)" printf "which npm: %s\n" "$(which npm)" @@ -71,7 +71,7 @@ function install_gemini() { # Add to shell profile for future sessions if ! grep -q "export PATH=$HOME/.npm-global/bin:\$PATH" ~/.bashrc; then - echo "export PATH=$HOME/.npm-global/bin:\$PATH" >> ~/.bashrc + echo "export PATH=$HOME/.npm-global/bin:\$PATH" >> ~/.bashrc fi fi @@ -87,89 +87,91 @@ function install_gemini() { } function populate_settings_json() { - if [ "${ARG_GEMINI_CONFIG}" != "" ]; then - SETTINGS_PATH="$HOME/.gemini/settings.json" - mkdir -p "$(dirname "$SETTINGS_PATH")" - printf "Custom gemini_config is provided !\n" - echo "${ARG_GEMINI_CONFIG}" > "$HOME/.gemini/settings.json" - else - printf "No custom gemini_config provided, using default settings.json.\n" - append_extensions_to_settings_json - fi -} - -function append_extensions_to_settings_json() { + if [ "${ARG_GEMINI_CONFIG}" != "" ]; then SETTINGS_PATH="$HOME/.gemini/settings.json" mkdir -p "$(dirname "$SETTINGS_PATH")" - printf "[append_extensions_to_settings_json] Starting extension merge process...\n" - if [ -z "${BASE_EXTENSIONS:-}" ]; then - printf "[append_extensions_to_settings_json] BASE_EXTENSIONS is empty, skipping merge.\n" - return - fi - if [ ! -f "$SETTINGS_PATH" ]; then - printf "%s does not exist. Creating with merged mcpServers structure.\n" "$SETTINGS_PATH" - # If ADDITIONAL_EXTENSIONS is not set or empty, use '{}' - ADD_EXT_JSON='{}' - if [ -n "${ADDITIONAL_EXTENSIONS:-}" ]; then - ADD_EXT_JSON="$ADDITIONAL_EXTENSIONS" - fi - printf '{"mcpServers":%s}\n' "$(jq -s 'add' <(echo "$BASE_EXTENSIONS") <(echo "$ADD_EXT_JSON"))" > "$SETTINGS_PATH" - fi - - # Prepare temp files - TMP_SETTINGS=$(mktemp) + printf "Custom gemini_config is provided !\n" + echo "${ARG_GEMINI_CONFIG}" > "$HOME/.gemini/settings.json" + else + printf "No custom gemini_config provided, using default settings.json.\n" + append_extensions_to_settings_json + fi +} +function append_extensions_to_settings_json() { + SETTINGS_PATH="$HOME/.gemini/settings.json" + mkdir -p "$(dirname "$SETTINGS_PATH")" + printf "[append_extensions_to_settings_json] Starting extension merge process...\n" + if [ -z "${BASE_EXTENSIONS:-}" ]; then + printf "[append_extensions_to_settings_json] BASE_EXTENSIONS is empty, skipping merge.\n" + return + fi + if [ ! -f "$SETTINGS_PATH" ]; then + printf "%s does not exist. Creating with merged mcpServers structure.\n" "$SETTINGS_PATH" # If ADDITIONAL_EXTENSIONS is not set or empty, use '{}' ADD_EXT_JSON='{}' if [ -n "${ADDITIONAL_EXTENSIONS:-}" ]; then - printf "[append_extensions_to_settings_json] ADDITIONAL_EXTENSIONS is set.\n" ADD_EXT_JSON="$ADDITIONAL_EXTENSIONS" - else - printf "[append_extensions_to_settings_json] ADDITIONAL_EXTENSIONS is empty or not set.\n" fi + printf '{"mcpServers":%s}\n' "$(jq -s 'add' <(echo "$BASE_EXTENSIONS") <(echo "$ADD_EXT_JSON"))" > "$SETTINGS_PATH" + fi + + # Prepare temp files + TMP_SETTINGS=$(mktemp) + + # If ADDITIONAL_EXTENSIONS is not set or empty, use '{}' + ADD_EXT_JSON='{}' + if [ -n "${ADDITIONAL_EXTENSIONS:-}" ]; then + printf "[append_extensions_to_settings_json] ADDITIONAL_EXTENSIONS is set.\n" + ADD_EXT_JSON="$ADDITIONAL_EXTENSIONS" + else + printf "[append_extensions_to_settings_json] ADDITIONAL_EXTENSIONS is empty or not set.\n" + fi - printf "[append_extensions_to_settings_json] Merging BASE_EXTENSIONS and ADDITIONAL_EXTENSIONS into mcpServers...\n" - jq --argjson base "$BASE_EXTENSIONS" --argjson add "$ADD_EXT_JSON" \ - '.mcpServers = (.mcpServers // {} + $base + $add)' \ - "$SETTINGS_PATH" > "$TMP_SETTINGS" && mv "$TMP_SETTINGS" "$SETTINGS_PATH" + printf "[append_extensions_to_settings_json] Merging BASE_EXTENSIONS and ADDITIONAL_EXTENSIONS into mcpServers...\n" + jq --argjson base "$BASE_EXTENSIONS" --argjson add "$ADD_EXT_JSON" \ + '.mcpServers = (.mcpServers // {} + $base + $add)' \ + "$SETTINGS_PATH" > "$TMP_SETTINGS" && mv "$TMP_SETTINGS" "$SETTINGS_PATH" - # Add theme and selectedAuthType fields - jq '.theme = "Default" | .selectedAuthType = "gemini-api-key"' "$SETTINGS_PATH" > "$TMP_SETTINGS" && mv "$TMP_SETTINGS" "$SETTINGS_PATH" + # Add theme, selectedAuthType, and Gemini settings + jq '.theme = "Default" | + .selectedAuthType = "gemini-api-key" | + .autoApproveApiKey = true | + .geminicodeassist.agentYoloMode = true | + .geminicodeassist.autoConfirm = true' "$SETTINGS_PATH" > "$TMP_SETTINGS" && mv "$TMP_SETTINGS" "$SETTINGS_PATH" - printf "[append_extensions_to_settings_json] Merge complete.\n" + printf "[append_extensions_to_settings_json] Merge complete.\n" } function add_instruction_prompt_if_exists() { - if [ -n "${GEMINI_INSTRUCTION_PROMPT:-}" ]; then - if [ -d "${GEMINI_START_DIRECTORY}" ]; then - printf "Directory '%s' exists. Changing to it.\\n" "${GEMINI_START_DIRECTORY}" - cd "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } - else - printf "Directory '%s' does not exist. Creating and changing to it.\\n" "${GEMINI_START_DIRECTORY}" - mkdir -p "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not create directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } - cd "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } - fi - touch GEMINI.md - printf "Setting GEMINI.md\n" - echo "${GEMINI_INSTRUCTION_PROMPT}" > GEMINI.md + if [ -n "${GEMINI_INSTRUCTION_PROMPT:-}" ]; then + if [ -d "${GEMINI_START_DIRECTORY}" ]; then + printf "Directory '%s' exists. Changing to it.\\n" "${GEMINI_START_DIRECTORY}" + cd "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } else - printf "GEMINI.md is not set.\n" + printf "Directory '%s' does not exist. Creating and changing to it.\\n" "${GEMINI_START_DIRECTORY}" + mkdir -p "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not create directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } + cd "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } fi + touch GEMINI.md + printf "Setting GEMINI.md\n" + echo "${GEMINI_INSTRUCTION_PROMPT}" > GEMINI.md + else + printf "GEMINI.md is not set.\n" + fi } - # Install Gemini install_gemini gemini --version populate_settings_json add_instruction_prompt_if_exists - diff --git a/registry/coder-labs/modules/gemini/scripts/start.sh b/registry/coder-labs/modules/gemini/scripts/start.sh index 00e0b5edb..f3065d30f 100644 --- a/registry/coder-labs/modules/gemini/scripts/start.sh +++ b/registry/coder-labs/modules/gemini/scripts/start.sh @@ -4,7 +4,7 @@ source "$HOME"/.bashrc command_exists() { - command -v "$1" >/dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 } if [ -f "$HOME/.nvm/nvm.sh" ]; then @@ -18,45 +18,45 @@ printf "Version: %s\n" "$(gemini --version)" GEMINI_TASK_PROMPT=$(echo -n "$GEMINI_TASK_PROMPT" | base64 -d) if command_exists gemini; then - printf "Gemini is installed\n" + printf "Gemini is installed\n" else - printf "Error: Gemini is not installed. Please enable install_gemini or install it manually :)\n" - exit 1 + printf "Error: Gemini is not installed. Please enable install_gemini or install it manually :)\n" + exit 1 fi if [ -d "${GEMINI_START_DIRECTORY}" ]; then - printf "Directory '%s' exists. Changing to it.\\n" "${GEMINI_START_DIRECTORY}" - cd "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } + printf "Directory '%s' exists. Changing to it.\\n" "${GEMINI_START_DIRECTORY}" + cd "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } else - printf "Directory '%s' does not exist. Creating and changing to it.\\n" "${GEMINI_START_DIRECTORY}" - mkdir -p "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not create directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } - cd "${GEMINI_START_DIRECTORY}" || { - printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" - exit 1 - } + printf "Directory '%s' does not exist. Creating and changing to it.\\n" "${GEMINI_START_DIRECTORY}" + mkdir -p "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not create directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } + cd "${GEMINI_START_DIRECTORY}" || { + printf "Error: Could not change to directory '%s'.\\n" "${GEMINI_START_DIRECTORY}" + exit 1 + } fi if [ -n "$GEMINI_TASK_PROMPT" ]; then - printf "Running the task prompt %s\n" "$GEMINI_TASK_PROMPT" - PROMPT="Every step of the way, report tasks to Coder with proper descriptions and statuses. Your task at hand: $GEMINI_TASK_PROMPT" - GEMINI_ARGS=(--prompt-interactive "$PROMPT") + printf "Running the task prompt %s\n" "$GEMINI_TASK_PROMPT" + PROMPT="Every step of the way, report tasks to Coder with proper descriptions and statuses. Your task at hand: $GEMINI_TASK_PROMPT" + GEMINI_ARGS=(--prompt-interactive "$PROMPT") else - printf "No task prompt given.\n" - GEMINI_ARGS=() + printf "No task prompt given.\n" + GEMINI_ARGS=() fi if [ -n "$GEMINI_API_KEY" ]; then - printf "gemini_api_key provided !\n" + printf "gemini_api_key provided !\n" else - printf "gemini_api_key not provided\n" + printf "gemini_api_key not provided\n" fi # use low width to fit in the tasks UI sidebar. height is adjusted so that width x height ~= 80x1000 characters # are visible in the terminal screen by default. -agentapi server --term-width 67 --term-height 1190 -- gemini "${GEMINI_ARGS[@]}" \ No newline at end of file +agentapi server --term-width 67 --term-height 1190 -- gemini "${GEMINI_ARGS[@]}" diff --git a/registry/coder-labs/modules/gemini/testdata/gemini-mock.sh b/registry/coder-labs/modules/gemini/testdata/gemini-mock.sh index 53c9c41de..3ba8b081c 100644 --- a/registry/coder-labs/modules/gemini/testdata/gemini-mock.sh +++ b/registry/coder-labs/modules/gemini/testdata/gemini-mock.sh @@ -9,6 +9,6 @@ fi set -e while true; do - echo "$(date) - gemini-mock" - sleep 15 -done \ No newline at end of file + echo "$(date) - gemini-mock" + sleep 15 +done diff --git a/registry/coder/modules/agentapi/scripts/agentapi-wait-for-start.sh b/registry/coder/modules/agentapi/scripts/agentapi-wait-for-start.sh index 7430e9ecd..6e18332f9 100644 --- a/registry/coder/modules/agentapi/scripts/agentapi-wait-for-start.sh +++ b/registry/coder/modules/agentapi/scripts/agentapi-wait-for-start.sh @@ -11,22 +11,22 @@ agentapi_started=false echo "Waiting for agentapi server to start on port $port..." for i in $(seq 1 150); do - for j in $(seq 1 3); do - sleep 0.1 - if curl -fs -o /dev/null "http://localhost:$port/status"; then - echo "agentapi response received ($j/3)" - else - echo "agentapi server not responding ($i/15)" - continue 2 - fi - done - agentapi_started=true - break + for j in $(seq 1 3); do + sleep 0.1 + if curl -fs -o /dev/null "http://localhost:$port/status"; then + echo "agentapi response received ($j/3)" + else + echo "agentapi server not responding ($i/15)" + continue 2 + fi + done + agentapi_started=true + break done if [ "$agentapi_started" != "true" ]; then - echo "Error: agentapi server did not start on port $port after 15 seconds." - exit 1 + echo "Error: agentapi server did not start on port $port after 15 seconds." + exit 1 fi echo "agentapi server started on port $port." diff --git a/registry/coder/modules/agentapi/scripts/main.sh b/registry/coder/modules/agentapi/scripts/main.sh index f7a5caab7..3b485d27d 100644 --- a/registry/coder/modules/agentapi/scripts/main.sh +++ b/registry/coder/modules/agentapi/scripts/main.sh @@ -16,81 +16,81 @@ AGENTAPI_PORT="$ARG_AGENTAPI_PORT" set +o nounset command_exists() { - command -v "$1" >/dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 } module_path="$HOME/${MODULE_DIR_NAME}" mkdir -p "$module_path/scripts" if [ ! -d "${WORKDIR}" ]; then - echo "Warning: The specified folder '${WORKDIR}' does not exist." - echo "Creating the folder..." - mkdir -p "${WORKDIR}" - echo "Folder created successfully." + echo "Warning: The specified folder '${WORKDIR}' does not exist." + echo "Creating the folder..." + mkdir -p "${WORKDIR}" + echo "Folder created successfully." fi if [ -n "${PRE_INSTALL_SCRIPT}" ]; then - echo "Running pre-install script..." - echo -n "${PRE_INSTALL_SCRIPT}" >"$module_path/pre_install.sh" - chmod +x "$module_path/pre_install.sh" - "$module_path/pre_install.sh" 2>&1 | tee "$module_path/pre_install.log" + echo "Running pre-install script..." + echo -n "${PRE_INSTALL_SCRIPT}" > "$module_path/pre_install.sh" + chmod +x "$module_path/pre_install.sh" + "$module_path/pre_install.sh" 2>&1 | tee "$module_path/pre_install.log" fi echo "Running install script..." -echo -n "${INSTALL_SCRIPT}" >"$module_path/install.sh" +echo -n "${INSTALL_SCRIPT}" > "$module_path/install.sh" chmod +x "$module_path/install.sh" "$module_path/install.sh" 2>&1 | tee "$module_path/install.log" # Install AgentAPI if enabled if [ "${INSTALL_AGENTAPI}" = "true" ]; then - echo "Installing AgentAPI..." - arch=$(uname -m) - if [ "$arch" = "x86_64" ]; then - binary_name="agentapi-linux-amd64" - elif [ "$arch" = "aarch64" ]; then - binary_name="agentapi-linux-arm64" - else - echo "Error: Unsupported architecture: $arch" - exit 1 - fi - if [ "${AGENTAPI_VERSION}" = "latest" ]; then - # for the latest release the download URL pattern is different than for tagged releases - # https://docs.github.com/en/repositories/releasing-projects-on-github/linking-to-releases - download_url="https://github.com/coder/agentapi/releases/latest/download/$binary_name" - else - download_url="https://github.com/coder/agentapi/releases/download/${AGENTAPI_VERSION}/$binary_name" - fi - curl \ - --retry 5 \ - --retry-delay 5 \ - --fail \ - --retry-all-errors \ - -L \ - -C - \ - -o agentapi \ - "$download_url" - chmod +x agentapi - sudo mv agentapi /usr/local/bin/agentapi + echo "Installing AgentAPI..." + arch=$(uname -m) + if [ "$arch" = "x86_64" ]; then + binary_name="agentapi-linux-amd64" + elif [ "$arch" = "aarch64" ]; then + binary_name="agentapi-linux-arm64" + else + echo "Error: Unsupported architecture: $arch" + exit 1 + fi + if [ "${AGENTAPI_VERSION}" = "latest" ]; then + # for the latest release the download URL pattern is different than for tagged releases + # https://docs.github.com/en/repositories/releasing-projects-on-github/linking-to-releases + download_url="https://github.com/coder/agentapi/releases/latest/download/$binary_name" + else + download_url="https://github.com/coder/agentapi/releases/download/${AGENTAPI_VERSION}/$binary_name" + fi + curl \ + --retry 5 \ + --retry-delay 5 \ + --fail \ + --retry-all-errors \ + -L \ + -C - \ + -o agentapi \ + "$download_url" + chmod +x agentapi + sudo mv agentapi /usr/local/bin/agentapi fi if ! command_exists agentapi; then - echo "Error: AgentAPI is not installed. Please enable install_agentapi or install it manually." - exit 1 + echo "Error: AgentAPI is not installed. Please enable install_agentapi or install it manually." + exit 1 fi -echo -n "${START_SCRIPT}" >"$module_path/scripts/agentapi-start.sh" -echo -n "${WAIT_FOR_START_SCRIPT}" >"$module_path/scripts/agentapi-wait-for-start.sh" +echo -n "${START_SCRIPT}" > "$module_path/scripts/agentapi-start.sh" +echo -n "${WAIT_FOR_START_SCRIPT}" > "$module_path/scripts/agentapi-wait-for-start.sh" chmod +x "$module_path/scripts/agentapi-start.sh" chmod +x "$module_path/scripts/agentapi-wait-for-start.sh" if [ -n "${POST_INSTALL_SCRIPT}" ]; then - echo "Running post-install script..." - echo -n "${POST_INSTALL_SCRIPT}" >"$module_path/post_install.sh" - chmod +x "$module_path/post_install.sh" - "$module_path/post_install.sh" 2>&1 | tee "$module_path/post_install.log" + echo "Running post-install script..." + echo -n "${POST_INSTALL_SCRIPT}" > "$module_path/post_install.sh" + chmod +x "$module_path/post_install.sh" + "$module_path/post_install.sh" 2>&1 | tee "$module_path/post_install.log" fi export LANG=en_US.UTF-8 export LC_ALL=en_US.UTF-8 cd "${WORKDIR}" -nohup "$module_path/scripts/agentapi-start.sh" true "${AGENTAPI_PORT}" &>"$module_path/agentapi-start.log" & +nohup "$module_path/scripts/agentapi-start.sh" true "${AGENTAPI_PORT}" &> "$module_path/agentapi-start.log" & "$module_path/scripts/agentapi-wait-for-start.sh" "${AGENTAPI_PORT}" diff --git a/registry/coder/modules/agentapi/testdata/agentapi-start.sh b/registry/coder/modules/agentapi/testdata/agentapi-start.sh index 1564fe032..cf55e7a17 100644 --- a/registry/coder/modules/agentapi/testdata/agentapi-start.sh +++ b/registry/coder/modules/agentapi/testdata/agentapi-start.sh @@ -8,9 +8,9 @@ port=${2:-3284} module_path="$HOME/.agentapi-module" log_file_path="$module_path/agentapi.log" -echo "using prompt: $use_prompt" >>/home/coder/test-agentapi-start.log -echo "using port: $port" >>/home/coder/test-agentapi-start.log +echo "using prompt: $use_prompt" >> /home/coder/test-agentapi-start.log +echo "using port: $port" >> /home/coder/test-agentapi-start.log agentapi server --port "$port" --term-width 67 --term-height 1190 -- \ - bash -c aiagent \ - >"$log_file_path" 2>&1 + bash -c aiagent \ + > "$log_file_path" 2>&1 diff --git a/registry/coder/modules/amazon-q/main.tf b/registry/coder/modules/amazon-q/main.tf index dcc03156a..d2961b75f 100644 --- a/registry/coder/modules/amazon-q/main.tf +++ b/registry/coder/modules/amazon-q/main.tf @@ -69,7 +69,7 @@ variable "experiment_use_tmux" { variable "experiment_report_tasks" { type = bool description = "Whether to enable task reporting." - default = false + default = true } variable "experiment_pre_install_script" { @@ -125,13 +125,32 @@ variable "ai_prompt" { locals { encoded_pre_install_script = var.experiment_pre_install_script != null ? base64encode(var.experiment_pre_install_script) : "" encoded_post_install_script = var.experiment_post_install_script != null ? base64encode(var.experiment_post_install_script) : "" - full_prompt = <<-EOT + ${var.system_prompt} Your first task is: ${var.ai_prompt} EOT + + module_dir_name = ".amazon-q-module" +} + +module "agentapi" { + source = "registry.coder.com/coder/agentapi/coder" + version = "1.0.0" + + agent_id = var.agent_id + web_app_slug = "amazon-q" + web_app_order = var.order + web_app_group = var.group + web_app_icon = var.icon + web_app_display_name = "Amazon Q" + cli_app = true + cli_app_slug = "amazon-q-cli" + cli_app_display_name = "Amazon Q CLI" + module_dir_name = local.module_dir_name + install_agentapi = true } resource "coder_script" "amazon_q" { @@ -207,86 +226,34 @@ resource "coder_script" "amazon_q" { /tmp/post_install.sh fi - if [ "${var.experiment_use_tmux}" = "true" ] && [ "${var.experiment_use_screen}" = "true" ]; then - echo "Error: Both experiment_use_tmux and experiment_use_screen cannot be true simultaneously." - echo "Please set only one of them to true." + exit 1 fi - if [ "${var.experiment_use_tmux}" = "true" ]; then - echo "Running Amazon Q in the background with tmux..." - - if ! command_exists tmux; then - echo "Error: tmux is not installed. Please install tmux manually." - exit 1 - fi - - touch "$HOME/.amazon-q.log" - - export LANG=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 - - tmux new-session -d -s amazon-q -c "${var.folder}" "q chat --trust-all-tools | tee -a "$HOME/.amazon-q.log" && exec bash" - - tmux send-keys -t amazon-q "${local.full_prompt}" - sleep 5 - tmux send-keys -t amazon-q Enter + if [ -n "${local.full_prompt}" ]; then + mkdir -p "${HOME}/${local.module_dir_name}" + echo "${local.full_prompt}" > "${HOME}/${local.module_dir_name}/prompt.txt" + Q_ARGS=(chat --trust-all-tools --message "$(cat ${HOME}/${local.module_dir_name}/prompt.txt)") + else + echo "Starting without a prompt" + Q_ARGS=(chat --trust-all-tools) fi - if [ "${var.experiment_use_screen}" = "true" ]; then - echo "Running Amazon Q in the background..." - - if ! command_exists screen; then - echo "Error: screen is not installed. Please install screen manually." - exit 1 - fi - - touch "$HOME/.amazon-q.log" - - if [ ! -f "$HOME/.screenrc" ]; then - echo "Creating ~/.screenrc and adding multiuser settings..." | tee -a "$HOME/.amazon-q.log" - echo -e "multiuser on\nacladd $(whoami)" > "$HOME/.screenrc" - fi - - if ! grep -q "^multiuser on$" "$HOME/.screenrc"; then - echo "Adding 'multiuser on' to ~/.screenrc..." | tee -a "$HOME/.amazon-q.log" - echo "multiuser on" >> "$HOME/.screenrc" - fi + export LANG=en_US.UTF-8 + export LC_ALL=en_US.UTF-8 - if ! grep -q "^acladd $(whoami)$" "$HOME/.screenrc"; then - echo "Adding 'acladd $(whoami)' to ~/.screenrc..." | tee -a "$HOME/.amazon-q.log" - echo "acladd $(whoami)" >> "$HOME/.screenrc" - fi - export LANG=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 - - screen -U -dmS amazon-q bash -c ' - cd ${var.folder} - q chat --trust-all-tools | tee -a "$HOME/.amazon-q.log - exec bash - ' - # Extremely hacky way to send the prompt to the screen session - # This will be fixed in the future, but `amazon-q` was not sending MCP - # tasks when an initial prompt is provided. - screen -S amazon-q -X stuff "${local.full_prompt}" - sleep 5 - screen -S amazon-q -X stuff "^M" - else - if ! command_exists q; then - echo "Error: Amazon Q is not installed. Please enable install_amazon_q or install it manually." - exit 1 - fi - fi + cd "${var.folder}" + agentapi server --term-width 67 --term-height 1190 -- \ + bash -c "$(printf '%q ' "q" "${Q_ARGS[@]}")" EOT run_on_start = true } -resource "coder_app" "amazon_q" { - slug = "amazon-q" - display_name = "Amazon Q" - agent_id = var.agent_id - command = <<-EOT - #!/bin/bash +resource "coder_ai_task" "amazon_q" { + sidebar_app { + id = module.agentapi.web_app_id + } +} set -e export LANG=en_US.UTF-8 diff --git a/registry/coder/modules/claude-code/install.tf b/registry/coder/modules/claude-code/install.tf new file mode 100644 index 000000000..0d097ea43 --- /dev/null +++ b/registry/coder/modules/claude-code/install.tf @@ -0,0 +1,15 @@ +resource "coder_script" "install_claude_code" { + agent_id = var.agent_id + display_name = "Install Claude Code" + icon = var.icon + script = file("${path.module}/scripts/install.sh") + run_on_start = true + + env = { + ARG_ENABLE_SUBAGENTS = tostring(var.enable_subagents) + ARG_SUBAGENTS_VERSION = var.subagents_version + ARG_CUSTOM_SUBAGENTS_PATH = var.custom_subagents_path + ARG_ENABLED_SUBAGENTS = jsonencode(var.enabled_subagents) + ARG_DEFAULT_SUBAGENT_MODEL = var.default_subagent_model + } +} diff --git a/registry/coder/modules/claude-code/main.tf b/registry/coder/modules/claude-code/main.tf index b5dfc7c87..a659a1866 100644 --- a/registry/coder/modules/claude-code/main.tf +++ b/registry/coder/modules/claude-code/main.tf @@ -48,6 +48,36 @@ variable "install_claude_code" { default = true } +variable "enable_subagents" { + type = bool + description = "Whether to enable Claude Code subagents for specialized tasks." + default = false +} + +variable "subagents_version" { + type = string + description = "The version of subagents to install. Set to 'latest' for the most recent version." + default = "latest" +} + +variable "custom_subagents_path" { + type = string + description = "Path to custom subagents directory. If not set, will use the default agents from wshobson/agents." + default = "" +} + +variable "enabled_subagents" { + type = list(string) + description = "List of subagents to enable. If empty, all subagents will be enabled when enable_subagents is true." + default = [] +} + +variable "default_subagent_model" { + type = string + description = "Default Claude model to use for subagents that don't specify a model. Options: claude-3-5-haiku-20241022, claude-sonnet-4-20250514, claude-opus-4-20250514" + default = "claude-sonnet-4-20250514" +} + variable "claude_code_version" { type = string description = "The version of Claude Code to install." diff --git a/registry/coder/modules/claude-code/scripts/agentapi-start.sh b/registry/coder/modules/claude-code/scripts/agentapi-start.sh index c66b7f359..4f447c335 100644 --- a/registry/coder/modules/claude-code/scripts/agentapi-start.sh +++ b/registry/coder/modules/claude-code/scripts/agentapi-start.sh @@ -9,14 +9,14 @@ log_file_path="$module_path/agentapi.log" # if the first argument is not empty, start claude with the prompt if [ -n "$1" ]; then - cp "$module_path/prompt.txt" /tmp/claude-code-prompt + cp "$module_path/prompt.txt" /tmp/claude-code-prompt else - rm -f /tmp/claude-code-prompt + rm -f /tmp/claude-code-prompt fi # if the log file already exists, archive it if [ -f "$log_file_path" ]; then - mv "$log_file_path" "$log_file_path"".$(date +%s)" + mv "$log_file_path" "$log_file_path"".$(date +%s)" fi # see the remove-last-session-id.js script for details @@ -28,14 +28,14 @@ node "$scripts_dir/remove-last-session-id.js" "$(pwd)" || true set +o errexit function start_agentapi() { - local continue_flag="$1" - local prompt_subshell='"$(cat /tmp/claude-code-prompt)"' - - # use low width to fit in the tasks UI sidebar. height is adjusted so that width x height ~= 80x1000 characters - # visible in the terminal screen by default. - agentapi server --term-width 67 --term-height 1190 -- \ - bash -c "claude $continue_flag --dangerously-skip-permissions $prompt_subshell" \ - > "$log_file_path" 2>&1 + local continue_flag="$1" + local prompt_subshell='"$(cat /tmp/claude-code-prompt)"' + + # use low width to fit in the tasks UI sidebar. height is adjusted so that width x height ~= 80x1000 characters + # visible in the terminal screen by default. + agentapi server --term-width 67 --term-height 1190 -- \ + bash -c "claude $continue_flag --dangerously-skip-permissions $prompt_subshell" \ + > "$log_file_path" 2>&1 } echo "Starting AgentAPI..." @@ -47,15 +47,15 @@ exit_code=$? echo "First AgentAPI exit code: $exit_code" if [ $exit_code -eq 0 ]; then - exit 0 + exit 0 fi # if there was no conversation to continue, claude exited with an error. # start claude without the --continue flag. if grep -q "No conversation found to continue" "$log_file_path"; then - echo "AgentAPI with --continue flag failed, starting claude without it." - start_agentapi - exit_code=$? + echo "AgentAPI with --continue flag failed, starting claude without it." + start_agentapi + exit_code=$? fi echo "Second AgentAPI exit code: $exit_code" diff --git a/registry/coder/modules/claude-code/scripts/agentapi-wait-for-start.sh b/registry/coder/modules/claude-code/scripts/agentapi-wait-for-start.sh index 2eb849756..b9e76d362 100644 --- a/registry/coder/modules/claude-code/scripts/agentapi-wait-for-start.sh +++ b/registry/coder/modules/claude-code/scripts/agentapi-wait-for-start.sh @@ -9,22 +9,22 @@ agentapi_started=false echo "Waiting for agentapi server to start on port 3284..." for i in $(seq 1 150); do - for j in $(seq 1 3); do - sleep 0.1 - if curl -fs -o /dev/null "http://localhost:3284/status"; then - echo "agentapi response received ($j/3)" - else - echo "agentapi server not responding ($i/15)" - continue 2 - fi - done - agentapi_started=true - break + for j in $(seq 1 3); do + sleep 0.1 + if curl -fs -o /dev/null "http://localhost:3284/status"; then + echo "agentapi response received ($j/3)" + else + echo "agentapi server not responding ($i/15)" + continue 2 + fi + done + agentapi_started=true + break done if [ "$agentapi_started" != "true" ]; then - echo "Error: agentapi server did not start on port 3284 after 15 seconds." - exit 1 + echo "Error: agentapi server did not start on port 3284 after 15 seconds." + exit 1 fi echo "agentapi server started on port 3284." diff --git a/registry/coder/modules/claude-code/scripts/install.sh b/registry/coder/modules/claude-code/scripts/install.sh new file mode 100644 index 000000000..ad3c982d3 --- /dev/null +++ b/registry/coder/modules/claude-code/scripts/install.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +set -euo pipefail + +BOLD='\033[0;1m' + +# Parse arguments +ARG_ENABLE_SUBAGENTS="${ARG_ENABLE_SUBAGENTS:-false}" +ARG_SUBAGENTS_VERSION="${ARG_SUBAGENTS_VERSION:-latest}" +ARG_CUSTOM_SUBAGENTS_PATH="${ARG_CUSTOM_SUBAGENTS_PATH:-}" +ARG_ENABLED_SUBAGENTS="${ARG_ENABLED_SUBAGENTS:-}" +ARG_DEFAULT_SUBAGENT_MODEL="${ARG_DEFAULT_SUBAGENT_MODEL:-claude-sonnet-4-20250514}" + +# Create Claude config directory +CLAUDE_DIR="$HOME/.claude" +mkdir -p "$CLAUDE_DIR" + +# Install subagents if enabled +if [ "$ARG_ENABLE_SUBAGENTS" = "true" ]; then + printf "%s Installing Claude Code subagents...\n" "${BOLD}" + + if [ -n "$ARG_CUSTOM_SUBAGENTS_PATH" ]; then + # Use custom subagents path + printf "Using custom subagents from: %s\n" "$ARG_CUSTOM_SUBAGENTS_PATH" + mkdir -p "$CLAUDE_DIR/agents" + cp -r "$ARG_CUSTOM_SUBAGENTS_PATH"/* "$CLAUDE_DIR/agents/" + else + # Clone the default agents repository + AGENTS_DIR="$CLAUDE_DIR/agents" + if [ ! -d "$AGENTS_DIR" ]; then + git clone https://github.com/wshobson/agents.git "$AGENTS_DIR" + fi + cd "$AGENTS_DIR" + + if [ "$ARG_SUBAGENTS_VERSION" = "latest" ]; then + git pull origin main + else + git checkout "$ARG_SUBAGENTS_VERSION" + fi + fi + + # Configure enabled subagents + if [ -n "$ARG_ENABLED_SUBAGENTS" ]; then + printf "Configuring enabled subagents: %s\n" "$ARG_ENABLED_SUBAGENTS" + mkdir -p "$CLAUDE_DIR/config" + echo "{\"enabledAgents\": $ARG_ENABLED_SUBAGENTS, \"defaultModel\": \"$ARG_DEFAULT_SUBAGENT_MODEL\"}" > "$CLAUDE_DIR/config/agents.json" + fi + + printf "%s Claude Code subagents installed successfully\n" "${BOLD}" +fi + +# Install Claude Code +printf "%s Installing Claude Code...\n" "${BOLD}" +if command -v npm &> /dev/null; then + npm install -g @anthropic/claude-code +else + echo "npm not found. Please install Node.js and npm first." + exit 1 +fi + +printf "%s Claude Code installation complete\n" "${BOLD}" diff --git a/registry/coder/modules/devcontainers-cli/run.sh b/registry/coder/modules/devcontainers-cli/run.sh index f7bf852c6..bd3c1b1dc 100644 --- a/registry/coder/modules/devcontainers-cli/run.sh +++ b/registry/coder/modules/devcontainers-cli/run.sh @@ -1,55 +1,55 @@ #!/usr/bin/env sh # If @devcontainers/cli is already installed, we can skip -if command -v devcontainer >/dev/null 2>&1; then - echo "πŸ₯³ @devcontainers/cli is already installed into $(which devcontainer)!" - exit 0 +if command -v devcontainer > /dev/null 2>&1; then + echo "πŸ₯³ @devcontainers/cli is already installed into $(which devcontainer)!" + exit 0 fi # Check if docker is installed -if ! command -v docker >/dev/null 2>&1; then - echo "WARNING: Docker was not found but is required to use @devcontainers/cli, please make sure it is available." +if ! command -v docker > /dev/null 2>&1; then + echo "WARNING: Docker was not found but is required to use @devcontainers/cli, please make sure it is available." fi # Determine the package manager to use: npm, pnpm, or yarn -if command -v yarn >/dev/null 2>&1; then - PACKAGE_MANAGER="yarn" -elif command -v npm >/dev/null 2>&1; then - PACKAGE_MANAGER="npm" -elif command -v pnpm >/dev/null 2>&1; then - PACKAGE_MANAGER="pnpm" +if command -v yarn > /dev/null 2>&1; then + PACKAGE_MANAGER="yarn" +elif command -v npm > /dev/null 2>&1; then + PACKAGE_MANAGER="npm" +elif command -v pnpm > /dev/null 2>&1; then + PACKAGE_MANAGER="pnpm" else - echo "ERROR: No supported package manager (npm, pnpm, yarn) is installed. Please install one first." 1>&2 - exit 1 + echo "ERROR: No supported package manager (npm, pnpm, yarn) is installed. Please install one first." 1>&2 + exit 1 fi install() { - echo "Installing @devcontainers/cli using $PACKAGE_MANAGER..." - if [ "$PACKAGE_MANAGER" = "npm" ]; then - npm install -g @devcontainers/cli - elif [ "$PACKAGE_MANAGER" = "pnpm" ]; then - # Check if PNPM_HOME is set, if not, set it to the script's bin directory - # pnpm needs this to be set to install binaries - # coder agent ensures this part is part of the PATH - # so that the devcontainer command is available - if [ -z "$PNPM_HOME" ]; then - PNPM_HOME="$CODER_SCRIPT_BIN_DIR" - export M_HOME - fi - pnpm add -g @devcontainers/cli - elif [ "$PACKAGE_MANAGER" = "yarn" ]; then - yarn global add @devcontainers/cli --prefix "$(dirname "$CODER_SCRIPT_BIN_DIR")" + echo "Installing @devcontainers/cli using $PACKAGE_MANAGER..." + if [ "$PACKAGE_MANAGER" = "npm" ]; then + npm install -g @devcontainers/cli + elif [ "$PACKAGE_MANAGER" = "pnpm" ]; then + # Check if PNPM_HOME is set, if not, set it to the script's bin directory + # pnpm needs this to be set to install binaries + # coder agent ensures this part is part of the PATH + # so that the devcontainer command is available + if [ -z "$PNPM_HOME" ]; then + PNPM_HOME="$CODER_SCRIPT_BIN_DIR" + export M_HOME fi + pnpm add -g @devcontainers/cli + elif [ "$PACKAGE_MANAGER" = "yarn" ]; then + yarn global add @devcontainers/cli --prefix "$(dirname "$CODER_SCRIPT_BIN_DIR")" + fi } if ! install; then - echo "Failed to install @devcontainers/cli" >&2 - exit 1 + echo "Failed to install @devcontainers/cli" >&2 + exit 1 fi -if ! command -v devcontainer >/dev/null 2>&1; then - echo "Installation completed but 'devcontainer' command not found in PATH" >&2 - exit 1 +if ! command -v devcontainer > /dev/null 2>&1; then + echo "Installation completed but 'devcontainer' command not found in PATH" >&2 + exit 1 fi echo "πŸ₯³ @devcontainers/cli has been installed into $(which devcontainer)!" diff --git a/registry/coder/modules/filebrowser/run.sh b/registry/coder/modules/filebrowser/run.sh index ea4b857a3..ed34e2a2a 100644 --- a/registry/coder/modules/filebrowser/run.sh +++ b/registry/coder/modules/filebrowser/run.sh @@ -7,7 +7,7 @@ BOLD='\033[[0;1m' printf "$${BOLD}Installing filebrowser \n\n" # Check if filebrowser is installed -if ! command -v filebrowser &>/dev/null; then +if ! command -v filebrowser &> /dev/null; then curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash fi @@ -34,6 +34,6 @@ printf "πŸ‘· Starting filebrowser in background... \n\n" printf "πŸ“‚ Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n" -filebrowser >>${LOG_PATH} 2>&1 & +filebrowser >> ${LOG_PATH} 2>&1 & printf "πŸ“ Logs at ${LOG_PATH} \n\n" diff --git a/registry/coder/modules/goose/scripts/install.sh b/registry/coder/modules/goose/scripts/install.sh index 28fc923ad..9bca8086b 100644 --- a/registry/coder/modules/goose/scripts/install.sh +++ b/registry/coder/modules/goose/scripts/install.sh @@ -2,7 +2,7 @@ # Function to check if a command exists command_exists() { - command -v "$1" >/dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 } set -o nounset @@ -18,40 +18,40 @@ echo "--------------------------------" set +o nounset if [ "${ARG_INSTALL}" = "true" ]; then - echo "Installing Goose..." - parsed_version="${ARG_GOOSE_VERSION}" - if [ "${ARG_GOOSE_VERSION}" = "stable" ]; then - parsed_version="" - fi - curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | GOOSE_VERSION="${parsed_version}" CONFIGURE=false bash - echo "Goose installed" + echo "Installing Goose..." + parsed_version="${ARG_GOOSE_VERSION}" + if [ "${ARG_GOOSE_VERSION}" = "stable" ]; then + parsed_version="" + fi + curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | GOOSE_VERSION="${parsed_version}" CONFIGURE=false bash + echo "Goose installed" else - echo "Skipping Goose installation" + echo "Skipping Goose installation" fi if [ "${ARG_GOOSE_CONFIG}" != "" ]; then - echo "Configuring Goose..." - mkdir -p "$HOME/.config/goose" - echo "GOOSE_PROVIDER: $ARG_PROVIDER" >"$HOME/.config/goose/config.yaml" - echo "GOOSE_MODEL: $ARG_MODEL" >>"$HOME/.config/goose/config.yaml" - echo "$ARG_GOOSE_CONFIG" >>"$HOME/.config/goose/config.yaml" + echo "Configuring Goose..." + mkdir -p "$HOME/.config/goose" + echo "GOOSE_PROVIDER: $ARG_PROVIDER" > "$HOME/.config/goose/config.yaml" + echo "GOOSE_MODEL: $ARG_MODEL" >> "$HOME/.config/goose/config.yaml" + echo "$ARG_GOOSE_CONFIG" >> "$HOME/.config/goose/config.yaml" else - echo "Skipping Goose configuration" + echo "Skipping Goose configuration" fi if [ "${GOOSE_SYSTEM_PROMPT}" != "" ]; then - echo "Setting Goose system prompt..." - mkdir -p "$HOME/.config/goose" - echo "$GOOSE_SYSTEM_PROMPT" >"$HOME/.config/goose/.goosehints" + echo "Setting Goose system prompt..." + mkdir -p "$HOME/.config/goose" + echo "$GOOSE_SYSTEM_PROMPT" > "$HOME/.config/goose/.goosehints" else - echo "Goose system prompt not set. use the GOOSE_SYSTEM_PROMPT environment variable to set it." + echo "Goose system prompt not set. use the GOOSE_SYSTEM_PROMPT environment variable to set it." fi if command_exists goose; then - GOOSE_CMD=goose + GOOSE_CMD=goose elif [ -f "$HOME/.local/bin/goose" ]; then - GOOSE_CMD="$HOME/.local/bin/goose" + GOOSE_CMD="$HOME/.local/bin/goose" else - echo "Error: Goose is not installed. Please enable install_goose or install it manually." - exit 1 + echo "Error: Goose is not installed. Please enable install_goose or install it manually." + exit 1 fi diff --git a/registry/coder/modules/goose/scripts/start.sh b/registry/coder/modules/goose/scripts/start.sh index 314a41d0f..737138ba6 100644 --- a/registry/coder/modules/goose/scripts/start.sh +++ b/registry/coder/modules/goose/scripts/start.sh @@ -4,16 +4,16 @@ set -o errexit set -o pipefail command_exists() { - command -v "$1" >/dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 } if command_exists goose; then - GOOSE_CMD=goose + GOOSE_CMD=goose elif [ -f "$HOME/.local/bin/goose" ]; then - GOOSE_CMD="$HOME/.local/bin/goose" + GOOSE_CMD="$HOME/.local/bin/goose" else - echo "Error: Goose is not installed. Please enable install_goose or install it manually." - exit 1 + echo "Error: Goose is not installed. Please enable install_goose or install it manually." + exit 1 fi # this must be kept up to date with main.tf @@ -21,15 +21,15 @@ MODULE_DIR="$HOME/.goose-module" mkdir -p "$MODULE_DIR" if [ ! -z "$GOOSE_TASK_PROMPT" ]; then - echo "Starting with a prompt" - PROMPT="Review your goosehints. Every step of the way, report tasks to Coder with proper descriptions and statuses. Your task at hand: $GOOSE_TASK_PROMPT" - PROMPT_FILE="$MODULE_DIR/prompt.txt" - echo -n "$PROMPT" >"$PROMPT_FILE" - GOOSE_ARGS=(run --interactive --instructions "$PROMPT_FILE") + echo "Starting with a prompt" + PROMPT="Review your goosehints. Every step of the way, report tasks to Coder with proper descriptions and statuses. Your task at hand: $GOOSE_TASK_PROMPT" + PROMPT_FILE="$MODULE_DIR/prompt.txt" + echo -n "$PROMPT" > "$PROMPT_FILE" + GOOSE_ARGS=(run --interactive --instructions "$PROMPT_FILE") else - echo "Starting without a prompt" - GOOSE_ARGS=() + echo "Starting without a prompt" + GOOSE_ARGS=() fi agentapi server --term-width 67 --term-height 1190 -- \ - bash -c "$(printf '%q ' "$GOOSE_CMD" "${GOOSE_ARGS[@]}")" + bash -c "$(printf '%q ' "$GOOSE_CMD" "${GOOSE_ARGS[@]}")" diff --git a/registry/coder/modules/goose/testdata/goose-mock.sh b/registry/coder/modules/goose/testdata/goose-mock.sh index 4d7d3931e..b6b3e38c9 100644 --- a/registry/coder/modules/goose/testdata/goose-mock.sh +++ b/registry/coder/modules/goose/testdata/goose-mock.sh @@ -3,6 +3,6 @@ set -e while true; do - echo "$(date) - goose-mock" - sleep 15 + echo "$(date) - goose-mock" + sleep 15 done diff --git a/registry/coder/modules/jfrog-maven-token/README.md b/registry/coder/modules/jfrog-maven-token/README.md new file mode 100644 index 000000000..818b4ebd0 --- /dev/null +++ b/registry/coder/modules/jfrog-maven-token/README.md @@ -0,0 +1,121 @@ +--- +display_name: JFrog Maven (Token) +description: Install the JF CLI and configure Maven with Artifactory using Artifactory terraform provider. +icon: ../../../../.icons/jfrog.svg +verified: true +tags: [integration, jfrog, maven] +--- + +# JFrog Maven + +Install the JF CLI and configure Maven with Artifactory using Artifactory terraform provider. + +```tf +module "jfrog_maven" { + source = "registry.coder.com/coder/jfrog-maven-token/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://XXXX.jfrog.io" + artifactory_access_token = var.artifactory_access_token + maven_repositories = ["maven-local", "maven-remote", "maven-virtual"] +} +``` + +For detailed instructions, please see this [guide](https://coder.com/docs/v2/latest/guides/artifactory-integration#jfrog-token) on the Coder documentation. + +> Note +> This module does not install Maven but only configures it. You need to handle the installation of Maven yourself. + +![JFrog Maven](../../.images/jfrog-maven.png) + +## Examples + +### Configure Maven with Artifactory local repositories + +```tf +module "jfrog_maven" { + source = "registry.coder.com/coder/jfrog-maven-token/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://YYYY.jfrog.io" + artifactory_access_token = var.artifactory_access_token # An admin access token + maven_repositories = ["maven-local", "maven-remote"] +} +``` + +You should now be able to use Maven with Artifactory repositories: + +```shell +jf mvn clean install +``` + +```shell +mvn clean install +``` + +### Configure code-server with JFrog extension + +The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extension) for VS Code allows you to interact with Artifactory from within the IDE. + +```tf +module "jfrog_maven" { + source = "registry.coder.com/coder/jfrog-maven-token/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://XXXX.jfrog.io" + artifactory_access_token = var.artifactory_access_token + configure_code_server = true # Add JFrog extension configuration for code-server + maven_repositories = ["maven-local"] +} +``` + +### Add a custom token description + +```tf +data "coder_workspace" "me" {} + +module "jfrog_maven" { + source = "registry.coder.com/coder/jfrog-maven-token/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://XXXX.jfrog.io" + artifactory_access_token = var.artifactory_access_token + token_description = "Token for Coder workspace: ${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}" + maven_repositories = ["maven-local"] +} +``` + +### Using the access token in other terraform resources + +JFrog Access token is also available as a terraform output. You can use it in other terraform resources. + +```tf +output "jfrog_access_token" { + value = module.jfrog_maven.access_token + sensitive = true +} +``` + +## Variables + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| `agent_id` | The ID of a Coder agent. | `string` | n/a | yes | +| `jfrog_url` | JFrog instance URL. e.g. https://myartifactory.jfrog.io | `string` | n/a | yes | +| `artifactory_access_token` | The admin-level access token to use for JFrog. | `string` | n/a | yes | +| `maven_repositories` | List of Maven repository keys to configure. | `list(string)` | `[]` | no | +| `username_field` | The field to use for the artifactory username. | `string` | `"username"` | no | +| `username` | Username to use for Artifactory. | `string` | `null` | no | +| `jfrog_server_id` | The server ID of the JFrog instance for JFrog CLI configuration. | `string` | `"0"` | no | +| `token_description` | Free text token description. | `string` | `"Token for Coder workspace"` | no | +| `check_license` | Toggle for pre-flight checking of Artifactory license. | `bool` | `true` | no | +| `refreshable` | Is this token refreshable? | `bool` | `false` | no | +| `expires_in` | The amount of time, in seconds, it would take for the token to expire. | `number` | `null` | no | +| `configure_code_server` | Set to true to configure code-server to use JFrog. | `bool` | `false` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| `access_token` | The JFrog access token | +| `username` | The JFrog username | \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven-token/main.test.ts b/registry/coder/modules/jfrog-maven-token/main.test.ts new file mode 100644 index 000000000..01e635346 --- /dev/null +++ b/registry/coder/modules/jfrog-maven-token/main.test.ts @@ -0,0 +1,91 @@ +import { serve } from "bun"; +import { describe, expect, it } from "bun:test"; +import { + createJSONResponse, + findResourceInstance, + runTerraformInit, + runTerraformApply, + testRequiredVariables, +} from "~test"; + +describe("jfrog-maven-token", async () => { + type TestVariables = { + agent_id: string; + jfrog_url: string; + artifactory_access_token: string; + maven_repositories?: string; + + token_description?: string; + check_license?: boolean; + refreshable?: boolean; + expires_in?: number; + username_field?: string; + username?: string; + jfrog_server_id?: string; + configure_code_server?: boolean; + }; + + await runTerraformInit(import.meta.dir); + + // Run a fake JFrog server so the provider can initialize + // correctly. This saves us from having to make remote requests! + const fakeFrogHost = serve({ + fetch: (req) => { + const url = new URL(req.url); + // See https://jfrog.com/help/r/jfrog-rest-apis/license-information + if (url.pathname === "/artifactory/api/system/license") + return createJSONResponse({ + type: "Commercial", + licensedTo: "JFrog inc.", + validThrough: "May 15, 2036", + }); + if (url.pathname === "/access/api/v1/tokens") + return createJSONResponse({ + token_id: "xxx", + access_token: "xxx", + scopes: "any", + }); + return createJSONResponse({}); + }, + port: 0, + }); + + const fakeFrogApi = `${fakeFrogHost.hostname}:${fakeFrogHost.port}/artifactory/api`; + const fakeFrogUrl = `http://${fakeFrogHost.hostname}:${fakeFrogHost.port}`; + const user = "default"; + const token = "xxx"; + + it("can run apply with required variables", async () => { + testRequiredVariables(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + artifactory_access_token: "XXXX", + }); + }); + + it("configures maven with multiple repos", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + artifactory_access_token: "XXXX", + maven_repositories: JSON.stringify(["maven-local", "maven-remote", "maven-virtual"]), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain( + 'jf mvc --global --repo-resolve "maven-local"', + ); + expect(coderScript.script).toContain("mkdir -p ~/.m2"); + expect(coderScript.script).toContain("cat << EOF > ~/.m2/settings.xml"); + }); + + it("skips maven configuration when no repos provided", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + artifactory_access_token: "XXXX", + maven_repositories: JSON.stringify([]), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain("no Maven repositories are set, skipping Maven configuration"); + }); +}); \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven-token/main.tf b/registry/coder/modules/jfrog-maven-token/main.tf new file mode 100644 index 000000000..47882ef71 --- /dev/null +++ b/registry/coder/modules/jfrog-maven-token/main.tf @@ -0,0 +1,176 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + coder = { + source = "coder/coder" + version = ">= 0.23" + } + artifactory = { + source = "registry.terraform.io/jfrog/artifactory" + version = "~> 10.0.2" + } + } +} + +variable "jfrog_url" { + type = string + description = "JFrog instance URL. e.g. https://myartifactory.jfrog.io" + validation { + condition = can(regex("^(https|http)://", var.jfrog_url)) + error_message = "jfrog_url must be a valid URL starting with either 'https://' or 'http://'" + } +} + +variable "jfrog_server_id" { + type = string + description = "The server ID of the JFrog instance for JFrog CLI configuration" + default = "0" +} + +variable "artifactory_access_token" { + type = string + description = "The admin-level access token to use for JFrog." +} + +variable "token_description" { + type = string + description = "Free text token description. Useful for filtering and managing tokens." + default = "Token for Coder workspace" +} + +variable "check_license" { + type = bool + description = "Toggle for pre-flight checking of Artifactory license. Default to `true`." + default = true +} + +variable "refreshable" { + type = bool + description = "Is this token refreshable? Default is `false`." + default = false +} + +variable "expires_in" { + type = number + description = "The amount of time, in seconds, it would take for the token to expire." + default = null +} + +variable "username_field" { + type = string + description = "The field to use for the artifactory username. Default `username`." + default = "username" + validation { + condition = can(regex("^(email|username)$", var.username_field)) + error_message = "username_field must be either 'email' or 'username'" + } +} + +variable "username" { + type = string + description = "Username to use for Artifactory. Overrides the field specified in `username_field`" + default = null +} + +variable "agent_id" { + type = string + description = "The ID of a Coder agent." +} + +variable "maven_repositories" { + type = list(string) + description = "List of Maven repository keys to configure. e.g. ['maven-local', 'maven-remote', 'maven-virtual']" + default = [] +} + +variable "configure_code_server" { + type = bool + description = "Set to true to configure code-server to use JFrog." + default = false +} + +locals { + # The username to use for artifactory + username = coalesce(var.username, var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name) + jfrog_host = split("://", var.jfrog_url)[1] + common_values = { + JFROG_URL = var.jfrog_url + JFROG_HOST = local.jfrog_host + JFROG_SERVER_ID = var.jfrog_server_id + ARTIFACTORY_USERNAME = local.username + ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email + ARTIFACTORY_ACCESS_TOKEN = artifactory_scoped_token.me.access_token + } + maven_settings = templatefile( + "${path.module}/settings.xml.tftpl", merge(local.common_values, { REPOS = var.maven_repositories }) + ) +} + +# Configure the Artifactory provider +provider "artifactory" { + url = join("/", [var.jfrog_url, "artifactory"]) + access_token = var.artifactory_access_token + check_license = var.check_license +} + +resource "artifactory_scoped_token" "me" { + # This is hacky, but on terraform plan the data source gives empty strings, + # which fails validation. + username = length(local.username) > 0 ? local.username : "dummy" + scopes = ["applied-permissions/user"] + refreshable = var.refreshable + expires_in = var.expires_in + description = var.token_description +} + +data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} + +resource "coder_script" "jfrog_maven" { + agent_id = var.agent_id + display_name = "jfrog-maven" + icon = "/icon/jfrog.svg" + script = templatefile("${path.module}/run.sh", merge( + local.common_values, + { + CONFIGURE_CODE_SERVER = var.configure_code_server + HAS_MAVEN = length(var.maven_repositories) == 0 ? "" : "YES" + MAVEN_SETTINGS = local.maven_settings + REPOSITORY_MAVEN = try(element(var.maven_repositories, 0), "") + } + )) + run_on_start = true +} + +resource "coder_env" "jfrog_ide_url" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_URL" + value = var.jfrog_url +} + +resource "coder_env" "jfrog_ide_access_token" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_ACCESS_TOKEN" + value = artifactory_scoped_token.me.access_token +} + +resource "coder_env" "jfrog_ide_store_connection" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_STORE_CONNECTION" + value = true +} + +output "access_token" { + description = "value of the JFrog access token" + value = artifactory_scoped_token.me.access_token + sensitive = true +} + +output "username" { + description = "value of the JFrog username" + value = local.username +} \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven-token/run.sh b/registry/coder/modules/jfrog-maven-token/run.sh new file mode 100644 index 000000000..95ab60e06 --- /dev/null +++ b/registry/coder/modules/jfrog-maven-token/run.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash + +BOLD='\033[0;1m' + +not_configured() { + echo "πŸ€” no Maven repositories are set, skipping Maven configuration." + echo "You can configure Maven repositories by providing a list for 'maven_repositories' input." +} + +config_complete() { + echo "πŸ₯³ Maven configuration complete!" +} + +# check if JFrog CLI is already installed +if command -v jf > /dev/null 2>&1; then + echo "βœ… JFrog CLI is already installed, skipping installation." +else + echo "πŸ“¦ Installing JFrog CLI..." + curl -fL https://install-cli.jfrog.io | sudo sh + sudo chmod 755 /usr/local/bin/jf +fi + +# The jf CLI checks $CI when determining whether to use interactive flows. +export CI=true +# Authenticate JFrog CLI with Artifactory. +echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}" +# Set the configured server as the default. +jf c use "${JFROG_SERVER_ID}" + +# Configure Maven to use the Artifactory repositories. +if [ -z "${HAS_MAVEN}" ]; then + not_configured +else + if command -v mvn > /dev/null 2>&1; then + echo "β˜• Configuring Maven..." + jf mvc --global --repo-resolve "${REPOSITORY_MAVEN}" + mkdir -p ~/.m2 + cat << EOF > ~/.m2/settings.xml +${MAVEN_SETTINGS} +EOF + config_complete + else + echo "πŸ€” no maven is installed, skipping maven configuration." + fi +fi + +# Install the JFrog vscode extension for code-server. +if [ "${CONFIGURE_CODE_SERVER}" == "true" ]; then + while ! [ -x /tmp/code-server/bin/code-server ]; do + counter=0 + if [ $counter -eq 60 ]; then + echo "Timed out waiting for /tmp/code-server/bin/code-server to be installed." + exit 1 + fi + echo "Waiting for /tmp/code-server/bin/code-server to be installed..." + sleep 1 + ((counter++)) + done + echo "πŸ“¦ Installing JFrog extension..." + /tmp/code-server/bin/code-server --install-extension jfrog.jfrog-vscode-extension + echo "πŸ₯³ JFrog extension installed!" +else + echo "πŸ€” Skipping JFrog extension installation. Set configure_code_server to true to install the JFrog extension." +fi + +# Configure the JFrog CLI completion +echo "πŸ“¦ Configuring JFrog CLI completion..." +# Get the user's shell +SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}') +# Generate the completion script +jf completion $SHELLNAME --install +begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-maven)" +# Add the completion script to the user's shell profile +if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then + if ! grep -q "$begin_stanza" ~/.bashrc; then + printf "%s\n" "$begin_stanza" >> ~/.bashrc + echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc + echo "# END: jf CLI shell completion" >> ~/.bashrc + else + echo "πŸ₯³ ~/.bashrc already contains jf CLI shell completion configuration, skipping." + fi +elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then + if ! grep -q "$begin_stanza" ~/.zshrc; then + printf "\n%s\n" "$begin_stanza" >> ~/.zshrc + echo "autoload -Uz compinit" >> ~/.zshrc + echo "compinit" >> ~/.zshrc + echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc + echo "# END: jf CLI shell completion" >> ~/.zshrc + else + echo "πŸ₯³ ~/.zshrc already contains jf CLI shell completion configuration, skipping." + fi +else + echo "πŸ€” ~/.bashrc or ~/.zshrc does not exist, skipping jf CLI shell completion configuration." +fi \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven-token/settings.xml.tftpl b/registry/coder/modules/jfrog-maven-token/settings.xml.tftpl new file mode 100644 index 000000000..587cc1c0e --- /dev/null +++ b/registry/coder/modules/jfrog-maven-token/settings.xml.tftpl @@ -0,0 +1,57 @@ + + + + +%{ for REPO in REPOS ~} + + ${REPO} + ${ARTIFACTORY_USERNAME} + ${ARTIFACTORY_ACCESS_TOKEN} + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + + ${REPO} + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + ${REPO} +%{ endfor ~} + + + \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven/README.md b/registry/coder/modules/jfrog-maven/README.md new file mode 100644 index 000000000..a07bf2c8c --- /dev/null +++ b/registry/coder/modules/jfrog-maven/README.md @@ -0,0 +1,109 @@ +--- +display_name: JFrog Maven +description: Install the JF CLI and configure Maven with Artifactory using OAuth. +icon: ../../../../.icons/jfrog.svg +verified: true +tags: [integration, jfrog, maven, helper] +--- + +# JFrog Maven + +Install the JF CLI and configure Maven with Artifactory using OAuth configured via the Coder [`external-auth`](https://coder.com/docs/v2/latest/admin/external-auth) feature. + +![JFrog Maven](../../.images/jfrog-maven.png) + +```tf +module "jfrog_maven" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/coder/jfrog-maven/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://example.jfrog.io" + username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username" + + maven_repositories = ["maven-local", "maven-remote", "maven-virtual"] +} +``` + +> Note +> This module does not install Maven but only configures it. You need to handle the installation of Maven yourself. + +## Prerequisites + +This module is usable by JFrog self-hosted (on-premises) Artifactory as it requires configuring a custom integration. This integration benefits from Coder's [external-auth](https://coder.com/docs/v2/latest/admin/external-auth) feature and allows each user to authenticate with Artifactory using an OAuth flow and issues user-scoped tokens to each user. For configuration instructions, see this [guide](https://coder.com/docs/v2/latest/guides/artifactory-integration#jfrog-oauth) on the Coder documentation. + +## Examples + +### Configure Maven with Artifactory repositories + +Configure Maven to fetch dependencies from Artifactory while mapping the Coder username to the Artifactory username. + +```tf +module "jfrog_maven" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/coder/jfrog-maven/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://example.jfrog.io" + username_field = "username" + + maven_repositories = ["maven-local", "maven-remote"] +} +``` + +You should now be able to use Maven with Artifactory repositories: + +```shell +jf mvn clean install +``` + +```shell +mvn clean install +``` + +### Configure code-server with JFrog extension + +The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extension) for VS Code allows you to interact with Artifactory from within the IDE. + +```tf +module "jfrog_maven" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/coder/jfrog-maven/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + jfrog_url = "https://example.jfrog.io" + username_field = "username" + configure_code_server = true # Add JFrog extension configuration for code-server + maven_repositories = ["maven-local"] +} +``` + +### Using the access token in other terraform resources + +JFrog Access token is also available as a terraform output. You can use it in other terraform resources. + +```tf +output "jfrog_access_token" { + value = module.jfrog_maven[0].access_token + sensitive = true +} +``` + +## Variables + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| `agent_id` | The ID of a Coder agent. | `string` | n/a | yes | +| `jfrog_url` | JFrog instance URL. e.g. https://myartifactory.jfrog.io | `string` | n/a | yes | +| `maven_repositories` | List of Maven repository keys to configure. | `list(string)` | `[]` | no | +| `username_field` | The field to use for the artifactory username. | `string` | `"username"` | no | +| `jfrog_server_id` | The server ID of the JFrog instance for JFrog CLI configuration. | `string` | `"0"` | no | +| `external_auth_id` | JFrog external auth ID. | `string` | `"jfrog"` | no | +| `configure_code_server` | Set to true to configure code-server to use JFrog. | `bool` | `false` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| `access_token` | The JFrog access token | +| `username` | The JFrog username | \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven/main.test.ts b/registry/coder/modules/jfrog-maven/main.test.ts new file mode 100644 index 000000000..779ed1cb1 --- /dev/null +++ b/registry/coder/modules/jfrog-maven/main.test.ts @@ -0,0 +1,57 @@ +import { describe, expect, it } from "bun:test"; +import { + findResourceInstance, + runTerraformInit, + runTerraformApply, + testRequiredVariables, +} from "~test"; + +describe("jfrog-maven", async () => { + type TestVariables = { + agent_id: string; + jfrog_url: string; + maven_repositories?: string; + + username_field?: string; + jfrog_server_id?: string; + external_auth_id?: string; + configure_code_server?: boolean; + }; + + await runTerraformInit(import.meta.dir); + + const fakeFrogApi = "localhost:8081/artifactory/api"; + const fakeFrogUrl = "http://localhost:8081"; + const user = "default"; + + it("can run apply with required variables", async () => { + testRequiredVariables(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + }); + }); + + it("configures maven with multiple repos", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + maven_repositories: JSON.stringify(["maven-local", "maven-remote", "maven-virtual"]), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain( + 'jf mvc --global --repo-resolve "maven-local"', + ); + expect(coderScript.script).toContain("mkdir -p ~/.m2"); + expect(coderScript.script).toContain("cat << EOF > ~/.m2/settings.xml"); + }); + + it("skips maven configuration when no repos provided", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + maven_repositories: JSON.stringify([]), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain("no Maven repositories are set, skipping Maven configuration"); + }); +}); \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven/main.tf b/registry/coder/modules/jfrog-maven/main.tf new file mode 100644 index 000000000..74e773703 --- /dev/null +++ b/registry/coder/modules/jfrog-maven/main.tf @@ -0,0 +1,130 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + coder = { + source = "coder/coder" + version = ">= 0.23" + } + } +} + +variable "jfrog_url" { + type = string + description = "JFrog instance URL. e.g. https://myartifactory.jfrog.io" + validation { + condition = can(regex("^(https|http)://", var.jfrog_url)) + error_message = "jfrog_url must be a valid URL starting with either 'https://' or 'http://'" + } +} + +variable "jfrog_server_id" { + type = string + description = "The server ID of the JFrog instance for JFrog CLI configuration" + default = "0" +} + +variable "username_field" { + type = string + description = "The field to use for the artifactory username. i.e. Coder username or email." + default = "username" + validation { + condition = can(regex("^(email|username)$", var.username_field)) + error_message = "username_field must be either 'email' or 'username'" + } +} + +variable "external_auth_id" { + type = string + description = "JFrog external auth ID. Default: 'jfrog'" + default = "jfrog" +} + +variable "agent_id" { + type = string + description = "The ID of a Coder agent." +} + +variable "maven_repositories" { + type = list(string) + description = "List of Maven repository keys to configure. e.g. ['maven-local', 'maven-remote', 'maven-virtual']" + default = [] +} + +variable "configure_code_server" { + type = bool + description = "Set to true to configure code-server to use JFrog." + default = false +} + +locals { + # The username field to use for artifactory + username = var.username_field == "email" ? data.coder_workspace_owner.me.email : data.coder_workspace_owner.me.name + jfrog_host = split("://", var.jfrog_url)[1] + common_values = { + JFROG_URL = var.jfrog_url + JFROG_HOST = local.jfrog_host + JFROG_SERVER_ID = var.jfrog_server_id + ARTIFACTORY_USERNAME = local.username + ARTIFACTORY_EMAIL = data.coder_workspace_owner.me.email + ARTIFACTORY_ACCESS_TOKEN = data.coder_external_auth.jfrog.access_token + } + maven_settings = templatefile( + "${path.module}/settings.xml.tftpl", merge(local.common_values, { REPOS = var.maven_repositories }) + ) +} + +data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} + +data "coder_external_auth" "jfrog" { + id = var.external_auth_id +} + +resource "coder_script" "jfrog_maven" { + agent_id = var.agent_id + display_name = "jfrog-maven" + icon = "/icon/jfrog.svg" + script = templatefile("${path.module}/run.sh", merge( + local.common_values, + { + CONFIGURE_CODE_SERVER = var.configure_code_server + HAS_MAVEN = length(var.maven_repositories) == 0 ? "" : "YES" + MAVEN_SETTINGS = local.maven_settings + REPOSITORY_MAVEN = try(element(var.maven_repositories, 0), "") + } + )) + run_on_start = true +} + +resource "coder_env" "jfrog_ide_url" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_URL" + value = var.jfrog_url +} + +resource "coder_env" "jfrog_ide_access_token" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_ACCESS_TOKEN" + value = data.coder_external_auth.jfrog.access_token +} + +resource "coder_env" "jfrog_ide_store_connection" { + count = var.configure_code_server ? 1 : 0 + agent_id = var.agent_id + name = "JFROG_IDE_STORE_CONNECTION" + value = true +} + +output "access_token" { + description = "value of the JFrog access token" + value = data.coder_external_auth.jfrog.access_token + sensitive = true +} + +output "username" { + description = "value of the JFrog username" + value = local.username +} \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven/run.sh b/registry/coder/modules/jfrog-maven/run.sh new file mode 100644 index 000000000..95ab60e06 --- /dev/null +++ b/registry/coder/modules/jfrog-maven/run.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash + +BOLD='\033[0;1m' + +not_configured() { + echo "πŸ€” no Maven repositories are set, skipping Maven configuration." + echo "You can configure Maven repositories by providing a list for 'maven_repositories' input." +} + +config_complete() { + echo "πŸ₯³ Maven configuration complete!" +} + +# check if JFrog CLI is already installed +if command -v jf > /dev/null 2>&1; then + echo "βœ… JFrog CLI is already installed, skipping installation." +else + echo "πŸ“¦ Installing JFrog CLI..." + curl -fL https://install-cli.jfrog.io | sudo sh + sudo chmod 755 /usr/local/bin/jf +fi + +# The jf CLI checks $CI when determining whether to use interactive flows. +export CI=true +# Authenticate JFrog CLI with Artifactory. +echo "${ARTIFACTORY_ACCESS_TOKEN}" | jf c add --access-token-stdin --url "${JFROG_URL}" --overwrite "${JFROG_SERVER_ID}" +# Set the configured server as the default. +jf c use "${JFROG_SERVER_ID}" + +# Configure Maven to use the Artifactory repositories. +if [ -z "${HAS_MAVEN}" ]; then + not_configured +else + if command -v mvn > /dev/null 2>&1; then + echo "β˜• Configuring Maven..." + jf mvc --global --repo-resolve "${REPOSITORY_MAVEN}" + mkdir -p ~/.m2 + cat << EOF > ~/.m2/settings.xml +${MAVEN_SETTINGS} +EOF + config_complete + else + echo "πŸ€” no maven is installed, skipping maven configuration." + fi +fi + +# Install the JFrog vscode extension for code-server. +if [ "${CONFIGURE_CODE_SERVER}" == "true" ]; then + while ! [ -x /tmp/code-server/bin/code-server ]; do + counter=0 + if [ $counter -eq 60 ]; then + echo "Timed out waiting for /tmp/code-server/bin/code-server to be installed." + exit 1 + fi + echo "Waiting for /tmp/code-server/bin/code-server to be installed..." + sleep 1 + ((counter++)) + done + echo "πŸ“¦ Installing JFrog extension..." + /tmp/code-server/bin/code-server --install-extension jfrog.jfrog-vscode-extension + echo "πŸ₯³ JFrog extension installed!" +else + echo "πŸ€” Skipping JFrog extension installation. Set configure_code_server to true to install the JFrog extension." +fi + +# Configure the JFrog CLI completion +echo "πŸ“¦ Configuring JFrog CLI completion..." +# Get the user's shell +SHELLNAME=$(grep "^$USER" /etc/passwd | awk -F':' '{print $7}' | awk -F'/' '{print $NF}') +# Generate the completion script +jf completion $SHELLNAME --install +begin_stanza="# BEGIN: jf CLI shell completion (added by coder module jfrog-maven)" +# Add the completion script to the user's shell profile +if [ "$SHELLNAME" == "bash" ] && [ -f ~/.bashrc ]; then + if ! grep -q "$begin_stanza" ~/.bashrc; then + printf "%s\n" "$begin_stanza" >> ~/.bashrc + echo 'source "$HOME/.jfrog/jfrog_bash_completion"' >> ~/.bashrc + echo "# END: jf CLI shell completion" >> ~/.bashrc + else + echo "πŸ₯³ ~/.bashrc already contains jf CLI shell completion configuration, skipping." + fi +elif [ "$SHELLNAME" == "zsh" ] && [ -f ~/.zshrc ]; then + if ! grep -q "$begin_stanza" ~/.zshrc; then + printf "\n%s\n" "$begin_stanza" >> ~/.zshrc + echo "autoload -Uz compinit" >> ~/.zshrc + echo "compinit" >> ~/.zshrc + echo 'source "$HOME/.jfrog/jfrog_zsh_completion"' >> ~/.zshrc + echo "# END: jf CLI shell completion" >> ~/.zshrc + else + echo "πŸ₯³ ~/.zshrc already contains jf CLI shell completion configuration, skipping." + fi +else + echo "πŸ€” ~/.bashrc or ~/.zshrc does not exist, skipping jf CLI shell completion configuration." +fi \ No newline at end of file diff --git a/registry/coder/modules/jfrog-maven/settings.xml.tftpl b/registry/coder/modules/jfrog-maven/settings.xml.tftpl new file mode 100644 index 000000000..587cc1c0e --- /dev/null +++ b/registry/coder/modules/jfrog-maven/settings.xml.tftpl @@ -0,0 +1,57 @@ + + + + +%{ for REPO in REPOS ~} + + ${REPO} + ${ARTIFACTORY_USERNAME} + ${ARTIFACTORY_ACCESS_TOKEN} + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + + ${REPO} + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + ${REPO} +%{ endfor ~} + + + \ No newline at end of file diff --git a/registry/coder/modules/jfrog-oauth/README.md b/registry/coder/modules/jfrog-oauth/README.md index 5d149832f..9d0268a97 100644 --- a/registry/coder/modules/jfrog-oauth/README.md +++ b/registry/coder/modules/jfrog-oauth/README.md @@ -26,6 +26,7 @@ module "jfrog" { go = ["go", "another-go-repo"] pypi = ["pypi", "extra-index-pypi"] docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"] + maven = ["maven-local", "maven-remote", "maven-virtual"] } } ``` @@ -66,6 +67,35 @@ jf pip install requests pip install requests ``` +### Configure Maven + +Configure the Maven package manager to fetch dependencies from Artifactory. + +```tf +module "jfrog" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/coder/jfrog-oauth/coder" + version = "1.0.31" + agent_id = coder_agent.example.id + jfrog_url = "https://example.jfrog.io" + username_field = "username" + + package_managers = { + maven = ["maven-local", "maven-remote"] + } +} +``` + +You should now be able to use Maven with Artifactory repositories: + +```shell +jf mvn clean install +``` + +```shell +mvn clean install +``` + ### Configure code-server with JFrog extension The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extension) for VS Code allows you to interact with Artifactory from within the IDE. @@ -80,9 +110,10 @@ module "jfrog" { username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username" configure_code_server = true # Add JFrog extension configuration for code-server package_managers = { - npm = ["npm"] - go = ["go"] - pypi = ["pypi"] + npm = ["npm"] + go = ["go"] + pypi = ["pypi"] + maven = ["maven-local"] } } ``` diff --git a/registry/coder/modules/jfrog-oauth/main.test.ts b/registry/coder/modules/jfrog-oauth/main.test.ts index 20ace6971..6288048f0 100644 --- a/registry/coder/modules/jfrog-oauth/main.test.ts +++ b/registry/coder/modules/jfrog-oauth/main.test.ts @@ -126,4 +126,23 @@ EOF`; 'if [ -z "YES" ]; then\n not_configured go', ); }); + + it("configures maven with multiple repos", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + package_managers: JSON.stringify({ + maven: ["maven-local", "maven-remote", "maven-virtual"], + }), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain( + 'jf mvc --global --repo-resolve "maven-local"', + ); + expect(coderScript.script).toContain( + 'if [ -z "YES" ]; then\n not_configured maven', + ); + expect(coderScript.script).toContain("mkdir -p ~/.m2"); + expect(coderScript.script).toContain("cat << EOF > ~/.m2/settings.xml"); + }); }); diff --git a/registry/coder/modules/jfrog-oauth/main.tf b/registry/coder/modules/jfrog-oauth/main.tf index 0bc22568b..028062d3e 100644 --- a/registry/coder/modules/jfrog-oauth/main.tf +++ b/registry/coder/modules/jfrog-oauth/main.tf @@ -58,6 +58,7 @@ variable "package_managers" { go = optional(list(string), []) pypi = optional(list(string), []) docker = optional(list(string), []) + maven = optional(list(string), []) }) description = <<-EOF A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted. @@ -67,6 +68,7 @@ variable "package_managers" { go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"] pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"] docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"] + maven = ["YOUR_MAVEN_REPO_KEY", "ANOTHER_MAVEN_REPO_KEY"] } EOF } @@ -98,6 +100,9 @@ locals { pip_conf = templatefile( "${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi }) ) + maven_settings = templatefile( + "${path.module}/settings.xml.tftpl", merge(local.common_values, { REPOS = var.package_managers.maven }) + ) } data "coder_workspace" "me" {} @@ -125,6 +130,9 @@ resource "coder_script" "jfrog" { REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "") HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES" REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker)) + HAS_MAVEN = length(var.package_managers.maven) == 0 ? "" : "YES" + MAVEN_SETTINGS = local.maven_settings + REPOSITORY_MAVEN = try(element(var.package_managers.maven, 0), "") } )) run_on_start = true diff --git a/registry/coder/modules/jfrog-oauth/run.sh b/registry/coder/modules/jfrog-oauth/run.sh index 7d36e47c7..3a6a11551 100644 --- a/registry/coder/modules/jfrog-oauth/run.sh +++ b/registry/coder/modules/jfrog-oauth/run.sh @@ -81,6 +81,23 @@ else fi fi +# Configure Maven to use the Artifactory "maven" repository. +if [ -z "${HAS_MAVEN}" ]; then + not_configured maven +else + if command -v mvn > /dev/null 2>&1; then + echo "β˜• Configuring Maven..." + jf mvc --global --repo-resolve "${REPOSITORY_MAVEN}" + mkdir -p ~/.m2 + cat << EOF > ~/.m2/settings.xml +${MAVEN_SETTINGS} +EOF + config_complete + else + echo "πŸ€” no maven is installed, skipping maven configuration." + fi +fi + # Install the JFrog vscode extension for code-server. if [ "${CONFIGURE_CODE_SERVER}" == "true" ]; then while ! [ -x /tmp/code-server/bin/code-server ]; do diff --git a/registry/coder/modules/jfrog-oauth/settings.xml.tftpl b/registry/coder/modules/jfrog-oauth/settings.xml.tftpl new file mode 100644 index 000000000..587cc1c0e --- /dev/null +++ b/registry/coder/modules/jfrog-oauth/settings.xml.tftpl @@ -0,0 +1,57 @@ + + + + +%{ for REPO in REPOS ~} + + ${REPO} + ${ARTIFACTORY_USERNAME} + ${ARTIFACTORY_ACCESS_TOKEN} + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + + ${REPO} + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + ${REPO} +%{ endfor ~} + + + \ No newline at end of file diff --git a/registry/coder/modules/jfrog-token/README.md b/registry/coder/modules/jfrog-token/README.md index 4eb4fe103..8f5d60b06 100644 --- a/registry/coder/modules/jfrog-token/README.md +++ b/registry/coder/modules/jfrog-token/README.md @@ -22,6 +22,7 @@ module "jfrog" { go = ["go", "another-go-repo"] pypi = ["pypi", "extra-index-pypi"] docker = ["example-docker-staging.jfrog.io", "example-docker-production.jfrog.io"] + maven = ["maven-local", "maven-remote", "maven-virtual"] } } ``` @@ -66,6 +67,33 @@ go get github.com/golang/example/hello pip install requests ``` +### Configure Maven + +Configure the Maven package manager to fetch dependencies from Artifactory. + +```tf +module "jfrog" { + source = "registry.coder.com/coder/jfrog-token/coder" + version = "1.0.31" + agent_id = coder_agent.example.id + jfrog_url = "https://YYYY.jfrog.io" + artifactory_access_token = var.artifactory_access_token + package_managers = { + maven = ["maven-local", "maven-remote"] + } +} +``` + +You should now be able to use Maven with Artifactory repositories: + +```shell +jf mvn clean install +``` + +```shell +mvn clean install +``` + ### Configure code-server with JFrog extension The [JFrog extension](https://open-vsx.org/extension/JFrog/jfrog-vscode-extension) for VS Code allows you to interact with Artifactory from within the IDE. diff --git a/registry/coder/modules/jfrog-token/main.test.ts b/registry/coder/modules/jfrog-token/main.test.ts index 4aeaba35d..aa3242f65 100644 --- a/registry/coder/modules/jfrog-token/main.test.ts +++ b/registry/coder/modules/jfrog-token/main.test.ts @@ -162,4 +162,24 @@ EOF`; 'if [ -z "YES" ]; then\n not_configured go', ); }); + + it("configures maven with multiple repos", async () => { + const state = await runTerraformApply(import.meta.dir, { + agent_id: "some-agent-id", + jfrog_url: fakeFrogUrl, + artifactory_access_token: "XXXX", + package_managers: JSON.stringify({ + maven: ["maven-local", "maven-remote", "maven-virtual"], + }), + }); + const coderScript = findResourceInstance(state, "coder_script"); + expect(coderScript.script).toContain( + 'jf mvc --global --repo-resolve "maven-local"', + ); + expect(coderScript.script).toContain( + 'if [ -z "YES" ]; then\n not_configured maven', + ); + expect(coderScript.script).toContain("mkdir -p ~/.m2"); + expect(coderScript.script).toContain("cat << EOF > ~/.m2/settings.xml"); + }); }); diff --git a/registry/coder/modules/jfrog-token/main.tf b/registry/coder/modules/jfrog-token/main.tf index 720e2d8c1..32c0852d9 100644 --- a/registry/coder/modules/jfrog-token/main.tf +++ b/registry/coder/modules/jfrog-token/main.tf @@ -91,6 +91,7 @@ variable "package_managers" { go = optional(list(string), []) pypi = optional(list(string), []) docker = optional(list(string), []) + maven = optional(list(string), []) }) description = <<-EOF A map of package manager names to their respective artifactory repositories. Unused package managers can be omitted. @@ -100,6 +101,7 @@ variable "package_managers" { go = ["YOUR_GO_REPO_KEY", "ANOTHER_GO_REPO_KEY"] pypi = ["YOUR_PYPI_REPO_KEY", "ANOTHER_PYPI_REPO_KEY"] docker = ["YOUR_DOCKER_REPO_KEY", "ANOTHER_DOCKER_REPO_KEY"] + maven = ["YOUR_MAVEN_REPO_KEY", "ANOTHER_MAVEN_REPO_KEY"] } EOF } @@ -131,6 +133,9 @@ locals { pip_conf = templatefile( "${path.module}/pip.conf.tftpl", merge(local.common_values, { REPOS = var.package_managers.pypi }) ) + maven_settings = templatefile( + "${path.module}/settings.xml.tftpl", merge(local.common_values, { REPOS = var.package_managers.maven }) + ) } # Configure the Artifactory provider @@ -171,6 +176,9 @@ resource "coder_script" "jfrog" { REPOSITORY_PYPI = try(element(var.package_managers.pypi, 0), "") HAS_DOCKER = length(var.package_managers.docker) == 0 ? "" : "YES" REGISTER_DOCKER = join("\n", formatlist("register_docker \"%s\"", var.package_managers.docker)) + HAS_MAVEN = length(var.package_managers.maven) == 0 ? "" : "YES" + MAVEN_SETTINGS = local.maven_settings + REPOSITORY_MAVEN = try(element(var.package_managers.maven, 0), "") } )) run_on_start = true diff --git a/registry/coder/modules/jfrog-token/run.sh b/registry/coder/modules/jfrog-token/run.sh index d3a1a74c3..27e020233 100644 --- a/registry/coder/modules/jfrog-token/run.sh +++ b/registry/coder/modules/jfrog-token/run.sh @@ -80,6 +80,23 @@ else fi fi +# Configure Maven to use the Artifactory "maven" repository. +if [ -z "${HAS_MAVEN}" ]; then + not_configured maven +else + if command -v mvn > /dev/null 2>&1; then + echo "β˜• Configuring Maven..." + jf mvc --global --repo-resolve "${REPOSITORY_MAVEN}" + mkdir -p ~/.m2 + cat << EOF > ~/.m2/settings.xml +${MAVEN_SETTINGS} +EOF + config_complete + else + echo "πŸ€” no maven is installed, skipping maven configuration." + fi +fi + # Install the JFrog vscode extension for code-server. if [ "${CONFIGURE_CODE_SERVER}" == "true" ]; then while ! [ -x /tmp/code-server/bin/code-server ]; do diff --git a/registry/coder/modules/jfrog-token/settings.xml.tftpl b/registry/coder/modules/jfrog-token/settings.xml.tftpl new file mode 100644 index 000000000..587cc1c0e --- /dev/null +++ b/registry/coder/modules/jfrog-token/settings.xml.tftpl @@ -0,0 +1,57 @@ + + + + +%{ for REPO in REPOS ~} + + ${REPO} + ${ARTIFACTORY_USERNAME} + ${ARTIFACTORY_ACCESS_TOKEN} + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + + ${REPO} + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + + + ${REPO} + ${REPO} + ${JFROG_URL}/artifactory/${REPO} + + true + + + true + + + + +%{ endfor ~} + + + +%{ for REPO in REPOS ~} + ${REPO} +%{ endfor ~} + + + \ No newline at end of file diff --git a/registry/coder/modules/jupyterlab/run.sh b/registry/coder/modules/jupyterlab/run.sh index e9a45b5ae..be686e55f 100644 --- a/registry/coder/modules/jupyterlab/run.sh +++ b/registry/coder/modules/jupyterlab/run.sh @@ -3,13 +3,13 @@ INSTALLER="" check_available_installer() { # check if pipx is installed echo "Checking for a supported installer" - if command -v pipx >/dev/null 2>&1; then + if command -v pipx > /dev/null 2>&1; then echo "pipx is installed" INSTALLER="pipx" return fi # check if uv is installed - if command -v uv >/dev/null 2>&1; then + if command -v uv > /dev/null 2>&1; then echo "uv is installed" INSTALLER="uv" return @@ -26,21 +26,21 @@ fi BOLD='\033[0;1m' # check if jupyterlab is installed -if ! command -v jupyter-lab >/dev/null 2>&1; then +if ! command -v jupyter-lab > /dev/null 2>&1; then # install jupyterlab check_available_installer printf "$${BOLD}Installing jupyterlab!\n" case $INSTALLER in - uv) - uv pip install -q jupyterlab && - printf "%s\n" "πŸ₯³ jupyterlab has been installed" - JUPYTER="$HOME/.venv/bin/jupyter-lab" - ;; - pipx) - pipx install jupyterlab && - printf "%s\n" "πŸ₯³ jupyterlab has been installed" - JUPYTER="$HOME/.local/bin/jupyter-lab" - ;; + uv) + uv pip install -q jupyterlab \ + && printf "%s\n" "πŸ₯³ jupyterlab has been installed" + JUPYTER="$HOME/.venv/bin/jupyter-lab" + ;; + pipx) + pipx install jupyterlab \ + && printf "%s\n" "πŸ₯³ jupyterlab has been installed" + JUPYTER="$HOME/.local/bin/jupyter-lab" + ;; esac else printf "%s\n\n" "πŸ₯³ jupyterlab is already installed" @@ -55,4 +55,4 @@ $JUPYTER --no-browser \ --ServerApp.port="${PORT}" \ --ServerApp.token='' \ --ServerApp.password='' \ - >"${LOG_PATH}" 2>&1 & + > "${LOG_PATH}" 2>&1 & diff --git a/registry/coder/modules/kasmvnc/run.sh b/registry/coder/modules/kasmvnc/run.sh index 67a8a310c..9ba7d2073 100644 --- a/registry/coder/modules/kasmvnc/run.sh +++ b/registry/coder/modules/kasmvnc/run.sh @@ -3,7 +3,10 @@ # Exit on error, undefined variables, and pipe failures set -euo pipefail -error() { printf "πŸ’€ ERROR: %s\n" "$@"; exit 1; } +error() { + printf "πŸ’€ ERROR: %s\n" "$@" + exit 1 +} # Function to check if vncserver is already installed check_installed() { @@ -248,30 +251,30 @@ get_http_dir() { echo $httpd_directory } -fix_server_index_file(){ - local fname=$${FUNCNAME[0]} # gets current function name - if [[ $# -ne 1 ]]; then - error "$fname requires exactly 1 parameter:\n\tpath to KasmVNC httpd_directory" - fi - local httpdir="$1" - if [[ ! -d "$httpdir" ]]; then - error "$fname: $httpdir is not a directory" - fi - pushd "$httpdir" > /dev/null +fix_server_index_file() { + local fname=$${FUNCNAME[0]} # gets current function name + if [[ $# -ne 1 ]]; then + error "$fname requires exactly 1 parameter:\n\tpath to KasmVNC httpd_directory" + fi + local httpdir="$1" + if [[ ! -d "$httpdir" ]]; then + error "$fname: $httpdir is not a directory" + fi + pushd "$httpdir" > /dev/null - cat <<'EOH' > /tmp/path_vnc.html + cat << 'EOH' > /tmp/path_vnc.html ${PATH_VNC_HTML} EOH - $SUDO mv /tmp/path_vnc.html . - # check for the switcheroo - if [[ -f "index.html" && -L "vnc.html" ]]; then - $SUDO mv $httpdir/index.html $httpdir/vnc.html - fi - $SUDO ln -s -f path_vnc.html index.html - popd > /dev/null + $SUDO mv /tmp/path_vnc.html . + # check for the switcheroo + if [[ -f "index.html" && -L "vnc.html" ]]; then + $SUDO mv $httpdir/index.html $httpdir/vnc.html + fi + $SUDO ln -s -f path_vnc.html index.html + popd > /dev/null } -patch_kasm_http_files(){ +patch_kasm_http_files() { homedir=$(get_http_dir) fix_server_index_file "$homedir" } @@ -292,7 +295,7 @@ set -e if [[ $RETVAL -ne 0 ]]; then echo "ERROR: Failed to start KasmVNC server. Return code: $RETVAL" - if [[ -f "$VNC_LOG" ]]; then + if [[ -f "$VNC_LOG" ]]; then echo "Full logs:" cat "$VNC_LOG" else diff --git a/registry/coder/modules/parsec/README.md b/registry/coder/modules/parsec/README.md new file mode 100644 index 000000000..7ada6ad73 --- /dev/null +++ b/registry/coder/modules/parsec/README.md @@ -0,0 +1,156 @@ +--- +display_name: Parsec +description: Enable low-latency remote desktop access using Parsec cloud gaming technology +icon: ../../../../.icons/parsec.svg +verified: true +tags: [remote-desktop, gaming, gpu, streaming] +--- + +# Parsec Module + +This module integrates [Parsec](https://parsec.app/) into your workspace for low-latency remote desktop access. Parsec provides high-performance streaming optimized for gaming and real-time interaction. + +## Features + +- High-performance remote desktop streaming +- GPU acceleration support +- Configurable streaming quality +- Automatic startup options +- Cross-platform client support + +## Prerequisites + +- Windows or Linux-based workspace +- Parsec host key (obtain from [Parsec Settings](https://console.parsec.app/settings)) +- For GPU acceleration: + - Windows: NVIDIA or AMD GPU with latest drivers + - Linux: NVIDIA GPU with appropriate drivers installed + +## Usage + +Basic usage: + +```hcl +module "parsec" { + source = "registry.coder.com/coder/parsec/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + parsec_host_key = var.parsec_host_key +} +``` + +Advanced configuration: + +```hcl +module "parsec" { + source = "registry.coder.com/coder/parsec/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + parsec_host_key = var.parsec_host_key + + enable_gpu_acceleration = true + auto_start = true + + parsec_config = { + encoder_bitrate = 50 # Mbps + encoder_fps = 60 + bandwidth_limit = 100 # Mbps + encoder_h265 = true + client_keyboard_layout = "en-us" + } +} +``` + +## Variables + +| Name | Description | Type | Default | +|------|-------------|------|---------| +| parsec_host_key | Parsec host key for authentication | string | required | +| parsec_version | Version of Parsec to install | string | "latest" | +| enable_gpu_acceleration | Enable GPU acceleration | bool | true | +| auto_start | Start Parsec daemon automatically | bool | true | +| parsec_config | Parsec configuration options | object | see below | + +### parsec_config Options + +```hcl +parsec_config = { + encoder_bitrate = 50 # Streaming bitrate in Mbps (1-100) + encoder_fps = 60 # Target framerate + bandwidth_limit = 100 # Bandwidth limit in Mbps + encoder_h265 = true # Use H.265 encoding when available + client_keyboard_layout = "en-us" # Keyboard layout +} +``` + +## How it Works + +1. **Installation**: The module installs Parsec and required dependencies + - Windows: Uses PowerShell to download and install Parsec + - Linux: Uses shell script to install via package manager +2. **Configuration**: Sets up Parsec with the provided host key and settings + - Creates platform-specific configuration files + - Applies custom streaming settings +3. **GPU Support**: Automatically configures GPU acceleration if available + - Windows: Supports both NVIDIA and AMD GPUs + - Linux: Configures NVIDIA GPU drivers +4. **Autostart**: Optionally starts Parsec daemon on workspace startup + - Windows: Configures Windows service + - Linux: Sets up systemd service + +## Client Setup + +1. Download the [Parsec client](https://parsec.app/downloads) for your platform +2. Log in with your Parsec account +3. Your workspace will appear in the "Computers" list +4. Click to connect and start streaming + +## Troubleshooting + +### Stream Quality Issues +- If experiencing poor quality: + - Reduce encoder_bitrate or encoder_fps + - Check your network connection + - Verify GPU acceleration is working + +### Connection Problems +- If connection fails: + - Verify your host key is correct + - Check workspace firewall settings + - Ensure Parsec daemon is running + +### Platform-Specific Issues + +#### Windows +- GPU not detected: + - Update GPU drivers through Device Manager + - For NVIDIA: Install latest Game Ready drivers + - For AMD: Install latest Radeon Software +- Service not starting: + - Check Windows Services app + - Review Event Viewer for errors + +#### Linux +- GPU acceleration not working: + - Verify NVIDIA drivers are installed: `nvidia-smi` + - Check X server configuration +- Display server issues: + - Ensure X11 or Wayland is running + - Check display server logs + +## References + +### Documentation +- [Parsec Documentation](https://parsec.app/docs) +- [Host Computer Requirements](https://parsec.app/docs/hosting-specifications) +- [Windows Setup Guide](https://parsec.app/docs/windows) +- [Linux Setup Guide](https://parsec.app/docs/linux) + +### Support Resources +- [Parsec Support Center](https://support.parsec.app) +- [GPU Driver Downloads](https://parsec.app/docs/supported-graphics-cards) +- [Network Requirements](https://support.parsec.app/hc/en-us/articles/115002875791-Required-Network-Ports-And-Protocols) + +### Community +- [Parsec Discord](https://discord.gg/parsec) +- [Coder Discussion Forum](https://github.com/coder/coder/discussions) diff --git a/registry/coder/modules/parsec/main.tf b/registry/coder/modules/parsec/main.tf new file mode 100644 index 000000000..6ea45650e --- /dev/null +++ b/registry/coder/modules/parsec/main.tf @@ -0,0 +1,69 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.7" + } + } +} + +variable "agent_id" { + type = string + description = "The ID of a Coder agent." +} + +variable "parsec_host_key" { + type = string + description = "The Parsec host key for authentication. Can be obtained from https://console.parsec.app/settings" + sensitive = true +} + +variable "parsec_version" { + type = string + description = "The version of Parsec to install. Use 'latest' for the most recent version." + default = "latest" +} + +variable "enable_gpu_acceleration" { + type = bool + description = "Whether to enable GPU acceleration for Parsec streaming." + default = true +} + +variable "auto_start" { + type = bool + description = "Whether to automatically start Parsec daemon on workspace startup." + default = true +} + +variable "parsec_config" { + type = object({ + encoder_bitrate = optional(number, 50) # Mbps + encoder_fps = optional(number, 60) + bandwidth_limit = optional(number, 100) # Mbps + encoder_h265 = optional(bool, true) + client_keyboard_layout = optional(string, "en-us") + }) + description = "Parsec configuration options" + default = {} +} + +data "coder_workspace" "me" {} + +resource "coder_script" "install_parsec" { + agent_id = var.agent_id + display_name = "Install Parsec" + icon = "/icon/parsec.svg" + script = data.coder_workspace.me.os == "windows" ? file("${path.module}/scripts/install.ps1") : file("${path.module}/scripts/install.sh") + run_on_start = true + + environment = { + PARSEC_HOST_KEY = var.parsec_host_key + PARSEC_VERSION = var.parsec_version + ENABLE_GPU = tostring(var.enable_gpu_acceleration) + AUTO_START = tostring(var.auto_start) + PARSEC_CONFIG = jsonencode(var.parsec_config) + } +} diff --git a/registry/coder/modules/parsec/scripts/install.ps1 b/registry/coder/modules/parsec/scripts/install.ps1 new file mode 100644 index 000000000..a0386949b --- /dev/null +++ b/registry/coder/modules/parsec/scripts/install.ps1 @@ -0,0 +1,73 @@ +# Install and configure Parsec +$ErrorActionPreference = "Stop" + +Write-Host "Starting Parsec installation..." + +# Parse configuration from environment variables +$parsecConfig = [System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($env:PARSEC_CONFIG)) | ConvertFrom-Json + +# Download Parsec installer +$tempDir = $env:TEMP +$installerPath = Join-Path $tempDir "parsec-windows.exe" + +Write-Host "Downloading Parsec installer..." +if ($env:PARSEC_VERSION -eq "latest") { + $downloadUrl = "https://builds.parsec.app/package/parsec-windows.exe" +} else { + $downloadUrl = "https://builds.parsec.app/package/parsec-windows-$env:PARSEC_VERSION.exe" +} + +Invoke-WebRequest -Uri $downloadUrl -OutFile $installerPath + +# Install Parsec silently +Write-Host "Installing Parsec..." +Start-Process -FilePath $installerPath -ArgumentList "/silent" -Wait + +# Create Parsec config directory +$parsecConfigDir = "$env:APPDATA\Parsec" +New-Item -ItemType Directory -Force -Path $parsecConfigDir | Out-Null + +# Configure Parsec +Write-Host "Configuring Parsec..." +$configContent = @" +# Parsec Configuration +app_host = 1 +app_run_level = 3 +encoder_bitrate = $($parsecConfig.encoder_bitrate ?? 50) +encoder_fps = $($parsecConfig.encoder_fps ?? 60) +encoder_min_bitrate = 10 +bandwidth_limit = $($parsecConfig.bandwidth_limit ?? 100) +encoder_h265 = $($parsecConfig.encoder_h265 ?? "true") +client_keyboard_layout = $($parsecConfig.client_keyboard_layout ?? "en-us") +host_virtual_monitors = 1 +"@ + +if ($env:PARSEC_HOST_KEY) { + $configContent += "`nhost_key = $env:PARSEC_HOST_KEY" +} + +# Configure GPU acceleration if enabled +if ($env:ENABLE_GPU -eq "true") { + Write-Host "Configuring GPU acceleration..." + try { + # Check for NVIDIA GPU + $gpuInfo = Get-WmiObject Win32_VideoController | Where-Object { $_.Name -like "*NVIDIA*" } + if ($gpuInfo) { + $configContent += "`nencoder_device = 0" + } else { + Write-Host "Warning: GPU acceleration enabled but no NVIDIA GPU found" + } + } catch { + Write-Host "Warning: Error checking GPU info: $_" + } +} + +$configContent | Out-File -FilePath "$parsecConfigDir\config.txt" -Encoding ASCII + +# Start Parsec if auto-start is enabled +if ($env:AUTO_START -eq "true") { + Write-Host "Starting Parsec..." + Start-Process "${env:ProgramFiles}\Parsec\parsecd.exe" +} + +Write-Host "Parsec setup complete!" diff --git a/registry/coder/modules/parsec/scripts/install.sh b/registry/coder/modules/parsec/scripts/install.sh new file mode 100644 index 000000000..f87189d8f --- /dev/null +++ b/registry/coder/modules/parsec/scripts/install.sh @@ -0,0 +1,118 @@ +#!/bin/bash +set -euo pipefail + +BOLD='\033[0;1m' +RED='\033[0;31m' +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Parse configuration +PARSEC_CONFIG=$(echo "$PARSEC_CONFIG" | base64 -d) + +printf "${BLUE}Starting Parsec installation...${NC}\n" + +# Check if we're running on a supported system +if [ "$(uname)" != "Linux" ]; then + printf "${RED}Error: This module only supports Linux systems${NC}\n" + exit 1 +fi + +# Install dependencies +printf "${BLUE}Installing dependencies...${NC}\n" +if command -v apt-get &> /dev/null; then + # Ubuntu/Debian + sudo apt-get update + sudo apt-get install -y \ + libegl1-mesa \ + libgl1-mesa-glx \ + libvdpau1 \ + x11-xserver-utils \ + pulseaudio \ + curl \ + jq +elif command -v dnf &> /dev/null; then + # Fedora/RHEL + sudo dnf install -y \ + mesa-libEGL \ + mesa-libGL \ + libvdpau \ + xorg-x11-server-utils \ + pulseaudio \ + curl \ + jq +else + printf "${RED}Error: Unsupported Linux distribution${NC}\n" + exit 1 +fi + +# Download and install Parsec +printf "${BLUE}Downloading Parsec...${NC}\n" +if [ "$PARSEC_VERSION" = "latest" ]; then + DOWNLOAD_URL="https://builds.parsec.app/package/parsec-linux.deb" +else + DOWNLOAD_URL="https://builds.parsec.app/package/parsec-linux-${PARSEC_VERSION}.deb" +fi + +wget -O /tmp/parsec.deb "$DOWNLOAD_URL" +sudo dpkg -i /tmp/parsec.deb || sudo apt-get -f install -y +rm /tmp/parsec.deb + +# Create Parsec configuration directory +PARSEC_CONFIG_DIR="$HOME/.config/parsec" +mkdir -p "$PARSEC_CONFIG_DIR" + +# Configure Parsec +printf "${BLUE}Configuring Parsec...${NC}\n" +cat > "$PARSEC_CONFIG_DIR/config.txt" << EOL +# Parsec Configuration +app_host = 1 +app_run_level = 3 +encoder_bitrate = $(jq -r '.encoder_bitrate // 50' <<< "$PARSEC_CONFIG") +encoder_fps = $(jq -r '.encoder_fps // 60' <<< "$PARSEC_CONFIG") +encoder_min_bitrate = 10 +bandwidth_limit = $(jq -r '.bandwidth_limit // 100' <<< "$PARSEC_CONFIG") +encoder_h265 = $(jq -r '.encoder_h265 // true' <<< "$PARSEC_CONFIG") +client_keyboard_layout = $(jq -r '.client_keyboard_layout // "en-us"' <<< "$PARSEC_CONFIG") +host_virtual_monitors = 1 +EOL + +# Configure host key +if [ -n "$PARSEC_HOST_KEY" ]; then + echo "host_key = $PARSEC_HOST_KEY" >> "$PARSEC_CONFIG_DIR/config.txt" +fi + +# Configure GPU acceleration if enabled +if [ "$ENABLE_GPU" = "true" ]; then + printf "${BLUE}Configuring GPU acceleration...${NC}\n" + # Check for NVIDIA GPU + if command -v nvidia-smi &> /dev/null; then + echo "encoder_device = 0" >> "$PARSEC_CONFIG_DIR/config.txt" + else + printf "${RED}Warning: GPU acceleration enabled but no NVIDIA GPU found${NC}\n" + fi +fi + +# Set up autostart if enabled +if [ "$AUTO_START" = "true" ]; then + printf "${BLUE}Configuring autostart...${NC}\n" + mkdir -p "$HOME/.config/autostart" + cat > "$HOME/.config/autostart/parsec.desktop" << EOL +[Desktop Entry] +Type=Application +Name=Parsec +Exec=parsecd +Hidden=false +NoDisplay=false +X-GNOME-Autostart-enabled=true +EOL +fi + +# Start Parsec daemon +if [ "$AUTO_START" = "true" ]; then + printf "${BLUE}Starting Parsec daemon...${NC}\n" + parsecd & +fi + +printf "${GREEN}Parsec installation and configuration complete!${NC}\n" +printf "You can now connect to this workspace using the Parsec client.\n" diff --git a/registry/coder/modules/vault-jwt/run.sh b/registry/coder/modules/vault-jwt/run.sh index 6d4785482..d95b45a27 100644 --- a/registry/coder/modules/vault-jwt/run.sh +++ b/registry/coder/modules/vault-jwt/run.sh @@ -9,11 +9,11 @@ CODER_OIDC_ACCESS_TOKEN=${CODER_OIDC_ACCESS_TOKEN} fetch() { dest="$1" url="$2" - if command -v curl >/dev/null 2>&1; then + if command -v curl > /dev/null 2>&1; then curl -sSL --fail "$${url}" -o "$${dest}" - elif command -v wget >/dev/null 2>&1; then + elif command -v wget > /dev/null 2>&1; then wget -O "$${dest}" "$${url}" - elif command -v busybox >/dev/null 2>&1; then + elif command -v busybox > /dev/null 2>&1; then busybox wget -O "$${dest}" "$${url}" else printf "curl, wget, or busybox is not installed. Please install curl or wget in your image.\n" @@ -22,9 +22,9 @@ fetch() { } unzip_safe() { - if command -v unzip >/dev/null 2>&1; then + if command -v unzip > /dev/null 2>&1; then command unzip "$@" - elif command -v busybox >/dev/null 2>&1; then + elif command -v busybox > /dev/null 2>&1; then busybox unzip "$@" else printf "unzip or busybox is not installed. Please install unzip in your image.\n" @@ -56,7 +56,7 @@ install() { # Check if the vault CLI is installed and has the correct version installation_needed=1 - if command -v vault >/dev/null 2>&1; then + if command -v vault > /dev/null 2>&1; then CURRENT_VERSION=$(vault version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+') if [ "$${CURRENT_VERSION}" = "$${VAULT_CLI_VERSION}" ]; then printf "Vault version %s is already installed and up-to-date.\n\n" "$${CURRENT_VERSION}" @@ -81,7 +81,7 @@ install() { return 1 fi rm vault.zip - if sudo mv vault /usr/local/bin/vault 2>/dev/null; then + if sudo mv vault /usr/local/bin/vault 2> /dev/null; then printf "Vault installed successfully!\n\n" else mkdir -p ~/.local/bin diff --git a/registry/coder/modules/vscode-web/run.sh b/registry/coder/modules/vscode-web/run.sh index 9346b4bdb..b2554d064 100644 --- a/registry/coder/modules/vscode-web/run.sh +++ b/registry/coder/modules/vscode-web/run.sh @@ -68,7 +68,7 @@ esac # Detect the platform if [ -n "${PLATFORM}" ]; then DETECTED_PLATFORM="${PLATFORM}" -elif [ -f /etc/alpine-release ] || grep -qi 'ID=alpine' /etc/os-release 2>/dev/null || command -v apk > /dev/null 2>&1; then +elif [ -f /etc/alpine-release ] || grep -qi 'ID=alpine' /etc/os-release 2> /dev/null || command -v apk > /dev/null 2>&1; then DETECTED_PLATFORM="alpine" elif [ "$(uname -s)" = "Darwin" ]; then DETECTED_PLATFORM="darwin" diff --git a/registry/coder/templates/windows-parsec/main.tf b/registry/coder/templates/windows-parsec/main.tf new file mode 100644 index 000000000..684684ae4 --- /dev/null +++ b/registry/coder/templates/windows-parsec/main.tf @@ -0,0 +1,34 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + } + } +} + +provider "coder" {} + +data "coder_workspace" "me" {} + +resource "coder_agent" "main" { + arch = "amd64" + auth = "token" + os = "windows" +} + +module "parsec" { + source = "../../modules/parsec" + agent_id = coder_agent.main.id + parsec_host_key = var.parsec_host_key + parsec_config = { + encoder_bitrate = 50 + encoder_fps = 60 + encoder_h265 = true + } +} + +variable "parsec_host_key" { + type = string + description = "Your Parsec host key from https://console.parsec.app/settings" + sensitive = true +} diff --git a/scripts/update_readmes.go b/scripts/update_readmes.go new file mode 100644 index 000000000..8eab00950 --- /dev/null +++ b/scripts/update_readmes.go @@ -0,0 +1,246 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "text/template" +) + +const readmeTemplate = `--- +display_name: {{.DisplayName}} +description: {{.Description}} +icon: {{.Icon}} +verified: {{.Verified}} +tags: {{.Tags}} +--- + +# {{.Title}} + +{{.Description}} + +## Prerequisites + +### Required Tools +- AWS CLI v2.0 or later (installation: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) +- Terraform v1.0 or later (installation: https://developer.hashicorp.com/terraform/downloads) +{{if .ExtraTools}}{{range .ExtraTools}} +- {{.}}{{end}}{{end}} + +### Authentication +{{.AuthInstructions}} + +## Infrastructure + +This template provisions the following resources: + +{{range .Resources}}* {{.}} +{{end}} + +Architecture diagram: +` + "```" + `mermaid +graph TD +{{.ArchitectureDiagram}} +` + "```" + ` + +## Usage + +1. Configure credentials: +` + "```" + `bash +{{.SetupCommands}} +` + "```" + ` + +2. Create the template: +` + "```" + `bash +coder templates create {{.TemplateName}} +` + "```" + ` + +3. Create a workspace: +` + "```" + `bash +coder create --template {{.TemplateName}} myworkspace +` + "```" + ` + +Example Terraform configuration: +` + "```" + `hcl +{{.TerraformExample}} +` + "```" + ` + +## Cost and Permissions + +Estimated costs: +{{range .CostEstimates}}* {{.}} +{{end}} + +Total: ~{{.TotalCost}}/month + +Required permissions: +` + "```" + `json +{{.RequiredPermissions}} +` + "```" + ` + +Cost optimization tips: +{{range .CostTips}}* {{.}} +{{end}} + +## Variables + +| Name | Type | Description | Default | Required | +|------|------|-------------|---------|----------| +{{range .Variables}}| {{.Name}} | {{.Type}} | {{.Description}} | {{.Default}} | {{.Required}} | +{{end}} +` + +type TemplateData struct { + DisplayName string + Description string + Icon string + Verified bool + Tags string + Title string + + ExtraTools []string + AuthInstructions string + + Resources []string + ArchitectureDiagram string + + SetupCommands string + TemplateName string + TerraformExample string + + CostEstimates []string + TotalCost string + RequiredPermissions string + CostTips []string + + Variables []Variable +} + +type Variable struct { + Name string + Type string + Description string + Default string + Required string +} + +func main() { + // Example usage for aws-linux template + data := TemplateData{ + DisplayName: "AWS EC2 (Linux)", + Description: "Provision AWS EC2 VMs as Coder workspaces", + Icon: "../../../../.icons/aws.svg", + Verified: true, + Tags: "[vm, linux, aws, persistent-vm]", + Title: "Remote Development on AWS EC2 VMs (Linux)", + + ExtraTools: []string{ + "Docker (optional)", + "Git", + }, + AuthInstructions: "Use AWS credentials file or environment variables. See https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html", + + Resources: []string{ + "EC2 instance (t3.large: 2 vCPU, 8GB RAM)", + "100GB EBS volume (gp3)", + "Security group for workspace access", + "IAM instance profile", + }, + ArchitectureDiagram: ` + A[Coder Workspace] --> B[EC2 Instance] + B --> C[EBS Volume] + B --> D[Security Group] + B --> E[IAM Profile]`, + + SetupCommands: "aws configure", + TemplateName: "aws-linux", + TerraformExample: `terraform { + required_providers { + coder = { + source = "coder/coder" + } + aws = { + source = "hashicorp/aws" + } + } +} + +provider "aws" { + region = data.coder_parameter.region.value +} + +data "coder_parameter" "region" { + name = "region" + display_name = "Region" + description = "The region to deploy the workspace in" + default = "us-east-1" + type = "string" + mutable = false +}`, + + CostEstimates: []string{ + "EC2 t3.large: $0.0832/hour ($60/month)", + "EBS gp3 100GB: $10/month", + }, + TotalCost: "$70", + RequiredPermissions: `{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "ec2:RunInstances", + "ec2:TerminateInstances", + "iam:CreateRole", + "iam:PutRolePolicy" + ], + "Resource": "*" + } + ] +}`, + CostTips: []string{ + "Use spot instances for dev environments", + "Enable auto-shutdown during off-hours", + "Use EBS snapshots for faster startup", + }, + + Variables: []Variable{ + { + Name: "region", + Type: "string", + Description: "AWS region", + Default: "us-east-1", + Required: "Yes", + }, + { + Name: "instance_type", + Type: "string", + Description: "EC2 instance type", + Default: "t3.large", + Required: "No", + }, + }, + } + + tmpl, err := template.New("readme").Parse(readmeTemplate) + if err != nil { + fmt.Printf("Error parsing template: %v\n", err) + os.Exit(1) + } + + // Example: Write to aws-linux template + path := filepath.Join("registry", "coder", "templates", "aws-linux", "README.md") + f, err := os.Create(path) + if err != nil { + fmt.Printf("Error creating file: %v\n", err) + os.Exit(1) + } + defer f.Close() + + err = tmpl.Execute(f, data) + if err != nil { + fmt.Printf("Error executing template: %v\n", err) + os.Exit(1) + } +} diff --git a/tatus b/tatus new file mode 100644 index 000000000..7aa4146fc --- /dev/null +++ b/tatus @@ -0,0 +1,3 @@ +0990fb6 (HEAD -> feature/jfrog-maven-support) add and commit +14ba4ae (feat/parsec-integration) Add JFrog Maven support modules +83b2a40 feat: add Parsec module for low-latency remote desktop diff --git a/terraform.zip b/terraform.zip new file mode 100644 index 000000000..47756a04a Binary files /dev/null and b/terraform.zip differ diff --git a/terraform/terraform.exe b/terraform/terraform.exe new file mode 100644 index 000000000..57c318f92 Binary files /dev/null and b/terraform/terraform.exe differ diff --git a/try b/try new file mode 100644 index 000000000..00fea12b4 --- /dev/null +++ b/try @@ -0,0 +1,5 @@ +14ba4ae (HEAD -> feature/jfrog-maven-support, feat/parsec-integration) Add JFrog Maven support modules +83b2a40 feat: add Parsec module for low-latency remote desktop +20dc292 (origin/feat/claude-code-subagents, feat/claude-code-subagents) feat(claude-code): add subagents support +db9dbdb (origin/main, origin/feat/improve-gemini-module, origin/HEAD, main, feat/improve-gemini-module) fix(gemini): improve module configuration and documentation +3fd7b47 feat: dynamic username template (#261)