diff --git a/.changeset/green-mice-watch.md b/.changeset/green-mice-watch.md new file mode 100644 index 0000000000..e3b94d48f4 --- /dev/null +++ b/.changeset/green-mice-watch.md @@ -0,0 +1,6 @@ +--- +'@e2b/python-sdk': patch +'e2b': patch +--- + +add doublestar pattern when not specified to folder when calling .copy() diff --git a/packages/js-sdk/src/template/utils.ts b/packages/js-sdk/src/template/utils.ts index 4ee8b27e36..9530c16d52 100644 --- a/packages/js-sdk/src/template/utils.ts +++ b/packages/js-sdk/src/template/utils.ts @@ -3,6 +3,7 @@ import fs from 'node:fs' import path from 'node:path' import { dynamicGlob, dynamicTar } from '../utils' import { BASE_STEP_NAME, FINALIZE_STEP_NAME } from './consts' +import type { Path } from 'glob' /** * Read and parse a .dockerignore file. @@ -23,6 +24,47 @@ export function readDockerignore(contextPath: string): string[] { .filter((line) => line && !line.startsWith('#')) } +/** + * Get all files for a given path and ignore patterns. + * + * @param src Path to the source directory + * @param contextPath Base directory for resolving relative paths + * @param ignorePatterns Ignore patterns + * @returns Array of files + */ +export async function getAllFilesForFilesHash( + src: string, + contextPath: string, + ignorePatterns: string[] +) { + const { glob } = await dynamicGlob() + const files = new Set() + + const globFiles = await glob(src, { + ignore: ignorePatterns, + withFileTypes: true, + cwd: contextPath, + }) + + for (const file of globFiles) { + if (file.isDirectory()) { + // For directories, add the directory itself and all files inside it + files.add(file) + const dirFiles = await glob(path.join(file.fullpath(), '**/*'), { + ignore: ignorePatterns, + withFileTypes: true, + cwd: file.fullpath(), + }) + dirFiles.forEach((f) => files.add(f)) + } else { + // For files, just add the file + files.add(file) + } + } + + return Array.from(files).sort() +} + /** * Calculate a hash of files being copied to detect changes for cache invalidation. * The hash includes file content, metadata (mode, uid, gid, size, mtime), and relative paths. @@ -44,20 +86,15 @@ export async function calculateFilesHash( resolveSymlinks: boolean, stackTrace: string | undefined ): Promise { - const { glob } = await dynamicGlob() - const srcPath = path.join(contextPath, src) const hash = crypto.createHash('sha256') const content = `COPY ${src} ${dest}` hash.update(content) - const files = await glob(srcPath, { - ignore: ignorePatterns, - withFileTypes: true, - }) + const files = await getAllFilesForFilesHash(src, contextPath, ignorePatterns) if (files.length === 0) { - const error = new Error(`No files found in ${srcPath}`) + const error = new Error(`No files found in ${src}`) if (stackTrace) { error.stack = stackTrace } @@ -73,6 +110,7 @@ export async function calculateFilesHash( hash.update(stats.mtimeMs.toString()) } + // Process files recursively for (const file of files) { // Add a relative path to hash calculation const relativePath = path.relative(contextPath, file.fullpath()) @@ -98,9 +136,9 @@ export async function calculateFilesHash( } const stats = fs.statSync(file.fullpath()) - hashStats(stats) + // Add file content to hash calculation if (stats.isFile()) { const content = fs.readFileSync(file.fullpath()) hash.update(new Uint8Array(content)) diff --git a/packages/js-sdk/tests/template/utils/getAllFilesForFilesHash.test.ts b/packages/js-sdk/tests/template/utils/getAllFilesForFilesHash.test.ts new file mode 100644 index 0000000000..b110043816 --- /dev/null +++ b/packages/js-sdk/tests/template/utils/getAllFilesForFilesHash.test.ts @@ -0,0 +1,246 @@ +import { expect, test, describe, beforeAll, afterAll, beforeEach } from 'vitest' +import { writeFile, mkdir, rm } from 'fs/promises' +import { join } from 'path' +import { getAllFilesForFilesHash } from '../../../src/template/utils' + +describe('getAllFilesForFilesHash', () => { + const testDir = join(__dirname, 'folder') + + beforeAll(async () => { + await rm(testDir, { recursive: true, force: true }) + await mkdir(testDir, { recursive: true }) + }) + + afterAll(async () => { + await rm(testDir, { recursive: true, force: true }) + }) + + beforeEach(async () => { + await rm(testDir, { recursive: true, force: true }) + await mkdir(testDir, { recursive: true }) + }) + + test('should return files matching a simple pattern', async () => { + // Create test files + await writeFile(join(testDir, 'file1.txt'), 'content1') + await writeFile(join(testDir, 'file2.txt'), 'content2') + await writeFile(join(testDir, 'file3.js'), 'content3') + + const files = await getAllFilesForFilesHash('*.txt', testDir, []) + + expect(files).toHaveLength(2) + expect(files.some((f) => f.fullpath().endsWith('file1.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('file2.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('file3.js'))).toBe(false) + }) + + test('should handle directory patterns recursively', async () => { + // Create nested directory structure + await mkdir(join(testDir, 'src'), { recursive: true }) + await mkdir(join(testDir, 'src', 'components'), { recursive: true }) + await mkdir(join(testDir, 'src', 'utils'), { recursive: true }) + + await writeFile(join(testDir, 'src', 'index.ts'), 'index content') + await writeFile( + join(testDir, 'src', 'components', 'Button.tsx'), + 'button content' + ) + await writeFile( + join(testDir, 'src', 'utils', 'helper.ts'), + 'helper content' + ) + await writeFile(join(testDir, 'README.md'), 'readme content') + + const files = await getAllFilesForFilesHash('src', testDir, []) + + expect(files).toHaveLength(6) // 3 files + 3 directories (src, components, utils) + expect(files.some((f) => f.fullpath().endsWith('index.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.tsx'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('helper.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('README.md'))).toBe(false) + }) + + test('should respect ignore patterns', async () => { + // Create test files + await writeFile(join(testDir, 'file1.txt'), 'content1') + await writeFile(join(testDir, 'file2.txt'), 'content2') + await writeFile(join(testDir, 'temp.txt'), 'temp content') + await writeFile(join(testDir, 'backup.txt'), 'backup content') + + const files = await getAllFilesForFilesHash('*.txt', testDir, [ + 'temp*', + 'backup*', + ]) + + expect(files).toHaveLength(2) + expect(files.some((f) => f.fullpath().endsWith('file1.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('file2.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('temp.txt'))).toBe(false) + expect(files.some((f) => f.fullpath().endsWith('backup.txt'))).toBe(false) + }) + + test('should handle complex ignore patterns', async () => { + // Create nested structure with various file types + await mkdir(join(testDir, 'src'), { recursive: true }) + await mkdir(join(testDir, 'src', 'components'), { recursive: true }) + await mkdir(join(testDir, 'src', 'utils'), { recursive: true }) + await mkdir(join(testDir, 'tests'), { recursive: true }) + + await writeFile(join(testDir, 'src', 'index.ts'), 'index content') + await writeFile( + join(testDir, 'src', 'components', 'Button.tsx'), + 'button content' + ) + await writeFile( + join(testDir, 'src', 'utils', 'helper.ts'), + 'helper content' + ) + await writeFile(join(testDir, 'tests', 'test.spec.ts'), 'test content') + await writeFile( + join(testDir, 'src', 'components', 'Button.test.tsx'), + 'test content' + ) + await writeFile( + join(testDir, 'src', 'utils', 'helper.spec.ts'), + 'spec content' + ) + + const files = await getAllFilesForFilesHash('src', testDir, [ + '**/*.test.*', + '**/*.spec.*', + ]) + + expect(files).toHaveLength(6) // 3 files + 3 directories (src, components, utils) + expect(files.some((f) => f.fullpath().endsWith('index.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.tsx'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('helper.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.test.tsx'))).toBe( + false + ) + expect(files.some((f) => f.fullpath().endsWith('helper.spec.ts'))).toBe( + false + ) + }) + + test('should handle empty directories', async () => { + await mkdir(join(testDir, 'empty'), { recursive: true }) + await writeFile(join(testDir, 'file.txt'), 'content') + + const files = await getAllFilesForFilesHash('empty', testDir, []) + + expect(files).toHaveLength(1) // The empty directory itself + }) + + test('should handle mixed files and directories', async () => { + // Create a mix of files and directories + await writeFile(join(testDir, 'file1.txt'), 'content1') + await mkdir(join(testDir, 'dir1'), { recursive: true }) + await writeFile(join(testDir, 'dir1', 'file2.txt'), 'content2') + await writeFile(join(testDir, 'file3.txt'), 'content3') + + const files = await getAllFilesForFilesHash('*', testDir, []) + + expect(files).toHaveLength(4) // 3 files + 1 directory + expect(files.some((f) => f.fullpath().endsWith('file1.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('file2.txt'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('file3.txt'))).toBe(true) + }) + + test('should handle glob patterns with subdirectories', async () => { + // Create nested structure + await mkdir(join(testDir, 'src'), { recursive: true }) + await mkdir(join(testDir, 'src', 'components'), { recursive: true }) + await mkdir(join(testDir, 'src', 'utils'), { recursive: true }) + + await writeFile(join(testDir, 'src', 'index.ts'), 'index content') + await writeFile( + join(testDir, 'src', 'components', 'Button.tsx'), + 'button content' + ) + await writeFile( + join(testDir, 'src', 'utils', 'helper.ts'), + 'helper content' + ) + await writeFile( + join(testDir, 'src', 'components', 'Button.css'), + 'css content' + ) + + const files = await getAllFilesForFilesHash('src/**/*', testDir, []) + + expect(files).toHaveLength(9) // 4 files + 5 directories (including nested ones) + expect(files.some((f) => f.fullpath().endsWith('index.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.tsx'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('helper.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.css'))).toBe(true) + }) + + test('should handle specific file extensions', async () => { + await writeFile(join(testDir, 'file1.ts'), 'ts content') + await writeFile(join(testDir, 'file2.js'), 'js content') + await writeFile(join(testDir, 'file3.tsx'), 'tsx content') + await writeFile(join(testDir, 'file4.css'), 'css content') + + const files = await getAllFilesForFilesHash('*.ts', testDir, []) + + expect(files).toHaveLength(1) + expect(files.some((f) => f.fullpath().endsWith('file1.ts'))).toBe(true) + }) + + test('should return sorted files', async () => { + await writeFile(join(testDir, 'zebra.txt'), 'z content') + await writeFile(join(testDir, 'apple.txt'), 'a content') + await writeFile(join(testDir, 'banana.txt'), 'b content') + + const files = await getAllFilesForFilesHash('*.txt', testDir, []) + + expect(files).toHaveLength(3) + // Files are sorted by full path, not just filename + const fileNames = files.map((f) => f.fullpath().split('/').pop()).sort() + expect(fileNames).toEqual(['apple.txt', 'banana.txt', 'zebra.txt']) + }) + + test('should handle no matching files', async () => { + await writeFile(join(testDir, 'file.txt'), 'content') + + const files = await getAllFilesForFilesHash('*.js', testDir, []) + + expect(files).toHaveLength(0) + }) + + test('should handle complex ignore patterns with directories', async () => { + // Create a complex structure + await mkdir(join(testDir, 'src'), { recursive: true }) + await mkdir(join(testDir, 'src', 'components'), { recursive: true }) + await mkdir(join(testDir, 'src', 'utils'), { recursive: true }) + await mkdir(join(testDir, 'src', 'tests'), { recursive: true }) + await mkdir(join(testDir, 'dist'), { recursive: true }) + + await writeFile(join(testDir, 'src', 'index.ts'), 'index content') + await writeFile( + join(testDir, 'src', 'components', 'Button.tsx'), + 'button content' + ) + await writeFile( + join(testDir, 'src', 'utils', 'helper.ts'), + 'helper content' + ) + await writeFile( + join(testDir, 'src', 'tests', 'test.spec.ts'), + 'test content' + ) + await writeFile(join(testDir, 'dist', 'bundle.js'), 'bundle content') + await writeFile(join(testDir, 'README.md'), 'readme content') + + const files = await getAllFilesForFilesHash('src', testDir, [ + '**/tests/**', + '**/*.spec.*', + ]) + + expect(files).toHaveLength(6) // 3 files + 3 directories (src, components, utils) + expect(files.some((f) => f.fullpath().endsWith('index.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('Button.tsx'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('helper.ts'))).toBe(true) + expect(files.some((f) => f.fullpath().endsWith('test.spec.ts'))).toBe(false) + }) +}) diff --git a/packages/python-sdk/e2b/template/utils.py b/packages/python-sdk/e2b/template/utils.py index 07b6727489..7cbf60aac0 100644 --- a/packages/python-sdk/e2b/template/utils.py +++ b/packages/python-sdk/e2b/template/utils.py @@ -3,11 +3,11 @@ import json import stat from glob import glob -import fnmatch import re import inspect from types import TracebackType, FrameType from typing import List, Optional, Union +from pathspec import PathSpec from e2b.template.consts import BASE_STEP_NAME, FINALIZE_STEP_NAME @@ -34,6 +34,45 @@ def read_dockerignore(context_path: str) -> List[str]: ] +def get_all_files_for_files_hash( + src: str, context_path: str, ignore_patterns: List[str] +) -> List[str]: + """ + Get all files for a given path and ignore patterns. + + :param src: Path to the source directory + :param context_path: Base directory for resolving relative paths + :param ignore_patterns: Ignore patterns + :return: Array of files + """ + files = set() + + spec = PathSpec.from_lines("gitwildmatch", ignore_patterns) + + def matches_ignore(path: str) -> bool: + return spec.match_file(os.path.relpath(path, context_path)) + + # Use glob to find all files/directories matching the pattern under context_path + files_glob = glob(os.path.join(context_path, src), recursive=True) + + for file in files_glob: + if ignore_patterns and matches_ignore(file): + continue + + if os.path.isdir(file): + # If it's a directory, add the directory and all entries recursively + files.add(file) + dir_files = glob(os.path.join(file, "**/*"), recursive=True) + for dir_file in dir_files: + if ignore_patterns and matches_ignore(dir_file): + continue + files.add(dir_file) + else: + files.add(file) + + return sorted(list(files)) + + def calculate_files_hash( src: str, dest: str, @@ -58,24 +97,15 @@ def calculate_files_hash( :raises ValueError: If no files match the source pattern """ - src_path = os.path.join(context_path, src) hash_obj = hashlib.sha256() content = f"COPY {src} {dest}" hash_obj.update(content.encode()) - files_glob = glob(src_path, recursive=True) - - files = [] - for file in files_glob: - if ignore_patterns and any( - fnmatch.fnmatch(file, pattern) for pattern in ignore_patterns - ): - continue - files.append(file) + files = get_all_files_for_files_hash(src, context_path, ignore_patterns) if len(files) == 0: - raise ValueError(f"No files found in {src_path}").with_traceback(stack_trace) + raise ValueError(f"No files found in {src}").with_traceback(stack_trace) def hash_stats(stat_info: os.stat_result) -> None: hash_obj.update(str(stat_info.st_mode).encode()) diff --git a/packages/python-sdk/poetry.lock b/packages/python-sdk/poetry.lock index 51a91e56ac..570f340c3d 100644 --- a/packages/python-sdk/poetry.lock +++ b/packages/python-sdk/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +18,7 @@ version = "4.11.0" description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, @@ -37,6 +39,7 @@ version = "3.6.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, @@ -51,6 +54,7 @@ version = "25.4.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, @@ -62,6 +66,7 @@ version = "25.9.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7"}, {file = "black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92"}, @@ -109,6 +114,7 @@ version = "2025.10.5" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, @@ -120,6 +126,7 @@ version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, @@ -242,6 +249,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -256,6 +264,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -267,6 +277,7 @@ version = "4.5.2" description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. The `databind` package will install the full suite of databind packages. Compatible with Python 3.8 and newer." optional = false python-versions = "<4.0.0,>=3.8.0" +groups = ["dev"] files = [ {file = "databind-4.5.2-py3-none-any.whl", hash = "sha256:b9c3a03c0414aa4567f095d7218ac904bd2b267b58e3763dac28e83d64b69770"}, {file = "databind-4.5.2.tar.gz", hash = "sha256:0a8aa0ff130a0306581c559388f5ef65e0fae7ef4b86412eacb1f4a0420006c4"}, @@ -286,6 +297,7 @@ version = "4.5.2" description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. Compatible with Python 3.8 and newer. Deprecated, use `databind` package." optional = false python-versions = "<4.0.0,>=3.8.0" +groups = ["dev"] files = [ {file = "databind.core-4.5.2-py3-none-any.whl", hash = "sha256:a1dd1c6bd8ca9907d1292d8df9ec763ce91543e27f7eda4268e4a1a84fcd1c42"}, {file = "databind.core-4.5.2.tar.gz", hash = "sha256:b8ac8127bc5d6b239a2a81aeddb268b0c4cadd53fbce7e8b2c7a9ef6413bccb3"}, @@ -300,6 +312,7 @@ version = "4.5.2" description = "De-/serialize Python dataclasses to or from JSON payloads. Compatible with Python 3.8 and newer. Deprecated, use `databind` module instead." optional = false python-versions = "<4.0.0,>=3.8.0" +groups = ["dev"] files = [ {file = "databind.json-4.5.2-py3-none-any.whl", hash = "sha256:a803bf440634685984361cb2a5a975887e487c854ed48d81ff7aaf3a1ed1e94c"}, {file = "databind.json-4.5.2.tar.gz", hash = "sha256:6cc9b5c6fddaebd49b2433932948eb3be8a41633b90aa37998d7922504b8f165"}, @@ -314,6 +327,7 @@ version = "0.34.0" description = "Datamodel Code Generator" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "datamodel_code_generator-0.34.0-py3-none-any.whl", hash = "sha256:74d1aaf2ab27e21b6d6e28b5236f27271b8404b7fd0e856be95c2f7562d694ff"}, {file = "datamodel_code_generator-0.34.0.tar.gz", hash = "sha256:4695bdd2c9e85049db4bdf5791f68647518d98fd589d30bd8525e941e628acf7"}, @@ -345,6 +359,7 @@ version = "1.2.18" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["dev"] files = [ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, @@ -354,7 +369,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "dockerfile-parse" @@ -362,6 +377,7 @@ version = "2.0.1" description = "Python library for Dockerfile manipulation" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "dockerfile-parse-2.0.1.tar.gz", hash = "sha256:3184ccdc513221983e503ac00e1aa504a2aa8f84e5de673c46b0b6eee99ec7bc"}, {file = "dockerfile_parse-2.0.1-py2.py3-none-any.whl", hash = "sha256:bdffd126d2eb26acf1066acb54cb2e336682e1d72b974a40894fac76a4df17f6"}, @@ -373,6 +389,7 @@ version = "2.2.1" description = "Docspec is a JSON object specification for representing API documentation of programming languages." optional = false python-versions = ">=3.7,<4.0" +groups = ["dev"] files = [ {file = "docspec-2.2.1-py3-none-any.whl", hash = "sha256:7538f750095a9688c6980ff9a4e029a823a500f64bd00b6b4bdb27951feb31cb"}, {file = "docspec-2.2.1.tar.gz", hash = "sha256:4854e77edc0e2de40e785e57e95880f7095a05fe978f8b54cef7a269586e15ff"}, @@ -389,6 +406,7 @@ version = "2.2.2" description = "A parser based on lib2to3 producing docspec data from Python source code." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "docspec_python-2.2.2-py3-none-any.whl", hash = "sha256:caa32dc1e8c470af8a5ecad67cca614e68c1563ac01dab0c0486c4d7f709d6b1"}, {file = "docspec_python-2.2.2.tar.gz", hash = "sha256:429be834d09549461b95bf45eb53c16859f3dfb3e9220408b3bfb12812ccb3fb"}, @@ -405,6 +423,7 @@ version = "0.11" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "docstring_parser-0.11.tar.gz", hash = "sha256:93b3f8f481c7d24e37c5d9f30293c89e2933fa209421c8abd731dd3ef0715ecb"}, ] @@ -418,6 +437,8 @@ version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, @@ -435,6 +456,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -449,6 +471,7 @@ version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, @@ -460,6 +483,7 @@ version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -471,6 +495,7 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -492,6 +517,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -504,7 +530,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -516,6 +542,7 @@ version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, @@ -530,6 +557,8 @@ version = "8.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, @@ -539,12 +568,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -553,6 +582,7 @@ version = "7.5.0" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344"}, {file = "inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f"}, @@ -563,7 +593,7 @@ more_itertools = ">=8.5.0" typeguard = ">=4.0.1" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -576,6 +606,7 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -587,6 +618,7 @@ version = "6.1.0" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784"}, {file = "isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481"}, @@ -605,6 +637,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -622,6 +655,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -646,6 +680,7 @@ version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, @@ -744,6 +779,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -755,6 +791,7 @@ version = "10.8.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b"}, {file = "more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd"}, @@ -766,6 +803,7 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -777,6 +815,7 @@ version = "2.1.0" description = "" optional = false python-versions = ">=3.6,<4.0" +groups = ["dev"] files = [ {file = "nr_date-2.1.0-py3-none-any.whl", hash = "sha256:bd672a9dfbdcf7c4b9289fea6750c42490eaee08036a72059dcc78cb236ed568"}, {file = "nr_date-2.1.0.tar.gz", hash = "sha256:0643aea13bcdc2a8bc56af9d5e6a89ef244c9744a1ef00cdc735902ba7f7d2e6"}, @@ -788,6 +827,7 @@ version = "1.1.5" description = "" optional = false python-versions = ">=3.6,<4.0" +groups = ["dev"] files = [ {file = "nr_stream-1.1.5-py3-none-any.whl", hash = "sha256:47e12150b331ad2cb729cfd9d2abd281c9949809729ba461c6aa87dd9927b2d4"}, {file = "nr_stream-1.1.5.tar.gz", hash = "sha256:eb0216c6bfc61a46d4568dba3b588502c610ec8ddef4ac98f3932a2bd7264f65"}, @@ -799,6 +839,7 @@ version = "0.8.12" description = "General purpose Python utility library." optional = false python-versions = ">=3.7,<4.0" +groups = ["dev"] files = [ {file = "nr.util-0.8.12-py3-none-any.whl", hash = "sha256:91da02ac9795eb8e015372275c1efe54bac9051231ee9b0e7e6f96b0b4e7d2bb"}, {file = "nr.util-0.8.12.tar.gz", hash = "sha256:a4549c2033d99d2f0379b3f3d233fd2a8ade286bbf0b3ad0cc7cea16022214f4"}, @@ -814,6 +855,7 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -825,6 +867,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -836,6 +879,7 @@ version = "4.4.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, @@ -852,6 +896,7 @@ version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -867,6 +912,7 @@ version = "6.33.0" description = "" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035"}, {file = "protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee"}, @@ -886,6 +932,7 @@ version = "2.12.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae"}, {file = "pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd"}, @@ -899,7 +946,7 @@ typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -907,6 +954,7 @@ version = "2.41.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e"}, {file = "pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b"}, @@ -1036,6 +1084,7 @@ version = "4.8.2" description = "Create Python API documentation in Markdown format." optional = false python-versions = ">=3.7,<4.0" +groups = ["dev"] files = [ {file = "pydoc_markdown-4.8.2-py3-none-any.whl", hash = "sha256:203f74119e6bb2f9deba43d452422de7c8ec31955b61e0620fa4dd8c2611715f"}, {file = "pydoc_markdown-4.8.2.tar.gz", hash = "sha256:fb6c927e31386de17472d42f9bd3d3be2905977d026f6216881c65145aa67f0b"}, @@ -1063,6 +1112,7 @@ version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -1077,6 +1127,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1099,6 +1150,7 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -1117,6 +1169,7 @@ version = "0.5.2" description = "A py.test plugin that parses environment files before running tests" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, @@ -1132,6 +1185,7 @@ version = "3.8.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88"}, {file = "pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1"}, @@ -1152,6 +1206,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1166,6 +1221,7 @@ version = "1.1.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"}, {file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"}, @@ -1180,6 +1236,7 @@ version = "0.2.0" description = "A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8"}, {file = "pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43"}, @@ -1194,6 +1251,7 @@ version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -1276,6 +1334,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -1297,6 +1356,7 @@ version = "14.2.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}, {file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}, @@ -1315,6 +1375,7 @@ version = "0.11.13" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46"}, {file = "ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48"}, @@ -1342,19 +1403,21 @@ version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -1362,6 +1425,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1373,6 +1437,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1384,6 +1449,7 @@ version = "2.3.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, @@ -1435,6 +1501,7 @@ version = "1.2.0" description = "A lil' TOML writer" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90"}, {file = "tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021"}, @@ -1446,6 +1513,7 @@ version = "2.2.4" description = "" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "typeapi-2.2.4-py3-none-any.whl", hash = "sha256:bd6d5e5907fa47e0303bf254e7cc8712d4be4eb26d7ffaedb67c9e7844c53bb8"}, {file = "typeapi-2.2.4.tar.gz", hash = "sha256:daa80767520c0957a320577e4f729c0ba6921c708def31f4c6fd8d611908fd7b"}, @@ -1460,6 +1528,7 @@ version = "4.4.4" description = "Run-time type checker for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e"}, {file = "typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74"}, @@ -1475,6 +1544,7 @@ version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, @@ -1486,6 +1556,7 @@ version = "0.4.2" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, @@ -1500,13 +1571,14 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1517,6 +1589,7 @@ version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -1559,6 +1632,7 @@ version = "1.17.3" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}, {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}, @@ -1649,6 +1723,7 @@ version = "0.43.0" description = "A formatter for Python code" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "yapf-0.43.0-py3-none-any.whl", hash = "sha256:224faffbc39c428cb095818cf6ef5511fdab6f7430a10783fdfb292ccf2852ca"}, {file = "yapf-0.43.0.tar.gz", hash = "sha256:00d3aa24bfedff9420b2e0d5d9f5ab6d9d4268e72afbf59bb3fa542781d5218e"}, @@ -1664,13 +1739,15 @@ version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1678,6 +1755,6 @@ test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_it type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" -content-hash = "04dab9a9328d4cd18faf57e661d272b78bee7ede93c10eb328fed49f0ddd3d0b" +content-hash = "dbe47a6eaf33a8da5d59b6df218822da78ebaee6261ad6e0105661db890f3bfb" diff --git a/packages/python-sdk/pyproject.toml b/packages/python-sdk/pyproject.toml index 606efbf2d0..b0c7bcabff 100644 --- a/packages/python-sdk/pyproject.toml +++ b/packages/python-sdk/pyproject.toml @@ -11,7 +11,7 @@ packages = [{ include = "e2b" }, { include = "e2b_connect" }] [tool.poetry.dependencies] python = "^3.9" - +pathspec = "^0.12.1" python-dateutil = ">=2.8.2" protobuf = ">=4.21.0" httpcore = "^1.0.5" diff --git a/packages/python-sdk/tests/async/template_async/utils/test_get_all_files_for_files_hash.py b/packages/python-sdk/tests/async/template_async/utils/test_get_all_files_for_files_hash.py new file mode 100644 index 0000000000..d0798a9d1e --- /dev/null +++ b/packages/python-sdk/tests/async/template_async/utils/test_get_all_files_for_files_hash.py @@ -0,0 +1,281 @@ +import os +import tempfile +import pytest +from e2b.template.utils import get_all_files_for_files_hash + + +class TestGetAllFilesForFilesHash: + @pytest.fixture + def test_dir(self): + """Create a temporary directory for testing.""" + with tempfile.TemporaryDirectory() as tmpdir: + yield tmpdir + + def test_should_return_files_matching_simple_pattern(self, test_dir): + """Test that function returns files matching a simple pattern.""" + # Create test files + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + with open(os.path.join(test_dir, "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "file3.js"), "w") as f: + f.write("content3") + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 2 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert not any("file3.js" in f for f in files) + + def test_should_handle_directory_patterns_recursively(self, test_dir): + """Test that function handles directory patterns recursively.""" + # Create nested directory structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "README.md"), "w") as f: + f.write("readme content") + + files = get_all_files_for_files_hash("src", test_dir, []) + + assert len(files) == 6 # 3 files + 3 directories (src, components, utils) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("README.md" in f for f in files) + + def test_should_respect_ignore_patterns(self, test_dir): + """Test that function respects ignore patterns.""" + # Create test files + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + with open(os.path.join(test_dir, "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "temp.txt"), "w") as f: + f.write("temp content") + with open(os.path.join(test_dir, "backup.txt"), "w") as f: + f.write("backup content") + + files = get_all_files_for_files_hash("*.txt", test_dir, ["temp*", "backup*"]) + + assert len(files) == 2 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert not any("temp.txt" in f for f in files) + assert not any("backup.txt" in f for f in files) + + def test_should_handle_complex_ignore_patterns(self, test_dir): + """Test that function handles complex ignore patterns.""" + # Create nested structure with various file types + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "tests"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "tests", "test.spec.ts"), "w") as f: + f.write("test content") + with open( + os.path.join(test_dir, "src", "components", "Button.test.tsx"), "w" + ) as f: + f.write("test content") + with open(os.path.join(test_dir, "src", "utils", "helper.spec.ts"), "w") as f: + f.write("spec content") + + files = get_all_files_for_files_hash( + "src", test_dir, ["**/*.test.*", "**/*.spec.*"] + ) + + assert len(files) == 6 # 3 files + 3 directories (src, components, utils) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("Button.test.tsx" in f for f in files) + assert not any("helper.spec.ts" in f for f in files) + + def test_should_handle_empty_directories(self, test_dir): + """Test that function handles empty directories.""" + os.makedirs(os.path.join(test_dir, "empty"), exist_ok=True) + with open(os.path.join(test_dir, "file.txt"), "w") as f: + f.write("content") + + files = get_all_files_for_files_hash("empty", test_dir, []) + + assert len(files) == 1 + + def test_should_handle_mixed_files_and_directories(self, test_dir): + """Test that function handles mixed files and directories.""" + # Create a mix of files and directories + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + os.makedirs(os.path.join(test_dir, "dir1"), exist_ok=True) + with open(os.path.join(test_dir, "dir1", "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "file3.txt"), "w") as f: + f.write("content3") + + files = get_all_files_for_files_hash("*", test_dir, []) + + assert len(files) == 4 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert any("file3.txt" in f for f in files) + + def test_should_handle_glob_patterns_with_subdirectories(self, test_dir): + """Test that function handles glob patterns with subdirectories.""" + # Create nested structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "src", "components", "Button.css"), "w") as f: + f.write("css content") + + files = get_all_files_for_files_hash("src/**/*", test_dir, []) + + assert len(files) == 6 + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert any("Button.css" in f for f in files) + + def test_should_handle_specific_file_extensions(self, test_dir): + """Test that function handles specific file extensions.""" + with open(os.path.join(test_dir, "file1.ts"), "w") as f: + f.write("ts content") + with open(os.path.join(test_dir, "file2.js"), "w") as f: + f.write("js content") + with open(os.path.join(test_dir, "file3.tsx"), "w") as f: + f.write("tsx content") + with open(os.path.join(test_dir, "file4.css"), "w") as f: + f.write("css content") + + files = get_all_files_for_files_hash("*.ts", test_dir, []) + + assert len(files) == 1 + assert any("file1.ts" in f for f in files) + + def test_should_return_sorted_files(self, test_dir): + """Test that function returns sorted files.""" + with open(os.path.join(test_dir, "zebra.txt"), "w") as f: + f.write("z content") + with open(os.path.join(test_dir, "apple.txt"), "w") as f: + f.write("a content") + with open(os.path.join(test_dir, "banana.txt"), "w") as f: + f.write("b content") + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 3 + assert "apple.txt" in files[0] + assert "banana.txt" in files[1] + assert "zebra.txt" in files[2] + + def test_should_handle_no_matching_files(self, test_dir): + """Test that function handles no matching files.""" + with open(os.path.join(test_dir, "file.txt"), "w") as f: + f.write("content") + + files = get_all_files_for_files_hash("*.js", test_dir, []) + + assert len(files) == 0 + + def test_should_handle_complex_ignore_patterns_with_directories(self, test_dir): + """Test that function handles complex ignore patterns with directories.""" + # Create a complex structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "tests"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "dist"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "src", "tests", "test.spec.ts"), "w") as f: + f.write("test content") + with open(os.path.join(test_dir, "dist", "bundle.js"), "w") as f: + f.write("bundle content") + with open(os.path.join(test_dir, "README.md"), "w") as f: + f.write("readme content") + + files = get_all_files_for_files_hash( + "src", test_dir, ["**/tests/**", "**/*.spec.*"] + ) + + assert ( + len(files) == 7 + ) # 3 files + 4 directories (src, components, utils, tests excluded) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("test.spec.ts" in f for f in files) + + def test_should_handle_symlinks(self, test_dir): + """Test that function handles symbolic links.""" + # Create a file and a symlink to it + with open(os.path.join(test_dir, "original.txt"), "w") as f: + f.write("original content") + + # Create symlink (only on Unix-like systems) + if hasattr(os, "symlink"): + os.symlink("original.txt", os.path.join(test_dir, "link.txt")) + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 2 + assert any("original.txt" in f for f in files) + assert any("link.txt" in f for f in files) + + def test_should_handle_nested_ignore_patterns(self, test_dir): + """Test that function handles nested ignore patterns.""" + # Create nested structure + os.makedirs(os.path.join(test_dir, "src", "components", "ui"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "components", "forms"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open( + os.path.join(test_dir, "src", "components", "ui", "Button.tsx"), "w" + ) as f: + f.write("button content") + with open( + os.path.join(test_dir, "src", "components", "forms", "Input.tsx"), "w" + ) as f: + f.write("input content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open( + os.path.join(test_dir, "src", "components", "ui", "Button.test.tsx"), "w" + ) as f: + f.write("test content") + + files = get_all_files_for_files_hash("src", test_dir, ["**/ui/**"]) + + assert ( + len(files) == 8 + ) # 3 files + 5 directories (src, components, forms, utils) + assert any("index.ts" in f for f in files) + assert any("Input.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("Button.tsx" in f for f in files) + assert not any("Button.test.tsx" in f for f in files) diff --git a/packages/python-sdk/tests/sync/template_sync/utils/test_get_all_files_for_files_hash.py b/packages/python-sdk/tests/sync/template_sync/utils/test_get_all_files_for_files_hash.py new file mode 100644 index 0000000000..d0798a9d1e --- /dev/null +++ b/packages/python-sdk/tests/sync/template_sync/utils/test_get_all_files_for_files_hash.py @@ -0,0 +1,281 @@ +import os +import tempfile +import pytest +from e2b.template.utils import get_all_files_for_files_hash + + +class TestGetAllFilesForFilesHash: + @pytest.fixture + def test_dir(self): + """Create a temporary directory for testing.""" + with tempfile.TemporaryDirectory() as tmpdir: + yield tmpdir + + def test_should_return_files_matching_simple_pattern(self, test_dir): + """Test that function returns files matching a simple pattern.""" + # Create test files + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + with open(os.path.join(test_dir, "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "file3.js"), "w") as f: + f.write("content3") + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 2 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert not any("file3.js" in f for f in files) + + def test_should_handle_directory_patterns_recursively(self, test_dir): + """Test that function handles directory patterns recursively.""" + # Create nested directory structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "README.md"), "w") as f: + f.write("readme content") + + files = get_all_files_for_files_hash("src", test_dir, []) + + assert len(files) == 6 # 3 files + 3 directories (src, components, utils) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("README.md" in f for f in files) + + def test_should_respect_ignore_patterns(self, test_dir): + """Test that function respects ignore patterns.""" + # Create test files + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + with open(os.path.join(test_dir, "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "temp.txt"), "w") as f: + f.write("temp content") + with open(os.path.join(test_dir, "backup.txt"), "w") as f: + f.write("backup content") + + files = get_all_files_for_files_hash("*.txt", test_dir, ["temp*", "backup*"]) + + assert len(files) == 2 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert not any("temp.txt" in f for f in files) + assert not any("backup.txt" in f for f in files) + + def test_should_handle_complex_ignore_patterns(self, test_dir): + """Test that function handles complex ignore patterns.""" + # Create nested structure with various file types + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "tests"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "tests", "test.spec.ts"), "w") as f: + f.write("test content") + with open( + os.path.join(test_dir, "src", "components", "Button.test.tsx"), "w" + ) as f: + f.write("test content") + with open(os.path.join(test_dir, "src", "utils", "helper.spec.ts"), "w") as f: + f.write("spec content") + + files = get_all_files_for_files_hash( + "src", test_dir, ["**/*.test.*", "**/*.spec.*"] + ) + + assert len(files) == 6 # 3 files + 3 directories (src, components, utils) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("Button.test.tsx" in f for f in files) + assert not any("helper.spec.ts" in f for f in files) + + def test_should_handle_empty_directories(self, test_dir): + """Test that function handles empty directories.""" + os.makedirs(os.path.join(test_dir, "empty"), exist_ok=True) + with open(os.path.join(test_dir, "file.txt"), "w") as f: + f.write("content") + + files = get_all_files_for_files_hash("empty", test_dir, []) + + assert len(files) == 1 + + def test_should_handle_mixed_files_and_directories(self, test_dir): + """Test that function handles mixed files and directories.""" + # Create a mix of files and directories + with open(os.path.join(test_dir, "file1.txt"), "w") as f: + f.write("content1") + os.makedirs(os.path.join(test_dir, "dir1"), exist_ok=True) + with open(os.path.join(test_dir, "dir1", "file2.txt"), "w") as f: + f.write("content2") + with open(os.path.join(test_dir, "file3.txt"), "w") as f: + f.write("content3") + + files = get_all_files_for_files_hash("*", test_dir, []) + + assert len(files) == 4 + assert any("file1.txt" in f for f in files) + assert any("file2.txt" in f for f in files) + assert any("file3.txt" in f for f in files) + + def test_should_handle_glob_patterns_with_subdirectories(self, test_dir): + """Test that function handles glob patterns with subdirectories.""" + # Create nested structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "src", "components", "Button.css"), "w") as f: + f.write("css content") + + files = get_all_files_for_files_hash("src/**/*", test_dir, []) + + assert len(files) == 6 + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert any("Button.css" in f for f in files) + + def test_should_handle_specific_file_extensions(self, test_dir): + """Test that function handles specific file extensions.""" + with open(os.path.join(test_dir, "file1.ts"), "w") as f: + f.write("ts content") + with open(os.path.join(test_dir, "file2.js"), "w") as f: + f.write("js content") + with open(os.path.join(test_dir, "file3.tsx"), "w") as f: + f.write("tsx content") + with open(os.path.join(test_dir, "file4.css"), "w") as f: + f.write("css content") + + files = get_all_files_for_files_hash("*.ts", test_dir, []) + + assert len(files) == 1 + assert any("file1.ts" in f for f in files) + + def test_should_return_sorted_files(self, test_dir): + """Test that function returns sorted files.""" + with open(os.path.join(test_dir, "zebra.txt"), "w") as f: + f.write("z content") + with open(os.path.join(test_dir, "apple.txt"), "w") as f: + f.write("a content") + with open(os.path.join(test_dir, "banana.txt"), "w") as f: + f.write("b content") + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 3 + assert "apple.txt" in files[0] + assert "banana.txt" in files[1] + assert "zebra.txt" in files[2] + + def test_should_handle_no_matching_files(self, test_dir): + """Test that function handles no matching files.""" + with open(os.path.join(test_dir, "file.txt"), "w") as f: + f.write("content") + + files = get_all_files_for_files_hash("*.js", test_dir, []) + + assert len(files) == 0 + + def test_should_handle_complex_ignore_patterns_with_directories(self, test_dir): + """Test that function handles complex ignore patterns with directories.""" + # Create a complex structure + os.makedirs(os.path.join(test_dir, "src", "components"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "tests"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "dist"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open(os.path.join(test_dir, "src", "components", "Button.tsx"), "w") as f: + f.write("button content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open(os.path.join(test_dir, "src", "tests", "test.spec.ts"), "w") as f: + f.write("test content") + with open(os.path.join(test_dir, "dist", "bundle.js"), "w") as f: + f.write("bundle content") + with open(os.path.join(test_dir, "README.md"), "w") as f: + f.write("readme content") + + files = get_all_files_for_files_hash( + "src", test_dir, ["**/tests/**", "**/*.spec.*"] + ) + + assert ( + len(files) == 7 + ) # 3 files + 4 directories (src, components, utils, tests excluded) + assert any("index.ts" in f for f in files) + assert any("Button.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("test.spec.ts" in f for f in files) + + def test_should_handle_symlinks(self, test_dir): + """Test that function handles symbolic links.""" + # Create a file and a symlink to it + with open(os.path.join(test_dir, "original.txt"), "w") as f: + f.write("original content") + + # Create symlink (only on Unix-like systems) + if hasattr(os, "symlink"): + os.symlink("original.txt", os.path.join(test_dir, "link.txt")) + + files = get_all_files_for_files_hash("*.txt", test_dir, []) + + assert len(files) == 2 + assert any("original.txt" in f for f in files) + assert any("link.txt" in f for f in files) + + def test_should_handle_nested_ignore_patterns(self, test_dir): + """Test that function handles nested ignore patterns.""" + # Create nested structure + os.makedirs(os.path.join(test_dir, "src", "components", "ui"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "components", "forms"), exist_ok=True) + os.makedirs(os.path.join(test_dir, "src", "utils"), exist_ok=True) + + with open(os.path.join(test_dir, "src", "index.ts"), "w") as f: + f.write("index content") + with open( + os.path.join(test_dir, "src", "components", "ui", "Button.tsx"), "w" + ) as f: + f.write("button content") + with open( + os.path.join(test_dir, "src", "components", "forms", "Input.tsx"), "w" + ) as f: + f.write("input content") + with open(os.path.join(test_dir, "src", "utils", "helper.ts"), "w") as f: + f.write("helper content") + with open( + os.path.join(test_dir, "src", "components", "ui", "Button.test.tsx"), "w" + ) as f: + f.write("test content") + + files = get_all_files_for_files_hash("src", test_dir, ["**/ui/**"]) + + assert ( + len(files) == 8 + ) # 3 files + 5 directories (src, components, forms, utils) + assert any("index.ts" in f for f in files) + assert any("Input.tsx" in f for f in files) + assert any("helper.ts" in f for f in files) + assert not any("Button.tsx" in f for f in files) + assert not any("Button.test.tsx" in f for f in files)