Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 131 additions & 5 deletions src/penguin/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,10 @@ def _do_package(project_dir, output_path):
# This fully validates and resolves auto-patching files into final config state
config = _validate_project(project_dir_abs, config_path)

# Always compute base_name from the project directory
base_name = os.path.basename(project_dir_abs)

if output_path is None:
base_name = os.path.basename(project_dir_abs)
output_path = f"{base_name}.tar.gz"

# Route relative output paths into the mapped workspace
Expand Down Expand Up @@ -352,7 +354,8 @@ def _add_file(file_path):
version_file_path = os.path.join(temp_dir, ".penguin_packaged_version")
package_metadata = {
"format_version": 1,
"penguin_version": VERSION
"penguin_version": VERSION,
"base_name": base_name
}
with open(version_file_path, "w") as f:
yaml.dump(package_metadata, f, default_flow_style=False, sort_keys=False)
Expand Down Expand Up @@ -871,7 +874,7 @@ def guest_cmd(ctx, args):
@click.option("-o", "--out", type=str, default=None, help="Output tar.gz file path. Defaults to <project_dir_name>.tar.gz")
@verbose_option
@click.pass_context
def package(ctx, project_dir, out):
def pack(ctx, project_dir, out):
"""
Package a penguin project into a distributable archive.

Expand All @@ -881,14 +884,137 @@ def package(ctx, project_dir, out):
_do_package(project_dir, out)


@cli.command()
@click.argument("archive", type=click.Path(exists=True))
@click.option("-o", "--output", type=str, default="./projects", help="Output directory path. Defaults to ./projects in current directory.")
@click.option("--force", is_flag=True, default=False, help="Forcefully delete output directory if it exists")
@verbose_option
@click.pass_context
def unpack(ctx, archive, output, force):
"""
Extract a packaged penguin project.

ARCHIVE is the path to a .tar.gz file created by 'penguin pack'.
"""
_startup_checks(ctx.obj['VERBOSE'])

archive_path = os.path.abspath(archive)
if not archive_path.endswith('.tar.gz'):
raise ValueError(f"Archive must be a .tar.gz file: {archive}")

if not os.path.exists(archive_path):
raise ValueError(f"Archive file not found: {archive}")

# Route relative output paths into the mapped workspace, similar to package command
if output is None:
output = "./projects"

if not os.path.isabs(output):
if os.path.exists("/workspace"):
output = os.path.join("/workspace", output)
else:
output = os.path.abspath(output)
else:
output = os.path.abspath(output)

# Ensure output directory exists
os.makedirs(output, exist_ok=True)

# Verify the archive contains the version file and extract metadata
try:
subprocess.run(
["tar", "-tzf", archive_path, ".penguin_packaged_version"],
capture_output=True,
text=True,
check=True
)
except subprocess.CalledProcessError:
raise ValueError(
"Archive is not a valid penguin package: missing .penguin_packaged_version file. "
"This archive was not created with 'penguin pack'."
)
except FileNotFoundError:
logger.error("tar command not found. Please ensure tar is installed.")
exit(1)

# Extract metadata to determine the base_name
with tempfile.TemporaryDirectory() as temp_extract_dir:
try:
subprocess.run(
["tar", "-I", "pigz", "-xf", archive_path, "-C", temp_extract_dir, ".penguin_packaged_version"],
check=True
)
metadata_path = os.path.join(temp_extract_dir, ".penguin_packaged_version")
with open(metadata_path, "r") as f:
metadata = yaml.safe_load(f) or {}

base_name = metadata.get("base_name")
if not base_name:
# Fall back to archive filename without .tar.gz
base_name = os.path.basename(archive_path)
if base_name.endswith(".tar.gz"):
base_name = base_name[:-7]
logger.warning(f"No base_name in metadata, using archive filename: {base_name}")
except (subprocess.CalledProcessError, yaml.YAMLError) as e:
logger.error(f"Failed to extract metadata: {e}")
exit(1)

# Set target directory based on metadata
target_dir = os.path.join(output, base_name)

# Check if target directory exists
if os.path.exists(target_dir):
if force:
logger.info(f"Deleting existing directory: {target_dir}")
shutil.rmtree(target_dir, ignore_errors=True)
else:
raise ValueError(
f"Output directory already exists: {target_dir}. Use --force to delete."
)

logger.info(f"Extracting {archive} to {target_dir}...")

# Create the target directory
os.makedirs(target_dir, exist_ok=True)

try:
subprocess.run(
["tar", "-I", "pigz", "-xf", archive_path, "-C", target_dir],
check=True
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to extract archive: {e}")
exit(1)

logger.info(f"Successfully extracted to {target_dir}")

# Verify it's a valid penguin project
config_path = os.path.join(target_dir, "config.yaml")
if not os.path.exists(config_path):
logger.warning("Extracted directory does not contain config.yaml")
else:
logger.info(f"Project ready at {target_dir}")


@cli.command(hidden=True)
@click.argument("project_dir", type=click.Path(exists=True))
@click.option("-o", "--out", type=str, default=None)
@verbose_option
@click.pass_context
def export(ctx, project_dir, out):
"""Alias for package"""
ctx.invoke(package, project_dir=project_dir, out=out)
"""Alias for pack"""
ctx.invoke(pack, project_dir=project_dir, out=out)


@cli.command(name="import", hidden=True)
@click.argument("archive", type=click.Path(exists=True))
@click.option("-o", "--output", type=str, default="./projects")
@click.option("--force", is_flag=True, default=False)
@verbose_option
@click.pass_context
def import_cmd(ctx, archive, output, force):
"""Alias for unpack"""
ctx.invoke(unpack, archive=archive, output=output, force=force)


if __name__ == "__main__":
Expand Down
Loading