Skip to content
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 70 additions & 0 deletions scancodeio/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,14 @@
import sys
import tempfile
from pathlib import Path
from venv import logger

import environ

from scanpipe.archiving import LocalFilesystemProvider
from scanpipe.archiving import S3LikeProvider
from scanpipe.archiving import SftpProvider

PROJECT_DIR = environ.Path(__file__) - 1
ROOT_DIR = PROJECT_DIR - 1

Expand Down Expand Up @@ -371,6 +376,71 @@

CRISPY_TEMPLATE_PACK = "bootstrap3"

# Storing archives locally or in S3 (Package Storage settings)

ENABLE_DOWNLOAD_ARCHIVING = env.bool("ENABLE_DOWNLOAD_ARCHIVING", default=False)

# localstorage, s3, sftp
DOWNLOAD_ARCHIVING_PROVIDER = env.str(
"DOWNLOAD_ARCHIVING_PROVIDER", default="localstorage"
)

# For local storage, we would store the root path in that setting
DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION = env.dict(
"DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION", default=None
)

# Initialize the DownloadStore based on provider

download_store = None
if ENABLE_DOWNLOAD_ARCHIVING:
if DOWNLOAD_ARCHIVING_PROVIDER == "localstorage":
config = DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION or {}
root_path = Path(config.get("root_path", "/var/scancodeio/downloads"))
try:
download_store = LocalFilesystemProvider(root_path=root_path)
except Exception as e:
logger.error(f"Failed to initialize LocalFilesystemProvider: {e}")
elif DOWNLOAD_ARCHIVING_PROVIDER == "s3":
config = DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION or {}
required_keys = ["bucket_name", "aws_userid", "aws_apikey"]
if not all(key in config for key in required_keys):
logger.error(
f"S3 provider requires {required_keys}"
"in DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION"
)
else:
try:
download_store = S3LikeProvider(
bucket_name=config.get("bucket_name"),
aws_userid=config.get("aws_userid"),
aws_apikey=config.get("aws_apikey"),
other_aws_credentials=config.get("other_aws_credentials", {}),
)
except Exception as e:
logger.error(f"Failed to initialize S3LikeProvider: {e}")
elif DOWNLOAD_ARCHIVING_PROVIDER == "sftp":
config = DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION or {}
required_keys = ["host", "root_path", "ssh_credentials"]
if not all(key in config for key in required_keys):
logger.error(
f"SFTP provider requires {required_keys}"
"in DOWNLOAD_ARCHIVING_PROVIDER_CONFIGURATION"
)
else:
try:
download_store = SftpProvider(
host=config.get("host"),
root_path=config.get("root_path"),
ssh_credentials=config.get("ssh_credentials", {}),
)
except Exception as e:
logger.error(f"Failed to initialize SftpProvider: {e}")
else:
logger.error(
f"Unknown DOWNLOAD_ARCHIVING_PROVIDER: {DOWNLOAD_ARCHIVING_PROVIDER}"
)

# Job Queue

RQ_QUEUES = {
Expand Down
Loading
Loading