Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,8 @@ max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2

max-positional-arguments=12


[CLASSES]

Expand Down
2 changes: 1 addition & 1 deletion libcloudforensics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@

# Since moving to poetry, ensure the version number tracked in pyproject.toml is
# also updated
__version__ = '20250331'
__version__ = '20250721'
10 changes: 5 additions & 5 deletions libcloudforensics/providers/aws/forensics.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,6 @@ def CreateVolumeCopy(zone: str,
account information could not be retrieved.
"""

if not instance_id and not volume_id:
raise ValueError(
'You must specify at least one of [instance_id, volume_id].')

source_account = account.AWSAccount(zone, aws_profile=src_profile)
destination_account = account.AWSAccount(zone, aws_profile=dst_profile)
Expand All @@ -127,6 +124,10 @@ def CreateVolumeCopy(zone: str,
elif instance_id:
instance = source_account.ec2.GetInstanceById(instance_id)
volume_to_copy = instance.GetBootVolume()
else:
raise ValueError(
'You must specify at least one of [instance_id, volume_id].')


if not volume_type:
volume_type = volume_to_copy.GetVolumeType()
Expand Down Expand Up @@ -200,8 +201,7 @@ def CreateVolumeCopy(zone: str,

return new_volume

# pylint: disable=too-many-arguments
def StartAnalysisVm(
def StartAnalysisVm( # pylint: disable=too-many-arguments,too-many-positional-arguments
vm_name: str,
default_availability_zone: str,
boot_volume_size: int,
Expand Down
3 changes: 1 addition & 2 deletions libcloudforensics/providers/aws/internal/ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,8 +372,7 @@ def ListImages(

return images['Images']

# pylint: disable=too-many-arguments
def GetOrCreateVm(
def GetOrCreateVm( # pylint: disable=too-many-arguments,too-many-positional-arguments
self,
vm_name: str,
boot_volume_size: int,
Expand Down
8 changes: 4 additions & 4 deletions libcloudforensics/providers/azure/forensics.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,6 @@ def CreateDiskCopy(
ValueError: If both instance_name and disk_name are missing.
"""

if not instance_name and not disk_name:
raise ValueError(
'You must specify at least one of [instance_name, disk_name].')

src_account = account.AZAccount(
resource_group_name, default_region=region, profile_name=src_profile)
dst_account = account.AZAccount(resource_group_name,
Expand All @@ -88,6 +84,10 @@ def CreateDiskCopy(
elif instance_name:
instance = src_account.compute.GetInstance(instance_name)
disk_to_copy = instance.GetBootDisk()
else:
raise ValueError(
'You must specify at least one of [instance_name, disk_name].')

logger.info('Disk copy of {0:s} started...'.format(
disk_to_copy.name))

Expand Down
99 changes: 68 additions & 31 deletions libcloudforensics/providers/gcp/forensics.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,6 @@ def CreateDiskCopy(
ValueError: If both instance_name and disk_name are missing.
"""

if not instance_name and not disk_name:
raise ValueError(
'You must specify at least one of [instance_name, disk_name].')

src_project = gcp_project.GoogleCloudProject(src_proj)
dst_project = gcp_project.GoogleCloudProject(dst_proj, default_zone=zone)

Expand All @@ -81,6 +77,9 @@ def CreateDiskCopy(
elif instance_name:
instance = src_project.compute.GetInstance(instance_name)
disk_to_copy = instance.GetBootDisk()
else:
raise ValueError(
'You must specify at least one of [instance_name, disk_name].')

if not disk_type:
disk_type = disk_to_copy.GetDiskType()
Expand Down Expand Up @@ -252,6 +251,50 @@ def CreateDiskFromGCSImage(
return result


def CopyDisksToGCS(source_project: str,
source_disk: str,
destination_bucket: str,
destination_directory: str,
image_format: str) -> str:
"""Given a VM, copy the disks to a GCS bucket.

Args:
source_project: The project containing the disk to copy
source_disk: The name of the disk to copy
destination_bucket: The destination bucket to store the disk copy
destination_directory: The directory in the bucket in which to store the
disk image
image_format: The image format to use. Supported formats documented at
https://github.com/GoogleCloudPlatform/compute-image-import/blob/edee48bddbe159100da9ad961131a4beb0f12158/cli_tools/gce_vm_image_export/README.md?plain=1#L3
"""
try:
src_project = gcp_project.GoogleCloudProject(source_project)
disk_to_copy = src_project.compute.GetDisk(source_disk)
copied_image = src_project.compute.CreateImageFromDisk(disk_to_copy)
return copied_image.ExportImage(
gcs_output_folder=f'gs://{destination_bucket}/{destination_directory}',
image_format=image_format,
output_name=disk_to_copy.name)
except (RefreshError, DefaultCredentialsError) as exception:
raise errors.CredentialsConfigurationError(
'Something is wrong with your Application Default Credentials. Try '
'running: $ gcloud auth application-default login: {0!s}'.format(
exception),
__name__) from exception
except HttpError as exception:
if exception.resp.status == 403:
raise errors.CredentialsConfigurationError(
'Make sure you have the appropriate permissions on the project: '
'{0!s}'.format(exception),
__name__) from exception
if exception.resp.status == 404:
raise errors.ResourceNotFoundError(
'GCP resource not found. Maybe a typo in the project / instance / '
'disk name?',
__name__) from exception
raise RuntimeError(exception) from exception


def AddDenyAllFirewallRules(
project_id: str,
network: str,
Expand Down Expand Up @@ -669,19 +712,13 @@ def TriageInstance(project_id: str, instance_name: str) -> Dict[str, Any]:

cpu_usage = project.monitoring.GetCpuUsage(
instance_ids=[instance_info['id']], aggregation_minutes=1)
if cpu_usage:
parsed_cpu = cpu_usage[0].get('cpu_usage', [])
parsed_cpu = cpu_usage[0].get('cpu_usage', []) if cpu_usage else None


gce_gpu_usage = project.monitoring.GetInstanceGPUUsage(
instance_ids=[instance_info['id']])
if gce_gpu_usage:
parsed_gce_gpu = gce_gpu_usage

instance_ids=[instance_info['id']])

gke_gpu_usage = project.monitoring.GetNodeAccelUsage()
if gke_gpu_usage:
parsed_gke_gpu = gke_gpu_usage

instance_triage = {
'instance_info': {
Expand All @@ -697,25 +734,25 @@ def TriageInstance(project_id: str, instance_name: str) -> Dict[str, Any]:
'data_type': 'service_accounts',
'values': instance_info['serviceAccounts']
},
{
'data_type': 'firewalls',
'values': instance.GetNormalisedFirewalls()
}, {
'data_type': 'cpu_usage', 'values': parsed_cpu
}, {
'data_type': 'gce_gpu_usage', 'values': parsed_gce_gpu
}, {
'data_type': 'gke_gpu_usage', 'values': parsed_gke_gpu
}, {
'data_type':
'ssh_auth',
'values':
CheckInstanceSSHAuth(
project_id, instance_info['name'])
}, {
'data_type': 'active_services',
'values': parsed_services
}]
{
'data_type': 'firewalls',
'values': instance.GetNormalisedFirewalls()
}, {
'data_type': 'cpu_usage', 'values': parsed_cpu
}, {
'data_type': 'gce_gpu_usage', 'values': gce_gpu_usage
}, {
'data_type': 'gke_gpu_usage', 'values': gke_gpu_usage
}, {
'data_type':
'ssh_auth',
'values':
CheckInstanceSSHAuth(
project_id, instance_info['name'])
}, {
'data_type': 'active_services',
'values': parsed_services
}]
}

return instance_triage
44 changes: 28 additions & 16 deletions libcloudforensics/providers/gcp/internal/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,7 @@ def CreateInstanceFromRequest(
return GoogleComputeInstance(
project_id=self.project_id, zone=compute_zone, name=instance_name)

def CreateInstanceFromArguments( #pylint: disable=too-many-arguments
def CreateInstanceFromArguments( # pylint: disable=too-many-arguments,too-many-positional-arguments
self,
instance_name: str,
machine_type: str,
Expand Down Expand Up @@ -1288,6 +1288,7 @@ def ImportImageFromStorage(self,
'windows-8-x86-byol'
]

img_type = None
if not bootable:
img_type = '-data_disk'
elif not os_name:
Expand Down Expand Up @@ -2253,39 +2254,49 @@ def GetOperation(self) -> Dict[str, Any]:
return response

def ExportImage(
self, gcs_output_folder: str, output_name: Optional[str] = None) -> None:
"""Export compute image to Google Cloud storage.

self,
gcs_output_folder: str,
image_format: str,
output_name: Optional[str]) -> str:
"""Export compute image to Google Cloud Storage.

Exported image is compressed and stored in .tar.gz format.

Args:
gcs_output_folder (str): Folder path of the exported image.
image_format (str): The image format to use for the export.
output_name (str): Optional. Name of the output file. Name will be
appended with .tar.gz. Default is [image_name].tar.gz.

Returns:
str: The full path of the exported image.
Raises:
InvalidNameError: If exported image name is invalid.
"""

if output_name:
if not common.REGEX_DISK_NAME.match(output_name):
raise errors.InvalidNameError(
'Exported image name {0:s} does not comply with {1:s}'.format(
output_name, common.REGEX_DISK_NAME.pattern),
__name__)
full_path = '{0:s}.tar.gz'.format(
os.path.join(gcs_output_folder, output_name))
full_path = '{0:s}'.format(os.path.join(gcs_output_folder, output_name))
else:
full_path = '{0:s}.tar.gz'.format(
os.path.join(gcs_output_folder, self.name))
full_path = '{0:s}'.format(os.path.join(gcs_output_folder, self.name))
if not image_format:
full_path = '{0:s}.tar.gz'.format(full_path)
else:
full_path = '{0:s}.{1:s}'.format(full_path, image_format)

build_args = [
'-source_image={0:s}'.format(self.name),
'-destination_uri={0:s}'.format(full_path),
'-client_id=api',
]
if image_format:
build_args.append('-format={0:s}'.format(image_format))
build_body = {
'timeout': '86400s',
'steps': [{
'args': [
'-source_image={0:s}'.format(self.name),
'-destination_uri={0:s}'.format(full_path),
'-client_id=api',
],
'steps': [{
'args': build_args,
'name': 'gcr.io/compute-image-tools/gce_vm_image_export:release',
'env': []
}],
Expand All @@ -2295,6 +2306,7 @@ def ExportImage(
response = cloud_build.CreateBuild(build_body)
cloud_build.BlockOperation(response)
logger.info('Image {0:s} exported to {1:s}.'.format(self.name, full_path))
return full_path

def Delete(self) -> None:
"""Delete Compute Disk Image from a project."""
Expand Down
1 change: 1 addition & 0 deletions libcloudforensics/providers/gcp/internal/monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def GetInstanceGPUUsage(
for response in responses:
time_series = response.get('timeSeries', [])
for ts in time_series:
gpu_name = 'None'
if ts['metric'].get('labels', None):
gpu_name = "{0:s} ({1:s})".format(
ts['metric']['labels']['model'],
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "libcloudforensics"
version = "20250331"
version = "20250721"
description = "libcloudforensics is a set of tools to help acquire forensic evidence from Cloud platforms."
authors = ["cloud-forensics-utils development team <[email protected]>"]
license = "Apache-2.0"
Expand Down
25 changes: 25 additions & 0 deletions tests/providers/gcp/test_forensics.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,3 +153,28 @@ def testCheckInstanceSSHAuth(self, mock_subprocess, mock_project):
'fake_project' , 'fake_instance')
self.assertListEqual(
ssh_auth, ['publickey', 'password', 'keyboard-interactive'])

@mock.patch('libcloudforensics.providers.gcp.internal.project.GoogleCloudProject')
def testCopyDisksToGCS(self, mock_project: mock.MagicMock) -> None:
"""Tests copying a disk to GCS storage."""

dest_bucket_name = gcp_mocks.MOCK_GCS_BUCKETS['items'][0].get('name') # type: ignore

forensics.CopyDisksToGCS(gcp_mocks.FAKE_SOURCE_PROJECT.project_id,
gcp_mocks.FAKE_DISK.name,
dest_bucket_name,
'/path/to/directory/',
'qcow2')

mock_project.assert_called_once_with(gcp_mocks.FAKE_SOURCE_PROJECT.project_id)
mock_project.return_value.compute.GetDisk.assert_called_once_with(gcp_mocks.FAKE_DISK.name)

mock_disk_obj = mock_project.return_value.compute.GetDisk.return_value
mock_project.return_value.compute.CreateImageFromDisk.assert_called_once_with(mock_disk_obj)

mock_image_obj = mock_project.return_value.compute.CreateImageFromDisk.return_value
mock_image_obj.ExportImage.assert_called_once_with(
gcs_output_folder=f'gs://{dest_bucket_name}/{"/path/to/directory/"}',
image_format='qcow2',
output_name=mock_disk_obj.name)

11 changes: 11 additions & 0 deletions tools/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
'bucketacls': gcp_cli.GetBucketACLs,
'bucketsize': gcp_cli.GetBucketSize,
'copydisk': gcp_cli.CreateDiskCopy,
'copydisktogcs': gcp_cli.CopyDiskToGCS,
'creatediskgcs': gcp_cli.CreateDiskFromGCSImage,
'deleteinstance': gcp_cli.DeleteInstance,
'deleteobject': gcp_cli.DeleteObject,
Expand Down Expand Up @@ -387,6 +388,16 @@ def Main() -> None:
'The default behavior is to use the same disk '
'type as the source disk.', None)
])
AddParser('gcp', gcp_subparsers, 'copydisktogcs',
'Copy a disk content into GCS.',
args=[
('project', 'Source GCP project containing the disk to copy',
''),
('disk_name', 'Name of the disk to copy.', ''),
('bucket', 'Name of the destination bucket.', ''),
('directory', 'Destination directory path in the GCS bucket.',
''),
('image_format', 'Image format.', '')])
AddParser('gcp', gcp_subparsers, 'startvm', 'Start a forensic analysis VM.',
args=[
('instance_name', 'Name of the GCE instance to create.',
Expand Down
Loading