Skip to content

Commit d2ef6f0

Browse files
committed
Show error message for access deny
1 parent f6a4b4c commit d2ef6f0

File tree

3 files changed

+148
-27
lines changed

3 files changed

+148
-27
lines changed

smoosense-gui/src/lib/features/folderTree/folderTreeSlice.ts

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,17 @@ export const loadFolderContents = createAsyncThunk(
5757

5858
const response = await fetch(`${API_PREFIX}/ls?${params}`)
5959
if (!response.ok) {
60-
throw new Error(`Failed to load folder contents: ${response.statusText}`)
60+
// Try to parse error message from API response
61+
try {
62+
const errorData = await response.json()
63+
throw new Error(errorData.error || `Failed to load folder contents: ${response.statusText}`)
64+
} catch (parseError) {
65+
// If parsing fails, throw generic error
66+
if (parseError instanceof Error && parseError.message !== `Failed to load folder contents: ${response.statusText}`) {
67+
throw parseError
68+
}
69+
throw new Error(`Failed to load folder contents: ${response.statusText}`)
70+
}
6171
}
6272

6373
const items: FSItem[] = await response.json()

smoosense-py/smoosense/utils/s3_fs.py

Lines changed: 37 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import boto3
77
from pydantic import validate_call
88

9+
from smoosense.exceptions import AccessDeniedException
910
from smoosense.utils.models import FSItem
1011

1112
logger = logging.getLogger(__name__)
@@ -17,6 +18,8 @@ def __init__(self, s3_client: boto3.client):
1718

1819
@validate_call()
1920
def list_one_level(self, key: str, limit: int = 100) -> list[FSItem]:
21+
from botocore.exceptions import ClientError
22+
2023
# Parse the S3 URL to get bucket and prefix
2124
parsed = urlparse(key)
2225
bucket = parsed.netloc
@@ -28,35 +31,43 @@ def list_one_level(self, key: str, limit: int = 100) -> list[FSItem]:
2831
paginator = self.s3_client.get_paginator("list_objects_v2")
2932
items = []
3033

31-
for page in paginator.paginate(Bucket=bucket, Prefix=prefix, Delimiter="/"):
32-
# Add common prefixes (directories)
33-
for prefix_entry in page.get("CommonPrefixes", []):
34-
items.append(
35-
FSItem(
36-
name=os.path.basename(prefix_entry["Prefix"].rstrip("/")),
37-
size=0,
38-
lastModified=0,
39-
isDir=True,
34+
try:
35+
for page in paginator.paginate(Bucket=bucket, Prefix=prefix, Delimiter="/"):
36+
# Add common prefixes (directories)
37+
for prefix_entry in page.get("CommonPrefixes", []):
38+
items.append(
39+
FSItem(
40+
name=os.path.basename(prefix_entry["Prefix"].rstrip("/")),
41+
size=0,
42+
lastModified=0,
43+
isDir=True,
44+
)
4045
)
41-
)
42-
43-
# Add objects (files)
44-
for obj in page.get("Contents", []):
45-
# Skip the directory marker itself
46-
if obj["Key"] == prefix:
47-
continue
48-
items.append(
49-
FSItem(
50-
name=os.path.basename(obj["Key"]),
51-
size=obj["Size"],
52-
lastModified=int(obj["LastModified"].timestamp() * 1000),
53-
isDir=False,
46+
47+
# Add objects (files)
48+
for obj in page.get("Contents", []):
49+
# Skip the directory marker itself
50+
if obj["Key"] == prefix:
51+
continue
52+
items.append(
53+
FSItem(
54+
name=os.path.basename(obj["Key"]),
55+
size=obj["Size"],
56+
lastModified=int(obj["LastModified"].timestamp() * 1000),
57+
isDir=False,
58+
)
5459
)
55-
)
5660

57-
if len(items) >= limit:
58-
items = items[:limit]
59-
break
61+
if len(items) >= limit:
62+
items = items[:limit]
63+
break
64+
except ClientError as e:
65+
error_code = e.response.get("Error", {}).get("Code", "")
66+
if error_code == "AccessDenied":
67+
raise AccessDeniedException(str(e)) from e
68+
if error_code == "NoSuchBucket":
69+
raise FileNotFoundError(str(e)) from e
70+
raise
6071

6172
return items
6273

smoosense-py/tests/test_s3_ls.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
import json
2+
import unittest
3+
4+
import boto3
5+
from flask import Flask
6+
7+
from smoosense.handlers.fs import fs_bp
8+
from smoosense.my_logging import getLogger
9+
10+
logger = getLogger(__name__)
11+
12+
13+
class TestS3LSEndpoint(unittest.TestCase):
14+
"""Test cases for the /ls endpoint with S3 paths."""
15+
16+
def setUp(self):
17+
"""Set up test fixtures before each test method."""
18+
self.app = Flask(__name__)
19+
self.app.register_blueprint(fs_bp)
20+
self.app.config["TESTING"] = True
21+
self.app.config["S3_CLIENT"] = boto3.client("s3")
22+
self.client = self.app.test_client()
23+
24+
# Set up application context for all tests
25+
self.app_context = self.app.app_context()
26+
self.app_context.push()
27+
28+
def tearDown(self):
29+
"""Clean up after each test method."""
30+
self.app_context.pop()
31+
32+
def test_ls_s3_bucket_root(self):
33+
"""Test listing S3 bucket root."""
34+
response = self.client.get("/ls?path=s3://smoosense-demo/")
35+
self.assertEqual(response.status_code, 200)
36+
37+
data = json.loads(response.get_data(as_text=True))
38+
self.assertIsInstance(data, list)
39+
self.assertGreater(len(data), 0)
40+
41+
# Each item should have required fields
42+
for item in data:
43+
self.assertIn("name", item)
44+
self.assertIn("size", item)
45+
self.assertIn("lastModified", item)
46+
self.assertIn("isDir", item)
47+
48+
def test_ls_s3_nested_path(self):
49+
"""Test listing S3 nested path."""
50+
response = self.client.get("/ls?path=s3://smoosense-demo/datasets/")
51+
self.assertEqual(response.status_code, 200)
52+
53+
data = json.loads(response.get_data(as_text=True))
54+
self.assertIsInstance(data, list)
55+
56+
def test_ls_s3_with_limit(self):
57+
"""Test listing S3 with limit parameter."""
58+
response = self.client.get("/ls?path=s3://smoosense-demo/&limit=2")
59+
self.assertEqual(response.status_code, 200)
60+
61+
data = json.loads(response.get_data(as_text=True))
62+
self.assertIsInstance(data, list)
63+
self.assertLessEqual(len(data), 2)
64+
65+
def test_ls_s3_nonexistent_bucket(self):
66+
"""Test listing non-existent S3 bucket returns 404."""
67+
response = self.client.get("/ls?path=s3://this-bucket-definitely-does-not-exist-12345/")
68+
self.assertEqual(response.status_code, 404)
69+
70+
data = json.loads(response.get_data(as_text=True))
71+
self.assertIn("error", data)
72+
self.assertIn("NoSuchBucket", data["error"])
73+
74+
def test_ls_s3_access_denied(self):
75+
"""Test listing S3 bucket without access returns 403."""
76+
# Use a known bucket that exists but we don't have access to
77+
response = self.client.get("/ls?path=s3://amazon-reviews-pds/")
78+
79+
# This should return 403 if access is denied
80+
# Note: This test depends on not having access to this public bucket
81+
# If you do have access, this test may pass with 200
82+
if response.status_code == 403:
83+
data = json.loads(response.get_data(as_text=True))
84+
self.assertIn("error", data)
85+
self.assertIn("AccessDenied", data["error"])
86+
else:
87+
# If we happen to have access, just verify it returns valid data
88+
self.assertEqual(response.status_code, 200)
89+
90+
def test_ls_s3_missing_path(self):
91+
"""Test listing without path parameter returns 400."""
92+
response = self.client.get("/ls")
93+
self.assertEqual(response.status_code, 400)
94+
95+
data = json.loads(response.get_data(as_text=True))
96+
self.assertIn("error", data)
97+
98+
99+
if __name__ == "__main__":
100+
unittest.main()

0 commit comments

Comments
 (0)