Skip to content

Commit b479c50

Browse files
authored
fix 653 (#655)
1 parent 5ecc2ca commit b479c50

File tree

5 files changed

+26
-6
lines changed

5 files changed

+26
-6
lines changed

.github/copilot-instructions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ Bootstrap, build, and test the repository:
1717
- `hatch run functional:all` – end-to-end functional (SQLite + PostgreSQL live scripts) (≈10–15s) **NEVER CANCEL.**
1818
- `hatch run functional:sqlite --all` – only SQLite functional cycle
1919
- `hatch run functional:postgres --all` – only PostgreSQL functional cycle
20-
- `hatch run lint:check` – lint (ruff) (≈5s)
20+
- `hatch fmt --check` – lint (ruff) (≈5s)
2121
- `hatch run docs:build` – build documentation (≈2s, strict)
2222
- `hatch run docs:serve` – local docs server (http://localhost:8000)
2323
- `hatch run docs:linkcheck` – validate internal/external links & spelling

CHANGELOG.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@ Don't forget to remove deprecated code on each major release!
1414

1515
## [Unreleased]
1616

17-
- Nothing (yet)!
17+
### Fixed
18+
19+
- Ensure `dbbackup` metadata file is always written as bytes to support storage backends that enforce bytes content (e.g. Google Cloud Storage).
1820

1921
## [5.1.0] - 2025-12-17
2022

dbbackup/management/commands/dbbackup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def _save_metadata(self, filename, local=False):
108108
with open(metadata_filename, "w") as fd:
109109
fd.write(metadata_content)
110110
else:
111-
metadata_file = ContentFile(metadata_content)
111+
metadata_file = ContentFile(metadata_content.encode("utf-8"))
112112
self.write_to_storage(metadata_file, metadata_filename)
113113

114114
def _save_new_backup(self, database):

tests/commands/test_dbbackup.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,22 @@ def test_schema(self):
8080

8181
assert result is None
8282

83+
def test_metadata_is_bytes(self):
84+
"""Test that metadata content is passed as bytes to storage."""
85+
self.command._save_new_backup(TEST_DATABASE)
86+
87+
# Find the metadata file in HANDLED_FILES
88+
# HANDLED_FILES["written_files"] contains tuples (name, file_object)
89+
metadata_file_entry = next((f for f in HANDLED_FILES["written_files"] if f[0].endswith(".metadata")), None)
90+
assert metadata_file_entry is not None
91+
92+
metadata_file = metadata_file_entry[1]
93+
metadata_file.open()
94+
content = metadata_file.read()
95+
96+
# Check if content is bytes
97+
assert isinstance(content, bytes), f"Metadata content should be bytes, but got {type(content)}"
98+
8399
@patch("dbbackup.management.commands._base.BaseDbBackupCommand.write_to_storage")
84100
def test_path_s3_uri(self, mock_write_to_storage):
85101
"""Test that S3 URIs in output path are handled by write_to_storage instead of write_local_file."""

tests/commands/test_dbrestore_metadata.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -189,9 +189,11 @@ def test_connector_fallback_interactive_no(self, mock_input, mock_get_connector)
189189
metadata = {"engine": settings.DATABASES["default"]["ENGINE"], "connector": "my.broken.Connector"}
190190
self.command.storage.read_file.return_value = Mock(read=lambda: json.dumps(metadata))
191191

192-
with patch("dbbackup.management.commands.dbrestore.import_module", side_effect=ImportError):
193-
with pytest.raises(SystemExit):
194-
self.command._restore_backup()
192+
with (
193+
patch("dbbackup.management.commands.dbrestore.import_module", side_effect=ImportError),
194+
pytest.raises(SystemExit),
195+
):
196+
self.command._restore_backup()
195197

196198
# Verify input was called
197199
mock_input.assert_called()

0 commit comments

Comments
 (0)