|
18 | 18 | from .models import Source, Media |
19 | 19 | from .tasks import cleanup_old_media |
20 | 20 | from .filtering import filter_media |
| 21 | +from .utils import filter_response |
21 | 22 |
|
22 | 23 |
|
23 | 24 | class FrontEndTestCase(TestCase): |
@@ -1709,6 +1710,84 @@ def test_is_regex_match(self): |
1709 | 1710 | f'expected {expected_match_result}') |
1710 | 1711 |
|
1711 | 1712 |
|
| 1713 | +class ResponseFilteringTestCase(TestCase): |
| 1714 | + |
| 1715 | + def setUp(self): |
| 1716 | + # Disable general logging for test case |
| 1717 | + logging.disable(logging.CRITICAL) |
| 1718 | + # Add a test source |
| 1719 | + self.source = Source.objects.create( |
| 1720 | + source_type=Source.SOURCE_TYPE_YOUTUBE_CHANNEL, |
| 1721 | + key='testkey', |
| 1722 | + name='testname', |
| 1723 | + directory='testdirectory', |
| 1724 | + index_schedule=3600, |
| 1725 | + delete_old_media=False, |
| 1726 | + days_to_keep=14, |
| 1727 | + source_resolution=Source.SOURCE_RESOLUTION_1080P, |
| 1728 | + source_vcodec=Source.SOURCE_VCODEC_VP9, |
| 1729 | + source_acodec=Source.SOURCE_ACODEC_OPUS, |
| 1730 | + prefer_60fps=False, |
| 1731 | + prefer_hdr=False, |
| 1732 | + fallback=Source.FALLBACK_FAIL |
| 1733 | + ) |
| 1734 | + # Add some media |
| 1735 | + self.media = Media.objects.create( |
| 1736 | + key='mediakey', |
| 1737 | + source=self.source, |
| 1738 | + metadata='{}' |
| 1739 | + ) |
| 1740 | + |
| 1741 | + def test_metadata_20230629(self): |
| 1742 | + self.media.metadata = all_test_metadata['20230629'] |
| 1743 | + self.media.save() |
| 1744 | + |
| 1745 | + unfiltered = self.media.loaded_metadata |
| 1746 | + filtered = filter_response(self.media.loaded_metadata) |
| 1747 | + self.assertIn('formats', unfiltered.keys()) |
| 1748 | + self.assertIn('formats', filtered.keys()) |
| 1749 | + # filtered 'downloader_options' |
| 1750 | + self.assertIn('downloader_options', unfiltered['formats'][10].keys()) |
| 1751 | + self.assertNotIn('downloader_options', filtered['formats'][10].keys()) |
| 1752 | + # filtered 'http_headers' |
| 1753 | + self.assertIn('http_headers', unfiltered['formats'][0].keys()) |
| 1754 | + self.assertNotIn('http_headers', filtered['formats'][0].keys()) |
| 1755 | + # did not lose any formats |
| 1756 | + self.assertEqual(48, len(unfiltered['formats'])) |
| 1757 | + self.assertEqual(48, len(filtered['formats'])) |
| 1758 | + self.assertEqual(len(unfiltered['formats']), len(filtered['formats'])) |
| 1759 | + # did not remove everything with url |
| 1760 | + self.assertIn('original_url', unfiltered.keys()) |
| 1761 | + self.assertIn('original_url', filtered.keys()) |
| 1762 | + self.assertEqual(unfiltered['original_url'], filtered['original_url']) |
| 1763 | + # did reduce the size of the metadata |
| 1764 | + self.assertTrue(len(str(filtered)) < len(str(unfiltered))) |
| 1765 | + |
| 1766 | + url_keys = [] |
| 1767 | + for format in unfiltered['formats']: |
| 1768 | + for key in format.keys(): |
| 1769 | + if 'url' in key: |
| 1770 | + url_keys.append((format['format_id'], key, format[key],)) |
| 1771 | + unfiltered_url_keys = url_keys |
| 1772 | + self.assertEqual(63, len(unfiltered_url_keys), msg=str(unfiltered_url_keys)) |
| 1773 | + |
| 1774 | + url_keys = [] |
| 1775 | + for format in filtered['formats']: |
| 1776 | + for key in format.keys(): |
| 1777 | + if 'url' in key: |
| 1778 | + url_keys.append((format['format_id'], key, format[key],)) |
| 1779 | + filtered_url_keys = url_keys |
| 1780 | + self.assertEqual(3, len(filtered_url_keys), msg=str(filtered_url_keys)) |
| 1781 | + |
| 1782 | + url_keys = [] |
| 1783 | + for lang_code, captions in filtered['automatic_captions'].items(): |
| 1784 | + for caption in captions: |
| 1785 | + for key in caption.keys(): |
| 1786 | + if 'url' in key: |
| 1787 | + url_keys.append((lang_code, caption['ext'], caption[key],)) |
| 1788 | + self.assertEqual(0, len(url_keys), msg=str(url_keys)) |
| 1789 | + |
| 1790 | + |
1712 | 1791 | class TasksTestCase(TestCase): |
1713 | 1792 |
|
1714 | 1793 | def setUp(self): |
|
0 commit comments