@@ -1470,272 +1470,6 @@ def test_exception_captured_by_sentry(self) -> None:
14701470 assert len (events ) == 1
14711471 assert events [0 ]["exception" ]["values" ][0 ]["type" ] == "ZeroDivisionError"
14721472
1473- def test_split_query (self , request : pytest .FixtureRequest ) -> None :
1474- state .set_config ("use_split" , 1 )
1475- request .addfinalizer (lambda : state .set_config ("use_split" , 0 ))
1476- state .set_config ("split_step" , 3600 ) # first batch will be 1 hour
1477-
1478- # Test getting the last 150 events, should happen in 2 batches
1479- result = json .loads (
1480- self .post (
1481- json .dumps (
1482- {
1483- "project" : 1 ,
1484- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1485- "from_date" : self .base_time .isoformat (),
1486- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1487- "orderby" : "-timestamp" ,
1488- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1489- "limit" : 150 ,
1490- }
1491- ),
1492- ).data
1493- )
1494- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (
1495- map (str , reversed (range (30 , 180 )))
1496- )
1497-
1498- # Test getting the last 150 events, offset by 10
1499- result = json .loads (
1500- self .post (
1501- json .dumps (
1502- {
1503- "project" : 1 ,
1504- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1505- "from_date" : self .base_time .isoformat (),
1506- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1507- "orderby" : "-timestamp" ,
1508- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1509- "limit" : 150 ,
1510- "offset" : 10 ,
1511- }
1512- ),
1513- ).data
1514- )
1515- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (
1516- map (str , reversed (range (20 , 170 )))
1517- )
1518-
1519- # Test asking for more events than there are
1520- result = json .loads (
1521- self .post (
1522- json .dumps (
1523- {
1524- "project" : 1 ,
1525- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1526- "from_date" : self .base_time .isoformat (),
1527- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1528- "orderby" : "-timestamp" ,
1529- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1530- "limit" : 200 ,
1531- }
1532- ),
1533- ).data
1534- )
1535- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (
1536- map (str , reversed (range (0 , 180 )))
1537- )
1538-
1539- # Test offset by more events than there are
1540- result = json .loads (
1541- self .post (
1542- json .dumps (
1543- {
1544- "project" : 1 ,
1545- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1546- "from_date" : self .base_time .isoformat (),
1547- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1548- "orderby" : "-timestamp" ,
1549- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1550- "limit" : 10 ,
1551- "offset" : 180 ,
1552- }
1553- ),
1554- ).data
1555- )
1556- assert result ["data" ] == []
1557-
1558- # Test offset that spans batches
1559- result = json .loads (
1560- self .post (
1561- json .dumps (
1562- {
1563- "project" : 1 ,
1564- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1565- "from_date" : self .base_time .isoformat (),
1566- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1567- "orderby" : "-timestamp" ,
1568- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1569- "limit" : 10 ,
1570- "offset" : 55 ,
1571- }
1572- ),
1573- ).data
1574- )
1575- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (
1576- map (str , reversed (range (115 , 125 )))
1577- )
1578-
1579- # Test offset by the size of the first batch retrieved. (the first batch will be discarded/trimmed)
1580- result = json .loads (
1581- self .post (
1582- json .dumps (
1583- {
1584- "project" : 1 ,
1585- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1586- "from_date" : self .base_time .isoformat (),
1587- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1588- "orderby" : "-timestamp" ,
1589- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1590- "limit" : 10 ,
1591- "offset" : 60 ,
1592- }
1593- ),
1594- ).data
1595- )
1596- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (
1597- map (str , reversed (range (110 , 120 )))
1598- )
1599-
1600- # Test condition that means 0 events will be returned
1601- result = json .loads (
1602- self .post (
1603- json .dumps (
1604- {
1605- "project" : 1 ,
1606- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1607- "from_date" : self .base_time .isoformat (),
1608- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1609- "orderby" : "-timestamp" ,
1610- "selected_columns" : ["tags[sentry:release]" , "timestamp" ],
1611- "conditions" : [["message" , "=" , "doesnt exist" ]],
1612- "limit" : 10 ,
1613- "offset" : 55 ,
1614- }
1615- ),
1616- ).data
1617- )
1618- assert result ["data" ] == []
1619-
1620- # Test splitting query by columns - non timestamp sort
1621- result = json .loads (
1622- self .post (
1623- json .dumps (
1624- {
1625- "project" : 1 ,
1626- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1627- "from_date" : self .base_time .isoformat (),
1628- "to_date" : (self .base_time + timedelta (minutes = 59 )).isoformat (),
1629- "orderby" : "tags[sentry:release]" ,
1630- "selected_columns" : [
1631- "event_id" ,
1632- "timestamp" ,
1633- "tags[sentry:release]" ,
1634- "tags[one]" ,
1635- "tags[two]" ,
1636- "tags[three]" ,
1637- "tags[four]" ,
1638- "tags[five]" ,
1639- ],
1640- "limit" : 5 ,
1641- }
1642- ),
1643- ).data
1644- )
1645- # Alphabetical sort
1646- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == [
1647- "0" ,
1648- "1" ,
1649- "10" ,
1650- "11" ,
1651- "12" ,
1652- ]
1653-
1654- # Test splitting query by columns - timestamp sort
1655- result = json .loads (
1656- self .post (
1657- json .dumps (
1658- {
1659- "project" : 1 ,
1660- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1661- "from_date" : self .base_time .isoformat (),
1662- "to_date" : (self .base_time + timedelta (minutes = 59 )).isoformat (),
1663- "orderby" : "timestamp" ,
1664- "selected_columns" : [
1665- "event_id" ,
1666- "timestamp" ,
1667- "tags[sentry:release]" ,
1668- "tags[one]" ,
1669- "tags[two]" ,
1670- "tags[three]" ,
1671- "tags[four]" ,
1672- "tags[five]" ,
1673- ],
1674- "limit" : 5 ,
1675- }
1676- ),
1677- ).data
1678- )
1679- assert [d ["tags[sentry:release]" ] for d in result ["data" ]] == list (map (str , range (0 , 5 )))
1680-
1681- result = json .loads (
1682- self .post (
1683- json .dumps (
1684- {
1685- "project" : 1 ,
1686- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1687- "from_date" : (self .base_time - timedelta (days = 100 )).isoformat (),
1688- "to_date" : (self .base_time - timedelta (days = 99 )).isoformat (),
1689- "orderby" : "timestamp" ,
1690- "selected_columns" : [
1691- "event_id" ,
1692- "timestamp" ,
1693- "tags[sentry:release]" ,
1694- "tags[one]" ,
1695- "tags[two]" ,
1696- "tags[three]" ,
1697- "tags[four]" ,
1698- "tags[five]" ,
1699- ],
1700- "limit" : 5 ,
1701- }
1702- ),
1703- ).data
1704- )
1705- assert len (result ["data" ]) == 0
1706-
1707- # Test offset
1708- result = json .loads (
1709- self .post (
1710- json .dumps (
1711- {
1712- "project" : 1 ,
1713- "tenant_ids" : {"referrer" : "r" , "organization_id" : 1234 },
1714- "from_date" : self .base_time .isoformat (),
1715- "to_date" : (self .base_time + timedelta (minutes = self .minutes )).isoformat (),
1716- "orderby" : "-timestamp" ,
1717- "selected_columns" : [
1718- "event_id" ,
1719- "timestamp" ,
1720- "tags[sentry:release]" ,
1721- "tags[one]" ,
1722- "tags[two]" ,
1723- "tags[three]" ,
1724- "tags[four]" ,
1725- "tags[five]" ,
1726- ],
1727- "offset" : 170 ,
1728- "limit" : 170 ,
1729- }
1730- ),
1731- ).data
1732- )
1733-
1734- assert len (result ["data" ]) == 10
1735- assert [e ["tags[sentry:release]" ] for e in result ["data" ]] == list (
1736- map (str , reversed (range (0 , 10 )))
1737- )
1738-
17391473 def test_consistent (self ) -> None :
17401474 state .set_config ("consistent_override" , "test_override=0;another=0.5" )
17411475 state .set_config ("read_through_cache.short_circuit" , 1 )
0 commit comments