Skip to content

Commit 75e4794

Browse files
authored
feat: use sqlalchemy 1.4 instead of 2 (#8)
* feat: use sqlalchemy 1.4 instead of 2 * define default targets
1 parent 246a719 commit 75e4794

File tree

9 files changed

+77
-80
lines changed

9 files changed

+77
-80
lines changed

cloud2sql/__main__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22

33
from resotolib.args import Namespace, ArgumentParser
44
from resotolib.logger import setup_logger
5-
from sqlalchemy import create_engine, Engine
5+
from sqlalchemy import create_engine
6+
from sqlalchemy.engine import Engine
67

78
from cloud2sql.collect_plugins import collect_from_plugins
89

cloud2sql/collect_plugins.py

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from resotolib.types import Json
2323
from rich import print as rich_print
2424
from rich.live import Live
25-
from sqlalchemy import Engine
25+
from sqlalchemy.engine import Engine
2626

2727
from cloud2sql.show_progress import CollectInfo
2828
from cloud2sql.sql import SqlModel, SqlUpdater
@@ -64,40 +64,41 @@ def collect(collector: BaseCollectorPlugin, engine: Engine, feedback: CoreFeedba
6464
kinds = [from_json(m, Kind) for m in collector.graph.export_model(walk_subclasses=False)]
6565
model = SqlModel(Model({k.fqn: k for k in kinds}))
6666
node_edge_count = len(collector.graph.nodes) + len(collector.graph.edges)
67-
ne_current = 0
68-
progress_update = 5000
69-
feedback.progress_done("sync_db", 0, node_edge_count, context=[collector.cloud])
67+
ne_count = iter(range(0, node_edge_count))
68+
progress_update = max(node_edge_count // 100, 50)
69+
schema = f"create temp tables {engine.dialect.name}"
70+
syncdb = f"synchronize {engine.dialect.name}"
71+
feedback.progress_done(schema, 0, 1, context=[collector.cloud])
72+
feedback.progress_done(syncdb, 0, node_edge_count, context=[collector.cloud])
7073
with engine.connect() as conn:
71-
# create the ddl metadata from the kinds
72-
model.create_schema(conn, args)
73-
# ingest the data
74-
updater = SqlUpdater(model)
75-
node: BaseResource
76-
for node in collector.graph.nodes:
77-
node._graph = collector.graph
78-
exported = node_to_dict(node)
79-
exported["type"] = "node"
80-
exported["ancestors"] = {
81-
"cloud": {"reported": {"id": node.cloud().name}},
82-
"account": {"reported": {"id": node.account().name}},
83-
"region": {"reported": {"id": node.region().name}},
84-
"zone": {"reported": {"id": node.zone().name}},
85-
}
86-
stmt = updater.insert_node(exported)
87-
if stmt is not None:
88-
conn.execute(stmt)
89-
ne_current += 1
90-
if ne_current % progress_update == 0:
91-
feedback.progress_done("sync_db", ne_current, node_edge_count, context=[collector.cloud])
92-
for from_node, to_node, _ in collector.graph.edges:
93-
stmt = updater.insert_node({"from": from_node.chksum, "to": to_node.chksum, "type": "edge"})
94-
if stmt is not None:
95-
conn.execute(stmt)
96-
ne_current += 1
97-
if ne_current % progress_update == 0:
98-
feedback.progress_done("sync_db", ne_current, node_edge_count, context=[collector.cloud])
99-
# commit all the changes to the tmp tables
100-
conn.commit()
74+
with conn.begin():
75+
# create the ddl metadata from the kinds
76+
model.create_schema(conn, args)
77+
feedback.progress_done(schema, 1, 1, context=[collector.cloud])
78+
# ingest the data
79+
updater = SqlUpdater(model)
80+
node: BaseResource
81+
for node in collector.graph.nodes:
82+
node._graph = collector.graph
83+
exported = node_to_dict(node)
84+
exported["type"] = "node"
85+
exported["ancestors"] = {
86+
"cloud": {"reported": {"id": node.cloud().name}},
87+
"account": {"reported": {"id": node.account().name}},
88+
"region": {"reported": {"id": node.region().name}},
89+
"zone": {"reported": {"id": node.zone().name}},
90+
}
91+
stmt = updater.insert_node(exported)
92+
if stmt is not None:
93+
conn.execute(stmt)
94+
if (nx := next(ne_count)) % progress_update == 0:
95+
feedback.progress_done(syncdb, nx, node_edge_count, context=[collector.cloud])
96+
for from_node, to_node, _ in collector.graph.edges:
97+
stmt = updater.insert_node({"from": from_node.chksum, "to": to_node.chksum, "type": "edge"})
98+
if stmt is not None:
99+
conn.execute(stmt)
100+
if (nx := next(ne_count)) % progress_update == 0:
101+
feedback.progress_done(syncdb, nx, node_edge_count, context=[collector.cloud])
101102
feedback.progress_done(collector.cloud, 1, 1)
102103

103104

cloud2sql/collect_resoto.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from resotoclient import ResotoClient
22
from resotolib.args import Namespace
3-
from sqlalchemy import Engine
3+
from sqlalchemy.engine import Engine
44

55
from cloud2sql.sql import SqlModel, SqlUpdater
66

@@ -12,9 +12,9 @@ def collect_from_resoto(engine: Engine, args: Namespace) -> None:
1212
updater = SqlUpdater(model)
1313

1414
with engine.connect() as conn:
15-
model.create_schema(conn, args)
16-
for nd in client.search_graph("id(root) -[0:]->"):
17-
stmt = updater.insert_node(nd)
18-
if stmt is not None:
19-
conn.execute(stmt)
20-
conn.commit()
15+
with conn.begin():
16+
model.create_schema(conn, args)
17+
for nd in client.search_graph("id(root) -[0:]->"):
18+
stmt = updater.insert_node(nd)
19+
if stmt is not None:
20+
conn.execute(stmt)

cloud2sql/sql.py

Lines changed: 21 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -7,23 +7,11 @@
77
from resotolib.args import Namespace
88
from resotolib.baseresources import BaseResource
99
from resotolib.types import Json
10-
from sqlalchemy import (
11-
Boolean,
12-
Column,
13-
Connection,
14-
Double,
15-
Float,
16-
Insert,
17-
Integer,
18-
JSON,
19-
MetaData,
20-
String,
21-
Table,
22-
ValuesBase,
23-
DDL,
24-
Engine,
25-
)
10+
from sqlalchemy import Boolean, Column, Float, Integer, JSON, MetaData, String, Table, DDL
11+
from sqlalchemy.engine import Engine, Connection
12+
from sqlalchemy.sql import Insert
2613
from sqlalchemy.sql.ddl import DropTable, DropConstraint
14+
from sqlalchemy.sql.dml import ValuesBase
2715
from sqlalchemy.sql.type_api import TypeEngine
2816

2917
from cloud2sql.util import value_in_path
@@ -66,7 +54,7 @@ def column_type_from(kind: str) -> Type[TypeEngine[Any]]:
6654
elif kind in "float":
6755
return Float
6856
elif kind in "double":
69-
return Double
57+
return Float # use Double with sqlalchemy 2
7058
elif kind in ("string", "date", "datetime", "duration"):
7159
return String
7260
elif kind == "boolean":
@@ -159,22 +147,22 @@ def link_table_schema_from_successors(kind: Kind) -> None:
159147
@staticmethod
160148
def swap_temp_tables(engine: Engine) -> None:
161149
with engine.connect() as connection:
162-
metadata = MetaData()
163-
metadata.reflect(connection, resolve_fks=False)
164-
165-
def drop_table(tl: Table) -> None:
166-
for cs in tl.foreign_key_constraints:
167-
connection.execute(DropConstraint(cs)) # type: ignore
168-
connection.execute(DropTable(tl))
169-
170-
for table in metadata.tables.values():
171-
if table.name.startswith(temp_prefix):
172-
prod_table = table.name[len(temp_prefix) :] # noqa: E203
173-
if prod_table in metadata.tables:
174-
drop_table(metadata.tables[prod_table])
175-
connection.execute(DDL(f"ALTER TABLE {table.name} RENAME TO {prod_table}")) # type: ignore
176-
# todo: create foreign key constraints on the final tables
177-
connection.commit()
150+
with connection.begin():
151+
metadata = MetaData()
152+
metadata.reflect(connection, resolve_fks=False)
153+
154+
def drop_table(tl: Table) -> None:
155+
for cs in tl.foreign_key_constraints:
156+
connection.execute(DropConstraint(cs)) # type: ignore
157+
connection.execute(DropTable(tl))
158+
159+
for table in metadata.tables.values():
160+
if table.name.startswith(temp_prefix):
161+
prod_table = table.name[len(temp_prefix) :] # noqa: E203
162+
if prod_table in metadata.tables:
163+
drop_table(metadata.tables[prod_table])
164+
connection.execute(DDL(f"ALTER TABLE {table.name} RENAME TO {prod_table}")) # type: ignore
165+
# todo: create foreign key constraints on the final tables
178166

179167

180168
class SqlUpdater:

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Only list production dependencies. Everything else can be added to requirements-test.txt and requirements-dev.txt.
22

33
# required for cloud2sql to run
4-
SQLAlchemy==2.0.0b4
4+
SQLAlchemy==1.4.45
55
PyYAML>=6.0
66
rich>=12.6.0
77
resotoclient>=1.2.1

setup.cfg

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
[flake8]
22
max-line-length = 120
33
exclude = .git,.tox,__pycache__,.idea,.pytest_cache,docs
4-
application-import-names = resotocore tests
5-
ignore = E1131, N818, W503
4+
application-import-names = cloud2sql tests
65

76
[tool:pytest]
87
log_cli=true

tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from resotolib.args import Namespace
77
from resotolib.core.actions import CoreFeedback
88
from resotolib.types import Json
9-
from sqlalchemy import create_engine, Engine
9+
from sqlalchemy.engine import create_engine, Engine
1010

1111
from cloud2sql.sql import SqlModel, SqlUpdater
1212

tests/sql_test.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from resotoclient.models import Model
22
from resotolib.args import Namespace
3-
from sqlalchemy import Engine, MetaData
3+
from sqlalchemy import MetaData
4+
from sqlalchemy.engine import Engine
45
from sqlalchemy.orm import Session
56

67
from cloud2sql.sql import SqlModel, SqlUpdater

tox.ini

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
[tox]
2+
env_list =
3+
tests
4+
syntax
5+
black
6+
mypy
7+
18
[pytest]
29
testpaths= tests
310

0 commit comments

Comments
 (0)