63
63
pytest .mark .single_cpu ,
64
64
]
65
65
66
+
66
67
def table_uuid_gen (prefix : str ) -> str :
67
68
"""Generate a unique table name with context prefix."""
68
69
return f"{ prefix } _{ uuid .uuid4 ().hex } "
@@ -1370,9 +1371,7 @@ def insert_on_conflict(table, conn, keys, data_iter):
1370
1371
conn .execute (create_sql )
1371
1372
1372
1373
expected = DataFrame ([[1 , 2.1 , "a" ]], columns = list ("abc" ))
1373
- expected .to_sql (
1374
- name = table_uuid , con = conn , if_exists = "append" , index = False
1375
- )
1374
+ expected .to_sql (name = table_uuid , con = conn , if_exists = "append" , index = False )
1376
1375
1377
1376
df_insert = DataFrame ([[1 , 3.2 , "b" ]], columns = list ("abc" ))
1378
1377
inserted = df_insert .to_sql (
@@ -2029,7 +2028,7 @@ def test_api_to_sql_index_label_multiindex(conn, request):
2029
2028
# no index name, defaults to 'level_0' and 'level_1'
2030
2029
result = sql .to_sql (temp_frame , table_uuid , conn )
2031
2030
assert result == expected_row_count
2032
- frame = sql .read_sql_query (f "SELECT * FROM table_uuid" , conn )
2031
+ frame = sql .read_sql_query ("SELECT * FROM table_uuid" , conn )
2033
2032
assert frame .columns [0 ] == "level_0"
2034
2033
assert frame .columns [1 ] == "level_1"
2035
2034
@@ -2061,7 +2060,7 @@ def test_api_to_sql_index_label_multiindex(conn, request):
2061
2060
index_label = ["C" , "D" ],
2062
2061
)
2063
2062
assert result == expected_row_count
2064
- frame = sql .read_sql_query (f "SELECT * FROM table_uuid" , conn )
2063
+ frame = sql .read_sql_query ("SELECT * FROM table_uuid" , conn )
2065
2064
assert frame .columns [:2 ].tolist () == ["C" , "D" ]
2066
2065
2067
2066
msg = "Length of 'index_label' should match number of levels, which is 2"
@@ -2562,7 +2561,9 @@ def test_database_uri_string(conn, request, test_frame1):
2562
2561
with tm .ensure_clean () as name :
2563
2562
db_uri = "sqlite:///" + name
2564
2563
table_uuid = table_uuid_gen ("iris" )
2565
- test_frame1 .to_sql (name = table_uuid , con = db_uri , if_exists = "replace" , index = False )
2564
+ test_frame1 .to_sql (
2565
+ name = table_uuid , con = db_uri , if_exists = "replace" , index = False
2566
+ )
2566
2567
test_frame2 = sql .read_sql (table_uuid , db_uri )
2567
2568
test_frame3 = sql .read_sql_table (table_uuid , db_uri )
2568
2569
query = f"SELECT * FROM { table_uuid } "
@@ -3318,7 +3319,7 @@ def test_dtype(conn, request):
3318
3319
df = DataFrame (data , columns = cols )
3319
3320
3320
3321
table_uuid1 = table_uuid_gen ("dtype_test" )
3321
- table_uuid2 = table_uuid_gen ("dtype_test2" )
3322
+ table_uuid2 = table_uuid_gen ("dtype_test2" )
3322
3323
table_uuid3 = table_uuid_gen ("dtype_test3" )
3323
3324
table_uuid_single = table_uuid_gen ("single_dtype_test" )
3324
3325
error_table = table_uuid_gen ("error" )
@@ -3470,8 +3471,7 @@ def main(connectable):
3470
3471
test_connectable (connectable )
3471
3472
3472
3473
assert (
3473
- DataFrame ({"test_foo_data" : [0 , 1 , 2 ]}).to_sql (name = table_uuid , con = conn )
3474
- == 3
3474
+ DataFrame ({"test_foo_data" : [0 , 1 , 2 ]}).to_sql (name = table_uuid , con = conn ) == 3
3475
3475
)
3476
3476
main (conn )
3477
3477
@@ -3900,8 +3900,7 @@ class Test(BaseModel):
3900
3900
with Session () as session :
3901
3901
df = DataFrame ({"id" : [0 , 1 ], "string_column" : ["hello" , "world" ]})
3902
3902
assert (
3903
- df .to_sql (name = table_uuid , con = conn , index = False , if_exists = "replace" )
3904
- == 2
3903
+ df .to_sql (name = table_uuid , con = conn , index = False , if_exists = "replace" ) == 2
3905
3904
)
3906
3905
session .commit ()
3907
3906
test_query = session .query (Test .id , Test .string_column )
@@ -3986,9 +3985,7 @@ def test_psycopg2_schema_support(postgresql_psycopg2_engine):
3986
3985
)
3987
3986
== 2
3988
3987
)
3989
- assert (
3990
- df .to_sql (name = schema_other_uuid , con = conn , index = False , schema = "other" ) == 2
3991
- )
3988
+ assert df .to_sql (name = schema_other_uuid , con = conn , index = False , schema = "other" ) == 2
3992
3989
3993
3990
# read dataframes back in
3994
3991
res1 = sql .read_sql_table (schema_public_uuid , conn )
@@ -4012,9 +4009,7 @@ def test_psycopg2_schema_support(postgresql_psycopg2_engine):
4012
4009
con .exec_driver_sql ("CREATE SCHEMA other;" )
4013
4010
4014
4011
# write dataframe with different if_exists options
4015
- assert (
4016
- df .to_sql (name = schema_other_uuid , con = conn , schema = "other" , index = False ) == 2
4017
- )
4012
+ assert df .to_sql (name = schema_other_uuid , con = conn , schema = "other" , index = False ) == 2
4018
4013
df .to_sql (
4019
4014
name = schema_other_uuid ,
4020
4015
con = conn ,
@@ -4042,27 +4037,25 @@ def test_self_join_date_columns(postgresql_psycopg2_engine):
4042
4037
conn = postgresql_psycopg2_engine
4043
4038
from sqlalchemy .sql import text
4044
4039
4045
- table_uuid = table_uuid_gen ("person" )
4040
+ tb = table_uuid_gen ("person" )
4046
4041
4047
4042
create_table = text (
4048
4043
f"""
4049
- CREATE TABLE { table_uuid }
4044
+ CREATE TABLE { tb }
4050
4045
(
4051
- id serial constraint { table_uuid } _pkey primary key,
4046
+ id serial constraint { tb } _pkey primary key,
4052
4047
created_dt timestamp with time zone
4053
4048
);
4054
4049
4055
- INSERT INTO { table_uuid }
4050
+ INSERT INTO { tb }
4056
4051
VALUES (1, '2021-01-01T00:00:00Z');
4057
4052
"""
4058
4053
)
4059
4054
with conn .connect () as con :
4060
4055
with con .begin ():
4061
4056
con .execute (create_table )
4062
4057
4063
- sql_query = (
4064
- f'SELECT * FROM "{ table_uuid } " AS p1 INNER JOIN "{ table_uuid } " AS p2 ON p1.id = p2.id;'
4065
- )
4058
+ sql_query = f'SELECT * FROM "{ tb } " AS p1 INNER JOIN "{ tb } " AS p2 ON p1.id = p2.id;'
4066
4059
result = pd .read_sql (sql_query , conn )
4067
4060
expected = DataFrame (
4068
4061
[[1 , Timestamp ("2021" , tz = "UTC" )] * 2 ], columns = ["id" , "created_dt" ] * 2
@@ -4072,7 +4065,7 @@ def test_self_join_date_columns(postgresql_psycopg2_engine):
4072
4065
4073
4066
# Cleanup
4074
4067
with sql .SQLDatabase (conn , need_transaction = True ) as pandasSQL :
4075
- pandasSQL .drop_table (table_uuid )
4068
+ pandasSQL .drop_table (tb )
4076
4069
4077
4070
4078
4071
def test_create_and_drop_table (sqlite_engine ):
@@ -4258,7 +4251,9 @@ def test_xsqlite_basic(sqlite_buildin):
4258
4251
new_idx = Index (np .arange (len (frame2 )), dtype = np .int64 ) + 10
4259
4252
frame2 ["Idx" ] = new_idx .copy ()
4260
4253
assert sql .to_sql (frame2 , name = table_uuid2 , con = sqlite_buildin , index = False ) == 10
4261
- result = sql .read_sql (f"select * from { table_uuid2 } " , sqlite_buildin , index_col = "Idx" )
4254
+ result = sql .read_sql (
4255
+ f"select * from { table_uuid2 } " , sqlite_buildin , index_col = "Idx"
4256
+ )
4262
4257
expected = frame .copy ()
4263
4258
expected .index = new_idx
4264
4259
expected .index .name = "Idx"
@@ -4271,19 +4266,20 @@ def test_xsqlite_write_row_by_row(sqlite_buildin):
4271
4266
columns = Index (list ("ABCD" )),
4272
4267
index = date_range ("2000-01-01" , periods = 10 , freq = "B" ),
4273
4268
)
4269
+ table_uuid = table_uuid_gen ("test" )
4274
4270
frame .iloc [0 , 0 ] = np .nan
4275
- create_sql = sql .get_schema (frame , "test" )
4271
+ create_sql = sql .get_schema (frame , table_uuid )
4276
4272
cur = sqlite_buildin .cursor ()
4277
4273
cur .execute (create_sql )
4278
4274
4279
- ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
4275
+ ins = f "INSERT INTO { table_uuid } VALUES (%s, %s, %s, %s)"
4280
4276
for _ , row in frame .iterrows ():
4281
4277
fmt_sql = format_query (ins , * row )
4282
4278
tquery (fmt_sql , con = sqlite_buildin )
4283
4279
4284
4280
sqlite_buildin .commit ()
4285
4281
4286
- result = sql .read_sql ("select * from test " , con = sqlite_buildin )
4282
+ result = sql .read_sql (f "select * from { table_uuid } " , con = sqlite_buildin )
4287
4283
result .index = frame .index
4288
4284
tm .assert_frame_equal (result , frame , rtol = 1e-3 )
4289
4285
@@ -4294,17 +4290,18 @@ def test_xsqlite_execute(sqlite_buildin):
4294
4290
columns = Index (list ("ABCD" )),
4295
4291
index = date_range ("2000-01-01" , periods = 10 , freq = "B" ),
4296
4292
)
4297
- create_sql = sql .get_schema (frame , "test" )
4293
+ table_uuid = table_uuid_gen ("test" )
4294
+ create_sql = sql .get_schema (frame , table_uuid )
4298
4295
cur = sqlite_buildin .cursor ()
4299
4296
cur .execute (create_sql )
4300
- ins = "INSERT INTO test VALUES (?, ?, ?, ?)"
4297
+ ins = f "INSERT INTO { table_uuid } VALUES (?, ?, ?, ?)"
4301
4298
4302
4299
row = frame .iloc [0 ]
4303
4300
with sql .pandasSQL_builder (sqlite_buildin ) as pandas_sql :
4304
4301
pandas_sql .execute (ins , tuple (row ))
4305
4302
sqlite_buildin .commit ()
4306
4303
4307
- result = sql .read_sql ("select * from test " , sqlite_buildin )
4304
+ result = sql .read_sql (f "select * from { table_uuid } " , sqlite_buildin )
4308
4305
result .index = frame .index [:1 ]
4309
4306
tm .assert_frame_equal (result , frame [:1 ])
4310
4307
@@ -4315,23 +4312,25 @@ def test_xsqlite_schema(sqlite_buildin):
4315
4312
columns = Index (list ("ABCD" )),
4316
4313
index = date_range ("2000-01-01" , periods = 10 , freq = "B" ),
4317
4314
)
4318
- create_sql = sql .get_schema (frame , "test" )
4315
+ table_uuid = table_uuid_gen ("test" )
4316
+ create_sql = sql .get_schema (frame , table_uuid )
4319
4317
lines = create_sql .splitlines ()
4320
4318
for line in lines :
4321
4319
tokens = line .split (" " )
4322
4320
if len (tokens ) == 2 and tokens [0 ] == "A" :
4323
4321
assert tokens [1 ] == "DATETIME"
4324
4322
4325
- create_sql = sql .get_schema (frame , "test" , keys = ["A" , "B" ])
4323
+ create_sql = sql .get_schema (frame , table_uuid , keys = ["A" , "B" ])
4326
4324
lines = create_sql .splitlines ()
4327
4325
assert 'PRIMARY KEY ("A", "B")' in create_sql
4328
4326
cur = sqlite_buildin .cursor ()
4329
4327
cur .execute (create_sql )
4330
4328
4331
4329
4332
4330
def test_xsqlite_execute_fail (sqlite_buildin ):
4333
- create_sql = """
4334
- CREATE TABLE test
4331
+ table_uuid = table_uuid_gen ("test" )
4332
+ create_sql = f"""
4333
+ CREATE TABLE { table_uuid }
4335
4334
(
4336
4335
a TEXT,
4337
4336
b TEXT,
@@ -4343,16 +4342,17 @@ def test_xsqlite_execute_fail(sqlite_buildin):
4343
4342
cur .execute (create_sql )
4344
4343
4345
4344
with sql .pandasSQL_builder (sqlite_buildin ) as pandas_sql :
4346
- pandas_sql .execute ('INSERT INTO test VALUES("foo", "bar", 1.234)' )
4347
- pandas_sql .execute ('INSERT INTO test VALUES("foo", "baz", 2.567)' )
4345
+ pandas_sql .execute (f 'INSERT INTO { table_uuid } VALUES("foo", "bar", 1.234)' )
4346
+ pandas_sql .execute (f 'INSERT INTO { table_uuid } VALUES("foo", "baz", 2.567)' )
4348
4347
4349
4348
with pytest .raises (sql .DatabaseError , match = "Execution failed on sql" ):
4350
- pandas_sql .execute ('INSERT INTO test VALUES("foo", "bar", 7)' )
4349
+ pandas_sql .execute (f 'INSERT INTO { table_uuid } VALUES("foo", "bar", 7)' )
4351
4350
4352
4351
4353
4352
def test_xsqlite_execute_closed_connection ():
4354
- create_sql = """
4355
- CREATE TABLE test
4353
+ table_uuid = table_uuid_gen ("test" )
4354
+ create_sql = f"""
4355
+ CREATE TABLE { table_uuid }
4356
4356
(
4357
4357
a TEXT,
4358
4358
b TEXT,
@@ -4365,38 +4365,39 @@ def test_xsqlite_execute_closed_connection():
4365
4365
cur .execute (create_sql )
4366
4366
4367
4367
with sql .pandasSQL_builder (conn ) as pandas_sql :
4368
- pandas_sql .execute ('INSERT INTO test VALUES("foo", "bar", 1.234)' )
4368
+ pandas_sql .execute (f 'INSERT INTO { table_uuid } VALUES("foo", "bar", 1.234)' )
4369
4369
4370
4370
msg = "Cannot operate on a closed database."
4371
4371
with pytest .raises (sqlite3 .ProgrammingError , match = msg ):
4372
- tquery ("select * from test " , con = conn )
4372
+ tquery (f "select * from { table_uuid } " , con = conn )
4373
4373
4374
4374
4375
4375
def test_xsqlite_keyword_as_column_names (sqlite_buildin ):
4376
+ table_uuid = table_uuid_gen ("testkeywords" )
4376
4377
df = DataFrame ({"From" : np .ones (5 )})
4377
- assert sql .to_sql (df , con = sqlite_buildin , name = "testkeywords" , index = False ) == 5
4378
+ assert sql .to_sql (df , con = sqlite_buildin , name = table_uuid , index = False ) == 5
4378
4379
4379
4380
4380
4381
def test_xsqlite_onecolumn_of_integer (sqlite_buildin ):
4381
4382
# GH 3628
4382
4383
# a column_of_integers dataframe should transfer well to sql
4383
-
4384
+ table_uuid = table_uuid_gen ( "mono_df" )
4384
4385
mono_df = DataFrame ([1 , 2 ], columns = ["c0" ])
4385
- assert sql .to_sql (mono_df , con = sqlite_buildin , name = "mono_df" , index = False ) == 2
4386
+ assert sql .to_sql (mono_df , con = sqlite_buildin , name = table_uuid , index = False ) == 2
4386
4387
# computing the sum via sql
4387
4388
con_x = sqlite_buildin
4388
- the_sum = sum (my_c0 [0 ] for my_c0 in con_x .execute ("select * from mono_df " ))
4389
+ the_sum = sum (my_c0 [0 ] for my_c0 in con_x .execute (f "select * from { table_uuid } " ))
4389
4390
# it should not fail, and gives 3 ( Issue #3628 )
4390
4391
assert the_sum == 3
4391
4392
4392
- result = sql .read_sql ("select * from mono_df " , con_x )
4393
+ result = sql .read_sql (f "select * from { table_uuid } " , con_x )
4393
4394
tm .assert_frame_equal (result , mono_df )
4394
4395
4395
4396
4396
4397
def test_xsqlite_if_exists (sqlite_buildin ):
4397
4398
df_if_exists_1 = DataFrame ({"col1" : [1 , 2 ], "col2" : ["A" , "B" ]})
4398
4399
df_if_exists_2 = DataFrame ({"col1" : [3 , 4 , 5 ], "col2" : ["C" , "D" , "E" ]})
4399
- table_name = "table_if_exists"
4400
+ table_name = table_uuid_gen ( "table_if_exists" )
4400
4401
sql_select = f"SELECT * FROM { table_name } "
4401
4402
4402
4403
msg = "'notvalidvalue' is not valid for if_exists"
0 commit comments