@@ -109,19 +109,36 @@ async def copy(iter: T, table: tables, conn: asyncpg.Connection) -> None:
109109 """Directly use copy to load data."""
110110 bytes_iter = aiter (iter )
111111 async with conn .transaction ():
112- await conn .copy_to_table (
113- table ,
114- source = bytes_iter ,
115- columns = ["content" ],
116- format = "csv" ,
117- quote = chr (27 ),
118- delimiter = chr (31 ),
119- )
120- await conn .execute (
112+ if table == "collections" :
113+ await conn .execute (
114+ """
115+ CREATE TEMP TABLE pgstactemp (content jsonb)
116+ ON COMMIT DROP;
121117 """
122- SELECT backfill_partitions();
123- """
124- )
118+ )
119+ await conn .copy_to_table (
120+ "pgstactemp" ,
121+ source = bytes_iter ,
122+ columns = ["content" ],
123+ format = "csv" ,
124+ quote = chr (27 ),
125+ delimiter = chr (31 ),
126+ )
127+ await conn .execute (
128+ """
129+ INSERT INTO collections (content)
130+ SELECT content FROM pgstactemp;
131+ """
132+ )
133+ if table == "items" :
134+ await conn .copy_to_table (
135+ "items_staging" ,
136+ source = bytes_iter ,
137+ columns = ["content" ],
138+ format = "csv" ,
139+ quote = chr (27 ),
140+ delimiter = chr (31 ),
141+ )
125142
126143
127144async def copy_ignore_duplicates (
@@ -130,56 +147,58 @@ async def copy_ignore_duplicates(
130147 """Load data first into a temp table to ignore duplicates."""
131148 bytes_iter = aiter (iter )
132149 async with conn .transaction ():
133- await conn .execute (
150+ if table == "collections" :
151+ await conn .execute (
152+ """
153+ CREATE TEMP TABLE pgstactemp (content jsonb)
154+ ON COMMIT DROP;
134155 """
135- CREATE TEMP TABLE pgstactemp (content jsonb)
136- ON COMMIT DROP;
137- """
138- )
139- await conn .copy_to_table (
140- "pgstactemp" ,
141- source = bytes_iter ,
142- columns = ["content" ],
143- format = "csv" ,
144- quote = chr (27 ),
145- delimiter = chr (31 ),
146- )
147- await conn .execute (
156+ )
157+ await conn .copy_to_table (
158+ "pgstactemp" ,
159+ source = bytes_iter ,
160+ columns = ["content" ],
161+ format = "csv" ,
162+ quote = chr (27 ),
163+ delimiter = chr (31 ),
164+ )
165+ await conn .execute (
166+ """
167+ INSERT INTO collections (content)
168+ SELECT content FROM pgstactemp
169+ ON CONFLICT DO NOTHING;
148170 """
149- SELECT make_partitions(
150- min((content->>'datetime')::timestamptz),
151- max((content->>'datetime')::timestamptz)
152- ) FROM pgstactemp;
153- """
154- )
155- await conn .execute (
156- f"""
157- INSERT INTO { table } (content)
158- SELECT content FROM pgstactemp
159- ON CONFLICT DO NOTHING;
160- """
161- )
171+ )
172+ if table == "items" :
173+ await conn .copy_to_table (
174+ "items_staging_ignore" ,
175+ source = bytes_iter ,
176+ columns = ["content" ],
177+ format = "csv" ,
178+ quote = chr (27 ),
179+ delimiter = chr (31 ),
180+ )
162181
163182
164183async def copy_upsert (iter : T , table : tables , conn : asyncpg .Connection ) -> None :
165184 """Insert data into a temp table to be able merge data."""
166185 bytes_iter = aiter (iter )
167186 async with conn .transaction ():
168- await conn .execute (
169- """
170- CREATE TEMP TABLE pgstactemp (content jsonb)
171- ON COMMIT DROP;
172- """
173- )
174- await conn .copy_to_table (
175- "pgstactemp" ,
176- source = bytes_iter ,
177- columns = ["content" ],
178- format = "csv" ,
179- quote = chr (27 ),
180- delimiter = chr (31 ),
181- )
182187 if table == "collections" :
188+ await conn .execute (
189+ """
190+ CREATE TEMP TABLE pgstactemp (content jsonb)
191+ ON COMMIT DROP;
192+ """
193+ )
194+ await conn .copy_to_table (
195+ "pgstactemp" ,
196+ source = bytes_iter ,
197+ columns = ["content" ],
198+ format = "csv" ,
199+ quote = chr (27 ),
200+ delimiter = chr (31 ),
201+ )
183202 await conn .execute (
184203 """
185204 INSERT INTO collections (content)
@@ -190,11 +209,13 @@ async def copy_upsert(iter: T, table: tables, conn: asyncpg.Connection) -> None:
190209 """
191210 )
192211 if table == "items" :
193- await conn .execute (
194- """
195- SELECT upsert_item(content)
196- FROM pgstactemp;
197- """
212+ await conn .copy_to_table (
213+ "items_staging_upsert" ,
214+ source = bytes_iter ,
215+ columns = ["content" ],
216+ format = "csv" ,
217+ quote = chr (27 ),
218+ delimiter = chr (31 ),
198219 )
199220
200221
0 commit comments