|
122 | 122 |
|
123 | 123 |
|
124 | 124 | def log_code_repair_to_db( |
125 | | - code_repair_log_db: Path, |
126 | | - optimization_id: str, |
127 | | - trace_id: str | None = None, |
128 | | - passed: str | None = None, |
129 | | - faster: str | None = None, |
| 125 | + code_repair_log_db: Path, optimization_id: str, trace_id: str, passed: str, faster: str |
130 | 126 | ) -> None: |
131 | | - """Log code repair data to SQLite database. |
132 | | -
|
133 | | - Uses upsert pattern to allow incremental logging with different columns at different places. |
134 | | - Only non-None values will be updated; existing values are preserved. |
135 | | - """ |
| 127 | + """Log code repair data to SQLite database.""" |
136 | 128 | try: |
137 | | - conn = sqlite3.connect(code_repair_log_db) |
138 | | - cursor = conn.cursor() |
139 | | - |
140 | | - # Build dynamic upsert query based on provided columns |
141 | | - columns = ["optimization_id"] |
142 | | - values = [optimization_id] |
143 | | - update_parts = ["updated_at = CURRENT_TIMESTAMP"] |
144 | | - |
145 | | - if trace_id is not None: |
146 | | - columns.append("trace_id") |
147 | | - values.append(trace_id) |
148 | | - update_parts.append("trace_id = excluded.trace_id") |
149 | | - |
150 | | - if passed is not None: |
151 | | - columns.append("passed") |
152 | | - values.append(passed) |
153 | | - update_parts.append("passed = excluded.passed") |
154 | | - |
155 | | - if faster is not None: |
156 | | - columns.append("faster") |
157 | | - values.append(faster) |
158 | | - update_parts.append("faster = excluded.faster") |
159 | | - |
160 | | - placeholders = ", ".join(["?"] * len(values)) |
161 | | - columns_str = ", ".join(columns) |
162 | | - update_str = ", ".join(update_parts) |
163 | | - |
164 | | - cursor.execute( |
165 | | - f""" |
166 | | - INSERT INTO code_repair_logs_cf ({columns_str}) |
167 | | - VALUES ({placeholders}) |
168 | | - ON CONFLICT(optimization_id) DO UPDATE SET {update_str} |
169 | | - """, # noqa: S608 |
170 | | - values, |
171 | | - ) |
172 | | - conn.commit() |
173 | | - conn.close() |
| 129 | + with sqlite3.connect(code_repair_log_db) as conn: |
| 130 | + cursor = conn.cursor() |
| 131 | + cursor.execute(""" |
| 132 | + CREATE TABLE IF NOT EXISTS code_repair_logs_cf ( |
| 133 | + optimization_id TEXT PRIMARY KEY, |
| 134 | + trace_id TEXT, |
| 135 | + passed TEXT, |
| 136 | + faster TEXT, |
| 137 | + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
| 138 | + ) |
| 139 | + """) |
| 140 | + cursor.execute( |
| 141 | + """ |
| 142 | + INSERT INTO code_repair_logs_cf (optimization_id, trace_id, passed, faster) |
| 143 | + VALUES (?, ?, ?, ?) |
| 144 | + """, |
| 145 | + (optimization_id, trace_id, passed, faster), |
| 146 | + ) |
| 147 | + conn.commit() |
174 | 148 | except Exception as e: |
175 | 149 | sentry_sdk.capture_exception(e) |
176 | | - logger.exception(e) |
| 150 | + logger.exception("Error logging code repair to db") |
177 | 151 |
|
178 | 152 |
|
179 | 153 | class CandidateProcessor: |
@@ -448,20 +422,6 @@ def optimize_function(self) -> Result[BestOptimization, str]: |
448 | 422 | initialization_result = self.can_be_optimized() |
449 | 423 | if not is_successful(initialization_result): |
450 | 424 | return Failure(initialization_result.failure()) |
451 | | - conn = sqlite3.connect(self.code_repair_log_db) |
452 | | - cursor = conn.cursor() |
453 | | - cursor.execute(""" |
454 | | - CREATE TABLE IF NOT EXISTS code_repair_logs_cf ( |
455 | | - optimization_id TEXT PRIMARY KEY, |
456 | | - trace_id TEXT, |
457 | | - passed TEXT, |
458 | | - faster TEXT, |
459 | | - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, |
460 | | - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
461 | | - ) |
462 | | - """) |
463 | | - conn.commit() |
464 | | - conn.close() |
465 | 425 | should_run_experiment, code_context, original_helper_code = initialization_result.unwrap() |
466 | 426 |
|
467 | 427 | code_print( |
|
0 commit comments