|
121 | 121 |
|
122 | 122 |
|
123 | 123 | def log_code_repair_to_db( |
124 | | - code_repair_log_db: Path, |
125 | | - optimization_id: str, |
126 | | - trace_id: str | None = None, |
127 | | - passed: str | None = None, |
128 | | - faster: str | None = None, |
| 124 | + code_repair_log_db: Path, optimization_id: str, trace_id: str, passed: str, faster: str |
129 | 125 | ) -> None: |
130 | | - """Log code repair data to SQLite database. |
131 | | -
|
132 | | - Uses upsert pattern to allow incremental logging with different columns at different places. |
133 | | - Only non-None values will be updated; existing values are preserved. |
134 | | - """ |
| 126 | + """Log code repair data to SQLite database.""" |
135 | 127 | try: |
136 | | - conn = sqlite3.connect(code_repair_log_db) |
137 | | - cursor = conn.cursor() |
138 | | - |
139 | | - # Build dynamic upsert query based on provided columns |
140 | | - columns = ["optimization_id"] |
141 | | - values = [optimization_id] |
142 | | - update_parts = ["updated_at = CURRENT_TIMESTAMP"] |
143 | | - |
144 | | - if trace_id is not None: |
145 | | - columns.append("trace_id") |
146 | | - values.append(trace_id) |
147 | | - update_parts.append("trace_id = excluded.trace_id") |
148 | | - |
149 | | - if passed is not None: |
150 | | - columns.append("passed") |
151 | | - values.append(passed) |
152 | | - update_parts.append("passed = excluded.passed") |
153 | | - |
154 | | - if faster is not None: |
155 | | - columns.append("faster") |
156 | | - values.append(faster) |
157 | | - update_parts.append("faster = excluded.faster") |
158 | | - |
159 | | - placeholders = ", ".join(["?"] * len(values)) |
160 | | - columns_str = ", ".join(columns) |
161 | | - update_str = ", ".join(update_parts) |
162 | | - |
163 | | - cursor.execute( |
164 | | - f""" |
165 | | - INSERT INTO code_repair_logs_cf ({columns_str}) |
166 | | - VALUES ({placeholders}) |
167 | | - ON CONFLICT(optimization_id) DO UPDATE SET {update_str} |
168 | | - """, # noqa: S608 |
169 | | - values, |
170 | | - ) |
171 | | - conn.commit() |
172 | | - conn.close() |
| 128 | + with sqlite3.connect(code_repair_log_db) as conn: |
| 129 | + cursor = conn.cursor() |
| 130 | + cursor.execute(""" |
| 131 | + CREATE TABLE IF NOT EXISTS code_repair_logs_cf ( |
| 132 | + optimization_id TEXT PRIMARY KEY, |
| 133 | + trace_id TEXT, |
| 134 | + passed TEXT, |
| 135 | + faster TEXT, |
| 136 | + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
| 137 | + ) |
| 138 | + """) |
| 139 | + cursor.execute( |
| 140 | + """ |
| 141 | + INSERT INTO code_repair_logs_cf (optimization_id, trace_id, passed, faster) |
| 142 | + VALUES (?, ?, ?, ?) |
| 143 | + """, |
| 144 | + (optimization_id, trace_id, passed, faster), |
| 145 | + ) |
| 146 | + conn.commit() |
173 | 147 | except Exception as e: |
174 | 148 | sentry_sdk.capture_exception(e) |
175 | | - logger.exception(e) |
| 149 | + logger.exception("Error logging code repair to db") |
176 | 150 |
|
177 | 151 |
|
178 | 152 | class CandidateProcessor: |
@@ -447,20 +421,6 @@ def optimize_function(self) -> Result[BestOptimization, str]: |
447 | 421 | initialization_result = self.can_be_optimized() |
448 | 422 | if not is_successful(initialization_result): |
449 | 423 | return Failure(initialization_result.failure()) |
450 | | - conn = sqlite3.connect(self.code_repair_log_db) |
451 | | - cursor = conn.cursor() |
452 | | - cursor.execute(""" |
453 | | - CREATE TABLE IF NOT EXISTS code_repair_logs_cf ( |
454 | | - optimization_id TEXT PRIMARY KEY, |
455 | | - trace_id TEXT, |
456 | | - passed TEXT, |
457 | | - faster TEXT, |
458 | | - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, |
459 | | - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
460 | | - ) |
461 | | - """) |
462 | | - conn.commit() |
463 | | - conn.close() |
464 | 424 | should_run_experiment, code_context, original_helper_code = initialization_result.unwrap() |
465 | 425 |
|
466 | 426 | code_print( |
|
0 commit comments