|
11 | 11 | </tr> |
12 | 12 | <tr> |
13 | 13 | <td class="talk-table">09:45 </td> |
14 | | - <td class="talk-table talk-organization"></td> |
| 14 | + <td class="talk-table talk-organization"><div class="talk-title">Introduction and welcome</div><div class="talk-speaker">Oscar Dowson</div></td> |
15 | 15 | <td class="talk-table talk-organization"></td> |
16 | 16 | </tr> |
17 | 17 | <tr> |
18 | 18 | <td class="talk-table">10:00 </td> |
19 | | - <td class="talk-table talk-organization"><div class="talk-title">Introduction and welcome</div></td> |
20 | | - <td class="talk-table"><div class="talk-title">Bridging the Gap Between Models and Solvers through Constraint Programming</div><div class="talk-speaker">Jean-François Baffier</div></td> |
| 19 | + <td class="talk-table"><div class="talk-title">The state of JuMP</div><div class="talk-speaker">Miles Lubin</div></td> |
| 20 | + <td class="talk-table"><div class="talk-title">Optimizing over trained neural networks with MathOptAI.jl</div><div class="talk-speaker">Robby Parker</div></td> |
21 | 21 | </tr> |
22 | 22 | <tr> |
23 | 23 | <td class="talk-table">10:15 </td> |
24 | | - <td class="talk-table"><div class="talk-title">The state of JuMP</div><div class="talk-speaker">Miles Lubin</div></td> |
| 24 | + <td class="talk-table"></td> |
25 | 25 | <td class="talk-table"></td> |
26 | 26 | </tr> |
27 | 27 | <tr> |
28 | 28 | <td class="talk-table">10:30 </td> |
29 | | - <td class="talk-table"></td> |
30 | | - <td class="talk-table"><div class="talk-title">Unlocking the Power of Google OR-Tools with MathOptInterface.jl</div><div class="talk-speaker">Thibaut Cuvelier</div></td> |
| 29 | + <td class="talk-table"><div class="talk-title">JuMP on demand: Creating your own compute cluster for solving optimisation problems</div><div class="talk-speaker">James Foster</div></td> |
| 30 | + <td class="talk-table"><div class="talk-title">ApplicationDrivenLearning.jl a framework to train forecast models with application-driven cost functions</div><div class="talk-speaker">Joaquim Dias Garcia</div></td> |
31 | 31 | </tr> |
32 | 32 | <tr> |
33 | 33 | <td class="talk-table">10:45 </td> |
34 | | - <td class="talk-table"><div class="talk-title">JuMP on demand: Creating your own compute cluster for solving optimisation problems</div><div class="talk-speaker">James Foster</div></td> |
35 | | - <td class="talk-table"></td> |
| 34 | + <td class="talk-table"><div class="talk-title">AdaptiveProjection.jl: Automating the Speed-Accuracy Trade-off in Random Projection Methods</div><div class="talk-speaker">Jean-François Baffier</div></td> |
| 35 | + <td class="talk-table"><div class="talk-title">MadIPM.jl</div><div class="talk-speaker">Alexis Montoison</div></td> |
36 | 36 | </tr> |
37 | 37 | <tr> |
38 | 38 | <td class="talk-table">11:00 </td> |
39 | | - <td class="talk-table"><div class="talk-title">Optimizing over trained neural networks with MathOptAI.jl</div><div class="talk-speaker">TBC</div></td> |
| 39 | + <td class="talk-table"><div class="talk-title">What's new in HiGHS, and thanks to JuMP for its support!</div><div class="talk-speaker">Julian Hall</div></td> |
40 | 40 | <td class="talk-table"><div class="talk-title">GPU Implementation of Algorithm NCL</div><div class="talk-speaker">Michael Saunders</div></td> |
41 | 41 | </tr> |
42 | 42 | <tr> |
|
46 | 46 | </tr> |
47 | 47 | <tr> |
48 | 48 | <td class="talk-table">11:30 </td> |
49 | | - <td class="talk-table"><div class="talk-title">ApplicationDrivenLearning.jl a framework to train forecast models with application-driven cost functions</div><div class="talk-speaker">Joaquim Dias Garcia</div></td> |
| 49 | + <td class="talk-table"><div class="talk-title">Revisiting sparse matrix coloring and bicoloring</div><div class="talk-speaker">Alexis Montoison</div></td> |
50 | 50 | <td class="talk-table"><div class="talk-title">CuClarabel: GPU Acceleration for a Conic Optimization Solver</div><div class="talk-speaker">Yuwen Chen</div></td> |
51 | 51 | </tr> |
52 | 52 | <tr> |
53 | 53 | <td class="talk-table">11:45 </td> |
54 | | - <td class="talk-table"><div class="talk-title">AdaptiveProjection.jl: Automating the Speed-Accuracy Trade-off in Random Projection Methods</div><div class="talk-speaker">Jean-François Baffier</div></td> |
| 54 | + <td class="talk-table"></td> |
55 | 55 | <td class="talk-table"></td> |
56 | 56 | </tr> |
57 | 57 | <tr> |
|
97 | 97 | <tr> |
98 | 98 | <td class="talk-table">14:00 </td> |
99 | 99 | <td class="talk-table"><div class="talk-title">Automatic Generation of JuMP.jl Constraints from ModelingToolkit.jl Models</div><div class="talk-speaker">Dimitri Alston</div></td> |
100 | | - <td class="talk-table"><div class="talk-title">What's new in HiGHS, and thanks to JuMP for its support!</div><div class="talk-speaker">Julian Hall</div></td> |
| 100 | + <td class="talk-table"><div class="talk-title">Bridging the Gap Between Models and Solvers through Constraint Programming</div><div class="talk-speaker">Jean-François Baffier</div></td> |
101 | 101 | </tr> |
102 | 102 | <tr> |
103 | 103 | <td class="talk-table">14:15 </td> |
|
107 | 107 | <tr> |
108 | 108 | <td class="talk-table">14:30 </td> |
109 | 109 | <td class="talk-table"><div class="talk-title">Large Scale JuMP Models with Constraint Generators</div><div class="talk-speaker">Benoît Legat</div></td> |
110 | | - <td class="talk-table"><div class="talk-title">Revisiting sparse matrix coloring and bicoloring</div><div class="talk-speaker">Alexis Montoison</div></td> |
| 110 | + <td class="talk-table"><div class="talk-title">Unlocking the Power of Google OR-Tools with MathOptInterface.jl</div><div class="talk-speaker">Thibaut Cuvelier</div></td> |
111 | 111 | </tr> |
112 | 112 | <tr> |
113 | 113 | <td class="talk-table">14:45 </td> |
|
0 commit comments