Skip to content

Commit 1d1c6a3

Browse files
authored
Merge branch 'master' into enh/remove-numpy-pin
2 parents c7170da + f4b48f5 commit 1d1c6a3

35 files changed

+3101
-1410
lines changed

.github/workflows/codeql.yml

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
# For most projects, this workflow file will not need changing; you simply need
2+
# to commit it to your repository.
3+
#
4+
# You may wish to alter this file to override the set of languages analyzed,
5+
# or to provide custom queries or build logic.
6+
#
7+
# ******** NOTE ********
8+
# We have attempted to detect the languages in your repository. Please check
9+
# the `language` matrix defined below to confirm you have the correct set of
10+
# supported CodeQL languages.
11+
#
12+
name: "CodeQL"
13+
14+
on:
15+
push:
16+
branches: [ "master" ]
17+
pull_request:
18+
branches: [ "master" ]
19+
schedule:
20+
- cron: '19 21 * * 6'
21+
22+
jobs:
23+
analyze:
24+
name: Analyze (${{ matrix.language }})
25+
# Runner size impacts CodeQL analysis time. To learn more, please see:
26+
# - https://gh.io/recommended-hardware-resources-for-running-codeql
27+
# - https://gh.io/supported-runners-and-hardware-resources
28+
# - https://gh.io/using-larger-runners (GitHub.com only)
29+
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
30+
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
31+
permissions:
32+
# required for all workflows
33+
security-events: write
34+
35+
# required to fetch internal or private CodeQL packs
36+
packages: read
37+
38+
# only required for workflows in private repositories
39+
actions: read
40+
contents: read
41+
42+
strategy:
43+
fail-fast: false
44+
matrix:
45+
include:
46+
- language: python
47+
build-mode: none
48+
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
49+
# Use `c-cpp` to analyze code written in C, C++ or both
50+
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
51+
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
52+
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
53+
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
54+
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
55+
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
56+
steps:
57+
- name: Checkout repository
58+
uses: actions/checkout@v4
59+
60+
# Initializes the CodeQL tools for scanning.
61+
- name: Initialize CodeQL
62+
uses: github/codeql-action/init@v3
63+
with:
64+
languages: ${{ matrix.language }}
65+
build-mode: ${{ matrix.build-mode }}
66+
# If you wish to specify custom queries, you can do so here or in a config file.
67+
# By default, queries listed here will override any specified in a config file.
68+
# Prefix the list here with "+" to use these queries and those in the config file.
69+
70+
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
71+
# queries: security-extended,security-and-quality
72+
73+
# If the analyze step fails for one of the languages you are analyzing with
74+
# "We were unable to automatically build your code", modify the matrix above
75+
# to set the build mode to "manual" for that language. Then modify this step
76+
# to build your code.
77+
# ℹ️ Command-line programs to run using the OS shell.
78+
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
79+
- if: matrix.build-mode == 'manual'
80+
shell: bash
81+
run: |
82+
echo 'If you are using a "manual" build mode for one or more of the' \
83+
'languages you are analyzing, replace this with the commands to build' \
84+
'your code, for example:'
85+
echo ' make bootstrap'
86+
echo ' make release'
87+
exit 1
88+
89+
- name: Perform CodeQL Analysis
90+
uses: github/codeql-action/analyze@v3
91+
with:
92+
category: "/language:${{matrix.language}}"

.github/workflows/test.yml

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,3 +93,35 @@ jobs:
9393
- name: Upload code coverage report
9494
if: matrix.os == 'ubuntu-latest'
9595
uses: codecov/codecov-action@v4
96+
with:
97+
token: ${{ secrets.CODECOV_TOKEN }}
98+
99+
check-types:
100+
name: Check types
101+
needs: [build]
102+
runs-on: ubuntu-latest
103+
104+
steps:
105+
- uses: actions/checkout@v4
106+
with:
107+
fetch-depth: 0 # Needed for setuptools_scm
108+
109+
- name: Set up Python 3.12
110+
uses: actions/setup-python@v5
111+
with:
112+
python-version: 3.12
113+
114+
- name: Download package
115+
uses: actions/download-artifact@v4
116+
with:
117+
name: Packages
118+
path: dist
119+
120+
- name: Install package and dependencies
121+
run: |
122+
python -m pip install uv
123+
uv pip install --system "$(ls dist/*.whl)[dev]"
124+
125+
- name: Run type checker (mypy)
126+
run: |
127+
mypy .

.pre-commit-config.yaml

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
1+
ci:
2+
autoupdate_schedule: monthly
3+
14
repos:
25
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: v4.6.0
6+
rev: v5.0.0
47
hooks:
58
- id: end-of-file-fixer
69
- id: trailing-whitespace
710
- id: check-merge-conflict
811
- repo: https://github.com/astral-sh/ruff-pre-commit
9-
rev: v0.5.7
12+
rev: v0.7.2
1013
hooks:
1114
- id: ruff
1215
args: [--fix]
@@ -32,9 +35,3 @@ repos:
3235
hooks:
3336
- id: jupyter-notebook-cleanup
3437
exclude: examples/solve-on-remote.ipynb
35-
- repo: https://github.com/pre-commit/mirrors-mypy
36-
rev: v1.11.1
37-
hooks:
38-
- id: mypy
39-
files: ^(linopy|test)/
40-
additional_dependencies: [numpy, pandas, xarray, types-paramiko]

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
        **I**nteger\
1313
        **N**on-linear\
1414
        **O**ptimization in\
15-
        **P Y**thon
15+
        **PY**thon
1616

1717
**linopy** is an open-source python package that facilitates **optimization** with **real world data**. It builds a bridge between data analysis packages like [xarray](https://github.com/pydata/xarray) & [pandas](https://pandas.pydata.org/) and problem solvers like [cbc](https://projects.coin-or.org/Cbc), [gurobi](https://www.gurobi.com/) (see the full list below). **Linopy** supports **Linear, Integer, Mixed-Integer and Quadratic Programming** while aiming to make linear programming in Python easy, highly-flexible and performant.
1818

benchmark/notebooks/plot-benchmarks.py.ipynb

Lines changed: 41 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
"import seaborn as sns\n",
1313
"\n",
1414
"sns.set_theme(\"paper\", \"white\")\n",
15-
"#plt.rc('text', usetex=True)\n",
16-
"#plt.rc('font', family='sans-serif')"
15+
"# plt.rc('text', usetex=True)\n",
16+
"# plt.rc('font', family='sans-serif')"
1717
]
1818
},
1919
{
@@ -24,8 +24,8 @@
2424
"outputs": [],
2525
"source": [
2626
"data = pd.read_csv(snakemake.input[0], index_col=0)\n",
27-
"cols = ['Time', 'Memory']\n",
28-
"df = data.melt(id_vars=data.columns.drop(cols), value_vars=cols, var_name='kind')"
27+
"cols = [\"Time\", \"Memory\"]\n",
28+
"df = data.melt(id_vars=data.columns.drop(cols), value_vars=cols, var_name=\"kind\")"
2929
]
3030
},
3131
{
@@ -35,21 +35,31 @@
3535
"metadata": {},
3636
"outputs": [],
3737
"source": [
38-
"if snakemake.wildcards[\"kind\"] == 'overhead':\n",
39-
" labels = ['Overhead time (s)', 'Overhead memory (MB)']\n",
38+
"if snakemake.wildcards[\"kind\"] == \"overhead\":\n",
39+
" labels = [\"Overhead time (s)\", \"Overhead memory (MB)\"]\n",
4040
"else:\n",
41-
" labels = ['Time (s)', 'Memory (MB)']\n",
41+
" labels = [\"Time (s)\", \"Memory (MB)\"]\n",
4242
"\n",
43-
"g = sns.FacetGrid(data=df, row=\"kind\", sharey=False, height=2., aspect=2)\n",
44-
"g.map_dataframe(sns.lineplot, x=\"Number of Variables\", y='value', hue='API', style='API',\n",
45-
" marker='.', legend='full', zorder=8)\n",
43+
"g = sns.FacetGrid(data=df, row=\"kind\", sharey=False, height=2.0, aspect=2)\n",
44+
"g.map_dataframe(\n",
45+
" sns.lineplot,\n",
46+
" x=\"Number of Variables\",\n",
47+
" y=\"value\",\n",
48+
" hue=\"API\",\n",
49+
" style=\"API\",\n",
50+
" marker=\".\",\n",
51+
" legend=\"full\",\n",
52+
" zorder=8,\n",
53+
")\n",
4654
"for ax, label in zip(g.axes.ravel(), labels):\n",
4755
" ax.set_ylabel(label)\n",
4856
" ax.set_title(\"\")\n",
49-
" ax.grid(axis='y', lw=0.2, color='grey', zorder=3, alpha=0.4)\n",
57+
" ax.grid(axis=\"y\", lw=0.2, color=\"grey\", zorder=3, alpha=0.4)\n",
5058
"g.fig.tight_layout()\n",
5159
"g.add_legend()\n",
52-
"g.fig.savefig(snakemake.output.time_memory, bbox_inches='tight', pad_inches=0.1, dpi=300)"
60+
"g.fig.savefig(\n",
61+
" snakemake.output.time_memory, bbox_inches=\"tight\", pad_inches=0.1, dpi=300\n",
62+
")"
5363
]
5464
},
5565
{
@@ -59,25 +69,33 @@
5969
"metadata": {},
6070
"outputs": [],
6171
"source": [
62-
"if snakemake.wildcards[\"kind\"] == 'overhead':\n",
63-
" label = 'Computational overhead [MBs]'\n",
72+
"if snakemake.wildcards[\"kind\"] == \"overhead\":\n",
73+
" label = \"Computational overhead [MBs]\"\n",
6474
"else:\n",
65-
" label = 'Computational resource [MBs]'\n",
75+
" label = \"Computational resource [MBs]\"\n",
6676
"\n",
67-
"df = data.assign(Resource = data[\"Time\"] * data[\"Memory\"])\n",
68-
"cols = ['Resource']\n",
69-
"df = df.melt(id_vars=df.columns.drop(cols), value_vars=cols, var_name='kind')\n",
77+
"df = data.assign(Resource=data[\"Time\"] * data[\"Memory\"])\n",
78+
"cols = [\"Resource\"]\n",
79+
"df = df.melt(id_vars=df.columns.drop(cols), value_vars=cols, var_name=\"kind\")\n",
7080
"\n",
7181
"fig, ax = plt.subplots(figsize=(6, 3))\n",
72-
"sns.lineplot(data=df, x=\"Number of Variables\", y='value', hue='API', style='API',\n",
73-
" marker='.', legend='full', zorder=8)\n",
82+
"sns.lineplot(\n",
83+
" data=df,\n",
84+
" x=\"Number of Variables\",\n",
85+
" y=\"value\",\n",
86+
" hue=\"API\",\n",
87+
" style=\"API\",\n",
88+
" marker=\".\",\n",
89+
" legend=\"full\",\n",
90+
" zorder=8,\n",
91+
")\n",
7492
"sns.despine()\n",
7593
"ax.set_ylabel(label)\n",
7694
"ax.set_title(\"\")\n",
77-
"plt.ticklabel_format(axis='both', style='sci', scilimits=(3,3))\n",
78-
"ax.grid(axis='y', lw=0.2, color='grey', zorder=3, alpha=0.4)\n",
95+
"plt.ticklabel_format(axis=\"both\", style=\"sci\", scilimits=(3, 3))\n",
96+
"ax.grid(axis=\"y\", lw=0.2, color=\"grey\", zorder=3, alpha=0.4)\n",
7997
"fig.tight_layout()\n",
80-
"fig.savefig(snakemake.output.resource, bbox_inches='tight', pad_inches=0.1, dpi=300)"
98+
"fig.savefig(snakemake.output.resource, bbox_inches=\"tight\", pad_inches=0.1, dpi=300)"
8199
]
82100
}
83101
],

codecov.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
comment: false

doc/release_notes.rst

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,35 @@ Release Notes
44
Upcoming Version
55
----------------
66

7+
* The method :meth:`model.to_file <linopy.model.Model.to_file>` now includes a progress argument to enable or disable the progress bar while writing.
8+
9+
10+
Version 0.4.2
11+
--------------
12+
13+
* Fix the file handler to properly close the file when reading the sense from a problem file.
14+
15+
Version 0.4.1
16+
--------------
17+
18+
* Fix the `slice_size` argument in the `solve` function. The argument was not properly passed to the `to_file` function.
19+
* Fix the slicing of constraints in case the term dimension is larger than the leading constraint coordinate dimension.
20+
21+
Version 0.4.0
22+
--------------
23+
24+
* When writing out an LP file, large variables and constraints are now chunked to avoid memory issues. This is especially useful for large models with constraints with many terms. The chunk size can be set with the `slice_size` argument in the `solve` function.
25+
* Constraints which of the form `<= infinity` and `>= -infinity` are now automatically filtered out when solving. The `solve` function now has a new argument `sanitize_infinities` to control this feature. Default is set to `True`.
26+
* The representation of linopy objects with multiindexed coordinates was improved to be more readable.
27+
* Grouping expressions is now supported on dimensions called "group" and dimensions that have the same name as the grouping object.
28+
* Grouping dimensions which have multiindexed coordinates is now supported.
29+
* See full list of changes `here <https://github.com/PyPSA/linopy/releases/tag/v0.4.0>`__.
30+
31+
Version 0.3.15
32+
--------------
33+
34+
* The group dimension when grouping by a pandas dataframe is now always `group`. This fixes the case that the dataframe contains a column named `name`.
35+
736
Version 0.3.14
837
--------------
938

examples/create-a-model-with-coordinates.ipynb

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -71,10 +71,14 @@
7171
"source": [
7272
"import pandas as pd\n",
7373
"\n",
74-
"time = pd.Index(range(10), name='time')\n",
74+
"time = pd.Index(range(10), name=\"time\")\n",
7575
"\n",
76-
"x = m.add_variables(lower=0, coords=[time], name='x', )\n",
77-
"y = m.add_variables(lower=0, coords=[time], name='y')"
76+
"x = m.add_variables(\n",
77+
" lower=0,\n",
78+
" coords=[time],\n",
79+
" name=\"x\",\n",
80+
")\n",
81+
"y = m.add_variables(lower=0, coords=[time], name=\"y\")"
7882
]
7983
},
8084
{
@@ -97,7 +101,7 @@
97101
"source": [
98102
"factor = pd.Series(time, index=time)\n",
99103
"\n",
100-
"3*x + 7*y >= 10*factor"
104+
"3 * x + 7 * y >= 10 * factor"
101105
]
102106
},
103107
{
@@ -115,8 +119,8 @@
115119
"metadata": {},
116120
"outputs": [],
117121
"source": [
118-
"con1 = m.add_constraints(3*x + 7*y >= 10*factor, name='con1')\n",
119-
"con2 = m.add_constraints(5*x + 2*y >= 3*factor, name='con2')\n",
122+
"con1 = m.add_constraints(3 * x + 7 * y >= 10 * factor, name=\"con1\")\n",
123+
"con2 = m.add_constraints(5 * x + 2 * y >= 3 * factor, name=\"con2\")\n",
120124
"m"
121125
]
122126
},
@@ -135,7 +139,7 @@
135139
"metadata": {},
136140
"outputs": [],
137141
"source": [
138-
"obj = (x + 2*y).sum()\n",
142+
"obj = (x + 2 * y).sum()\n",
139143
"m.add_objective(obj)"
140144
]
141145
},
@@ -164,7 +168,7 @@
164168
"metadata": {},
165169
"outputs": [],
166170
"source": [
167-
"m.solution.to_dataframe().plot(grid=True, ylabel='Optimal Value');"
171+
"m.solution.to_dataframe().plot(grid=True, ylabel=\"Optimal Value\");"
168172
]
169173
},
170174
{

0 commit comments

Comments
 (0)