Skip to content

Commit 3d0c10a

Browse files
chore: update pre-commit hooks (#4752)
* chore: update pre-commit hooks updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.6 → v0.9.1](astral-sh/ruff-pre-commit@v0.8.6...v0.9.1) * style: pre-commit fixes --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent b4990c4 commit 3d0c10a

File tree

40 files changed

+109
-124
lines changed

40 files changed

+109
-124
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ ci:
44

55
repos:
66
- repo: https://github.com/astral-sh/ruff-pre-commit
7-
rev: "v0.8.6"
7+
rev: "v0.9.1"
88
hooks:
99
- id: ruff
1010
args: [--fix, --show-fixes]

docs/source/examples/notebooks/initialize-model-with-solution.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@
8383
"# import drive cycle from file\n",
8484
"data_loader = pybamm.DataLoader()\n",
8585
"drive_cycle = pd.read_csv(\n",
86-
" f\"{data_loader.get_data(\"US06.csv\")}\", comment=\"#\", header=None\n",
86+
" f\"{data_loader.get_data('US06.csv')}\", comment=\"#\", header=None\n",
8787
").to_numpy()\n",
8888
"# create interpolant\n",
8989
"param = model.default_parameter_values\n",

docs/source/examples/notebooks/models/compare-ecker-data.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,10 @@
5959
"data_loader = pybamm.DataLoader()\n",
6060
"\n",
6161
"voltage_data_1C = pd.read_csv(\n",
62-
" f\"{data_loader.get_data(\"Ecker_1C.csv\")}\", header=None\n",
62+
" f\"{data_loader.get_data('Ecker_1C.csv')}\", header=None\n",
6363
").to_numpy()\n",
6464
"voltage_data_5C = pd.read_csv(\n",
65-
" f\"{data_loader.get_data(\"Ecker_5C.csv\")}\", header=None\n",
65+
" f\"{data_loader.get_data('Ecker_5C.csv')}\", header=None\n",
6666
").to_numpy()"
6767
]
6868
},

docs/source/examples/notebooks/models/pouch-cell-model.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
"source": [
194194
"data_loader = pybamm.DataLoader()\n",
195195
"comsol_results_path = pybamm.get_parameters_filepath(\n",
196-
" f\"{data_loader.get_data(\"comsol_1plus1D_3C.json\")}\"\n",
196+
" f\"{data_loader.get_data('comsol_1plus1D_3C.json')}\"\n",
197197
")\n",
198198
"comsol_variables = json.load(open(comsol_results_path))"
199199
]

docs/source/examples/notebooks/parameterization/change-input-current.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@
194194
"# import drive cycle from file\n",
195195
"data_loader = pybamm.DataLoader()\n",
196196
"drive_cycle = pd.read_csv(\n",
197-
" f\"{data_loader.get_data(\"US06.csv\")}\", comment=\"#\", header=None\n",
197+
" f\"{data_loader.get_data('US06.csv')}\", comment=\"#\", header=None\n",
198198
").to_numpy()\n",
199199
"\n",
200200
"# load parameter values\n",

docs/source/examples/notebooks/solvers/idaklu-jax-interface.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -307,13 +307,13 @@
307307
"# Calculate the Jacobian matrix (via forward autodiff)\n",
308308
"t_start = time.time()\n",
309309
"out = jax.jacfwd(f, argnums=1)(t_eval, inputs)\n",
310-
"print(f\"Jacobian forward method ran in {time.time()-t_start:0.3} secs\")\n",
310+
"print(f\"Jacobian forward method ran in {time.time() - t_start:0.3} secs\")\n",
311311
"print(out)\n",
312312
"\n",
313313
"# Calculate Jacobian matrix (via backward autodiff)\n",
314314
"t_start = time.time()\n",
315315
"out = jax.jacrev(f, argnums=1)(t_eval, inputs)\n",
316-
"print(f\"\\nJacobian reverse method ran in {time.time()-t_start:0.3} secs\")\n",
316+
"print(f\"\\nJacobian reverse method ran in {time.time() - t_start:0.3} secs\")\n",
317317
"print(out)"
318318
]
319319
},
@@ -382,7 +382,7 @@
382382
" ),\n",
383383
" in_axes=(0, None), # map time over the 0th dimension and do not map inputs\n",
384384
")(t_eval, inputs)\n",
385-
"print(f\"Gradient method ran in {time.time()-t_start:0.3} secs\")\n",
385+
"print(f\"Gradient method ran in {time.time() - t_start:0.3} secs\")\n",
386386
"print(data)"
387387
]
388388
},
@@ -436,7 +436,7 @@
436436
"# Get the value and gradient of the SSE function\n",
437437
"t_start = time.time()\n",
438438
"value, gradient = jax.value_and_grad(sse, argnums=1)(t_eval, inputs_pred)\n",
439-
"print(f\"Value and gradient computed in {time.time()-t_start:0.3} secs\")\n",
439+
"print(f\"Value and gradient computed in {time.time() - t_start:0.3} secs\")\n",
440440
"print(\"SSE value: \", value)\n",
441441
"print(\"SSE gradient (wrt each input): \", gradient)"
442442
]

docs/source/examples/notebooks/solvers/speed-up-solver.ipynb

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -806,23 +806,23 @@
806806
"y = pybamm.Variable(\"y\")\n",
807807
"\n",
808808
"# Normal maximum\n",
809-
"print(f\"Exact maximum: {pybamm.maximum(x,y)}\")\n",
809+
"print(f\"Exact maximum: {pybamm.maximum(x, y)}\")\n",
810810
"\n",
811811
"# Softplus\n",
812812
"print(\"Softplus (k=10): \", pybamm.softplus(x, y, 10))\n",
813813
"\n",
814814
"# Changing the setting to call softplus automatically\n",
815815
"pybamm.settings.min_max_mode = \"soft\"\n",
816816
"pybamm.settings.min_max_smoothing = 20\n",
817-
"print(f\"Softplus (k=20): {pybamm.maximum(x,y)}\")\n",
817+
"print(f\"Softplus (k=20): {pybamm.maximum(x, y)}\")\n",
818818
"\n",
819819
"# All smoothing parameters can be changed at once\n",
820820
"pybamm.settings.set_smoothing_parameters(30)\n",
821-
"print(f\"Softplus (k=30): {pybamm.maximum(x,y)}\")\n",
821+
"print(f\"Softplus (k=30): {pybamm.maximum(x, y)}\")\n",
822822
"\n",
823823
"# Change back\n",
824824
"pybamm.settings.set_smoothing_parameters(\"exact\")\n",
825-
"print(f\"Exact maximum: {pybamm.maximum(x,y)}\")"
825+
"print(f\"Exact maximum: {pybamm.maximum(x, y)}\")"
826826
]
827827
},
828828
{
@@ -1025,23 +1025,23 @@
10251025
"y = pybamm.Variable(\"y\")\n",
10261026
"\n",
10271027
"# Normal maximum\n",
1028-
"print(f\"Exact maximum: {pybamm.maximum(x,y)}\")\n",
1028+
"print(f\"Exact maximum: {pybamm.maximum(x, y)}\")\n",
10291029
"\n",
10301030
"# Smooth plus can be called explicitly\n",
10311031
"print(\"Smooth plus (k=100): \", pybamm.smooth_max(x, y, 100))\n",
10321032
"\n",
10331033
"# Smooth plus and smooth minus will be used when the mode is set to \"smooth\"\n",
10341034
"pybamm.settings.min_max_mode = \"smooth\"\n",
10351035
"pybamm.settings.min_max_smoothing = 200\n",
1036-
"print(f\"Smooth plus (k=200): {pybamm.maximum(x,y)}\")\n",
1036+
"print(f\"Smooth plus (k=200): {pybamm.maximum(x, y)}\")\n",
10371037
"\n",
10381038
"# Setting the smoothing parameters with set_smoothing_parameters() defaults to softplus\n",
10391039
"pybamm.settings.set_smoothing_parameters(10)\n",
1040-
"print(f\"Softplus (k=10): {pybamm.maximum(x,y)}\")\n",
1040+
"print(f\"Softplus (k=10): {pybamm.maximum(x, y)}\")\n",
10411041
"\n",
10421042
"# Change back\n",
10431043
"pybamm.settings.set_smoothing_parameters(\"exact\")\n",
1044-
"print(f\"Exact maximum: {pybamm.maximum(x,y)}\")"
1044+
"print(f\"Exact maximum: {pybamm.maximum(x, y)}\")"
10451045
]
10461046
},
10471047
{
@@ -1206,11 +1206,11 @@
12061206
],
12071207
"source": [
12081208
"pybamm.settings.set_smoothing_parameters(10)\n",
1209-
"print(f\"Soft minimum (softminus):\\t {pybamm.minimum(x,y)!s}\")\n",
1209+
"print(f\"Soft minimum (softminus):\\t {pybamm.minimum(x, y)!s}\")\n",
12101210
"print(f\"Smooth heaviside (sigmoid):\\t {x < y!s}\")\n",
12111211
"print(f\"Smooth absolute value: \\t\\t {abs(x)!s}\")\n",
12121212
"pybamm.settings.min_max_mode = \"smooth\"\n",
1213-
"print(f\"Smooth minimum:\\t\\t\\t {pybamm.minimum(x,y)!s}\")\n",
1213+
"print(f\"Smooth minimum:\\t\\t\\t {pybamm.minimum(x, y)!s}\")\n",
12141214
"pybamm.settings.set_smoothing_parameters(\"exact\")"
12151215
]
12161216
},

docs/source/examples/notebooks/spatial_methods/finite-volumes.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@
492492
"macro_mesh = mesh.combine_submeshes(*macroscale)\n",
493493
"print(\"gradient matrix is:\\n\")\n",
494494
"print(\n",
495-
" f\"1/dx *\\n{macro_mesh.d_nodes[:,np.newaxis] * grad_u_disc.children[0].entries.toarray()}\"\n",
495+
" f\"1/dx *\\n{macro_mesh.d_nodes[:, np.newaxis] * grad_u_disc.children[0].entries.toarray()}\"\n",
496496
")"
497497
]
498498
},
@@ -610,7 +610,7 @@
610610
"micro_mesh = mesh[\"negative particle\"]\n",
611611
"print(\"\\n gradient matrix is:\\n\")\n",
612612
"print(\n",
613-
" f\"1/dr *\\n{micro_mesh.d_nodes[:,np.newaxis] * grad_v_disc.children[0].entries.toarray()}\"\n",
613+
" f\"1/dr *\\n{micro_mesh.d_nodes[:, np.newaxis] * grad_v_disc.children[0].entries.toarray()}\"\n",
614614
")\n",
615615
"\n",
616616
"r_edge = micro_mesh.edges[1:-1] # note that grad_u_disc is evaluated on the node edges\n",
@@ -677,8 +677,8 @@
677677
"(grad_u_disc.render())\n",
678678
"u_eval = grad_u_disc.evaluate(y=y)\n",
679679
"dx = np.diff(macro_mesh.nodes)[-1]\n",
680-
"print(f\"The value of u on the left-hand boundary is {y[0] - dx*u_eval[0]/2}\")\n",
681-
"print(f\"The value of u on the right-hand boundary is {y[1] + dx*u_eval[-1]/2}\")"
680+
"print(f\"The value of u on the left-hand boundary is {y[0] - dx * u_eval[0] / 2}\")\n",
681+
"print(f\"The value of u on the right-hand boundary is {y[1] + dx * u_eval[-1] / 2}\")"
682682
]
683683
},
684684
{
@@ -765,7 +765,7 @@
765765
"(grad_u_disc.render())\n",
766766
"grad_u_eval = grad_u_disc.evaluate(y=y)\n",
767767
"u_eval = grad_u_disc.children[1].evaluate(y=y)\n",
768-
"print(f\"The value of u on the left-hand boundary is {(u_eval[0] + u_eval[1])/2}\")\n",
768+
"print(f\"The value of u on the left-hand boundary is {(u_eval[0] + u_eval[1]) / 2}\")\n",
769769
"print(f\"The gradient on the right-hand boundary is {grad_u_eval[-1]}\")"
770770
]
771771
},
@@ -914,7 +914,7 @@
914914
"source": [
915915
"int_u = pybamm.Integral(u, x_var)\n",
916916
"int_u_disc = disc.process_symbol(int_u)\n",
917-
"print(f\"int(u) = {int_u_disc.evaluate(y=y)} is approximately equal to 1/12, {1/12}\")\n",
917+
"print(f\"int(u) = {int_u_disc.evaluate(y=y)} is approximately equal to 1/12, {1 / 12}\")\n",
918918
"\n",
919919
"# We divide v by r to evaluate the integral more easily\n",
920920
"int_v_over_r2 = pybamm.Integral(v / r_var**2, r_var)\n",

examples/scripts/thermal_lithium_ion.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,8 @@
2525
full_params = parameter_values.copy()
2626
full_params.update(
2727
{
28-
"Negative current collector surface heat transfer coefficient [W.m-2.K-1]"
29-
"": 5,
30-
"Positive current collector surface heat transfer coefficient [W.m-2.K-1]"
31-
"": 5,
28+
"Negative current collector surface heat transfer coefficient [W.m-2.K-1]": 5,
29+
"Positive current collector surface heat transfer coefficient [W.m-2.K-1]": 5,
3230
"Negative tab heat transfer coefficient [W.m-2.K-1]": 0,
3331
"Positive tab heat transfer coefficient [W.m-2.K-1]": 0,
3432
"Edge heat transfer coefficient [W.m-2.K-1]": 0,

src/pybamm/expression_tree/operations/evaluate_python.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,7 @@ def find_symbols(
266266
and scipy.sparse.issparse(dummy_eval_right)
267267
):
268268
raise NotImplementedError(
269-
"sparse mat-mat multiplication not supported "
270-
"for output_jax == True"
269+
"sparse mat-mat multiplication not supported for output_jax == True"
271270
)
272271
else:
273272
symbol_str = (
@@ -451,7 +450,7 @@ def __init__(self, symbol: pybamm.Symbol):
451450

452451
# add function def to first line
453452
python_str = (
454-
"def evaluate(constants, t=None, y=None, " "inputs=None):\n" + python_str
453+
"def evaluate(constants, t=None, y=None, inputs=None):\n" + python_str
455454
)
456455

457456
# calculate the final variable that will output the result of calling `evaluate`

0 commit comments

Comments
 (0)