Skip to content

Commit 208c282

Browse files
committed
trivial updates
1 parent ad30ef5 commit 208c282

2 files changed

Lines changed: 33 additions & 37 deletions

File tree

paper/paper.bib

Lines changed: 31 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -39,15 +39,18 @@ @article{Batzner-22-05
3939
}
4040

4141
@inproceedings{Batatia-22-10,
42-
title = {{{MACE}}: {{Higher Order Equivariant Message Passing Neural Networks}} for {{Fast}} and {{Accurate Force Fields}}},
43-
booktitle = {Advances in {{Neural Information Processing Systems}}},
44-
author = {Batatia, Ilyes and Kovacs, David Peter and Simm, Gregor N. C. and Ortner, Christoph and Csanyi, Gabor},
45-
year = {2022},
46-
month = oct,
47-
urldate = {2025-06-22}
42+
author = {Batatia, Ilyes and Kovacs, David P and Simm, Gregor and Ortner, Christoph and Csanyi, Gabor},
43+
booktitle = {Advances in Neural Information Processing Systems},
44+
editor = {S. Koyejo and S. Mohamed and A. Agarwal and D. Belgrave and K. Cho and A. Oh},
45+
pages = {11423--11436},
46+
title = {MACE: Higher Order Equivariant Message Passing Neural Networks for Fast and Accurate Force Fields},
47+
url = {https://proceedings.neurips.cc/paper_files/paper/2022/file/4a36c3c51af11ed9f34615b81edb5bbc-Paper-Conference.pdf},
48+
volume = {35},
49+
year = {2022}
4850
}
4951

5052

53+
5154
@article{HjorthLarsen-17-06,
5255
title = {The Atomic Simulation Environment---a {{Python}} Library for Working with Atoms},
5356
author = {Hjorth Larsen, Ask and J{\o}rgen Mortensen, Jens and Blomqvist, Jakob and Castelli, Ivano E and Christensen, Rune and Du{\l}ak, Marcin and Friis, Jesper and Groves, Michael N and Hammer, Bj{\o}rk and Hargus, Cory and Hermes, Eric D and Jennings, Paul C and Bjerre Jensen, Peter and Kermode, James and Kitchin, John R and Leonhard Kolsbjerg, Esben and Kubal, Joseph and Kaasbjerg, Kristen and Lysgaard, Steen and Bergmann Maronsson, J{\'o}n and Maxson, Tristan and Olsen, Thomas and Pastewka, Lars and Peterson, Andrew and Rostgaard, Carsten and Schi{\o}tz, Jakob and Sch{\"u}tt, Ole and Strange, Mikkel and Thygesen, Kristian S and Vegge, Tejs and Vilhelmsen, Lasse and Walter, Michael and Zeng, Zhenhua and Jacobsen, Karsten W},
@@ -70,26 +73,21 @@ @article{Thompson-22-02
7073
urldate = {2022-11-08}
7174
}
7275

73-
74-
@misc{Simeon-23-06,
75-
title = {{{TensorNet}}: {{Cartesian Tensor Representations}} for {{Efficient Learning}} of {{Molecular Potentials}}},
76-
author = {Simeon, Guillem and {de Fabritiis}, Gianni},
77-
year = {2023},
78-
number = {arXiv:2306.06482},
79-
eprint = {2306.06482},
80-
primaryclass = {physics},
81-
archiveprefix = {arXiv}
76+
@inproceedings{Simeon-23-06,
77+
title = {TensorNet: Cartesian Tensor Representations for Efficient Learning of Molecular Potentials},
78+
author = {Simeon, Guillem and {de Fabritiis}, Gianni},
79+
booktitle = {Thirty-seventh Conference on Neural Information Processing Systems},
80+
year = {2023},
81+
url = {https://openreview.net/forum?id=BEHlPdBZ2e}
8282
}
8383

84-
@misc{Schutt-21-06,
85-
title = {Equivariant Message Passing for the Prediction of Tensorial Properties and Molecular Spectra},
86-
author = {Sch{\"u}tt, Kristof T. and Unke, Oliver T. and Gastegger, Michael},
87-
year = {2021},
88-
number = {arXiv:2102.03150},
89-
eprint = {2102.03150},
90-
primaryclass = {physics},
91-
doi = {10.48550/arXiv.2102.03150},
92-
archiveprefix = {arXiv}
84+
@inproceedings{Schutt-21-06,
85+
title = {Equivariant Message Passing for the Prediction of Tensorial Properties and Molecular Spectra},
86+
author = {Sch{\"u}tt, Kristof T. and Unke, Oliver T. and Gastegger, Michael},
87+
booktitle = {Proceedings of Machine Learning Research},
88+
year = {2021},
89+
note = {Pre-print at: \url{https://arxiv.org/abs/2102.03150}},
90+
doi = {10.48550/arXiv.2102.03150}
9391
}
9492

9593
@article{Wang-18-07,
@@ -352,17 +350,15 @@ @software{fairchem
352350
year = {2025}
353351
}
354352

355-
@misc{Kong-25-04,
356-
title = {{{MatterTune}}: {{An Integrated}}, {{User-Friendly Platform}} for {{Fine-Tuning Atomistic Foundation Models}} to {{Accelerate Materials Simulation}} and {{Discovery}}},
357-
author = {Kong, Lingyu and Shoghi, Nima and Hu, Guoxiang and Li, Pan and Fung, Victor},
358-
year = {2025},
359-
month = apr,
360-
number = {arXiv:2504.10655},
361-
eprint = {2504.10655},
362-
primaryclass = {cond-mat},
363-
doi = {10.48550/arXiv.2504.10655},
364-
urldate = {2025-04-16},
365-
archiveprefix = {arXiv}
353+
@article{Kong-25-08,
354+
title = {{{MatterTune}}: An Integrated, User-Friendly Platform for Fine-Tuning Atomistic Foundation Models to Accelerate Materials Simulation and Discovery},
355+
author = {Kong, Lingyu and Shoghi, Nima and Hu, Guoxiang and Li, Pan and Fung, Victor},
356+
year = 2025,
357+
journal = {Digital Discovery},
358+
volume = {4},
359+
number = {8},
360+
pages = {2253--2262},
361+
doi = {10.1039/D5DD00154D}
366362
}
367363

368364
@misc{Radova-25-02,

paper/paper.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ The `graph-pes` package provides a **unified interface and framework** for defin
5151

5252
A number of existing packages offer training and validation pipelines for particular ML-PES architectures, including `schnetpack` [@schutt2019schnetpack; @schutt2023schnetpack], `deepmd-kit` [@Wang-18-07; @Zeng-23-08], `nequip` [@Batzner-22-05], `mace-torch` [@Batatia-22-10], `torchmd-net` [@TorchMDNet], and `fairchem` [@fairchem].
5353
These frameworks focus on their associated model families and do not share a common interface for training.
54-
While `MatterTune` [@Kong-25-04] offers a unified interface for foundation model fine-tuning, it does not easily support training arbitrary models from scratch.
54+
While `MatterTune` [@Kong-25-08] offers a unified interface for foundation model fine-tuning, it does not easily support training arbitrary models from scratch.
5555
In contrast to these, `graph-pes` is a general, model-agnostic framework, designed to enable exact side-by-side comparisons, easy implementation of arbitrary new architectures, and standardized training and evaluation workflows.
5656

5757
# Features and implementation
@@ -86,7 +86,7 @@ As well as training from scratch, we also support the fine-tuning of existing mo
8686
Under the hood, `graph-pes-train` builds upon the `PyTorch Lightning` [@Lightning] training loop, allowing the user to configure a variety of common training features and callbacks.
8787
We also support the use of arbitrary, user-defined components, including custom loss functions, model architectures, optimisers, and datasets.
8888

89-
Because all models conform to the same interface, all training features can be used with any model architecture. Similarly, all downstream model uses can be written in an architecture-agnostic manner, allowing for MD, relaxations, and other scripts to be written once, and then used with any MLIP architecture, _e.g._ for extended validation beyond simple error metrics [@Morrow-23-03].
89+
Because all models conform to the same interface, all training features can be used with any model architecture. Similarly, all downstream model uses can be written in an architecture-agnostic manner, allowing for MD, relaxations, and other scripts to be written once, and then used with any MLIP architecture, _e.g._, for extended validation beyond simple error metrics [@Morrow-23-03].
9090

9191
## Easy access to foundation models
9292

0 commit comments

Comments
 (0)