Skip to content
Draft
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
85fe716
feat: Implement XAS (X-ray Absorption Spectroscopy) model, fitting, l…
anyangml Mar 24, 2026
9e9c6a3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 24, 2026
8fd99ad
feat: Reimplement XAS loss with per-atom property fitting, removing p…
anyangml Mar 24, 2026
9352c4f
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 24, 2026
9bc38d7
feat: Add X-ray Absorption Spectroscopy (XAS) training examples
anyangml Mar 24, 2026
c8a4005
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 24, 2026
e157ed7
feat: Implement XAS energy normalization in the XAS loss function and…
anyangml Mar 25, 2026
8c21612
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 25, 2026
250168b
fix:device
anyangml Mar 25, 2026
8ab20b2
fix: filter loss-related keys from state dict in inference and ignore…
anyangml Mar 30, 2026
38c3a04
fix: update XAS reference extraction path and ignore tests directory …
anyangml Mar 30, 2026
17ffd5b
feat: add weighted loss and smoothness regularization to XAS training…
anyangml Mar 30, 2026
829048e
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 30, 2026
f81f2a7
feat: add normalize_fparam option to fitting net and ignore tests dir…
anyangml Mar 30, 2026
3161398
chore: ignore tests directory in git tracking
anyangml Mar 30, 2026
ed8a87c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 30, 2026
73398f6
feat: add intensity_norm option to XAS loss for scale-invariant train…
anyangml Mar 31, 2026
eaae746
Merge branch 'feat/support-xas-spectrum' of github.com:anyangml/deepm…
anyangml Mar 31, 2026
a663c33
feat: add per-type/edge energy standard deviation normalization to XA…
anyangml Apr 1, 2026
f2d37ed
refactor: normalize energy predictions using global standard deviatio…
anyangml Apr 1, 2026
da895d0
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 1, 2026
94d2a5a
fix: change XAS loss reduction from mean to sum for atomic contributions
anyangml Apr 1, 2026
5f15806
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 1, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -71,3 +71,4 @@ frozen_model.*

# Test system directories
system/
tests/
3 changes: 3 additions & 0 deletions deepmd/__about__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# Auto-generated stub for development use
__version__ = "dev"
15 changes: 8 additions & 7 deletions deepmd/dpmodel/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,14 @@ def get_standard_model(data: dict) -> EnergyModel:
else:
raise RuntimeError(f"Unknown fitting type: {fitting_net_type}")

model = modelcls(
descriptor=descriptor,
fitting=fitting,
type_map=data["type_map"],
atom_exclude_types=atom_exclude_types,
pair_exclude_types=pair_exclude_types,
)
model_kwargs: dict = {
"descriptor": descriptor,
"fitting": fitting,
"type_map": data["type_map"],
"atom_exclude_types": atom_exclude_types,
"pair_exclude_types": pair_exclude_types,
}
model = modelcls(**model_kwargs)
return model


Expand Down
50 changes: 47 additions & 3 deletions deepmd/entrypoints/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,13 +887,19 @@ def test_property(
high_prec=True,
)

is_xas = var_name == "xas"

if dp.get_dim_fparam() > 0:
data.add(
"fparam", dp.get_dim_fparam(), atomic=False, must=True, high_prec=False
)
if dp.get_dim_aparam() > 0:
data.add("aparam", dp.get_dim_aparam(), atomic=True, must=True, high_prec=False)

# XAS requires sel_type.npy (per-frame absorbing element type index)
if is_xas:
data.add("sel_type", 1, atomic=False, must=True, high_prec=False)

test_data = data.get_test()
mixed_type = data.mixed_type
natoms = len(test_data["type"][0])
Expand All @@ -918,21 +924,59 @@ def test_property(
else:
aparam = None

# XAS: per-atom outputs are needed to average over absorbing-element atoms
eval_atomic = has_atom_property or is_xas
ret = dp.eval(
coord,
box,
atype,
fparam=fparam,
aparam=aparam,
atomic=has_atom_property,
atomic=eval_atomic,
mixed_type=mixed_type,
)

property = ret[0]
if is_xas:
# ret[1]: per-atom property [numb_test, natoms, task_dim]
atom_prop = ret[1].reshape([numb_test, natoms, dp.task_dim])
if mixed_type:
atype_frames = atype # [numb_test, natoms]
else:
atype_frames = np.tile(atype, (numb_test, 1)) # [numb_test, natoms]
sel_type_int = test_data["sel_type"][:numb_test, 0].astype(int)
property = np.zeros([numb_test, dp.task_dim], dtype=atom_prop.dtype)
for i in range(numb_test):
t = sel_type_int[i]
mask = atype_frames[i] == t # [natoms]
count = max(mask.sum(), 1)
property[i] = atom_prop[i][mask].sum(axis=0) / count

# Add back the per-(type, edge) energy reference so output is in
# absolute eV (matching label format). xas_e_ref is saved in the
# model checkpoint by XASLoss.compute_output_stats.
try:
# dp is DeepProperty (wrapper); the PT backend is dp.deep_eval,
# and its ModelWrapper is dp.deep_eval.dp.
xas_e_ref = dp.deep_eval.dp.model["Default"].atomic_model.xas_e_ref
except AttributeError:
xas_e_ref = None
if xas_e_ref is not None and fparam is not None:
import torch as _torch

edge_idx_all = (
_torch.tensor(fparam.reshape(numb_test, -1)).argmax(dim=-1).numpy()
)
e_ref_np = xas_e_ref.cpu().numpy() # [ntypes, nfparam, 2]
for i in range(numb_test):
t = sel_type_int[i]
e = int(edge_idx_all[i])
property[i, :2] += e_ref_np[t, e]
else:
property = ret[0]

property = property.reshape([numb_test, dp.task_dim])

if has_atom_property:
if has_atom_property and not is_xas:
aproperty = ret[1]
aproperty = aproperty.reshape([numb_test, natoms * dp.task_dim])

Expand Down
3 changes: 3 additions & 0 deletions deepmd/pt/infer/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,4 +73,7 @@ def __init__(
self.wrapper = ModelWrapper(self.model) # inference only
if JIT:
self.wrapper = torch.jit.script(self.wrapper)
# Drop loss-related keys (e.g. loss buffers like XASLoss.e_ref) that
# are not part of the inference-only wrapper.
state_dict = {k: v for k, v in state_dict.items() if not k.startswith("loss.")}
self.wrapper.load_state_dict(state_dict)
4 changes: 4 additions & 0 deletions deepmd/pt/loss/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@
from .tensor import (
TensorLoss,
)
from .xas import (
XASLoss,
)

__all__ = [
"DOSLoss",
Expand All @@ -31,4 +34,5 @@
"PropertyLoss",
"TaskLoss",
"TensorLoss",
"XASLoss",
]
Loading
Loading