Skip to content

Commit c32fabd

Browse files
committed
Save local changes before rebase
1 parent 0226096 commit c32fabd

File tree

73 files changed

+211
-9296
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+211
-9296
lines changed

applications/application_configs/md_ase.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ defaults:
77
- _self_
88
- run: default
99
- model: load_model
10-
- task: md
11-
- interface: ase
10+
- task: md # opt, md
11+
- calculator: ase
1212
- system: load_system # load_system, ase_create
1313
- logger: default
1414
- override hydra/job_logging: disabled

applications/application_configs/optimizer_ase.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ defaults:
77
- _self_
88
- run: default
99
- model: load_model
10-
- task: opt
11-
- interface: ase
10+
- task: opt # opt, md
11+
- calculator: ase
1212
- system: ase_create # load_system, ase_create
1313
- logger: default
1414
- override hydra/job_logging: disabled

applications/main.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77
from hydra.utils import instantiate
88
from omegaconf import DictConfig, OmegaConf
99

10-
from ppmat.predict import PPMatPredictor
11-
from ppmat.predict.structures import prepare_structures
10+
from ppmat.predictor import PPMatPredictor
11+
from ppmat.predictor.structures import build_init_structures
1212
from ppmat.utils import logger
1313

1414

@@ -51,7 +51,7 @@ def main(cfg: DictConfig):
5151
predictor.load_inference_model( interface_type=None )
5252

5353
# Load structures
54-
files, structures = prepare_structures(cfg, predictor)
54+
files, structures = build_init_structures(cfg, predictor)
5555

5656
if cfg.get("task") is not None:
5757
# Initialize the task
File renamed without changes.
File renamed without changes.

ppmat/predict/base.py renamed to ppmat/predictor/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@
2828
from ppmat.utils import save_load
2929
from ppmat.utils import logger
3030

31-
class PPMatPredictor:
32-
"""PPMaterial predictor.
31+
class Predictor:
32+
"""
3333
3434
This class provides an interface for predicting properties of crystalline
3535
structures using pre-trained deep learning models.
File renamed without changes.

tests/dataset_jarvis.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import os
2+
import os.path as osp
3+
import sys
4+
5+
__dir__ = os.path.dirname(os.path.abspath(__file__)) # ruff: noqa
6+
sys.path.insert(0, os.path.abspath(os.path.join(__dir__, ".."))) # ruff: noqa
7+
8+
from ppmat.utils import logger
9+
from ppmat.datasets import build_dataloader
10+
from ppmat.datasets import set_signal_handlers
11+
12+
13+
if __name__ == "__main__":
14+
15+
# init logger
16+
logger_path = osp.join(sys.path[0], "test/create_dataset.log")
17+
logger.init_logger(log_file=logger_path)
18+
logger.info(f"Logger saved to {logger_path}")
19+
logger.info(f"Test create new dataset.")
20+
21+
set_signal_handlers()
22+
data_cfg = {
23+
"dataset": {
24+
"__class_name__": "JarvisDataset",
25+
"__init_params__": {
26+
"path": osp.join(sys.path[0], "data/jarvis"),
27+
"jarvis_data_name": "dft_2d",
28+
"property_names": "formation_energy_peratom", # 这里改成你实际需要的标签名
29+
"build_structure_cfg": {
30+
"format": "jarvis",
31+
"num_cpus": 10,
32+
},
33+
"build_graph_cfg": {
34+
"__class_name__": "FindPointsInSpheres",
35+
"__init_params__": {
36+
"cutoff": 4.0,
37+
"num_cpus": 10,
38+
},
39+
},
40+
"cache_path": osp.join(sys.path[0], "data/jarvis"),
41+
# "overwrite": True,
42+
},
43+
"num_workers": 4,
44+
"use_shared_memory": False,
45+
},
46+
"sampler": {
47+
"__class_name__": "BatchSampler",
48+
"__init_params__": {
49+
"shuffle": True,
50+
"drop_last": True,
51+
"batch_size": 10,
52+
},
53+
},
54+
}
55+
logger.info("Train data config:\n" +
56+
"\n".join( f"{k}: {v}" for k, v in data_cfg.items() )
57+
)
58+
59+
train_loader = build_dataloader(data_cfg)

tests/dataset_mp.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import os
2+
import os.path as osp
3+
import sys
4+
5+
__dir__ = os.path.dirname(os.path.abspath(__file__)) # ruff: noqa
6+
sys.path.insert(0, os.path.abspath(os.path.join(__dir__, ".."))) # ruff: noqa
7+
8+
from ppmat.utils import logger
9+
from ppmat.datasets import build_dataloader
10+
from ppmat.datasets import set_signal_handlers
11+
12+
13+
if __name__ == "__main__":
14+
15+
# init logger
16+
logger_path = osp.join(sys.path[0], "test/create_dataset.log")
17+
logger.init_logger(log_file=logger_path)
18+
logger.info(f"Logger saved to {logger_path}")
19+
logger.info(f"Test create new dataset.")
20+
21+
set_signal_handlers()
22+
train_data_cfg = {
23+
"dataset": {
24+
"__class_name__": "MP2024Dataset",
25+
# "__class_name__": "MP2018Dataset",
26+
"__init_params__": {
27+
"path": osp.join(sys.path[0], "data/mp2024_train_130k/mp2024_train.txt"),
28+
# "path": osp.join(sys.path[0], "data/mp2018_train_60k/mp.2018.6.1_val.json"),
29+
"property_names": ["formation_energy_per_atom"], # 这里改成你实际需要的标签名
30+
"build_structure_cfg": {
31+
"format": "dict",
32+
"num_cpus": 10,
33+
},
34+
"build_graph_cfg": {
35+
"__class_name__": "FindPointsInSpheres",
36+
"__init_params__": {
37+
"cutoff": 4.0,
38+
"num_cpus": 10,
39+
},
40+
},
41+
"cache_path": osp.join(sys.path[0], "data/mp2024_train_130k_cache_find_points_in_spheres_cutoff_4/mp2024_train"),
42+
# "cache_path": osp.join(sys.path[0], "data/mp2018_train_60k_cache_find_points_in_spheres_cutoff_4/mp.2018.6.1_val"),
43+
},
44+
"num_workers": 4,
45+
"use_shared_memory": False,
46+
},
47+
"sampler": {
48+
"__class_name__": "BatchSampler",
49+
"__init_params__": {
50+
"shuffle": True,
51+
"drop_last": True,
52+
"batch_size": 10,
53+
},
54+
},
55+
}
56+
logger.info("Train data config:\n" +
57+
"\n".join( f"{k}: {v}" for k, v in train_data_cfg.items() )
58+
)
59+
60+
train_loader = build_dataloader(train_data_cfg)

0 commit comments

Comments
 (0)