Skip to content
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ The instructions regarding global configuration, [dflow](https://github.com/deep
| pool_size | Int | 1 | For multi tasks per parallel group, the pool size of multiprocessing pool to handle each task (1 for serial, -1 for infinity) |
| upload_python_package | Optional[List] | None | Additional python packages required in the container |
| debug_pool_workers | Int | 1 | Pool size of parallel tasks running in the debug mode |
| submit_only | Bool | False | Submit workflow only without automatic result retrieving |

* **Dflow config**
| Key words | Data structure | Default | Description |
Expand Down Expand Up @@ -251,10 +252,11 @@ Below are three examples (for detailed explanations of each parameter, please re
| vol_abs | Bool | False | Whether to treat vol_start and vol_end as absolute volume, default = False |

##### 3.1.2.2. Elastic
| Key words | Data structure | Example | Description |
| :------------ | ----- |-----------------------------------------------------| ------------------- |
| norm_deform | Float | 0.01 | The deformation in xx, yy, zz, defaul = 1e-2 |
| shear_deform | Float | 0.01 | The deformation in other directions, default = 1e-2 |
| Key words | Data structure | Example | Description |
|:-------------|----------------|---------|----------------------------------------------------|
| norm_deform | Float | 0.01 | The deformation in xx, yy, zz, defaul = 1e-2 |
| shear_deform | Float | 0.01 | The deformation in other directions, default = 1e-2 |
| conventional | Bool | False | Whether adopt conventional cell for deformation |

##### 3.1.2.3. Surface
| Key words | Data structure | Example | Description |
Expand Down Expand Up @@ -390,7 +392,7 @@ On the other hand, the advantage of **Finite Displacement Method** lies in its v

### 3.2. Submission
#### 3.2.1. Workflow Submission
APEX will execute a specific dflow workflow upon each invocation of the command in the format: `apex submit [-h] [-c [CONFIG]] [-w WORK [WORK ...]] [-d] [-f {relax,props,joint}] parameter [parameter ...]`. The type of workflow and calculation method will be automatically determined by APEX based on the parameter file provided by users. Additionally, users can specify the **workflow type**, **configuration JSON file**, and **work directory** through an optional argument (Run `apex submit -h` for help). Here is an example to submit a `joint` workflow:
APEX will execute a specific dflow workflow upon each invocation of the command in the format: `apex submit [-h] [-c [CONFIG]] [-w WORK [WORK ...]] [-d] [-s] [-f {relax,props,joint}] parameter [parameter ...]`. The type of workflow and calculation method will be automatically determined by APEX based on the parameter file provided by users. Additionally, users can specify the **workflow type**, **configuration JSON file**, and **work directory** through an optional argument (Run `apex submit -h` for further help). Here is an example to submit a `joint` workflow:
```shell
apex submit param_relax.json param_props.json -c ./global_bohrium.json -w 'dp_demo_0?' 'eam_demo'
```
Expand Down
16 changes: 12 additions & 4 deletions apex/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
json2dict,
update_dict,
return_prop_list,
load_config_file
load_config_file,
generate_random_string
)
from apex.database.DatabaseFactory import DatabaseFactory
from apex.config import Config
Expand Down Expand Up @@ -162,18 +163,25 @@ def archive_workdir(relax_param, props_param, config, work_dir, flow_type):
if props_param and flow_type != 'relax':
store.sync_props(props_param, config.archive_tasks)

# define archive key
data_id = config.archive_key if config.archive_key else str(store.work_dir_path)

dump_file = os.path.join(store.work_dir_path, 'all_result.json')
default_id = generate_random_string(10)
if os.path.isfile(dump_file):
logging.info(msg='all_result.json exists, and will be updated.')
orig_data = loadfn(dump_file)
try:
default_id = orig_data['archive_key']
except KeyError:
store.result_data['archive_key'] = default_id
update_dict(orig_data, store.result_data, depth=2)
dumpfn(orig_data, dump_file, indent=4)
else:
store.result_data['archive_key'] = default_id
dumpfn(store.result_data, dump_file, indent=4)

# try to get documented key id from all_result.json
# define archive key
data_id = config.archive_key if config.archive_key else default_id

if config.database_type != 'local':
data_json_str = json.dumps(store.result_data, cls=MontyEncoder, indent=4)
data_dict = json.loads(data_json_str)
Expand Down
1 change: 1 addition & 0 deletions apex/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class Config:
abacus_image_name: str = None
abacus_run_command: str = None
is_bohrium_dflow: bool = False
submit_only: bool = False

database_type: str = 'local'
archive_method: str = 'sync'
Expand Down
9 changes: 8 additions & 1 deletion apex/core/calculator/ABACUS.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,15 @@ def __init__(self, inter_parameter, path_to_poscar):

def make_potential_files(self, output_dir):
stru = os.path.abspath(os.path.join(output_dir, "STRU"))
poscar = os.path.abspath(os.path.join(output_dir, "POSCAR"))
if not os.path.isfile(stru):
raise FileNotFoundError("No file %s" % stru)
logging.warning(msg='No STRU found...')
if os.path.isfile(poscar):
logging.info(msg=f'will convert {poscar} into STRU...')
sys = dpdata.System(poscar, fmt="vasp/poscar")
sys.to("abacus/stru", stru)
else:
raise FileNotFoundError("No file %s" % stru)
stru_data = abacus_scf.get_abacus_STRU(stru)
atom_names = stru_data["atom_names"]
orb_files = stru_data["orb_files"]
Expand Down
9 changes: 7 additions & 2 deletions apex/core/calculator/VASP.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,15 @@ def make_potential_files(self, output_dir):

poscar = os.path.abspath(os.path.join(output_dir, "POSCAR"))
pos_str = Structure.from_file(poscar)
ele_pos_list = sorted({ii.as_dict()["element"] for ii in pos_str.species})
ele_pos_list_tmp = [ii.as_dict()["element"] for ii in pos_str.species]

ele_pos_list = [ele_pos_list_tmp[0]]
for ii in range(1, len(ele_pos_list_tmp)):
if not ele_pos_list_tmp[ii] == ele_pos_list_tmp[ii - 1]:
ele_pos_list.append(ele_pos_list_tmp[ii])

def write_potcar(ele_list, potcar_path):
with open(potcar_path,"w") as fp:
with open(potcar_path, "w") as fp:
for element in ele_list:
potcar_file = os.path.join(self.potcar_prefix, self.potcars[element])
with open(potcar_file,"r") as fc:
Expand Down
14 changes: 12 additions & 2 deletions apex/core/calculator/lib/abacus_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,8 @@ def check_stru_fixed(struf, fixed):
return True


def modify_stru_path(strucf, tpath):
def modify_stru_path(strucf, tpath, inter):
pp_dict = inter["potcars"]
if tpath[-1] != "/":
tpath += "/"
with open(strucf) as f1:
Expand All @@ -491,7 +492,16 @@ def modify_stru_path(strucf, tpath):
break
elif lines[j].strip() == "":
continue
ppfile = tpath + os.path.split(lines[j].split()[file_numb])[1]
try:
_ = lines[j].split()[file_numb]
except IndexError:
line_split = lines[j].split()
ele_name = line_split[0]
pp_name = pp_dict[ele_name]
line_split.append(pp_name)
lines[j] = " ".join(line_split) + "\n"
finally:
ppfile = tpath + os.path.split(lines[j].split()[file_numb])[1]
tmp_line = ""
for k in range(file_numb):
tmp_line += lines[j].split()[k] + " "
Expand Down
18 changes: 15 additions & 3 deletions apex/core/common_equi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
import shutil
import logging
import dpdata
from monty.serialization import dumpfn
from pymatgen.core.structure import Structure
from apex.core.calculator.lib import abacus_utils
Expand Down Expand Up @@ -74,9 +75,20 @@ def make_equi(confs, inter_param, relax_param):
poscar = os.path.abspath(os.path.join(ii, "POSCAR"))
POSCAR = "POSCAR"
if inter_param["type"] == "abacus":
shutil.copyfile(os.path.join(ii, "STRU"), os.path.join(ii, "STRU.bk"))
abacus_utils.modify_stru_path(os.path.join(ii, "STRU"), "pp_orb/")
poscar = os.path.abspath(os.path.join(ii, "STRU"))
stru = os.path.join(ii, "STRU")
# if no STRU found, try to convert POSCAR to STRU
if not os.path.isfile(stru):
logging.warning(msg='No STRU found...')
if os.path.isfile(poscar):
logging.info(msg=f'will convert {poscar} into STRU...')
sys = dpdata.System(poscar, fmt="vasp/poscar")
sys.to("abacus/stru", stru)
else:
raise FileNotFoundError("No file %s" % stru)

shutil.copyfile(stru, os.path.join(ii, "STRU.bk"))
abacus_utils.modify_stru_path(stru, "pp_orb/", inter_param)
poscar = os.path.abspath(stru)
POSCAR = "STRU"
if not os.path.exists(poscar):
raise FileNotFoundError("no configuration for APEX")
Expand Down
10 changes: 10 additions & 0 deletions apex/core/property/Elastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import re
from shutil import copyfile

import numpy as np
from monty.serialization import dumpfn, loadfn
from pymatgen.analysis.elasticity.elastic import ElasticTensor
from pymatgen.analysis.elasticity.strain import DeformedStructureSet, Strain
Expand All @@ -15,6 +16,7 @@
from apex.core.calculator.lib import vasp_utils
from apex.core.calculator.lib import abacus_scf
from apex.core.property.Property import Property
from apex.core.structure import StructureInfo
from apex.core.refine import make_refine
from apex.core.calculator.lib.vasp_utils import incar_upper
from dflow.python import upload_packages
Expand All @@ -28,6 +30,8 @@ def __init__(self, parameter, inter_param=None):
self.norm_deform = parameter["norm_deform"]
parameter.setdefault("shear_deform", 1e-2)
self.shear_deform = parameter["shear_deform"]
parameter.setdefault("conventional", False)
self.conventional = parameter["conventional"]
parameter.setdefault("cal_type", "relaxation")
self.cal_type = parameter["cal_type"]
default_cal_setting = {
Expand Down Expand Up @@ -131,6 +135,12 @@ def make_confs(self, path_to_work, path_to_equi, refine=False):
ss = abacus_utils.stru2Structure(equi_contcar)
else:
ss = Structure.from_file(equi_contcar)
# find conventional cell
if self.conventional:
st = StructureInfo(ss)
ss = st.conventional_structure
ss.to(os.path.join(path_to_work, "POSCAR.conv"), "POSCAR")

dfm_ss = DeformedStructureSet(
ss,
symmetry=False,
Expand Down
23 changes: 15 additions & 8 deletions apex/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,7 @@ def submit_relax(
upload_path: Union[os.PathLike, str],
download_path: Union[os.PathLike, str],
relax_parameter: dict,
submit_only: bool = False,
labels: Optional[dict] = None
) -> str:
self.upload_path = upload_path
Expand All @@ -258,8 +259,10 @@ def submit_relax(
self.workflow.add(relaxation)
self.workflow.submit()
self.dump_flow_id()
# Wait for and retrieve relaxation
self._monitor_relax()
if not submit_only:
# Wait for and retrieve relaxation
self._monitor_relax()

return self.workflow.id

@json2dict
Expand All @@ -268,6 +271,7 @@ def submit_props(
upload_path: Union[os.PathLike, str],
download_path: Union[os.PathLike, str],
props_parameter: dict,
submit_only: bool = False,
labels: Optional[dict] = None
) -> str:
self.upload_path = upload_path
Expand All @@ -281,8 +285,9 @@ def submit_props(
self.workflow.add(subprops_list)
self.workflow.submit()
self.dump_flow_id()
# wait for and retrieve sub-property flows
self._monitor_props(subprops_key_list)
if not submit_only:
# wait for and retrieve sub-property flows
self._monitor_props(subprops_key_list)

return self.workflow.id

Expand All @@ -293,6 +298,7 @@ def submit_joint(
download_path: Union[os.PathLike, str],
relax_parameter: dict,
props_parameter: dict,
submit_only: bool = False,
labels: Optional[dict] = None
) -> str:
self.upload_path = upload_path
Expand All @@ -312,9 +318,10 @@ def submit_joint(
self.workflow.add(subprops_list)
self.workflow.submit()
self.dump_flow_id()
# Wait for and retrieve relaxation
self._monitor_relax()
# Wait for and retrieve sub-property flows
self._monitor_props(subprops_key_list)
if not submit_only:
# Wait for and retrieve relaxation
self._monitor_relax()
# Wait for and retrieve sub-property flows
self._monitor_props(subprops_key_list)

return self.workflow.id
18 changes: 13 additions & 5 deletions apex/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ def parse_args():
action="store_true",
help="(Optional) Run APEX workflow via local debug mode"
)
parser_submit.add_argument(
"-s", "--submit_only",
action="store_true",
help="(Optional) Submit workflow only without automatic result retrieving"
)
parser_submit.add_argument(
'-f', "--flow",
choices=['relax', 'props', 'joint'],
Expand Down Expand Up @@ -460,9 +465,9 @@ def config_dflow(config_file: os.PathLike) -> None:
# config dflow_config and s3_config
config_dict = load_config_file(config_file)
wf_config = Config(**config_dict)
wf_config.config_dflow(wf_config.dflow_config_dict)
wf_config.config_bohrium(wf_config.bohrium_config_dict)
wf_config.config_s3(wf_config.dflow_s3_config_dict)
Config.config_dflow(wf_config.dflow_config_dict)
Config.config_bohrium(wf_config.bohrium_config_dict)
Config.config_s3(wf_config.dflow_s3_config_dict)


def format_print_table(t: List[List[str]]):
Expand All @@ -488,7 +493,7 @@ def format_time_delta(td: datetime.timedelta) -> str:


def get_id_from_record(work_dir: os.PathLike, operation_name: str = None) -> str:
logging.info(msg='No workflow_id is provided, will retrieve the latest workflow')
logging.info(msg='No workflow_id is provided, will employ the latest workflow')
workflow_log = os.path.join(work_dir, '.workflow.log')
assert os.path.isfile(workflow_log), \
'No workflow_id is provided and no .workflow.log file found in work_dir'
Expand All @@ -514,13 +519,14 @@ def main():
logging.basicConfig(level=logging.INFO)
# parse args
parser, args = parse_args()
header()
if args.cmd == 'submit':
header()
submit_from_args(
parameters=args.parameter,
config_file=args.config,
work_dirs=args.work,
indicated_flow_type=args.flow,
submit_only=args.submit_only,
is_debug=args.debug
)
elif args.cmd == "list":
Expand Down Expand Up @@ -715,6 +721,7 @@ def main():
else:
logging.warning(f"Step {key} with status: {step['phase']} will be skipping...({task_left} more left)")
elif args.cmd == 'do':
header()
do_step_from_args(
parameter=args.parameter,
machine_file=args.config,
Expand All @@ -732,6 +739,7 @@ def main():
is_result=args.result
)
elif args.cmd == 'report':
header()
report_from_args(
config_file=args.config,
path_list=args.work,
Expand Down
Loading