Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Define conformer single point calculation after optimization #766

Merged
merged 8 commits into from
Nov 4, 2024
4 changes: 3 additions & 1 deletion arc/checks/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,10 @@
Optional[int]: The corresponding conformer or tsg index.
"""
i = None
if 'conformer' in job_name:
if 'conf_opt' in job_name:
i = int(job_name[9:])
elif 'conf_sp' in job_name:
i = int(job_name[8:])

Check warning on line 42 in arc/checks/common.py

View check run for this annotation

Codecov / codecov/patch

arc/checks/common.py#L42

Added line #L42 was not covered by tests
elif 'tsg' in job_name:
i = int(job_name[3:])
return i
6 changes: 3 additions & 3 deletions arc/checks/common_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ def test_get_i_from_job_name(self):
"""Test the get_i_from_job_name() function"""
self.assertIsNone(common.get_i_from_job_name(''))
self.assertIsNone(common.get_i_from_job_name('some_job_name'))
self.assertEqual(common.get_i_from_job_name('conformer3'), 3)
self.assertEqual(common.get_i_from_job_name('conformer33'), 33)
self.assertEqual(common.get_i_from_job_name('conformer3355'), 3355)
self.assertEqual(common.get_i_from_job_name('conf_opt_3'), 3)
self.assertEqual(common.get_i_from_job_name('conf_opt_33'), 33)
self.assertEqual(common.get_i_from_job_name('conf_opt_3355'), 3355)
self.assertEqual(common.get_i_from_job_name('tsg2'), 2)


Expand Down
4 changes: 2 additions & 2 deletions arc/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def initialize_job_types(job_types: Optional[dict] = None,
if 'fine_grid' in job_types:
del job_types['fine_grid']

defaults_to_true = ['conformers', 'fine', 'freq', 'irc', 'opt', 'rotors', 'sp']
defaults_to_false = ['bde', 'onedmin', 'orbitals']
defaults_to_true = ['conf_opt', 'fine', 'freq', 'irc', 'opt', 'rotors', 'sp']
defaults_to_false = ['conf_sp', 'bde', 'onedmin', 'orbitals']
if job_types is None:
job_types = default_job_types
logger.info("Job types were not specified, using ARC's defaults")
Expand Down
5 changes: 3 additions & 2 deletions arc/common_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,14 @@ def setUpClass(cls):
cls.maxDiff = None
cls.rmgdb = make_rmg_database_object()
load_families_only(cls.rmgdb)
cls.default_job_types = {'conformers': True,
cls.default_job_types = {'conf_opt': True,
'opt': True,
'fine': True,
'freq': True,
'sp': True,
'rotors': True,
'irc': True,
'conf_sp': False,
'orbitals': False,
'onedmin': False,
'bde': False,
Expand Down Expand Up @@ -388,7 +389,7 @@ def test_almost_equal_lists(self):

def test_initialize_job_with_given_job_type(self):
"""Test the initialize_job_types() function"""
job_types = {'conformers': False, 'opt': True, 'fine': True, 'freq': True, 'sp': False, 'rotors': False, 'irc': True}
job_types = {'conf_opt': False, 'opt': True, 'fine': True, 'freq': True, 'sp': False, 'rotors': False, 'irc': True}
job_types_expected = copy.deepcopy(self.default_job_types)
job_types_expected.update(job_types)
job_types_initialized = common.initialize_job_types(job_types)
Expand Down
17 changes: 8 additions & 9 deletions arc/job/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,8 @@ class JobTypeEnum(str, Enum):
The available jon types are a finite set.
"""
composite = 'composite'
conformers = 'conformers' # conformer optimization (not generation)
conf_opt = 'conf_opt' # conformer optimization (not generation)
conf_sp = 'conf_sp' # conformer single point
freq = 'freq'
gen_confs = 'gen_confs' # conformer generation
irc = 'irc'
Expand Down Expand Up @@ -385,9 +386,9 @@ def determine_job_array_parameters(self):
if self.species is not None:
if len(self.species) > 1:
self.iterate_by.append('species')
if job_type == 'conformers':
if job_type == 'conf_opt':
if self.species is not None and sum(len(species.conformers) for species in self.species) > 10:
self.iterate_by.append('conformers')
self.iterate_by.append('conf_opt')
self.number_of_processes += sum([len(species.conformers) for species in self.species])
for species in self.species:
if job_type in ['sp', 'opt', 'freq', 'optfreq', 'composite', 'ornitals', 'onedmin', 'irc']:
Expand Down Expand Up @@ -456,7 +457,7 @@ def write_hdf5(self):
else:
for species in self.species:
data[species.label] = list()
if 'conformers' in self.iterate_by:
if 'conf_opt' in self.iterate_by:
for conformer in species.conformers:
data[species.label].append(DataPoint(charge=species.charge,
job_types=['opt'],
Expand Down Expand Up @@ -693,10 +694,8 @@ def _set_job_number(self):
self.job_num = job_num
# 2. Set other related attributes job_name and job_server_name.
self.job_server_name = self.job_server_name or 'a' + str(self.job_num)
if self.conformer is not None and (self.job_name is None or 'conformer_a' in self.job_name):
if self.job_name is not None:
logger.warning(f'Replacing job name {self.job_name} with conformer{self.conformer}')
self.job_name = f'conformer{self.conformer}'
if self.conformer is not None and self.job_name is None:
self.job_name = f'{self.job_type}_{self.conformer}_{self.job_server_name}'
elif self.tsg is not None and (self.job_name is None or 'tsg_a' in self.job_name):
if self.job_name is not None:
logger.warning(f'Replacing job name {self.job_name} with tsg{self.conformer}')
Expand Down Expand Up @@ -1097,7 +1096,7 @@ def _log_job_execution(self):
local = 'local '
else:
server = f' on {self.server}'
if 'conformer' in self.job_name or 'tsg' in self.job_name:
if 'conf_opt' in self.job_name or 'tsg' in self.job_name:
job_server_name = f' ({self.job_server_name})'
execution_type = {'incore': 'incore job', 'queue': 'queue job', 'pipe': 'job array (pipe)'}[self.execution_type]
pivots = f' for pivots {[[tor[1] + 1, tor[2] + 1] for tor in self.torsions]}' if self.torsions is not None else ''
Expand Down
7 changes: 4 additions & 3 deletions arc/job/adapter_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ def test_job_enum(self):
def test_job_type_enum(self):
"""Test the JobTypeEnum class"""
self.assertEqual(JobTypeEnum('composite').value, 'composite')
self.assertEqual(JobTypeEnum('conformers').value, 'conformers')
self.assertEqual(JobTypeEnum('conf_opt').value, 'conf_opt')
self.assertEqual(JobTypeEnum('conf_sp').value, 'conf_sp')
self.assertEqual(JobTypeEnum('freq').value, 'freq')
self.assertEqual(JobTypeEnum('gen_confs').value, 'gen_confs')
self.assertEqual(JobTypeEnum('irc').value, 'irc')
Expand Down Expand Up @@ -123,7 +124,7 @@ def setUpClass(cls):
"""
cls.maxDiff = None
cls.job_1 = GaussianAdapter(execution_type='queue',
job_type='conformers',
job_type='conf_opt',
level=Level(method='cbs-qb3'),
project='test',
project_directory=os.path.join(ARC_PATH, 'arc', 'testing', 'test_JobAdapter'),
Expand Down Expand Up @@ -224,7 +225,7 @@ def setUpClass(cls):

def test_determine_job_array_parameters(self):
"""Test determining job array parameters"""
self.assertEqual(self.job_1.iterate_by, ['species', 'conformers'])
self.assertEqual(self.job_1.iterate_by, ['species', 'conf_opt'])
self.assertEqual(self.job_1.number_of_processes, 3 * 6)
self.assertEqual(self.job_1.workers, 4)

Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/cfour.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,12 +212,12 @@ def write_input_file(self) -> None:
input_dict['keywords'] = ''

keywords = list()
if self.job_type in ['opt', 'conformers']:
if self.job_type in ['opt', 'conf_opt']:
keywords.append('METHOD=TS' if self.is_ts else 'METHOD=MANR')
elif self.job_type in ['freq', 'optfreq', 'scan']:
raise NotImplementedError(f'CFour cannot execute frequency computations or scans, '
f'got job type {self.job_type}')
elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
pass

input_dict['keywords'] = ','.join(key for key in keywords)
Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/gaussian.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def write_input_file(self) -> None:
max_c = int(match.group(1))
break

if self.job_type in ['opt', 'conformers', 'optfreq', 'composite']:
if self.job_type in ['opt', 'conf_opt', 'optfreq', 'composite']:
keywords = ['ts', 'calcfc', 'noeigentest', f'maxcycle={max_c}'] if self.is_ts else ['calcfc']
if self.level.method in ['rocbs-qb3']:
# There are no analytical 2nd derivatives (FC) for this method.
Expand Down Expand Up @@ -312,7 +312,7 @@ def write_input_file(self) -> None:
elif self.job_type == 'optfreq':
input_dict['job_type_2'] = 'freq IOp(7/33=1)'

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
input_dict['job_type_1'] = f'integral=(grid=ultrafine, {integral_algorithm})'
if input_dict['trsh']:
input_dict['trsh'] += ' '
Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/molpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,15 +233,15 @@ def write_input_file(self) -> None:
input_dict['restricted'] = 'u'

# Job type specific options
if self.job_type in ['opt', 'optfreq', 'conformers']:
if self.job_type in ['opt', 'optfreq', 'conf_opt']:
keywords = ['optg', 'root=2', 'method=qsd', 'readhess', "savexyz='geometry.xyz'"] if self.is_ts \
else ['optg', "savexyz='geometry.xyz'"]
input_dict['job_type_1'] = ', '.join(key for key in keywords)

elif self.job_type in ['freq', 'optfreq']:
input_dict['job_type_2'] = '{frequencies;\nthermo;\nprint,HESSIAN,thermo;}'

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
pass

elif self.job_type == 'scan':
Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/orca.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def write_input_file(self) -> None:
input_dict['restricted'] = 'r' if is_restricted(self) else 'u'

# Job type specific options
if self.job_type in ['opt', 'conformers', 'optfreq']:
if self.job_type in ['opt', 'conf_opt', 'optfreq']:
opt_convergence_key = 'fine_opt_convergence' if self.fine else 'opt_convergence'
opt_convergence = self.args['keyword'].get(opt_convergence_key, '').lower() or \
orca_default_options_dict['opt']['keyword'].get(opt_convergence_key, '').lower()
Expand Down Expand Up @@ -293,7 +293,7 @@ def write_input_file(self) -> None:
logger.info('Using numerical frequencies calculation in Orca. Note: This job might therefore be '
'time-consuming.')

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
input_dict['job_type_1'] = 'sp'

elif self.job_type == 'scan':
Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/qchem.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def write_input_file(self) -> None:
input_dict['unrestricted'] = 'True' if not is_restricted(self) else 'False'

# Job type specific options
if self.job_type in ['opt', 'conformers', 'optfreq', 'orbitals', 'scan']:
if self.job_type in ['opt', 'conf_opt', 'optfreq', 'orbitals', 'scan']:
input_dict['job_type_1'] = 'ts' if self.is_ts else 'opt'
if self.fine:
input_dict['fine'] = '\n GEOM_OPT_TOL_GRADIENT 15' \
Expand All @@ -238,7 +238,7 @@ def write_input_file(self) -> None:
elif self.job_type == 'freq':
input_dict['job_type_1'] = 'freq'

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
input_dict['job_type_1'] = 'sp'

elif self.job_type == 'orbitals':
Expand Down
2 changes: 1 addition & 1 deletion arc/job/adapters/scripts/ob_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def main():
sp = run_sp(mol=mol, ff_method=ff_method)
save_output_file(path = str(args.yml_path), key="sp", val=sp)

if job_type in ['opt', 'conformers', 'directed_scan']:
if job_type in ['opt', 'conf_opt', 'directed_scan']:
constraints = input_dict["constraints"] if "constraints" in input_dict.keys() else None
opt_xyz, sp = constraint_opt(mol=mol, constraints_dict=constraints, ff_method=ff_method)
save_output_file(path = str(args.yml_path), content_dict = {"opt_xyz" : opt_xyz, "sp" : sp})
Expand Down
2 changes: 1 addition & 1 deletion arc/job/adapters/scripts/tani_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ def main():
forces = run_force(xyz=xyz, device=device, model=model)
save_output_file(path = str(args.yml_path), key="force", val=forces)

elif job_type in ['opt', 'conformers', 'directed_scan', 'optfreq']:
elif job_type in ['opt', 'conf_opt', 'directed_scan', 'optfreq']:
constraints = input_dict["constraints"] if "constraints" in input_dict.keys() else None
opt_xyz = run_opt(xyz=xyz, constraints=constraints, fmax=input_dict["fmax"], model=model,
steps=input_dict["steps"] if "steps" in input_dict.keys() else None, engine=input_dict["engine"])
Expand Down
4 changes: 2 additions & 2 deletions arc/job/adapters/terachem.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def write_input_file(self) -> None:
input_dict['dispersion'] = 'no'

# Job type specific options
if self.job_type in ['conformer', 'opt', 'scan']:
if self.job_type in ['conf_opt', 'opt', 'scan']:
input_dict['job_type_1'] = 'minimize\n' \
'new_minimizer yes'
if self.fine:
Expand All @@ -250,7 +250,7 @@ def write_input_file(self) -> None:
elif self.job_type == 'freq':
input_dict['job_type_1'] = 'frequencies'

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
input_dict['job_type_1'] = 'energy'

if self.job_type == 'scan' \
Expand Down
6 changes: 3 additions & 3 deletions arc/job/adapters/xtb_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def write_input_file(self) -> None:
directives, block = '', ''
uhf = self.species[0].number_of_radicals or self.multiplicity - 1

if self.job_type in ['opt', 'conformers', 'scan']:
if self.job_type in ['opt', 'conf_opt', 'scan']:
directives += ' --opt'
directives += self.add_accuracy()
if self.constraints and self.job_type != 'scan':
Expand All @@ -234,7 +234,7 @@ def write_input_file(self) -> None:
elif self.job_type in ['fukui']:
directives += ' --vfukui'

elif self.job_type == 'sp':
elif self.job_type in ['sp', 'conf_sp']:
pass

directives += f' --{self.level.method}' if self.level is not None and self.level.method != 'xtb' else ' --gfn2'
Expand Down Expand Up @@ -291,7 +291,7 @@ def is_opt_ts_job(self) -> bool:
bool: Whether this is a transition state geometry optimization job.
"""
if self.species is not None and len(self.species) and self.species[0].is_ts \
and self.job_type in ['opt', 'conformers']:
and self.job_type in ['opt', 'conf_opt']:
return True
return False

Expand Down
4 changes: 2 additions & 2 deletions arc/job/trsh.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def determine_ess_status(output_path: str,
done = True
# If this is an opt job, we must also check that the max num of cycles hasn't been reached,
# so don't break yet.
if 'opt' not in job_type and 'conformer' not in job_type and 'ts' not in job_type:
if 'opt' not in job_type and 'conf_opt' not in job_type and 'ts' not in job_type:
break
elif 'SCF failed' in line:
keywords = ['SCF']
Expand All @@ -245,7 +245,7 @@ def determine_ess_status(output_path: str,
elif 'Invalid charge/multiplicity combination' in line:
raise SpeciesError(f'The multiplicity and charge combination for species '
f'{species_label} are wrong.')
if 'opt' in job_type or 'conformer' in job_type or 'ts' in job_type:
if 'opt' in job_type or 'conf_opt' in job_type or 'ts' in job_type:
if 'MAXIMUM OPTIMIZATION CYCLES REACHED' in line:
keywords = ['MaxOptCycles']
error = 'Maximum optimization cycles reached.'
Expand Down
Loading
Loading