Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1"""Defines class/functions to write input and parse output for FHI-aims.""" 

2import os 

3import re 

4import time 

5import warnings 

6from pathlib import Path 

7from typing import Union 

8 

9import numpy as np 

10from ase import Atom, Atoms 

11from ase.calculators.calculator import kpts2mp 

12from ase.calculators.singlepoint import SinglePointDFTCalculator 

13from ase.constraints import FixAtoms, FixCartesian 

14from ase.data import atomic_numbers 

15from ase.io import ParseError 

16from ase.units import Ang, fs 

17from ase.utils import lazymethod, lazyproperty, reader, writer 

18 

19v_unit = Ang / (1000.0 * fs) 

20 

21LINE_NOT_FOUND = object() 

22 

23 

24class AimsParseError(Exception): 

25 """Exception raised if an error occurs when parsing an Aims output file""" 

26 

27 def __init__(self, message): 

28 self.message = message 

29 super().__init__(self.message) 

30 

31 

32# Read aims geometry files 

33@reader 

34def read_aims(fd, apply_constraints=True): 

35 """Import FHI-aims geometry type files. 

36 

37 Reads unitcell, atom positions and constraints from 

38 a geometry.in file. 

39 

40 If geometric constraint (symmetry parameters) are in the file 

41 include that information in atoms.info["symmetry_block"] 

42 """ 

43 

44 lines = fd.readlines() 

45 return parse_geometry_lines(lines, apply_constraints=apply_constraints) 

46 

47 

48def parse_geometry_lines(lines, apply_constraints=True): 

49 

50 from ase import Atoms 

51 from ase.constraints import ( 

52 FixAtoms, 

53 FixCartesian, 

54 FixScaledParametricRelations, 

55 FixCartesianParametricRelations, 

56 ) 

57 

58 atoms = Atoms() 

59 

60 positions = [] 

61 cell = [] 

62 symbols = [] 

63 velocities = [] 

64 magmoms = [] 

65 symmetry_block = [] 

66 charges = [] 

67 fix = [] 

68 fix_cart = [] 

69 xyz = np.array([0, 0, 0]) 

70 i = -1 

71 n_periodic = -1 

72 periodic = np.array([False, False, False]) 

73 cart_positions, scaled_positions = False, False 

74 for line in lines: 

75 inp = line.split() 

76 if inp == []: 

77 continue 

78 if inp[0] in ["atom", "atom_frac"]: 

79 

80 if inp[0] == "atom": 

81 cart_positions = True 

82 else: 

83 scaled_positions = True 

84 

85 if xyz.all(): 

86 fix.append(i) 

87 elif xyz.any(): 

88 fix_cart.append(FixCartesian(i, xyz)) 

89 floatvect = float(inp[1]), float(inp[2]), float(inp[3]) 

90 positions.append(floatvect) 

91 symbols.append(inp[4]) 

92 magmoms.append(0.0) 

93 charges.append(0.0) 

94 xyz = np.array([0, 0, 0]) 

95 i += 1 

96 

97 elif inp[0] == "lattice_vector": 

98 floatvect = float(inp[1]), float(inp[2]), float(inp[3]) 

99 cell.append(floatvect) 

100 n_periodic = n_periodic + 1 

101 periodic[n_periodic] = True 

102 

103 elif inp[0] == "initial_moment": 

104 magmoms[-1] = float(inp[1]) 

105 

106 elif inp[0] == "initial_charge": 

107 charges[-1] = float(inp[1]) 

108 

109 elif inp[0] == "constrain_relaxation": 

110 if inp[1] == ".true.": 

111 fix.append(i) 

112 elif inp[1] == "x": 

113 xyz[0] = 1 

114 elif inp[1] == "y": 

115 xyz[1] = 1 

116 elif inp[1] == "z": 

117 xyz[2] = 1 

118 

119 elif inp[0] == "velocity": 

120 floatvect = [v_unit * float(line) for line in inp[1:4]] 

121 velocities.append(floatvect) 

122 

123 elif inp[0] in [ 

124 "symmetry_n_params", 

125 "symmetry_params", 

126 "symmetry_lv", 

127 "symmetry_frac", 

128 ]: 

129 symmetry_block.append(" ".join(inp)) 

130 

131 if xyz.all(): 

132 fix.append(i) 

133 elif xyz.any(): 

134 fix_cart.append(FixCartesian(i, xyz)) 

135 

136 if cart_positions and scaled_positions: 

137 raise Exception( 

138 "Can't specify atom positions with mixture of " 

139 "Cartesian and fractional coordinates" 

140 ) 

141 elif scaled_positions and periodic.any(): 

142 atoms = Atoms( 

143 symbols, 

144 scaled_positions=positions, 

145 cell=cell, 

146 pbc=periodic) 

147 else: 

148 atoms = Atoms(symbols, positions) 

149 

150 if len(velocities) > 0: 

151 if len(velocities) != len(positions): 

152 raise Exception( 

153 "Number of positions and velocities have to coincide.") 

154 atoms.set_velocities(velocities) 

155 

156 fix_params = [] 

157 

158 if len(symmetry_block) > 5: 

159 params = symmetry_block[1].split()[1:] 

160 

161 lattice_expressions = [] 

162 lattice_params = [] 

163 

164 atomic_expressions = [] 

165 atomic_params = [] 

166 

167 n_lat_param = int(symmetry_block[0].split(" ")[2]) 

168 

169 lattice_params = params[:n_lat_param] 

170 atomic_params = params[n_lat_param:] 

171 

172 for ll, line in enumerate(symmetry_block[2:]): 

173 expression = " ".join(line.split(" ")[1:]) 

174 if ll < 3: 

175 lattice_expressions += expression.split(",") 

176 else: 

177 atomic_expressions += expression.split(",") 

178 

179 fix_params.append( 

180 FixCartesianParametricRelations.from_expressions( 

181 list(range(3)), 

182 lattice_params, 

183 lattice_expressions, 

184 use_cell=True, 

185 ) 

186 ) 

187 

188 fix_params.append( 

189 FixScaledParametricRelations.from_expressions( 

190 list(range(len(atoms))), atomic_params, atomic_expressions 

191 ) 

192 ) 

193 

194 if any(magmoms): 

195 atoms.set_initial_magnetic_moments(magmoms) 

196 if any(charges): 

197 atoms.set_initial_charges(charges) 

198 

199 if periodic.any(): 

200 atoms.set_cell(cell) 

201 atoms.set_pbc(periodic) 

202 if len(fix): 

203 atoms.set_constraint([FixAtoms(indices=fix)] + fix_cart + fix_params) 

204 else: 

205 atoms.set_constraint(fix_cart + fix_params) 

206 

207 if fix_params and apply_constraints: 

208 atoms.set_positions(atoms.get_positions()) 

209 return atoms 

210 

211 

212def get_aims_header(): 

213 """Returns the header for aims input files""" 

214 lines = ["#" + "=" * 79] 

215 for line in [ 

216 "Created using the Atomic Simulation Environment (ASE)", 

217 time.asctime(), 

218 ]: 

219 lines.append("# " + line + "\n") 

220 return lines 

221 

222 

223# Write aims geometry files 

224@writer 

225def write_aims( 

226 fd, 

227 atoms, 

228 scaled=False, 

229 geo_constrain=False, 

230 write_velocities=False, 

231 velocities=False, 

232 ghosts=None, 

233 info_str=None, 

234 wrap=False, 

235): 

236 """Method to write FHI-aims geometry files. 

237 

238 Writes the atoms positions and constraints (only FixAtoms is 

239 supported at the moment). 

240 

241 Args: 

242 fd: file object 

243 File to output structure to 

244 atoms: ase.atoms.Atoms 

245 structure to output to the file 

246 scaled: bool 

247 If True use fractional coordinates instead of Cartesian coordinates 

248 symmetry_block: list of str 

249 List of geometric constraints as defined in: 

250 :arxiv:`1908.01610` 

251 write_velocities: bool 

252 If True add the atomic velocity vectors to the file 

253 velocities: bool 

254 NOT AN ARRAY OF VELOCITIES, but the legacy version of 

255 `write_velocities` 

256 ghosts: list of Atoms 

257 A list of ghost atoms for the system 

258 info_str: str 

259 A string to be added to the header of the file 

260 wrap: bool 

261 Wrap atom positions to cell before writing 

262 """ 

263 

264 from ase.constraints import FixAtoms, FixCartesian 

265 

266 if scaled and not np.all(atoms.pbc): 

267 raise ValueError( 

268 "Requesting scaled for a calculation where scaled=True, but " 

269 "the system is not periodic") 

270 

271 if geo_constrain: 

272 if not scaled and np.all(atoms.pbc): 

273 warnings.warn( 

274 "Setting scaled to True because a symmetry_block is detected." 

275 ) 

276 scaled = True 

277 elif not np.all(atoms.pbc): 

278 warnings.warn( 

279 "Parameteric constraints can only be used in periodic systems." 

280 ) 

281 geo_constrain = False 

282 

283 for line in get_aims_header(): 

284 fd.write(line + "\n") 

285 

286 # If writing additional information is requested via info_str: 

287 if info_str is not None: 

288 fd.write("\n# Additional information:\n") 

289 if isinstance(info_str, list): 

290 fd.write("\n".join(["# {}".format(s) for s in info_str])) 

291 else: 

292 fd.write("# {}".format(info_str)) 

293 fd.write("\n") 

294 

295 fd.write("#=======================================================\n") 

296 

297 i = 0 

298 if atoms.get_pbc().any(): 

299 for n, vector in enumerate(atoms.get_cell()): 

300 fd.write("lattice_vector ") 

301 for i in range(3): 

302 fd.write("%16.16f " % vector[i]) 

303 fd.write("\n") 

304 fix_cart = np.zeros([len(atoms), 3]) 

305 

306 if atoms.constraints: 

307 for constr in atoms.constraints: 

308 if isinstance(constr, FixAtoms): 

309 fix_cart[constr.index] = [1, 1, 1] 

310 elif isinstance(constr, FixCartesian): 

311 fix_cart[constr.index] = -constr.mask.astype(int) + 1 

312 

313 if ghosts is None: 

314 ghosts = np.zeros(len(atoms)) 

315 else: 

316 assert len(ghosts) == len(atoms) 

317 

318 wrap = wrap and not geo_constrain 

319 scaled_positions = atoms.get_scaled_positions(wrap=wrap) 

320 

321 for i, atom in enumerate(atoms): 

322 if ghosts[i] == 1: 

323 atomstring = "empty " 

324 elif scaled: 

325 atomstring = "atom_frac " 

326 else: 

327 atomstring = "atom " 

328 fd.write(atomstring) 

329 if scaled: 

330 for pos in scaled_positions[i]: 

331 fd.write("%16.16f " % pos) 

332 else: 

333 for pos in atom.position: 

334 fd.write("%16.16f " % pos) 

335 fd.write(atom.symbol) 

336 fd.write("\n") 

337 # (1) all coords are constrained: 

338 if fix_cart[i].all(): 

339 fd.write(" constrain_relaxation .true.\n") 

340 # (2) some coords are constrained: 

341 elif fix_cart[i].any(): 

342 xyz = fix_cart[i] 

343 for n in range(3): 

344 if xyz[n]: 

345 fd.write(" constrain_relaxation %s\n" % "xyz"[n]) 

346 if atom.charge: 

347 fd.write(" initial_charge %16.6f\n" % atom.charge) 

348 if atom.magmom: 

349 fd.write(" initial_moment %16.6f\n" % atom.magmom) 

350 

351 # Write the velocities if this is wanted 

352 if velocities: 

353 warnings.warn( 

354 '`velocities` is deprecated, please use `write_velocities`', 

355 np.VisibleDeprecationWarning 

356 ) 

357 write_velocities = True 

358 

359 if write_velocities and atoms.get_velocities() is not None: 

360 fd.write( 

361 " velocity {:.16f} {:.16f} {:.16f}\n".format( 

362 *atoms.get_velocities()[i] / v_unit 

363 ) 

364 ) 

365 

366 if geo_constrain: 

367 for line in get_sym_block(atoms): 

368 fd.write(line) 

369 

370 

371def get_sym_block(atoms): 

372 """Get symmetry block for Parametric constraints in atoms.constraints""" 

373 from ase.constraints import ( 

374 FixScaledParametricRelations, 

375 FixCartesianParametricRelations, 

376 ) 

377 

378 # Initialize param/expressions lists 

379 atomic_sym_params = [] 

380 lv_sym_params = [] 

381 atomic_param_constr = np.zeros((len(atoms),), dtype="<U100") 

382 lv_param_constr = np.zeros((3,), dtype="<U100") 

383 

384 # Populate param/expressions list 

385 for constr in atoms.constraints: 

386 if isinstance(constr, FixScaledParametricRelations): 

387 atomic_sym_params += constr.params 

388 

389 if np.any(atomic_param_constr[constr.indices] != ""): 

390 warnings.warn( 

391 "multiple parametric constraints defined for the same " 

392 "atom, using the last one defined" 

393 ) 

394 

395 atomic_param_constr[constr.indices] = [ 

396 ", ".join(expression) for expression in constr.expressions 

397 ] 

398 elif isinstance(constr, FixCartesianParametricRelations): 

399 lv_sym_params += constr.params 

400 

401 if np.any(lv_param_constr[constr.indices] != ""): 

402 warnings.warn( 

403 "multiple parametric constraints defined for the same " 

404 "lattice vector, using the last one defined" 

405 ) 

406 

407 lv_param_constr[constr.indices] = [ 

408 ", ".join(expression) for expression in constr.expressions 

409 ] 

410 

411 if np.all(atomic_param_constr == "") and np.all(lv_param_constr == ""): 

412 return [] 

413 

414 # Check Constraint Parameters 

415 if len(atomic_sym_params) != len(np.unique(atomic_sym_params)): 

416 warnings.warn( 

417 "Some parameters were used across constraints, they will be " 

418 "combined in the aims calculations" 

419 ) 

420 atomic_sym_params = np.unique(atomic_sym_params) 

421 

422 if len(lv_sym_params) != len(np.unique(lv_sym_params)): 

423 warnings.warn( 

424 "Some parameters were used across constraints, they will be " 

425 "combined in the aims calculations" 

426 ) 

427 lv_sym_params = np.unique(lv_sym_params) 

428 

429 if np.any(atomic_param_constr == ""): 

430 raise IOError( 

431 "FHI-aims input files require all atoms have defined parametric " 

432 "constraints" 

433 ) 

434 

435 cell_inds = np.where(lv_param_constr == "")[0] 

436 for ind in cell_inds: 

437 lv_param_constr[ind] = "{:.16f}, {:.16f}, {:.16f}".format( 

438 *atoms.cell[ind]) 

439 

440 n_atomic_params = len(atomic_sym_params) 

441 n_lv_params = len(lv_sym_params) 

442 n_total_params = n_atomic_params + n_lv_params 

443 

444 sym_block = [] 

445 if n_total_params > 0: 

446 sym_block.append("#" + "=" * 55 + "\n") 

447 sym_block.append("# Parametric constraints\n") 

448 sym_block.append("#" + "=" * 55 + "\n") 

449 sym_block.append( 

450 "symmetry_n_params {:d} {:d} {:d}\n".format( 

451 n_total_params, n_lv_params, n_atomic_params 

452 ) 

453 ) 

454 sym_block.append( 

455 "symmetry_params %s\n" % " ".join(lv_sym_params + atomic_sym_params) 

456 ) 

457 

458 for constr in lv_param_constr: 

459 sym_block.append("symmetry_lv {:s}\n".format(constr)) 

460 

461 for constr in atomic_param_constr: 

462 sym_block.append("symmetry_frac {:s}\n".format(constr)) 

463 return sym_block 

464 

465 

466def format_aims_control_parameter(key, value, format="%s"): 

467 """Format a line for the aims control.in 

468 

469 Parameter 

470 --------- 

471 key: str 

472 Name of the paramteter to format 

473 value: Object 

474 The value to pass to the parameter 

475 format: str 

476 string to format the the text as 

477 

478 Returns 

479 ------- 

480 str 

481 The properly formatted line for the aims control.in 

482 """ 

483 return f"{key :35s}" + (format % value) + "\n" 

484 

485 

486# Write aims control.in files 

487@writer 

488def write_control(fd, atoms, parameters, verbose_header=False): 

489 """Write the control.in file for FHI-aims 

490 Parameters 

491 ---------- 

492 fd: str 

493 The file object to write to 

494 atoms: atoms.Atoms 

495 The Atoms object for the requested calculation 

496 parameters: dict 

497 The dictionary of all paramters for the calculation 

498 verbose_header: bool 

499 If True then explcitly list the paramters used to generate the 

500 control.in file inside the header 

501 """ 

502 

503 parameters = dict(parameters) 

504 lim = "#" + "=" * 79 

505 

506 if parameters["xc"] == "LDA": 

507 parameters["xc"] = "pw-lda" 

508 

509 cubes = parameters.pop("cubes", None) 

510 

511 for line in get_aims_header(): 

512 fd.write(line + "\n") 

513 

514 if verbose_header: 

515 fd.write("# \n# List of parameters used to initialize the calculator:") 

516 for p, v in parameters.items(): 

517 s = "# {}:{}\n".format(p, v) 

518 fd.write(s) 

519 fd.write(lim + "\n") 

520 

521 assert not ("kpts" in parameters and "k_grid" in parameters) 

522 assert not ("smearing" in parameters and "occupation_type" in parameters) 

523 

524 for key, value in parameters.items(): 

525 if key == "kpts": 

526 mp = kpts2mp(atoms, parameters["kpts"]) 

527 dk = 0.5 - 0.5 / np.array(mp) 

528 fd.write( 

529 format_aims_control_parameter( 

530 "k_grid", 

531 tuple(mp), 

532 "%d %d %d")) 

533 fd.write( 

534 format_aims_control_parameter( 

535 "k_offset", 

536 tuple(dk), 

537 "%f %f %f")) 

538 elif key == "species_dir": 

539 continue 

540 elif key == "plus_u": 

541 continue 

542 elif key == "smearing": 

543 name = parameters["smearing"][0].lower() 

544 if name == "fermi-dirac": 

545 name = "fermi" 

546 width = parameters["smearing"][1] 

547 if name == "methfessel-paxton": 

548 order = parameters["smearing"][2] 

549 order = " %d" % order 

550 else: 

551 order = "" 

552 

553 fd.write( 

554 format_aims_control_parameter( 

555 "occupation_type", (name, width, order), "%s %f%s" 

556 ) 

557 ) 

558 elif key == "output": 

559 for output_type in value: 

560 fd.write(format_aims_control_parameter(key, output_type, "%s")) 

561 elif key == "vdw_correction_hirshfeld" and value: 

562 fd.write(format_aims_control_parameter(key, "", "%s")) 

563 elif isinstance(value, bool): 

564 fd.write( 

565 format_aims_control_parameter( 

566 key, str(value).lower(), ".%s.")) 

567 elif isinstance(value, (tuple, list)): 

568 fd.write( 

569 format_aims_control_parameter( 

570 key, " ".join([str(x) for x in value]), "%s" 

571 ) 

572 ) 

573 elif isinstance(value, str): 

574 fd.write(format_aims_control_parameter(key, value, "%s")) 

575 else: 

576 fd.write(format_aims_control_parameter(key, value, "%r")) 

577 

578 if cubes: 

579 cubes.write(fd) 

580 

581 fd.write(lim + "\n\n") 

582 

583 # Get the species directory 

584 species_dir = get_species_directory 

585 species_array = np.array(list(set(atoms.symbols))) 

586 # Grab the tier specification from the parameters. THis may either 

587 # be None, meaning the default should be used for all species, or a 

588 # list of integers/None values giving a specific basis set size 

589 # for each species in the calculation. 

590 tier = parameters.pop("tier", None) 

591 tier_array = np.full(len(species_array), tier) 

592 # Path to species files for FHI-aims. In this files are specifications 

593 # for the basis set sizes depending on which basis set tier is used. 

594 species_dir = get_species_directory(parameters.get("species_dir")) 

595 # Parse the species files for each species present in the calculation 

596 # according to the tier of each species. 

597 species_basis_dict = parse_species_path( 

598 species_array=species_array, tier_array=tier_array, 

599 species_dir=species_dir) 

600 # Write the basis functions to be included for each species in the 

601 # calculation into the control.in file (fd). 

602 write_species(fd, species_basis_dict, parameters) 

603 

604 

605def get_species_directory(species_dir=None): 

606 """Get the directory where the basis set information is stored 

607 

608 If the requested directory does not exist then raise an Error 

609 

610 Parameters 

611 ---------- 

612 species_dir: str 

613 Requested directory to find the basis set info from. E.g. 

614 `~/aims2022/FHIaims/species_defaults/defaults_2020/light`. 

615 

616 Returns 

617 ------- 

618 Path 

619 The Path to the requested or default species directory. 

620 

621 Raises 

622 ------ 

623 RuntimeError 

624 If both the requested directory and the default one is not defined 

625 or does not exit. 

626 """ 

627 if species_dir is None: 

628 species_dir = os.environ.get("AIMS_SPECIES_DIR") 

629 

630 if species_dir is None: 

631 raise RuntimeError( 

632 "Missing species directory! Use species_dir " 

633 + "parameter or set $AIMS_SPECIES_DIR environment variable." 

634 ) 

635 

636 species_path = Path(species_dir) 

637 if not species_path.exists(): 

638 raise RuntimeError( 

639 f"The requested species_dir {species_dir} does not exist") 

640 

641 return species_path 

642 

643 

644def write_species(control_file_descriptor, species_basis_dict, parameters): 

645 """Write species for the calculation depending on basis set size. 

646 

647 The calculation should include certain basis set size function depending 

648 on the numerical settings (light, tight, really tight) and the basis set 

649 size (minimal, tier1, tier2, tier3, tier4). If the basis set size is not 

650 given then a 'standard' basis set size is used for each numerical setting. 

651 The species files are defined according to these standard basis set sizes 

652 for the numerical settings in the FHI-aims repository. 

653 

654 Note, for FHI-aims in ASE, we don't explicitly give the numerical setting. 

655 Instead we include the numerical setting in the species path: e.g. 

656 `~/aims2022/FHIaims/species_defaults/defaults_2020/light` this path has 

657 `light`, the numerical setting, as the last folder in the path. 

658 

659 Example - a basis function might be commented in the standard basis set size 

660 such as "# hydro 4 f 7.4" and this basis function should be 

661 uncommented for another basis set size such as tier4. 

662 

663 Args: 

664 control_file_descriptor: File descriptor for the control.in file into 

665 which we need to write relevant basis functions to be included for 

666 the calculation. 

667 species_basis_dict: Dictionary where keys as the species symbols and 

668 each value is a single string containing all the basis functions 

669 to be included in the caclculation. 

670 parameters: Calculation parameters as a dict. 

671 """ 

672 # Now for every species (key) in the species_basis_dict, save the 

673 # relevant basis functions (values) from the species_basis_dict, by 

674 # writing to the file handle (species_file_descriptor) given to this 

675 # function. 

676 for species_symbol, basis_set_text in species_basis_dict.items(): 

677 control_file_descriptor.write(basis_set_text) 

678 if parameters.get("plus_u") is not None: 

679 if species_symbol in parameters.plus_u: 

680 control_file_descriptor.write( 

681 f"plus_u {parameters.plus_u[species_symbol]} \n") 

682 

683 

684def parse_species_path(species_array, tier_array, species_dir): 

685 """Parse the species files for each species according to the tier given. 

686 

687 Args: 

688 species_array: An array of species/element symbols present in the unit 

689 cell (e.g. ['C', 'H'].) 

690 tier_array: An array of None/integer values which define which basis 

691 set size to use for each species/element in the calcualtion. 

692 species_dir: Directory containing FHI-aims species files. 

693 

694 Returns: 

695 Dictionary containing species as keys and the basis set specification 

696 for each species as text as the value for the key. 

697 """ 

698 if len(species_array) != len(tier_array): 

699 raise ValueError( 

700 f"The species array length: {len(species_array)}, " 

701 f"is not the same as the tier_array length: {len(tier_array)}") 

702 

703 species_basis_dict = {} 

704 

705 for symbol, tier in zip(species_array, tier_array): 

706 path = species_dir / f"{atomic_numbers[symbol]:02}_{symbol}_default" 

707 # Open the species file: 

708 with open(path, "r", encoding="utf8") as species_file_handle: 

709 # Read the species file into a string. 

710 species_file_str = species_file_handle.read() 

711 species_basis_dict[symbol] = manipulate_tiers( 

712 species_file_str, tier) 

713 return species_basis_dict 

714 

715 

716def manipulate_tiers(species_string: str, tier: Union[None, int] = 1): 

717 """Adds basis set functions based on the tier value. 

718 

719 This function takes in the species file as a string, it then searches 

720 for relevant basis functions based on the tier value to include in a new 

721 string that is returned. 

722 

723 Args: 

724 species_string: species file (default) for a given numerical setting 

725 (light, tight, really tight) given as a string. 

726 tier: The basis set size. This will dictate which basis set functions 

727 are included in the returned string. 

728 

729 Returns: 

730 Basis set functions defined by the tier as a string. 

731 """ 

732 if tier is None: # Then we use the default species file untouched. 

733 return species_string 

734 tier_pattern = r"(# \".* tier\" .*|# +Further.*)" 

735 top, *tiers = re.split(tier_pattern, species_string) 

736 tier_comments = tiers[::2] 

737 tier_basis = tiers[1::2] 

738 assert len( 

739 tier_comments) == len(tier_basis), "Something wrong with splitting" 

740 n_tiers = len(tier_comments) 

741 assert tier <= n_tiers, f"Only {n_tiers} tiers available, you choose {tier}" 

742 string_new = top 

743 for i, (c, b) in enumerate(zip(tier_comments, tier_basis)): 

744 b = re.sub(r"\n( *for_aux| *hydro| *ionic| *confined)", r"\n#\g<1>", b) 

745 if i < tier: 

746 b = re.sub( 

747 r"\n#( *for_aux| *hydro| *ionic| *confined)", r"\n\g<1>", b) 

748 string_new += c + b 

749 return string_new 

750 

751 

752# Read aims.out files 

753scalar_property_to_line_key = { 

754 "free_energy": ["| Electronic free energy"], 

755 "number_of_iterations": ["| Number of self-consistency cycles"], 

756 "magnetic_moment": ["N_up - N_down"], 

757 "n_atoms": ["| Number of atoms"], 

758 "n_bands": [ 

759 "Number of Kohn-Sham states", 

760 "Reducing total number of Kohn-Sham states", 

761 "Reducing total number of Kohn-Sham states", 

762 ], 

763 "n_electrons": ["The structure contains"], 

764 "n_kpts": ["| Number of k-points"], 

765 "n_spins": ["| Number of spin channels"], 

766 "electronic_temp": ["Occupation type:"], 

767 "fermi_energy": ["| Chemical potential (Fermi level)"], 

768} 

769 

770 

771class AimsOutChunk: 

772 """Base class for AimsOutChunks""" 

773 

774 def __init__(self, lines): 

775 """Constructor 

776 

777 Parameters 

778 ---------- 

779 lines: list of str 

780 The set of lines from the output file the encompasses either 

781 a single structure within a trajectory or 

782 general information about the calculation (header) 

783 """ 

784 self.lines = lines 

785 

786 def reverse_search_for(self, keys, line_start=0): 

787 """Find the last time one of the keys appears in self.lines 

788 

789 Parameters 

790 ---------- 

791 keys: list of str 

792 The key strings to search for in self.lines 

793 line_start: int 

794 The lowest index to search for in self.lines 

795 

796 Returns 

797 ------- 

798 int 

799 The last time one of the keys appears in self.lines 

800 """ 

801 for ll, line in enumerate(self.lines[line_start:][::-1]): 

802 if any([key in line for key in keys]): 

803 return len(self.lines) - ll - 1 

804 

805 return LINE_NOT_FOUND 

806 

807 def search_for_all(self, key, line_start=0, line_end=-1): 

808 """Find the all times the key appears in self.lines 

809 

810 Parameters 

811 ---------- 

812 key: str 

813 The key string to search for in self.lines 

814 line_start: int 

815 The first line to start the search from 

816 line_end: int 

817 The last line to end the search at 

818 

819 Returns 

820 ------- 

821 list of ints 

822 All times the key appears in the lines 

823 """ 

824 line_index = [] 

825 for ll, line in enumerate(self.lines[line_start:line_end]): 

826 if key in line: 

827 line_index.append(ll + line_start) 

828 return line_index 

829 

830 def parse_scalar(self, property): 

831 """Parse a scalar property from the chunk 

832 

833 Parameters 

834 ---------- 

835 property: str 

836 The property key to parse 

837 

838 Returns 

839 ------- 

840 float 

841 The scalar value of the property 

842 """ 

843 line_start = self.reverse_search_for( 

844 scalar_property_to_line_key[property]) 

845 

846 if line_start == LINE_NOT_FOUND: 

847 return None 

848 

849 line = self.lines[line_start] 

850 return float(line.split(":")[-1].strip().split()[0]) 

851 

852 

853class AimsOutHeaderChunk(AimsOutChunk): 

854 """The header of the aims.out file containint general information""" 

855 

856 def __init__(self, lines): 

857 """Constructor 

858 

859 Parameters 

860 ---------- 

861 lines: list of str 

862 The lines inside the aims.out header 

863 """ 

864 super().__init__(lines) 

865 self._k_points = None 

866 self._k_point_weights = None 

867 

868 @lazyproperty 

869 def constraints(self): 

870 """Parse the constraints from the aims.out file 

871 

872 Constraints for the lattice vectors are not supported. 

873 """ 

874 

875 line_inds = self.search_for_all("Found relaxation constraint for atom") 

876 if len(line_inds) == 0: 

877 return [] 

878 

879 fix = [] 

880 fix_cart = [] 

881 for ll in line_inds: 

882 line = self.lines[ll] 

883 xyz = [0, 0, 0] 

884 ind = int(line.split()[5][:-1]) - 1 

885 if "All coordinates fixed" in line: 

886 if ind not in fix: 

887 fix.append(ind) 

888 if "coordinate fixed" in line: 

889 coord = line.split()[6] 

890 if coord == "x": 

891 xyz[0] = 1 

892 elif coord == "y": 

893 xyz[1] = 1 

894 elif coord == "z": 

895 xyz[2] = 1 

896 keep = True 

897 for n, c in enumerate(fix_cart): 

898 if ind == c.index: 

899 keep = False 

900 break 

901 if keep: 

902 fix_cart.append(FixCartesian(ind, xyz)) 

903 else: 

904 fix_cart[n].mask[xyz.index(1)] = 0 

905 if len(fix) > 0: 

906 fix_cart.append(FixAtoms(indices=fix)) 

907 

908 return fix_cart 

909 

910 @lazyproperty 

911 def initial_cell(self): 

912 """Parse the initial cell from the aims.out file""" 

913 line_start = self.reverse_search_for(["| Unit cell:"]) 

914 if line_start == LINE_NOT_FOUND: 

915 return None 

916 

917 return [ 

918 [float(inp) for inp in line.split()[-3:]] 

919 for line in self.lines[line_start + 1:line_start + 4] 

920 ] 

921 

922 @lazyproperty 

923 def initial_atoms(self): 

924 """Create an atoms object for the initial geometry.in structure 

925 from the aims.out file""" 

926 line_start = self.reverse_search_for(["Atomic structure:"]) 

927 if line_start == LINE_NOT_FOUND: 

928 raise AimsParseError( 

929 "No information about the structure in the chunk.") 

930 

931 line_start += 2 

932 

933 cell = self.initial_cell 

934 positions = np.zeros((self.n_atoms, 3)) 

935 symbols = [""] * self.n_atoms 

936 for ll, line in enumerate( 

937 self.lines[line_start:line_start + self.n_atoms]): 

938 inp = line.split() 

939 positions[ll, :] = [float(pos) for pos in inp[4:7]] 

940 symbols[ll] = inp[3] 

941 

942 atoms = Atoms(symbols=symbols, positions=positions) 

943 

944 if cell: 

945 atoms.set_cell(cell) 

946 atoms.set_pbc([True, True, True]) 

947 atoms.set_constraint(self.constraints) 

948 

949 return atoms 

950 

951 @lazyproperty 

952 def is_md(self): 

953 """Determine if calculation is a molecular dynamics calculation""" 

954 return LINE_NOT_FOUND != self.reverse_search_for( 

955 ["Complete information for previous time-step:"] 

956 ) 

957 

958 @lazyproperty 

959 def is_relaxation(self): 

960 """Determine if the calculation is a geometry optimization or not""" 

961 return LINE_NOT_FOUND != self.reverse_search_for( 

962 ["Geometry relaxation:"]) 

963 

964 @lazymethod 

965 def _parse_k_points(self): 

966 """Get the list of k-points used in the calculation""" 

967 n_kpts = self.parse_scalar("n_kpts") 

968 if n_kpts is None: 

969 return { 

970 "k_points": None, 

971 "k_point_weights": None, 

972 } 

973 n_kpts = int(n_kpts) 

974 

975 line_start = self.reverse_search_for(["| K-points in task"]) 

976 line_end = self.reverse_search_for(["| k-point:"]) 

977 if ( 

978 (line_start == LINE_NOT_FOUND) 

979 or (line_end == LINE_NOT_FOUND) 

980 or (line_end - line_start != n_kpts) 

981 ): 

982 return { 

983 "k_points": None, 

984 "k_point_weights": None, 

985 } 

986 

987 k_points = np.zeros((n_kpts, 3)) 

988 k_point_weights = np.zeros((n_kpts)) 

989 for kk, line in enumerate(self.lines[line_start + 1:line_end + 1]): 

990 k_points[kk] = [float(inp) for inp in line.split()[4:7]] 

991 k_point_weights[kk] = float(line.split()[-1]) 

992 

993 return { 

994 "k_points": k_points, 

995 "k_point_weights": k_point_weights, 

996 } 

997 

998 @lazyproperty 

999 def n_atoms(self): 

1000 """The number of atoms for the material""" 

1001 n_atoms = self.parse_scalar("n_atoms") 

1002 if n_atoms is None: 

1003 raise AimsParseError( 

1004 "No information about the number of atoms in the header." 

1005 ) 

1006 return int(n_atoms) 

1007 

1008 @lazyproperty 

1009 def n_bands(self): 

1010 """The number of Kohn-Sham states for the chunk""" 

1011 line_start = self.reverse_search_for( 

1012 scalar_property_to_line_key["n_bands"]) 

1013 

1014 if line_start == LINE_NOT_FOUND: 

1015 raise AimsParseError( 

1016 "No information about the number of Kohn-Sham states " 

1017 "in the header.") 

1018 

1019 line = self.lines[line_start] 

1020 if "| Number of Kohn-Sham states" in line: 

1021 return int(line.split(":")[-1].strip().split()[0]) 

1022 

1023 return int(line.split()[-1].strip()[:-1]) 

1024 

1025 @lazyproperty 

1026 def n_electrons(self): 

1027 """The number of electrons for the chunk""" 

1028 line_start = self.reverse_search_for( 

1029 scalar_property_to_line_key["n_electrons"]) 

1030 

1031 if line_start == LINE_NOT_FOUND: 

1032 raise AimsParseError( 

1033 "No information about the number of electrons in the header." 

1034 ) 

1035 

1036 line = self.lines[line_start] 

1037 return int(float(line.split()[-2])) 

1038 

1039 @lazyproperty 

1040 def n_k_points(self): 

1041 """The number of k_ppoints for the calculation""" 

1042 n_kpts = self.parse_scalar("n_kpts") 

1043 if n_kpts is None: 

1044 return None 

1045 

1046 return int(n_kpts) 

1047 

1048 @lazyproperty 

1049 def n_spins(self): 

1050 """The number of spin channels for the chunk""" 

1051 n_spins = self.parse_scalar("n_spins") 

1052 if n_spins is None: 

1053 raise AimsParseError( 

1054 "No information about the number of spin " 

1055 "channels in the header.") 

1056 return int(n_spins) 

1057 

1058 @lazyproperty 

1059 def electronic_temperature(self): 

1060 """The electronic temperature for the chunk""" 

1061 line_start = self.reverse_search_for( 

1062 scalar_property_to_line_key["electronic_temp"] 

1063 ) 

1064 if line_start == LINE_NOT_FOUND: 

1065 return 0.10 

1066 

1067 line = self.lines[line_start] 

1068 return float(line.split("=")[-1].strip().split()[0]) 

1069 

1070 @lazyproperty 

1071 def k_points(self): 

1072 """All k-points listed in the calculation""" 

1073 return self._parse_k_points()["k_points"] 

1074 

1075 @lazyproperty 

1076 def k_point_weights(self): 

1077 """The k-point weights for the calculation""" 

1078 return self._parse_k_points()["k_point_weights"] 

1079 

1080 @lazyproperty 

1081 def header_summary(self): 

1082 """Dictionary summarizing the information inside the header""" 

1083 return { 

1084 "initial_atoms": self.initial_atoms, 

1085 "initial_cell": self.initial_cell, 

1086 "constraints": self.constraints, 

1087 "is_relaxation": self.is_relaxation, 

1088 "is_md": self.is_md, 

1089 "n_atoms": self.n_atoms, 

1090 "n_bands": self.n_bands, 

1091 "n_electrons": self.n_electrons, 

1092 "n_spins": self.n_spins, 

1093 "electronic_temperature": self.electronic_temperature, 

1094 "n_k_points": self.n_k_points, 

1095 "k_points": self.k_points, 

1096 "k_point_weights": self.k_point_weights, 

1097 } 

1098 

1099 

1100class AimsOutCalcChunk(AimsOutChunk): 

1101 """A part of the aims.out file correponding to a single structure""" 

1102 

1103 def __init__(self, lines, header): 

1104 """Constructor 

1105 

1106 Parameters 

1107 ---------- 

1108 lines: list of str 

1109 The lines used for the structure 

1110 header: dict 

1111 A summary of the relevant information from the aims.out header 

1112 """ 

1113 super().__init__(lines) 

1114 self._header = header.header_summary 

1115 

1116 @lazymethod 

1117 def _parse_atoms(self): 

1118 """Create an atoms object for the subsequent structures 

1119 calculated in the aims.out file""" 

1120 start_keys = [ 

1121 "Atomic structure (and velocities) as used in the preceding " 

1122 "time step", 

1123 "Updated atomic structure", 

1124 "Atomic structure that was used in the preceding time step of " 

1125 "the wrapper", 

1126 ] 

1127 line_start = self.reverse_search_for(start_keys) 

1128 if line_start == LINE_NOT_FOUND: 

1129 return self.initial_atoms 

1130 

1131 line_start += 1 

1132 

1133 line_end = self.reverse_search_for( 

1134 ['Writing the current geometry to file "geometry.in.next_step"'], 

1135 line_start) 

1136 if line_end == LINE_NOT_FOUND: 

1137 line_end = len(self.lines) 

1138 

1139 cell = [] 

1140 velocities = [] 

1141 atoms = Atoms() 

1142 for line in self.lines[line_start:line_end]: 

1143 if "lattice_vector " in line: 

1144 cell.append([float(inp) for inp in line.split()[1:]]) 

1145 elif "atom " in line: 

1146 line_split = line.split() 

1147 atoms.append(Atom(line_split[4], tuple( 

1148 [float(inp) for inp in line_split[1:4]]))) 

1149 elif "velocity " in line: 

1150 velocities.append([float(inp) for inp in line.split()[1:]]) 

1151 

1152 assert len(atoms) == self.n_atoms 

1153 assert (len(velocities) == self.n_atoms) or (len(velocities) == 0) 

1154 if len(cell) == 3: 

1155 atoms.set_cell(np.array(cell)) 

1156 atoms.set_pbc([True, True, True]) 

1157 elif len(cell) != 0: 

1158 raise AimsParseError( 

1159 "Parsed geometry has incorrect number of lattice vectors." 

1160 ) 

1161 

1162 if len(velocities) > 0: 

1163 atoms.set_velocities(np.array(velocities)) 

1164 atoms.set_constraint(self.constraints) 

1165 

1166 return atoms 

1167 

1168 @lazyproperty 

1169 def forces(self): 

1170 """Parse the forces from the aims.out file""" 

1171 line_start = self.reverse_search_for(["Total atomic forces"]) 

1172 if line_start == LINE_NOT_FOUND: 

1173 return 

1174 

1175 line_start += 1 

1176 

1177 return np.array( 

1178 [ 

1179 [float(inp) for inp in line.split()[-3:]] 

1180 for line in self.lines[line_start:line_start + self.n_atoms] 

1181 ] 

1182 ) 

1183 

1184 @lazyproperty 

1185 def stresses(self): 

1186 """Parse the stresses from the aims.out file""" 

1187 line_start = self.reverse_search_for( 

1188 ["Per atom stress (eV) used for heat flux calculation"] 

1189 ) 

1190 if line_start == LINE_NOT_FOUND: 

1191 return None 

1192 line_start += 3 

1193 stresses = [] 

1194 for line in self.lines[line_start:line_start + self.n_atoms]: 

1195 xx, yy, zz, xy, xz, yz = [float(d) for d in line.split()[2:8]] 

1196 stresses.append([xx, yy, zz, yz, xz, xy]) 

1197 

1198 return np.array(stresses) 

1199 

1200 @lazyproperty 

1201 def stress(self): 

1202 """Parse the stress from the aims.out file""" 

1203 from ase.stress import full_3x3_to_voigt_6_stress 

1204 

1205 line_start = self.reverse_search_for( 

1206 [ 

1207 "Analytical stress tensor - Symmetrized", 

1208 "Numerical stress tensor", 

1209 ] 

1210 

1211 ) # Offest to relevant lines 

1212 if line_start == LINE_NOT_FOUND: 

1213 return 

1214 

1215 stress = [ 

1216 [float(inp) for inp in line.split()[2:5]] 

1217 for line in self.lines[line_start + 5:line_start + 8] 

1218 ] 

1219 return full_3x3_to_voigt_6_stress(stress) 

1220 

1221 @lazyproperty 

1222 def is_metallic(self): 

1223 """Checks the outputfile to see if the chunk corresponds 

1224 to a metallic system""" 

1225 line_start = self.reverse_search_for( 

1226 ["material is metallic within the approximate finite " 

1227 "broadening function (occupation_type)"]) 

1228 return line_start != LINE_NOT_FOUND 

1229 

1230 @lazyproperty 

1231 def energy(self): 

1232 """Parse the energy from the aims.out file""" 

1233 atoms = self._parse_atoms() 

1234 

1235 if np.all(atoms.pbc) and self.is_metallic: 

1236 line_ind = self.reverse_search_for(["Total energy corrected"]) 

1237 else: 

1238 line_ind = self.reverse_search_for(["Total energy uncorrected"]) 

1239 if line_ind == LINE_NOT_FOUND: 

1240 raise AimsParseError("No energy is associated with the structure.") 

1241 

1242 return float(self.lines[line_ind].split()[5]) 

1243 

1244 @lazyproperty 

1245 def dipole(self): 

1246 """Parse the electric dipole moment from the aims.out file.""" 

1247 line_start = self.reverse_search_for(["Total dipole moment [eAng]"]) 

1248 if line_start == LINE_NOT_FOUND: 

1249 return 

1250 

1251 line = self.lines[line_start] 

1252 return np.array([float(inp) for inp in line.split()[6:9]]) 

1253 

1254 @lazyproperty 

1255 def dielectric_tensor(self): 

1256 """Parse the dielectric tensor from the aims.out file""" 

1257 line_start = self.reverse_search_for(["PARSE DFPT_dielectric_tensor"]) 

1258 if line_start == LINE_NOT_FOUND: 

1259 return 

1260 

1261 # we should find the tensor in the next three lines: 

1262 lines = self.lines[line_start + 1:line_start + 4] 

1263 

1264 # make ndarray and return 

1265 return np.array([np.fromstring(line, sep=' ') for line in lines]) 

1266 

1267 @lazyproperty 

1268 def polarization(self): 

1269 """ Parse the polarization vector from the aims.out file""" 

1270 line_start = self.reverse_search_for(["| Cartesian Polarization"]) 

1271 if line_start == LINE_NOT_FOUND: 

1272 return 

1273 line = self.lines[line_start] 

1274 return np.array([float(s) for s in line.split()[-3:]]) 

1275 

1276 @lazymethod 

1277 def _parse_hirshfeld(self): 

1278 """Parse the Hirshfled charges volumes, and dipole moments from the 

1279 ouput""" 

1280 atoms = self._parse_atoms() 

1281 

1282 line_start = self.reverse_search_for( 

1283 ["Performing Hirshfeld analysis of fragment charges and moments."] 

1284 ) 

1285 if line_start == LINE_NOT_FOUND: 

1286 return { 

1287 "charges": None, 

1288 "volumes": None, 

1289 "atomic_dipoles": None, 

1290 "dipole": None, 

1291 } 

1292 

1293 line_inds = self.search_for_all("Hirshfeld charge", line_start, -1) 

1294 hirshfeld_charges = np.array( 

1295 [float(self.lines[ind].split(":")[1]) for ind in line_inds] 

1296 ) 

1297 

1298 line_inds = self.search_for_all("Hirshfeld volume", line_start, -1) 

1299 hirshfeld_volumes = np.array( 

1300 [float(self.lines[ind].split(":")[1]) for ind in line_inds] 

1301 ) 

1302 

1303 line_inds = self.search_for_all( 

1304 "Hirshfeld dipole vector", line_start, -1) 

1305 hirshfeld_atomic_dipoles = np.array( 

1306 [ 

1307 [float(inp) for inp in self.lines[ind].split(":")[1].split()] 

1308 for ind in line_inds 

1309 ] 

1310 ) 

1311 

1312 if not np.any(atoms.pbc): 

1313 hirshfeld_dipole = np.sum( 

1314 hirshfeld_charges.reshape((-1, 1)) * atoms.get_positions(), 

1315 axis=1, 

1316 ) 

1317 else: 

1318 hirshfeld_dipole = None 

1319 return { 

1320 "charges": hirshfeld_charges, 

1321 "volumes": hirshfeld_volumes, 

1322 "atomic_dipoles": hirshfeld_atomic_dipoles, 

1323 "dipole": hirshfeld_dipole, 

1324 } 

1325 

1326 @lazymethod 

1327 def _parse_eigenvalues(self): 

1328 """Parse the eigenvalues and occupancies of the system. If eigenvalue 

1329 for a particular k-point is not present in the output file 

1330 then set it to np.nan 

1331 """ 

1332 

1333 atoms = self._parse_atoms() 

1334 

1335 line_start = self.reverse_search_for(["Writing Kohn-Sham eigenvalues."]) 

1336 if line_start == LINE_NOT_FOUND: 

1337 return {"eigenvalues": None, "occupancies": None} 

1338 

1339 line_end_1 = self.reverse_search_for( 

1340 ["Self-consistency cycle converged."], line_start 

1341 ) 

1342 line_end_2 = self.reverse_search_for( 

1343 [ 

1344 "What follows are estimated values for band gap, " 

1345 "HOMO, LUMO, etc.", 

1346 "Current spin moment of the entire structure :", 

1347 "Highest occupied state (VBM)" 

1348 ], 

1349 line_start, 

1350 ) 

1351 if line_end_1 == LINE_NOT_FOUND: 

1352 line_end = line_end_2 

1353 elif line_end_2 == LINE_NOT_FOUND: 

1354 line_end = line_end_1 

1355 else: 

1356 line_end = min(line_end_1, line_end_2) 

1357 

1358 n_kpts = self.n_k_points if np.all(atoms.pbc) else 1 

1359 if n_kpts is None: 

1360 return {"eigenvalues": None, "occupancies": None} 

1361 

1362 eigenvalues = np.full((n_kpts, self.n_bands, self.n_spins), np.nan) 

1363 occupancies = np.full((n_kpts, self.n_bands, self.n_spins), np.nan) 

1364 

1365 occupation_block_start = self.search_for_all( 

1366 "State Occupation Eigenvalue [Ha] Eigenvalue [eV]", 

1367 line_start, 

1368 line_end, 

1369 ) 

1370 kpt_def = self.search_for_all("K-point: ", line_start, line_end) 

1371 

1372 if len(kpt_def) > 0: 

1373 kpt_inds = [int(self.lines[ll].split()[1]) - 1 for ll in kpt_def] 

1374 elif (self.n_k_points is None) or (self.n_k_points == 1): 

1375 kpt_inds = [0] 

1376 else: 

1377 raise ParseError("Cannot find k-point definitions") 

1378 

1379 assert len(kpt_inds) == len(occupation_block_start) 

1380 spins = [0] * len(occupation_block_start) 

1381 

1382 if self.n_spins == 2: 

1383 spin_def = self.search_for_all("Spin-", line_start, line_end) 

1384 assert len(spin_def) == len(occupation_block_start) 

1385 

1386 spins = [int("Spin-down eigenvalues:" in self.lines[ll]) 

1387 for ll in spin_def] 

1388 

1389 for occ_start, kpt_ind, spin in zip( 

1390 occupation_block_start, kpt_inds, spins): 

1391 for ll, line in enumerate( 

1392 self.lines[occ_start + 1:occ_start + self.n_bands + 1] 

1393 ): 

1394 if "***" in line: 

1395 warn_msg = f"The {ll+1}th eigenvalue for the " 

1396 "{kpt_ind+1}th k-point and {spin}th channels could " 

1397 "not be read (likely too large to be printed " 

1398 "in the output file)" 

1399 warnings.warn(warn_msg) 

1400 continue 

1401 split_line = line.split() 

1402 eigenvalues[kpt_ind, ll, spin] = float(split_line[3]) 

1403 occupancies[kpt_ind, ll, spin] = float(split_line[1]) 

1404 return {"eigenvalues": eigenvalues, "occupancies": occupancies} 

1405 

1406 @lazyproperty 

1407 def atoms(self): 

1408 """Convert AimsOutChunk to Atoms object and add all non-standard 

1409outputs to atoms.info""" 

1410 atoms = self._parse_atoms() 

1411 

1412 atoms.calc = SinglePointDFTCalculator( 

1413 atoms, 

1414 energy=self.energy, 

1415 free_energy=self.free_energy, 

1416 forces=self.forces, 

1417 stress=self.stress, 

1418 stresses=self.stresses, 

1419 magmom=self.magmom, 

1420 dipole=self.dipole, 

1421 dielectric_tensor=self.dielectric_tensor, 

1422 polarization=self.polarization, 

1423 ) 

1424 return atoms 

1425 

1426 @property 

1427 def results(self): 

1428 """Convert an AimsOutChunk to a Results Dictionary""" 

1429 results = { 

1430 "energy": self.energy, 

1431 "free_energy": self.free_energy, 

1432 "forces": self.forces, 

1433 "stress": self.stress, 

1434 "stresses": self.stresses, 

1435 "magmom": self.magmom, 

1436 "dipole": self.dipole, 

1437 "fermi_energy": self.E_f, 

1438 "n_iter": self.n_iter, 

1439 "hirshfeld_charges": self.hirshfeld_charges, 

1440 "hirshfeld_dipole": self.hirshfeld_dipole, 

1441 "hirshfeld_volumes": self.hirshfeld_volumes, 

1442 "hirshfeld_atomic_dipoles": self.hirshfeld_atomic_dipoles, 

1443 "eigenvalues": self.eigenvalues, 

1444 "occupancies": self.occupancies, 

1445 "dielectric_tensor": self.dielectric_tensor, 

1446 "polarization": self.polarization, 

1447 } 

1448 

1449 return { 

1450 key: value for key, 

1451 value in results.items() if value is not None} 

1452 

1453 # Properties from the aims.out header 

1454 @lazyproperty 

1455 def initial_atoms(self): 

1456 """The initial structure defined in the geoemtry.in file""" 

1457 return self._header["initial_atoms"] 

1458 

1459 @lazyproperty 

1460 def initial_cell(self): 

1461 """The initial lattice vectors defined in the geoemtry.in file""" 

1462 return self._header["initial_cell"] 

1463 

1464 @lazyproperty 

1465 def constraints(self): 

1466 """The relaxation constraints for the calculation""" 

1467 return self._header["constraints"] 

1468 

1469 @lazyproperty 

1470 def n_atoms(self): 

1471 """The number of atoms for the material""" 

1472 return self._header["n_atoms"] 

1473 

1474 @lazyproperty 

1475 def n_bands(self): 

1476 """The number of Kohn-Sham states for the chunk""" 

1477 return self._header["n_bands"] 

1478 

1479 @lazyproperty 

1480 def n_electrons(self): 

1481 """The number of electrons for the chunk""" 

1482 return self._header["n_electrons"] 

1483 

1484 @lazyproperty 

1485 def n_spins(self): 

1486 """The number of spin channels for the chunk""" 

1487 return self._header["n_spins"] 

1488 

1489 @lazyproperty 

1490 def electronic_temperature(self): 

1491 """The electronic temperature for the chunk""" 

1492 return self._header["electronic_temperature"] 

1493 

1494 @lazyproperty 

1495 def n_k_points(self): 

1496 """The number of electrons for the chunk""" 

1497 return self._header["n_k_points"] 

1498 

1499 @lazyproperty 

1500 def k_points(self): 

1501 """The number of spin channels for the chunk""" 

1502 return self._header["k_points"] 

1503 

1504 @lazyproperty 

1505 def k_point_weights(self): 

1506 """k_point_weights electronic temperature for the chunk""" 

1507 return self._header["k_point_weights"] 

1508 

1509 @lazyproperty 

1510 def free_energy(self): 

1511 """The free energy for the chunk""" 

1512 return self.parse_scalar("free_energy") 

1513 

1514 @lazyproperty 

1515 def n_iter(self): 

1516 """The number of SCF iterations needed to converge the SCF cycle for 

1517the chunk""" 

1518 return self.parse_scalar("number_of_iterations") 

1519 

1520 @lazyproperty 

1521 def magmom(self): 

1522 """The magnetic moment for the chunk""" 

1523 return self.parse_scalar("magnetic_moment") 

1524 

1525 @lazyproperty 

1526 def E_f(self): 

1527 """The Fermi energy for the chunk""" 

1528 return self.parse_scalar("fermi_energy") 

1529 

1530 @lazyproperty 

1531 def converged(self): 

1532 """True if the chunk is a fully converged final structure""" 

1533 return (len(self.lines) > 0) and ("Have a nice day." in self.lines[-5:]) 

1534 

1535 @lazyproperty 

1536 def hirshfeld_charges(self): 

1537 """The Hirshfeld charges for the chunk""" 

1538 return self._parse_hirshfeld()["charges"] 

1539 

1540 @lazyproperty 

1541 def hirshfeld_atomic_dipoles(self): 

1542 """The Hirshfeld atomic dipole moments for the chunk""" 

1543 return self._parse_hirshfeld()["atomic_dipoles"] 

1544 

1545 @lazyproperty 

1546 def hirshfeld_volumes(self): 

1547 """The Hirshfeld volume for the chunk""" 

1548 return self._parse_hirshfeld()["volumes"] 

1549 

1550 @lazyproperty 

1551 def hirshfeld_dipole(self): 

1552 """The Hirshfeld systematic dipole moment for the chunk""" 

1553 atoms = self._parse_atoms() 

1554 

1555 if not np.any(atoms.pbc): 

1556 return self._parse_hirshfeld()["dipole"] 

1557 

1558 return None 

1559 

1560 @lazyproperty 

1561 def eigenvalues(self): 

1562 """All outputted eigenvalues for the system""" 

1563 return self._parse_eigenvalues()["eigenvalues"] 

1564 

1565 @lazyproperty 

1566 def occupancies(self): 

1567 """All outputted occupancies for the system""" 

1568 return self._parse_eigenvalues()["occupancies"] 

1569 

1570 

1571def get_header_chunk(fd): 

1572 """Returns the header information from the aims.out file""" 

1573 header = [] 

1574 line = "" 

1575 

1576 # Stop the header once the first SCF cycle begins 

1577 while ( 

1578 "Convergence: q app. | density | eigen (eV) | Etot (eV)" 

1579 not in line 

1580 and "Begin self-consistency iteration #" not in line 

1581 ): 

1582 try: 

1583 line = next(fd).strip() # Raises StopIteration on empty file 

1584 except StopIteration: 

1585 raise ParseError( 

1586 "No SCF steps present, calculation failed at setup." 

1587 ) 

1588 

1589 header.append(line) 

1590 return AimsOutHeaderChunk(header) 

1591 

1592 

1593def get_aims_out_chunks(fd, header_chunk): 

1594 """Yield unprocessed chunks (header, lines) for each AimsOutChunk image.""" 

1595 try: 

1596 line = next(fd).strip() # Raises StopIteration on empty file 

1597 except StopIteration: 

1598 return 

1599 

1600 # If the calculation is relaxation the updated structural information 

1601 # occurs before the re-initialization 

1602 if header_chunk.is_relaxation: 

1603 chunk_end_line = ( 

1604 "Geometry optimization: Attempting to predict improved coordinates." 

1605 ) 

1606 else: 

1607 chunk_end_line = "Begin self-consistency loop: Re-initialization" 

1608 

1609 # If SCF is not converged then do not treat the next chunk_end_line as a 

1610 # new chunk until after the SCF is re-initialized 

1611 ignore_chunk_end_line = False 

1612 while True: 

1613 try: 

1614 line = next(fd).strip() # Raises StopIteration on empty file 

1615 except StopIteration: 

1616 break 

1617 

1618 lines = [] 

1619 while chunk_end_line not in line or ignore_chunk_end_line: 

1620 lines.append(line) 

1621 # If SCF cycle not converged or numerical stresses are requested, 

1622 # don't end chunk on next Re-initialization 

1623 patterns = [ 

1624 ( 

1625 "Self-consistency cycle not yet converged -" 

1626 " restarting mixer to attempt better convergence." 

1627 ), 

1628 ( 

1629 "Components of the stress tensor (for mathematical " 

1630 "background see comments in numerical_stress.f90)." 

1631 ), 

1632 "Calculation of numerical stress completed", 

1633 ] 

1634 if any([pattern in line for pattern in patterns]): 

1635 ignore_chunk_end_line = True 

1636 elif "Begin self-consistency loop: Re-initialization" in line: 

1637 ignore_chunk_end_line = False 

1638 

1639 try: 

1640 line = next(fd).strip() 

1641 except StopIteration: 

1642 break 

1643 

1644 yield AimsOutCalcChunk(lines, header_chunk) 

1645 

1646 

1647def check_convergence(chunks, non_convergence_ok=False): 

1648 """Check if the aims output file is for a converged calculation 

1649 

1650 Parameters 

1651 ---------- 

1652 chunks: list of AimsOutChunks 

1653 The list of chunks for the aims calculations 

1654 non_convergence_ok: bool 

1655 True if it is okay for the calculation to not be converged 

1656 

1657 Returns 

1658 ------- 

1659 bool 

1660 True if the calculation is converged 

1661 """ 

1662 if not non_convergence_ok and not chunks[-1].converged: 

1663 raise ParseError("The calculation did not complete successfully") 

1664 return True 

1665 

1666 

1667@reader 

1668def read_aims_output(fd, index=-1, non_convergence_ok=False): 

1669 """Import FHI-aims output files with all data available, i.e. 

1670 relaxations, MD information, force information etc etc etc.""" 

1671 header_chunk = get_header_chunk(fd) 

1672 chunks = list(get_aims_out_chunks(fd, header_chunk)) 

1673 check_convergence(chunks, non_convergence_ok) 

1674 

1675 # Relaxations have an additional footer chunk due to how it is split 

1676 if header_chunk.is_relaxation: 

1677 images = [chunk.atoms for chunk in chunks[:-1]] 

1678 else: 

1679 images = [chunk.atoms for chunk in chunks] 

1680 return images[index] 

1681 

1682 

1683@reader 

1684def read_aims_results(fd, index=-1, non_convergence_ok=False): 

1685 """Import FHI-aims output files and summarize all relevant information 

1686 into a dictionary""" 

1687 header_chunk = get_header_chunk(fd) 

1688 chunks = list(get_aims_out_chunks(fd, header_chunk)) 

1689 check_convergence(chunks, non_convergence_ok) 

1690 

1691 # Relaxations have an additional footer chunk due to how it is split 

1692 if header_chunk.is_relaxation and (index == -1): 

1693 return chunks[-2].results 

1694 

1695 return chunks[index].results