config.toml 2.2 KB
Newer Older
Q
Quleaf 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40
# A description of the task of this configuration file, this is optional. "GroundState" stands for calculate the ground state energy of the molecule.
task = 'GroundState'

# This field stores information related to the molecule is provided.
[molecule]
# Symbols of atoms inside the molecule.
symbols = ['H', 'H']
# The cartesian coordinates of each atom inside the molecule.
coords = [ [ 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.7 ] ]
# Quantum chemistry basis set used in the computation, see here for more information of the basis set, https://baike.baidu.com/item/%E5%9F%BA%E7%BB%84/6445527?fr=aladdin, Default is "sto-3g".
basis = 'sto-3g'
# Which unit system is used in the `coords` provided above. 
# If set to `true` will use Angstrom.
# If set to `false` will use Bohr.
use_angstrom = true

# This field specifies configurations of classical quantum chemistry driver used to calculate the molecular integrals. NOTE: Classical quantum chemistry package needs to be preinstalled. 
[driver]
# If set to `pyscf`, means PySCF is used (currently only support `pyscf` driver, will add more classical driver in the future).
name = 'pyscf'

# This field specifies configurations related to the quantum circuit in VQE is specified.
# NOTE: currently only support HardwareEfficient ansatz, more ansatz will come later!
[ansatz.HardwareEfficient]
# The depth of the HardwareEfficient ansatz. NOTE: on a personal laptop, we suggest the depth of the circuit should no more than 10.
depth = 2

# This field stores configurations of the variational quantum eigensolver (VQE) method. 
[VQE]
# Number of optimization cycles, default is 100.
num_iterations = 100
# The convergence criteria for the VQE optimization, default is 1e-5.
tol = 1e-5
# The number of optimization steps after which we record the loss value.
save_every = 10

# This field specifies the optimizer used in the VQE method, default is `Adam`, see here for available optimizers https://www.paddlepaddle.org.cn/documentation/docs/zh/api/paddle/optimizer/Overview_cn.html
[optimizer.Adam]
# The learning rate of the optimizer, see here for more details https://www.paddlepaddle.org.cn/documentation/docs/zh/api/paddle/optimizer/Adam_cn.html, default is 0.4.
learning_rate = 0.4