@article {2770, title = {Meta Hamiltonian Learning}, year = {2021}, month = {4/9/2021}, abstract = {

Efficient characterization of quantum devices is a significant challenge critical for the development of large scale quantum computers. We consider an experimentally motivated situation, in which we have a decent estimate of the Hamiltonian, and its parameters need to be characterized and fine-tuned frequently to combat drifting experimental variables. We use a machine learning technique known as meta-learning to learn a more efficient optimizer for this task. We consider training with the nearest-neighbor Ising model and study the trained model\&$\#$39;s generalizability to other Hamiltonian models and larger system sizes. We observe that the meta-optimizer outperforms other optimization methods in average loss over test samples. This advantage follows from the meta-optimizer being less likely to get stuck in local minima, which highly skews the distribution of the final loss of the other optimizers. In general, meta-learning decreases the number of calls to the experiment and reduces the needed classical computational resources.

}, url = {https://arxiv.org/abs/2104.04453}, author = {Przemyslaw Bienias and Alireza Seif and Mohammad Hafezi} }