|
| 1 | +""" |
| 2 | +VQE on 2D square lattice Heisenberg model with size n*m |
| 3 | +""" |
| 4 | + |
| 5 | +import tensorflow as tf |
| 6 | +import tensorcircuit as tc |
| 7 | + |
| 8 | +# import cotengra as ctg |
| 9 | + |
| 10 | +# optr = ctg.ReusableHyperOptimizer( |
| 11 | +# methods=["greedy", "kahypar"], |
| 12 | +# parallel=True, |
| 13 | +# minimize="flops", |
| 14 | +# max_time=120, |
| 15 | +# max_repeats=4096, |
| 16 | +# progbar=True, |
| 17 | +# ) |
| 18 | +# tc.set_contractor("custom", optimizer=optr, preprocessing=True) |
| 19 | + |
| 20 | +tc.set_dtype("complex64") |
| 21 | +tc.set_backend("tensorflow") |
| 22 | + |
| 23 | + |
| 24 | +n, m, nlayers = 3, 2, 2 |
| 25 | +coord = tc.templates.graphs.Grid2DCoord(n, m) |
| 26 | + |
| 27 | + |
| 28 | +def singlet_init(circuit): # assert n % 2 == 0 |
| 29 | + nq = circuit._nqubits |
| 30 | + for i in range(0, nq - 1, 2): |
| 31 | + j = (i + 1) % nq |
| 32 | + circuit.X(i) |
| 33 | + circuit.H(i) |
| 34 | + circuit.cnot(i, j) |
| 35 | + circuit.X(j) |
| 36 | + return circuit |
| 37 | + |
| 38 | + |
| 39 | +def vqe_forward(param): |
| 40 | + paramc = tc.backend.cast(param, dtype="complex64") |
| 41 | + c = tc.Circuit(n * m) |
| 42 | + c = singlet_init(c) |
| 43 | + for i in range(nlayers): |
| 44 | + c = tc.templates.blocks.Grid2D_entangling( |
| 45 | + c, coord, tc.gates._swap_matrix, paramc[i] |
| 46 | + ) |
| 47 | + loss = tc.templates.measurements.heisenberg_measurements(c, coord.lattice_graph()) |
| 48 | + return loss |
| 49 | + |
| 50 | + |
| 51 | +vgf = tc.backend.jit( |
| 52 | + tc.backend.value_and_grad(vqe_forward), |
| 53 | +) |
| 54 | +param = tc.backend.implicit_randn(stddev=0.1, shape=[nlayers, 2 * n * m]) |
| 55 | + |
| 56 | + |
| 57 | +if __name__ == "__main__": |
| 58 | + lr = tf.keras.optimizers.schedules.ExponentialDecay(0.01, 100, 0.9) |
| 59 | + opt = tc.backend.optimizer(tf.keras.optimizers.Adam(lr)) |
| 60 | + for j in range(1000): |
| 61 | + loss, gr = vgf(param) |
| 62 | + param = opt.update(gr, param) |
| 63 | + if j % 50 == 0: |
| 64 | + print("loss", loss.numpy()) |
0 commit comments