diff --git a/presentations/tf-qas.typ b/presentations/tf-qas.typ index f7d74f9..73b5201 100644 --- a/presentations/tf-qas.typ +++ b/presentations/tf-qas.typ @@ -4,6 +4,8 @@ #import "@preview/cetz:0.3.4" #import "@preview/typsium:0.2.0": ce #import "@preview/numbly:0.1.0": numbly +#import "@preview/quill:0.7.2": * +#import "@preview/quill:0.7.2": tequila as tq #import "./theme.typ": * #set heading(numbering: numbly("{1}.", default: "1.1")) @@ -41,12 +43,6 @@ ) -#slide[ -- #text(fill: purple)[Purple text is a question I have] -- #text(fill: red)[Red text is something I think they did not do well] -- #text(fill: orange)[Orange text is something I would have preferred a reference for] -] - #title-slide() #outline(depth: 1, title: text(fill: rgb("#00a6d6"))[Content]) @@ -54,21 +50,27 @@ = Introduction == Variational Quantum Algorithms + +- NISQ era - Classical optimisation - Parametrized Quantum Circuit -- Very structure dependent +#pause +- Performance is circuit dependent == Quantum Architecture Search -- Automated Design +- Automated Parametrized Quantum Ciruit finding + - Solution to circuit dependency +#pause - New Problems - Exponential search space - Ranking circuits during search +#pause - Parallels with Neural Architecture Search - Differentiable QAS - Reinforcement-learning QAS @@ -84,17 +86,21 @@ - Possibility for easier transfer - Need to prove correlation with ground-truth - #text(fill: red)[- not done in paper] + #text(fill: red)[- not done in paper@training-free] = Method == Overview #align(horizon)[ +The Steps of the protocol: +#pause 1. Sample circuits from search space +#pause 2. Filter using Path proxy +#pause 3. Rank on Expressibility ] @@ -102,11 +108,21 @@ Following Neural Predictor based QAS@npqas - Native gate set ($cal(A) = {R_x, R_y, R_z, X X, Y Y, Z Z}$) -- Layer based sampling - - Layers of $n/2$ gates -- Gate based sampling - - placing 1 gate at a time - +#grid( + columns: (auto, auto), + rows: (auto, auto), + gutter: 1em, + [- Layer based sampling + - Layers of $n/2$ gates + ], grid.cell(rowspan:2)[ + #quantum-circuit(equal-row-heights: true, row-spacing: 0.8em, wires: 6, 1, $R_l$, 1,[\ ],[\ ],1,$R_l$, 1,[\ ],[\ ],1,$R_l$) + #quantum-circuit(equal-row-heights: true, row-spacing: 0.8em, wires: 6, 3, [\ ], 1, $R_l$, 1,[\ ],[\ ],1,$R_l$, 1,[\ ],[\ ],1,$R_l$) + #quantum-circuit(equal-row-heights: true, row-spacing: 1.35em, wires: 6, 1, ctrl(1), 1, [\ ], 1, targ(), 1,[\ ], 1, ctrl(1),[\ ],1,targ(), 1,[\ ], 1, ctrl(1),[\ ],1,targ()) + ], + [- Gate based sampling + - placing 1 gate at a time + ], [] +) - Why not fully random circuits? - Mitigating barren plateaus - Mitigating high circuit depth @@ -115,25 +131,28 @@ Following Neural Predictor based QAS@npqas == Path Proxy #slide(composer: (auto, auto))[ -- 'zero-cost' - #text(fill:orange)[- best case: $O("Operations" times "Qubits"^2)$] - // I think it'd scale like this, but am uncertain since they didn't explain it anywhere +- *'zero-cost'* - below $7.8 times 10^(-4)$s + #text(fill:orange)[- $O("Operations" times "Qubits"^2)$] +#pause 1. Represent as Directed acyclic graph - +#pause 2. Count distinct paths from input-to-output - +#pause 3. Top-R highest path count circuits ][ + #meanwhile #image("tf-qas/circuit.png", height: 40%) + #pause #image("tf-qas/dag.png") #text(size: 0.6em)[#align(right)[from Training-Free QAS@training-free]] ] == Expressibility Proxy -#text(fill: red)[- Performance hinges on Expressibility] +Assumption: Expressibility $|->$ Performance +#pause - Particularly valueable without prior knowledge #block(fill: blue.lighten(85%), inset: 12pt, radius: 6pt, stroke: 2pt + blue)[ @@ -155,25 +174,28 @@ Following Neural Predictor based QAS@npqas - Heisenberg model - $"Be"space.hair"H"_2$ molecule +#pause - Compared to - Network-Predictive QAS@npqas - #text(fill: red)[- Hardware-efficient ansatz@hea-kandala] // but like, which one - - Random sampling + - Hardware-efficient ansatz@hea-kandala // but like, which one + - Their random sampling +#pause - Implementation details - TensorCircuit python package@tensorcircuit - #text(fill: red)[- No code included anywhere] + #pause + #text(fill: red)[- No code so one cannot reproduce] == Proxy combinations #slide(composer: (auto, auto))[ - Only Path - Fast proxy (each $~ 2 times 10^(-4) "s"$) - - Many queries (each $~ 10 "s"$) + - Many ADAM queries - Only Expressibility - Slower proxy (each $~ 0.21 "s"$) - - Fewer queries + - Fewer queries (each $~ 10 "s"$) - Combined - Fast proxy filtering @@ -186,13 +208,15 @@ Following Neural Predictor based QAS@npqas == Comparison with State of the Art #slide(composer: (1fr, auto))[ -#text(fill: purple)[- Where do these come from?] +- Where do these come from? - A lot fewer queries - Shorter search times -#text(fill: red)[- No ways to reproduce given] +#pause +However: +- No way to reproduce and check ][ #image("tf-qas/outcomes.png", height: 85%) #text(size: 0.6em)[#align(right)[from Training-Free QAS@training-free]] @@ -200,14 +224,28 @@ Following Neural Predictor based QAS@npqas = Conclusion -== +== Takeaways -- Combining proxies // can work better than seperately +- Combining proxies -- Training-Free methods are promising +- Training-Free methods can work better than HEA -#text(fill:red)[- Not reproducible] +== What will I do (differently) +Sampling: +- Evolutionary Algorithm instead of random sampling + - With hardware constraints + - Can build on parts of "Genetic optimization of ansatz + expressibility for enhanced variational quantum algorithm + performance."@genetic-expressibility + +#pause +Filtering: +- Noise proxy +- Better entanglement proxy + +#pause +And of course: Share my code #slide[ == References