diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 52f8c99..0000000 --- a/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM nvcr.io/nvidia/quantum/cuda-quantum:cu12-0.11.0 -WORKDIR /app -RUN pip install --upgrade pip setuptools wheel -RUN pip install cudaq==0.12.0 -COPY dynamics101/ /app/dynamics101/ -COPY images/ /app/images/ -COPY qaoa-for-max-cut/ /app/qaoa-for-max-cut/ -COPY qec101/ /app/qec101/ -COPY qis-examples/ /app/qis-examples/ -COPY quick-start-to-quantum/ /app/quick-start-to-quantum/ -COPY quantum-applications-to-finance/ /app/quantum-applicaitons-to-finance/ -ADD *.ipynb *.py /app/ -ENV JUPYTER_LAB_PORT=8888 -EXPOSE ${JUPYTER_LAB_PORT} -ENTRYPOINT [] -CMD /usr/local/bin/jupyter-lab --port=${JUPYTER_LAB_PORT} --ip=0.0.0.0 --NotebookApp.token='' diff --git a/Guide-to-cuda-q-backends.ipynb b/Guide-to-cuda-q-backends.ipynb index b27d434..dd3a5ce 100644 --- a/Guide-to-cuda-q-backends.ipynb +++ b/Guide-to-cuda-q-backends.ipynb @@ -1,847 +1,718 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Accelerating Quantum Computing: A Step-by-Step Guide to Expanding Simulation Capabilities and Enabling Interoperability of Quantum Hardware\n", - " \n", - "## Overview of methods of accelerating quantum simulation with GPUs\n", - "\n", - "This notebook includes the following: \n", - "* Introduction to CUDA-Q through two Hello World examples using `sample` and `observe` calls. \n", - "\n", - "* Guide to different backends for executing quantum circuits, emphasizing a variety of patterns of parallelization: \n", - " * Statevector memory over multiple processors for simulation\n", - " * Circuit sampling over multiple processors\n", - " * Hamiltonian batching\n", - " * Circuit cutting\n", - " * Quantum hardware\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Hello World Examples " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Uncomment and execute this cell if necessary\n", - "#!pip install cudaq" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "import cudaq\n", - "from cudaq import spin\n", - "from typing import List\n", - "import numpy as np\n", - "import sys" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the first example below, we demonstrate how to define and sample a quantum kernel that encodes a quantum circuit. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Example 1 - defining, drawing, and sampling a quantum kernel\n", - "\n", - "##############################################################\n", - "# 1. Select a backend for kernel execution\n", - "cudaq.set_target(\"qpp-cpu\")\n", - "##############################################################\n", - "\n", - "##############################################################\n", - "# 2. Define a kernel function \n", - "@cudaq.kernel\n", - "def kernel(qubit_count: int):\n", - " # Allocate our `qubit_count` to the kernel.\n", - " qvector = cudaq.qvector(qubit_count)\n", - "\n", - " # Apply gates to the qubit indexed by 0.\n", - " # CUDA-Q has several built in gates beyond the few examples below\n", - " # For a full list see https://nvidia.github.io/cuda-quantum/latest/api/default_ops.html\n", - " z(qvector[0])\n", - " z(qvector[0])\n", - " s(qvector[0])\n", - " t(qvector[0])\n", - " s(qvector[0])\n", - " h(qvector[0])\n", - " \n", - " # Apply gates to all qubits\n", - " x(qvector)\n", - " \n", - " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", - " # and each of the remaining qubits. \n", - " for i in range(1, qubit_count):\n", - " x.ctrl(qvector[0], qvector[i])\n", - "\n", - " # Measure the qubits\n", - " # If we don't specify measurements, all qubits are measured in\n", - " # the Z-basis by default.\n", - " mz(qvector)\n", - "\n", - "##############################################################\n", - "# 3. Call the kernel function with the variable qubit_count set to 2 and sample the outcomes\n", - "qubit_count = 2\n", - "result = cudaq.sample(kernel, qubit_count, shots_count=1000)\n", - "print(result)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now it's your turn to try it out. \n", - "\n", - "**Exercise 1:** Edit the code below to create a kernel that produces a circuit for the GHZ state with 4 qubits" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise 1 - Edit the code below to create a kernel that produces a\n", - "# circuit for the GHZ state with 4 qubits\n", - "\n", - "##############################################################\n", - "# 1. Select a backend for kernel execution\n", - "cudaq.set_target(\"qpp-cpu\")\n", - "##############################################################\n", - "\n", - "##############################################################\n", - "# 2. Define a kernel function \n", - "@cudaq.kernel\n", - "def kernel(qubit_count: int):\n", - " # Allocate our `qubit_count` to the kernel.\n", - " qvector = cudaq.qvector(qubit_count)\n", - "\n", - " ##############################################################\n", - " # Edit code below\n", - " \n", - " # Apply a Hadamard gate to the qubit indexed by 0.\n", - " \n", - " \n", - " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", - " # and each of the remaining qubits. \n", - "\n", - " # Measure the qubits\n", - "\n", - " # Edit code above\n", - " ##############################################################\n", - " \n", - "##############################################################\n", - "# 3. Call the kernel function with the variable qubit_count set to 2 and sample the outcomes\n", - "qubit_count = 4\n", - "result = cudaq.sample(kernel, qubit_count, shots_count=1000)\n", - "\n", - "print(result)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The next example illustrates a few things:\n", - "* Kernels can be used to define subcircuits. \n", - "* `cudaq.draw` can produce ascii or LaTeX circuit diagrams\n", - "* We can define Hamiltonians with `spin` operators and compute expecation values with `observe`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Example 2 - Expectation value calculations\n", - "\n", - "# Define a quantum kernel function to apply a CNOT gate between a control qubit and a each qubit in a list of target qubits\n", - "\n", - "@cudaq.kernel\n", - "def cnot_kernel(control: cudaq.qubit, targets: cudaq.qview):\n", - " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", - " # and each of the remaining qubits. \n", - " for i in range(len(targets)):\n", - " x.ctrl(control, targets[i])\n", - "\n", - "# Define a quantum kernel function to generate a GHZ state on multpile qubits\n", - "@cudaq.kernel\n", - "def kernel(qubit_count: int):\n", - " # Allocate our `qubit_count` to the kernel.\n", - " control_qubit = cudaq.qubit()\n", - " target_qubits = cudaq.qvector(qubit_count-1)\n", - "\n", - " # Apply a Hadamard gate to the qubit indexed by 0.\n", - " h(control_qubit)\n", - " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", - " # and each of the remaining qubits. \n", - " cnot_kernel(control_qubit, target_qubits)\n", - "\n", - "# Define a Hamiltonian in terms of Pauli Spin operators.\n", - "hamiltonian = spin.z(0) + 2*spin.y(1) - spin.x(0) * spin.z(1) - spin.i(2)\n", - "\n", - "# Compute the expectation value given the state prepared by the kernel.\n", - "qubit_count = 3\n", - "result = cudaq.observe(kernel, hamiltonian, qubit_count, shots_count = 1000).expectation()\n", - "\n", - "print(cudaq.draw(kernel, qubit_count)) \n", - "#print(cudaq.draw('latex', kernel, qubit_count)) # Use the 'latex' option to generate LaTeX code for\n", - "print(hamiltonian)\n", - "print(' =', result)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Guide to Different Simulation Targets\n", - "\n", - "\n", - "The figure below illustrates a few options for accelerating statevector simulations of single quantum processor kernel executions on one CPU, one GPU, or a multi-node, multi-GPU system. \n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/single-processor-backends.jpg)\n", - "\n", - "In the Hello World examples in the previous section, we saw statevector simulations of a QPU on a CPU. When GPU resources are available, we can use a single-GPU or multi-node, multi-GPU systems for fast statevector simulations. The `nvidia` target accelerates statevector simulations through `cuStateVec` library. This target offers a variety of configuration options:\n", - "\n", - "* **Single-precision GPU simulation** (default): The default of setting the target to `nvidia` through the command `cudaq.set_target('nvidia')` provides single (`fp32`) precision statevector simulation on one GPU.\n", - "\n", - "* **Double fp64 precision on a single-GPU**: The option `cudaq.set_target('nvidia', option='fp64')` increases the precision of the statevector simulation on one GPU.\n", - "\n", - "* **Multi-node, multi-GPU simulation**: To run the `cuStateVec` simulator on multiple GPUs, set the target to `nvidia` with the `mgpu` option (`cudaq.set_target('nvidia', option='mgpu,fp64')`) and then run the python file containing your quantum kernels within a `MPI` context: `mpiexec -np 2 python3 program.py`. Adjust the `-np` tag according to the number of GPUs you have available.\n", - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, we'll cover a few of the ways you can organize the distribution of quantum simulations over multiple GPU processors, whether you are simulating a single quantum processing unit (QPU) or multiple QPUs." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Single-QPU Statevector Simulations\n", - "\n", - " \n", - "\n", - "\n", - "In some cases, the memory required to hold the entire statevector for a simulation exceeds the memory of a single GPU. In these cases, we can distribute the statevector across multiple GPUs as the diagram in the image below suggests. \n", - "\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/statevector-distribution.png)\n", - "\n", - "This is handled automatically within the `mgpu` option when the number of qubits in the statevector exceeds 25. By changing the environmental variable `CUDAQ_MGPU_NQUBITS_THRESH` prior to setting the target, you can change the threshold at which the statevector distribution is invoked.\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Simulating Parallel QPU computaiton\n", - "\n", - "Future quantum computers will accelerate performance by linking multiple QPUs for parallel processing. Today, you can simulate and test programs for these systems using GPUs, and with minimal changes to the target platform, the same code can be executed on multi-QPU setups once they are developed.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/multi-qpus.png)\n", - "\n", - "We'll examine a few multi-QPU parallelization patterns here:\n", - "\n", - "* Circuit sampling distributed over multiple processors\n", - "* Hamiltonian batching\n", - "* Circuit cutting\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Circuit Sampling\n", - "\n", - "One method of parallelization is to sample a circuit over several processors as illustrated in the diagram below.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/circuit-sampling.png)\n", - "\n", - "Check out the [documentation](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html) for code that demonstrates how to launch asynchronous sampling tasks using `sample_async` on multiple virtual QPUs, each simulated by a tensornet simulator backend using the `remote-mqpu` target." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Hamiltonian Batching\n", - "Another method for distributing the computational load in a simulation is Hamiltonian batching. In this approach, the expectation values of the Hamiltonian's terms are calculated in parallel across several virtual QPUs, as illustrated in the image below.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/Hamiltonian-batching.png)\n", - "\n", - "The `nvidia-mqpu`option of the `nvidia` target along with the `execution=cudaq.parallel.thread` option in the `observe` call handles the distribution of the expectation value computations of a multi-term Hamiltonian across multiple virtual QPUs for you. Refer to the example below to see how this is carried out: \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cudaq.set_target(\"nvidia\", option=\"mqpu\")\n", - "target = cudaq.get_target()\n", - "num_qpus = target.num_qpus()\n", - "print(\"Number of QPUs:\", num_qpus)\n", - "\n", - "\n", - "# Define spin ansatz.\n", - "@cudaq.kernel\n", - "def kernel(angle: float):\n", - " qvector = cudaq.qvector(2)\n", - " x(qvector[0])\n", - " ry(angle, qvector[1])\n", - " x.ctrl(qvector[1], qvector[0])\n", - "\n", - "\n", - "# Define spin Hamiltonian.\n", - "hamiltonian = 5.907 - 2.1433 * spin.x(0) * spin.x(1) - 2.1433 * spin.y(\n", - " 0) * spin.y(1) + .21829 * spin.z(0) - 6.125 * spin.z(1)\n", - "\n", - "exp_val = cudaq.observe(kernel,\n", - " hamiltonian,\n", - " 0.59,\n", - " execution=cudaq.parallel.thread).expectation()\n", - "print(\"Expectation value: \", exp_val)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the above code snippet, since the Hamiltonian contains four non-identity terms, there are four quantum circuits that need to be executed. When the `nvidia-mqpu` platform is selected, these circuits will be distributed across all available QPUs. The final expectation value result is computed from all QPU execution results. \n", - "\n", - "An alternative method for orchestrating Hamiltonian batching is to use the MPI context and multiple GPUs. You can read more about this [here](https://nvidia.github.io/cuda-quantum/latest/using/backends/platform.html#nvidia-mqpu-platform)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Circuit cutting\n", - "\n", - "Circuit cutting is a widely used technique for parallelization. One way to conceptualize circuit cutting is through the Max Cut problem. In this scenario, we aim to approximate the Max Cut of a graph using a divide-and-conquer strategy, also known as QAOA-in-QAOA or QAOA². This approach breaks the graph into smaller subgraphs and solves the Max Cut for each subgraph in parallel using QAOA (see references such as [arXiv:2205.11762v1](https://arxiv.org/abs/2205.11762), [arxiv.2101.07813v1](https://arxiv.org/abs/2101.07813), [arxiv:2304.03037v1](https://arxiv.org/abs/2304.03037), [arxiv:2009.06726](https://arxiv.org/abs/2009.06726), and [arxiv:2406:17383](https://arxiv.org/abs/2406.17383)). By doing so, we effectively decompose the QAOA circuit for the larger graph into smaller QAOA circuits for the subgraphs.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/qaoa-cut.png)\n", - "\n", - "To complete the circuit cutting, we'll need to merge the results of QAOA on the subgraphs into a result for the entire graph. This requires solving another smaller optimization problem, which can also be tackled with QAOA. You can read about that in more detail in a series of [interactive labs](https://github.com/NVIDIA/cuda-q-academic/tree/main/qaoa-for-max-cut).\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/circuit-cutting.png)\n", - "\n", - "This example illustrates how to use the `MPI` context to orchestrate running `@cudaq.kernel` decorated functions in parallel. Additionally, a few exercises are built into this longer example to provide some practice with the CUDA-Q commands introduced earlier in this notebook. Solutions to these exercises appear in the [solutions-sc24.ipynb](https://github.com/NERSC/sc24-quantum-tutorial/blob/main/cudaq-overview/solutions-sc24.ipynb) file, but we encourage you to first attempt the exercises out yourself." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First we need to define a graph and subgraphs. Let's start with the graph drawn below.\n", - "\n", - "\n", - "\n", - "For this demonstration, we'll divide our example graph into the five subgraphs depicted below:\n", - "\n", - "\n", - "\n", - "Execute the cell below to generate subgraphs for the divide-and-conquer QAOA. " - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [], - "source": [ - "# Identify subgraphs, separating out the edges as source and target nodes\n", - "num_subgraphs = 5 # Number of subgraphs\n", - "nodeCountList = [8,7,6,5,4] # Number of nodes in each subgraph\n", - "nodeList : List[int] = [] # List of nodes in each of the subgraphs \n", - "edgeListSources : List[int] = [] # List of edge sources in each subgraph\n", - "edgeListTargets : List[int] = [] # List of edge targets in each subgraph\n", - "\n", - "# subgraph0 data\n", - "nodeList.append([3, 6, 9, 10, 13, 14, 21, 22])\n", - "edgeListSources.append([3,3,3,3,6,6,9,14])\n", - "edgeListTargets.append([14,9,10,13,22,13,21,22])\n", - "\n", - "# subgraph1 data\n", - "nodeList.append([8, 11, 12, 15, 16, 25, 26])\n", - "edgeListSources.append([8, 8, 11, 11, 11, 11, 12, 15, 16, 16, 25])\n", - "edgeListTargets.append([25, 12, 26, 25, 15, 12, 15, 16, 25, 26, 26])\n", - "\n", - "# subgraph2 data\n", - "nodeList.append([4, 5, 7, 18, 20, 24])\n", - "edgeListSources.append([4, 4, 5, 7, 18, 20])\n", - "edgeListTargets.append([5, 24, 7, 24, 20, 24])\n", - "\n", - "# subgraph3 data\n", - "nodeList.append([0, 19, 27, 28, 29])\n", - "edgeListSources.append([0, 0, 19, 19, 27, 27])\n", - "edgeListTargets.append([19, 28, 27, 29, 29, 28])\n", - "\n", - "# subgraph4 data\n", - "nodeList.append([1, 2, 17, 23])\n", - "edgeListSources.append([1, 1, 2, 17])\n", - "edgeListTargets.append([23, 2, 17, 23])\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, we need a helper function that will be used to map graph nodes to qubits." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# We'll create this function to rename the nodes to be sequential integers\n", - "# beginning with 0 as a way to map the graph nodes to qubits. We take this\n", - "# approach because we can't use the `.index` option for lists\n", - "# within a cudaq.kernel.\n", - "def rename_nodes(edge_src, edge_tgt, nodes):\n", - " \"\"\" \n", - " Parameters\n", - " ----------\n", - " edges_src: List[int]\n", - " List of the first (source) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", - " edges_tgt: List[int]\n", - " List of the second (target) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", - " nodes: List[int]\n", - " List of nodes of the graph\n", - " \n", - " Returns\n", - " -------\n", - " new_edge_src : List[int]\n", - " List of the first (source) node listed in each edge of the graph after renaming nodes to be sequential integers beginning with 0, \n", - " when the edges of the graph are listed as pairs of nodes\n", - " new_edge_tgt : List[int]\n", - " List of the second (target) node listed in each edge of the graph after renaming nodes to be sequential integers beginning with 0, \n", - " when the edges of the graph are listed as pairs of nodes\n", - " \"\"\"\n", - " new_edge_src = []\n", - " new_edge_tgt = []\n", - " for i in range(len(edge_src)):\n", - " new_edge_src.append(nodes.index(edge_src[i]))\n", - " new_edge_tgt.append(nodes.index(edge_tgt[i]))\n", - " return new_edge_src, new_edge_tgt " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next let's create kernels to combine into the QAOA circuit:\n", - "\n", - "* `qaoaProblem` kernel adds the gate sequence depicted below for each edge in the graph\n", - "\n", - "\n", - "\n", - "* `qaoaMixer` applies a parameterized `rx` gate to all the qubits, highlighted in green in the diagram below\n", - "\n", - "* `kernel_qaoa` builds the QAOA circuit drawn below using the `qaoaProblem` and `qaoaMixer`\n", - "\n", - "\n", - "\n", - "**Exercise 2:** The `kernel_qaoa` kernel has been defined for you. Your task is edit the two `###FIX_ME###`s in the code below to complete the `qaoaProblem` and `qaoaMixer` kernels. \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise 2 Edit the two ###FIX_ME###s in the code below. One is in `qaoaProblem` and the other is in `qaoaMixer`\n", - "\n", - "# Problem Kernel\n", - "@cudaq.kernel\n", - "def qaoaProblem(qubit_0 : cudaq.qubit, qubit_1 : cudaq.qubit, alpha : float):\n", - " \"\"\"Build the QAOA gate sequence between two qubits that represent an edge of the graph\n", - " Parameters\n", - " ----------\n", - " qubit_0: cudaq.qubit\n", - " Qubit representing the first vertex of an edge\n", - " qubit_1: cudaq.qubit\n", - " Qubit representing the second vertex of an edge\n", - " alpha: float\n", - " Free variable\n", - "\n", - " \"\"\"\n", - " x.ctrl(qubit_0, qubit_1)\n", - " ###FIX_ME###\n", - " x.ctrl(qubit_0, qubit_1)\n", - "\n", - "# Mixer Kernel\n", - "@cudaq.kernel\n", - "def qaoaMixer(###FIX_ME###):\n", - " \"\"\"Build the QAOA gate sequence that is applied to each qubit in the mixer portion of the circuit\n", - " Parameters\n", - " ----------\n", - " qubit_0: cudaq.qubit\n", - " Qubit\n", - " beta: float\n", - " Free variable\n", - "\n", - " \"\"\"\n", - " rx(2.0*beta, qubits)\n", - "\n", - "\n", - "# We now define the kernel_qaoa function which will build the QAOA circuit for our graph\n", - "@cudaq.kernel\n", - "def kernel_qaoa(qubit_count :int, layer_count: int, qubits_src: List[int], qubits_tgt: List[int], thetas : List[float]):\n", - " \"\"\"Build the QAOA circuit for max cut of the graph with given edges and nodes\n", - " Parameters\n", - " ----------\n", - " qubit_count: int\n", - " Number of qubits in the circuit, which is the same as the number of nodes in our graph\n", - " layer_count : int\n", - " Number of layers in the QAOA kernel\n", - " edges_src: List[int]\n", - " List of the first (source) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", - " edges_tgt: List[int]\n", - " List of the second (target) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", - " thetas: List[float]\n", - " Free variables to be optimized\n", - " \"\"\"\n", - " # Let's allocate the qubits\n", - " qreg = cudaq.qvector(qubit_count)\n", - "\n", - " # And then place the qubits in superposition\n", - " h(qreg)\n", - " \n", - " # Each layer has two components: the problem kernel and the mixer\n", - " for i in range(layer_count):\n", - " # Add the problem kernel to each layer\n", - " for edge in range(len(qubits_src)):\n", - " qubitu = qubits_src[edge]\n", - " qubitv = qubits_tgt[edge]\n", - " qaoaProblem(qreg[qubitu], qreg[qubitv], thetas[i])\n", - " # Add the mixer kernel to each layer\n", - " qaoaMixer(qreg,thetas[i+layer_count])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We'll need a Hamiltonian to encode the cost function: $$H= \\frac{1}{2}\\sum_{(u,v)\\in E} (Z_uZ_v-II),$$ where $E$ is the set of edges of the graph." - ] - }, - { - "cell_type": "code", - "execution_count": 72, - "metadata": {}, - "outputs": [], - "source": [ - "# Define a function to generate the Hamiltonian for a max cut problem using the graph G\n", - "\n", - "def hamiltonian_max_cut(sources : List[int], targets : List[int]):\n", - " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by sources and targets\n", - "\n", - " Parameters\n", - " ----------\n", - " sources: List[int]\n", - " list of the source vertices for edges in the graph\n", - " targets: List[int]\n", - " list of the target vertices for the edges in the graph\n", - "\n", - " Returns\n", - " -------\n", - " cudaq.SpinOperator\n", - " Hamiltonian for finding the max cut of the graph defined by the given edges\n", - " \"\"\"\n", - " hamiltonian = 0\n", - " \n", - " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", - " # we need to map the vertices to the list of integers 0 to qubit_count -1\n", - " \n", - " for i in range(len(sources)):\n", - " # Add a term to the Hamiltonian for the edge (u,v)\n", - " qubitu = sources[i]\n", - " qubitv = targets[i]\n", - " hamiltonian += 0.5*(spin.z(qubitu)*spin.z(qubitv)-spin.i(qubitu)*spin.i(qubitv))\n", - "\n", - " return hamiltonian" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now let's put this all together in a function that finds the the optimal parameters for QAOA of a given subgraph." - ] - }, - { - "cell_type": "code", - "execution_count": 69, - "metadata": {}, - "outputs": [], - "source": [ - "def find_optimal_parameters(qubit_src : List[int], qubit_tgt : List[int], qubit_count : int, layer_count: int, seed :int):\n", - " \"\"\"Function for finding the optimal parameters of QAOA for the max cut of a graph\n", - " Parameters\n", - " ----------\n", - " qubit_src: List[int]\n", - " qubit_tgt: List[int]\n", - " Sources and targets defining the edges of the graph\n", - " nodes: List[int]\n", - " Integer labels of the nodes of the graph\n", - " qubit_count: int\n", - " qubit_count is the number of nodes in the graph\n", - " layer_count : int\n", - " Number of layers in the QAOA circuit\n", - " seed : int\n", - " Random seed for reproducibility of results\n", - "\n", - " Returns\n", - " -------\n", - " list[float]\n", - " Optimal parameters for the QAOA applied to the given graph\n", - " \"\"\"\n", - " \n", - " # Each layer of the QAOA kernel contains 2 parameters\n", - " parameter_count : int = 2*layer_count\n", - "\n", - " # Specify the optimizer and its initial parameters.\n", - " optimizer = cudaq.optimizers.COBYLA()\n", - " np.random.seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n", - " \n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", - "\n", - " return optimal_parameters" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Before running this function in parallel, let's execute it sequentially. " - ] - }, - { - "cell_type": "code", - "execution_count": 70, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[2.760265752934724, -1.1781480121902836], [2.863390666464872, -1.2262588372574177], [2.7568258048030376, -1.1779328954753276], [2.8169010481390853, -1.2171055596613192], [2.7488320354701976, -1.178054032241366]]\n" - ] + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Accelerating Quantum Computing: A Step-by-Step Guide to Expanding Simulation Capabilities and Enabling Interoperability of Quantum Hardware\n", + " \n", + "## Overview of methods of accelerating quantum simulation with GPUs\n", + "\n", + "This notebook covers the following: \n", + "* Introduction to CUDA-Q through three Hello World examples using [`sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) and [`observe`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.observe) calls. \n", + "\n", + "* Guide to different backends for executing quantum circuits, emphasizing a variety of patterns of parallelization: \n", + " * Statevector memory over multiple processors for simulation\n", + " * Circuit sampling over multiple processors\n", + " * Hamiltonian batching\n", + " * Circuit cutting\n", + " * Quantum hardware\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Hello World Examples " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq\n", + "from cudaq import spin\n", + "\n", + "import cudaq_solvers as solvers\n", + "from typing import List\n", + "import numpy as np\n", + "import networkx as nx\n", + "import sys" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the first example below, we demonstrate how to define and sample a quantum kernel that encodes a quantum circuit. When using [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample), all qubits are measured in the Z-basis by default — explicit measurement operations in the kernel are not needed. For kernels that require explicit measurements (e.g., measuring specific qubits or in different bases), use the [`cudaq.run`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.run) command instead. You can explore more Hello World examples and the `run` command in this [interactive widget](https://nvidia.github.io/cuda-q-academic/quick-start-to-quantum/interactive_widget/cudaq-hello-world.html)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Example 1 - defining, drawing, and sampling a quantum kernel\n", + "\n", + "##############################################################\n", + "# 1. Select a backend for kernel execution\n", + "cudaq.set_target(\"qpp-cpu\")\n", + "##############################################################\n", + "\n", + "##############################################################\n", + "# 2. Define a kernel function \n", + "@cudaq.kernel\n", + "def kernel(qubit_count: int):\n", + " # Allocate our `qubit_count` to the kernel.\n", + " qvector = cudaq.qvector(qubit_count)\n", + "\n", + " # Apply gates to the qubit indexed by 0.\n", + " # CUDA-Q has several built in gates beyond the few examples below\n", + " # For a full list see https://nvidia.github.io/cuda-quantum/latest/api/default_ops.html\n", + " z(qvector[0])\n", + " z(qvector[0])\n", + " s(qvector[0])\n", + " t(qvector[0])\n", + " s(qvector[0])\n", + " h(qvector[0])\n", + " \n", + " # Apply gates to all qubits\n", + " x(qvector)\n", + " \n", + " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", + " # and each of the remaining qubits. \n", + " for i in range(1, qubit_count):\n", + " x.ctrl(qvector[0], qvector[i])\n", + "\n", + "##############################################################\n", + "# 3. Call the kernel function with the variable qubit_count set to 2 and sample the outcomes\n", + "# Note: `sample` measures all qubits in the Z-basis by default.\n", + "# For explicit measurement control, use `cudaq.run` instead.\n", + "qubit_count = 2\n", + "result = cudaq.sample(kernel, qubit_count, shots_count=1000)\n", + "print(result)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The example below shows how to create a kernel that produces the GHZ state with 4 qubits — a maximally entangled state frequently used as a benchmark in quantum computing. The structure is straightforward: a Hadamard gate on the first qubit creates superposition, and a chain of CNOT gates spreads the entanglement across all remaining qubits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Example 2 - GHZ state with 4 qubits\n", + "\n", + "##############################################################\n", + "# 1. Select a backend for kernel execution\n", + "cudaq.set_target(\"qpp-cpu\")\n", + "##############################################################\n", + "\n", + "##############################################################\n", + "# 2. Define a kernel function \n", + "@cudaq.kernel\n", + "def kernel(qubit_count: int):\n", + " # Allocate our `qubit_count` to the kernel.\n", + " qvector = cudaq.qvector(qubit_count)\n", + "\n", + " # Apply a Hadamard gate to the qubit indexed by 0.\n", + " h(qvector[0])\n", + " \n", + " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", + " # and each of the remaining qubits. \n", + " for i in range(1, qubit_count):\n", + " x.ctrl(qvector[0], qvector[i])\n", + "\n", + "##############################################################\n", + "# 3. Call the kernel function with the variable qubit_count set to 4 and sample the outcomes\n", + "qubit_count = 4\n", + "result = cudaq.sample(kernel, qubit_count, shots_count=1000)\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next example illustrates a few additional features:\n", + "* Kernels can be used to define subcircuits. \n", + "* [`cudaq.draw`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.draw) can produce ascii or LaTeX circuit diagrams.\n", + "* Hamiltonians can be defined with [`spin`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.SpinOperator) operators, and expectation values can be computed with [`observe`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.observe)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Example 3 - Expectation value calculations\n", + "\n", + "# Define a quantum kernel function to apply a CNOT gate between a control qubit and a each qubit in a list of target qubits\n", + "\n", + "@cudaq.kernel\n", + "def cnot_kernel(control: cudaq.qubit, targets: cudaq.qview):\n", + " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", + " # and each of the remaining qubits. \n", + " for i in range(len(targets)):\n", + " x.ctrl(control, targets[i])\n", + "\n", + "# Define a quantum kernel function to generate a GHZ state on multpile qubits\n", + "@cudaq.kernel\n", + "def kernel(qubit_count: int):\n", + " # Allocate our `qubit_count` to the kernel.\n", + " control_qubit = cudaq.qubit()\n", + " target_qubits = cudaq.qvector(qubit_count-1)\n", + "\n", + " # Apply a Hadamard gate to the qubit indexed by 0.\n", + " h(control_qubit)\n", + " # Apply a Controlled-X gate between qubit 0 (acting as the control)\n", + " # and each of the remaining qubits. \n", + " cnot_kernel(control_qubit, target_qubits)\n", + "\n", + "# Define a Hamiltonian in terms of Pauli Spin operators.\n", + "hamiltonian = spin.z(0) + 2*spin.y(1) - spin.x(0) * spin.z(1) - spin.i(2)\n", + "\n", + "# Compute the expectation value given the state prepared by the kernel.\n", + "qubit_count = 3\n", + "result = cudaq.observe(kernel, hamiltonian, qubit_count, shots_count = 1000).expectation()\n", + "\n", + "print(cudaq.draw(kernel, qubit_count)) \n", + "#print(cudaq.draw('latex', kernel, qubit_count)) # Use the 'latex' option to generate LaTeX code for\n", + "print(hamiltonian)\n", + "print(' =', result)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Guide to Different Simulation Targets\n", + "\n", + "\n", + "The figure below illustrates a few options for accelerating statevector simulations of single quantum processor kernel executions on one CPU, one GPU, or a multi-node, multi-GPU system. \n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/single-processor-backends.jpg)\n", + "\n", + "In the Hello World examples in the previous section, we saw statevector simulations of a QPU on a CPU using the [`qpp-cpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#cpu) target. When GPU resources are available, we can use a single-GPU or multi-node, multi-GPU systems for fast statevector simulations. The [`nvidia`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu) target accelerates statevector simulations through the [`cuStateVec`](https://developer.nvidia.com/cuquantum-sdk) library. For a comprehensive reference on all statevector simulator options, see the [State Vector Simulators documentation](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html). This target offers a variety of configuration options:\n", + "\n", + "* **[Single-precision GPU simulation](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu)** (default): The default of setting the target to [`nvidia`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu) through the command `cudaq.set_target('nvidia')` provides single (`fp32`) precision statevector simulation on one GPU.\n", + "\n", + "* **Double fp64 precision on a single-GPU**: The option `cudaq.set_target('nvidia', option='fp64')` increases the precision of the statevector simulation on one GPU.\n", + "\n", + "* **[Multi-node, multi-GPU simulation](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#multi-gpu-multi-node)**: To run the `cuStateVec` simulator on multiple GPUs, set the target to `nvidia` with the [`mgpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#multi-gpu-multi-node) option (`cudaq.set_target('nvidia', option='mgpu,fp64')`) and then run the python file containing your quantum kernels within a `MPI` context: `mpiexec -np 2 python3 program.py`. Adjust the `-np` tag according to the number of GPUs you have available.\n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we'll cover a few of the ways you can organize the distribution of quantum simulations over multiple GPU processors, whether you are simulating a single quantum processing unit (QPU) or multiple QPUs." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Single-QPU Statevector Simulations\n", + "\n", + " \n", + "\n", + "\n", + "In some cases, the memory required to hold the entire statevector for a simulation exceeds the memory of a single GPU. In these cases, we can distribute the statevector across multiple GPUs as the diagram in the image below suggests. \n", + "\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/statevector-distribution.png)\n", + "\n", + "This is handled automatically within the [`mgpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#multi-gpu-multi-node) option when the number of qubits in the statevector exceeds 25. By changing the environmental variable `CUDAQ_MGPU_NQUBITS_THRESH` prior to setting the target, you can change the threshold at which the statevector distribution is invoked.\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Simulating Parallel QPU computaiton\n", + "\n", + "Future quantum computers will accelerate performance by linking multiple QPUs for parallel processing. Today, you can simulate and test programs for these systems using GPUs, and with minimal changes to the target platform, the same code can be executed on multi-QPU setups once they are developed.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/multi-qpus.png)\n", + "\n", + "We'll examine a few multi-QPU parallelization patterns here:\n", + "\n", + "* Circuit sampling distributed over multiple processors\n", + "* Hamiltonian batching\n", + "* Circuit cutting\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### Circuit Sampling\n", + "\n", + "One method of parallelization is to sample a circuit over several processors as illustrated in the diagram below.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/circuit-sampling.png)\n", + "\n", + "Check out the [documentation](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html) for code that demonstrates how to launch asynchronous sampling tasks using [`sample_async`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample_async) on multiple virtual QPUs, each simulated by a [`tensornet`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/tnsims.html) simulator backend using the [`remote-mqpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html#multi-qpu-other-backends) target." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### Hamiltonian Batching\n", + "Another method for distributing the computational load in a simulation is Hamiltonian batching. In this approach, the expectation values of the Hamiltonian's terms are calculated in parallel across several virtual QPUs, as illustrated in the image below.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/Hamiltonian-batching.png)\n", + "\n", + "The [`mqpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html) option of the [`nvidia`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu) target along with the `execution=cudaq.parallel.thread` option in the [`observe`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.observe) call handles the distribution of the expectation value computations of a multi-term Hamiltonian across multiple virtual QPUs for you. Refer to the example below to see how this is carried out: \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cudaq.set_target(\"nvidia\", option=\"mqpu\")\n", + "target = cudaq.get_target()\n", + "num_qpus = target.num_qpus()\n", + "print(\"Number of QPUs:\", num_qpus)\n", + "\n", + "\n", + "# Define spin ansatz.\n", + "@cudaq.kernel\n", + "def kernel(angle: float):\n", + " qvector = cudaq.qvector(2)\n", + " x(qvector[0])\n", + " ry(angle, qvector[1])\n", + " x.ctrl(qvector[1], qvector[0])\n", + "\n", + "\n", + "# Define spin Hamiltonian.\n", + "hamiltonian = 5.907 - 2.1433 * spin.x(0) * spin.x(1) - 2.1433 * spin.y(\n", + " 0) * spin.y(1) + .21829 * spin.z(0) - 6.125 * spin.z(1)\n", + "\n", + "exp_val = cudaq.observe(kernel,\n", + " hamiltonian,\n", + " 0.59,\n", + " execution=cudaq.parallel.thread).expectation()\n", + "print(\"Expectation value: \", exp_val)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the above code snippet, since the Hamiltonian contains four non-identity terms, there are four quantum circuits that need to be executed. When the [`nvidia`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu) target with the [`mqpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html) option is selected, these circuits will be distributed across all available QPUs. The final expectation value result is computed from all QPU execution results. \n", + "\n", + "An alternative method for orchestrating Hamiltonian batching is to use the MPI context and multiple GPUs. You can read more about this [here](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/mqpusims.html)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### Circuit cutting\n", + "\n", + "Circuit cutting is a widely used technique for parallelization. One way to conceptualize circuit cutting is through the Max Cut problem. In this scenario, we aim to approximate the Max Cut of a graph using a divide-and-conquer strategy, also known as QAOA-in-QAOA or QAOA². This approach breaks the graph into smaller subgraphs and solves the Max Cut for each subgraph in parallel using QAOA (see references such as [arXiv:2205.11762v1](https://arxiv.org/abs/2205.11762), [arxiv.2101.07813v1](https://arxiv.org/abs/2101.07813), [arxiv:2304.03037v1](https://arxiv.org/abs/2304.03037), [arxiv:2009.06726](https://arxiv.org/abs/2009.06726), and [arxiv:2406:17383](https://arxiv.org/abs/2406.17383)). By doing so, we effectively decompose the QAOA circuit for the larger graph into smaller QAOA circuits for the subgraphs.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/qaoa-cut.png)\n", + "\n", + "To complete the circuit cutting, we'll need to merge the results of QAOA on the subgraphs into a result for the entire graph. This requires solving another smaller optimization problem, which can also be tackled with QAOA. You can read about that in more detail in a series of [interactive labs](https://github.com/NVIDIA/cuda-q-academic/tree/main/qaoa-for-max-cut).\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/circuit-cutting.png)\n", + "\n", + "This example illustrates how to use the `MPI` context to orchestrate running quantum algorithms in parallel. We'll use [`solvers.qaoa()`](https://nvidia.github.io/cuda-quantum/latest/applications/python/qaoa.html) from the [`cudaq-solvers`](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) library, which encapsulates the entire QAOA workflow—kernel construction, optimization, and sampling—so we can focus on the backends and parallelization patterns rather than the QAOA circuit structure itself." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First we need to define a graph and subgraphs. Let's start with the graph drawn below.\n", + "\n", + "\n", + "\n", + "For this demonstration, we'll divide our example graph into the five subgraphs depicted below:\n", + "\n", + "\n", + "\n", + "Execute the cell below to generate subgraphs for the divide-and-conquer QAOA. " + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": {}, + "outputs": [], + "source": [ + "# Identify subgraphs, separating out the edges as source and target nodes\n", + "num_subgraphs = 5 # Number of subgraphs\n", + "nodeCountList = [8,7,6,5,4] # Number of nodes in each subgraph\n", + "nodeList : List[int] = [] # List of nodes in each of the subgraphs \n", + "edgeListSources : List[int] = [] # List of edge sources in each subgraph\n", + "edgeListTargets : List[int] = [] # List of edge targets in each subgraph\n", + "\n", + "# subgraph0 data\n", + "nodeList.append([3, 6, 9, 10, 13, 14, 21, 22])\n", + "edgeListSources.append([3,3,3,3,6,6,9,14])\n", + "edgeListTargets.append([14,9,10,13,22,13,21,22])\n", + "\n", + "# subgraph1 data\n", + "nodeList.append([8, 11, 12, 15, 16, 25, 26])\n", + "edgeListSources.append([8, 8, 11, 11, 11, 11, 12, 15, 16, 16, 25])\n", + "edgeListTargets.append([25, 12, 26, 25, 15, 12, 15, 16, 25, 26, 26])\n", + "\n", + "# subgraph2 data\n", + "nodeList.append([4, 5, 7, 18, 20, 24])\n", + "edgeListSources.append([4, 4, 5, 7, 18, 20])\n", + "edgeListTargets.append([5, 24, 7, 24, 20, 24])\n", + "\n", + "# subgraph3 data\n", + "nodeList.append([0, 19, 27, 28, 29])\n", + "edgeListSources.append([0, 0, 19, 19, 27, 27])\n", + "edgeListTargets.append([19, 28, 27, 29, 29, 28])\n", + "\n", + "# subgraph4 data\n", + "nodeList.append([1, 2, 17, 23])\n", + "edgeListSources.append([1, 1, 2, 17])\n", + "edgeListTargets.append([23, 2, 17, 23])\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need a helper function that will be used to map graph nodes to qubits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Rename nodes to sequential integers beginning with 0 so they can serve\n", + "# as qubit indices. The mapping is: qubit j <-> nodes[j], which lets us\n", + "# translate QAOA bitstrings back to original graph node labels.\n", + "def rename_nodes(edge_src, edge_tgt, nodes):\n", + " \"\"\" \n", + " Parameters\n", + " ----------\n", + " edges_src: List[int]\n", + " List of the first (source) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", + " edges_tgt: List[int]\n", + " List of the second (target) node listed in each edge of the graph, when the edges of the graph are listed as pairs of nodes\n", + " nodes: List[int]\n", + " List of nodes of the graph\n", + " \n", + " Returns\n", + " -------\n", + " new_edge_src : List[int]\n", + " List of the first (source) node listed in each edge of the graph after renaming nodes to be sequential integers beginning with 0, \n", + " when the edges of the graph are listed as pairs of nodes\n", + " new_edge_tgt : List[int]\n", + " List of the second (target) node listed in each edge of the graph after renaming nodes to be sequential integers beginning with 0, \n", + " when the edges of the graph are listed as pairs of nodes\n", + " \"\"\"\n", + " new_edge_src = []\n", + " new_edge_tgt = []\n", + " for i in range(len(edge_src)):\n", + " new_edge_src.append(nodes.index(edge_src[i]))\n", + " new_edge_tgt.append(nodes.index(edge_tgt[i]))\n", + " return new_edge_src, new_edge_tgt " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Instead of manually building the QAOA kernel (problem unitary, mixer unitary, and layer structure), we can use [`solvers.qaoa()`](https://nvidia.github.io/cuda-quantum/latest/applications/python/qaoa.html) from the [`cudaq-solvers`](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) library. This function encapsulates the full QAOA algorithm—constructing the parameterized circuit, running the classical optimizer, and sampling the optimized circuit—in a single call.\n", + "\n", + "For a detailed walkthrough of how QAOA circuits are built from scratch, see the [interactive QAOA tutorials](https://github.com/NVIDIA/cuda-q-academic/tree/main/qaoa-for-max-cut).\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# solvers.qaoa() handles the full QAOA workflow in a single call.\n", + "# Here is the function signature for reference:\n", + "#\n", + "# optimal_value, optimal_parameters, sample_result = solvers.qaoa(\n", + "# hamiltonian, # the cost Hamiltonian\n", + "# layer_count, # number of QAOA layers (p)\n", + "# initial_parameters # initial gamma and beta angles\n", + "# )\n", + "#\n", + "# - optimal_value : the minimized expectation value found by the optimizer\n", + "# - optimal_parameters : the gamma/beta angles that achieved the optimum\n", + "# - sample_result : measurement outcomes from sampling the optimized circuit\n", + "#\n", + "# See the run_qaoa() helper below for a full working example.\n", + "help(solvers.qaoa)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We'll need a Hamiltonian to encode the cost function: $$H= \\frac{1}{2}\\sum_{(u,v)\\in E} (Z_uZ_v-II),$$ where $E$ is the set of edges of the graph. The `solvers.get_maxcut_hamiltonian()` function generates this directly from a NetworkX graph." + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": {}, + "outputs": [], + "source": [ + "# Example: build a small graph and inspect its Max-Cut Hamiltonian\n", + "G_example = nx.Graph()\n", + "G_example.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0)]) # a 4-node cycle\n", + "\n", + "H_example = solvers.get_maxcut_hamiltonian(G_example)\n", + "print(\"Max-Cut Hamiltonian for a 4-node cycle:\")\n", + "print(H_example)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's put this all together in a function that runs QAOA on a given subgraph. `solvers.qaoa()` takes the cost Hamiltonian, number of layers, and initial parameters, and returns the optimal expectation value, optimal parameters, and sampled measurement results." + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": {}, + "outputs": [], + "source": [ + "def run_qaoa(qubit_src : List[int], qubit_tgt : List[int], qubit_count : int, layer_count: int, seed : int):\n", + " \"\"\"Run QAOA for the max cut of a graph defined by its edges.\n", + "\n", + " Parameters\n", + " ----------\n", + " qubit_src: List[int]\n", + " qubit_tgt: List[int]\n", + " Sources and targets defining the edges of the graph (0-indexed)\n", + " qubit_count : int\n", + " Number of nodes in the graph\n", + " layer_count : int\n", + " Number of layers in the QAOA circuit\n", + " seed : int\n", + " Random seed for reproducibility of results\n", + "\n", + " Returns\n", + " -------\n", + " tuple\n", + " (optimal_value, optimal_parameters, sample_result)\n", + " \"\"\"\n", + " G = nx.Graph()\n", + " G.add_nodes_from(range(qubit_count))\n", + " G.add_edges_from(zip(qubit_src, qubit_tgt))\n", + " hamiltonian = solvers.get_maxcut_hamiltonian(G)\n", + "\n", + " parameter_count = solvers.get_num_qaoa_parameters(hamiltonian, layer_count)\n", + "\n", + " np.random.seed(seed)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi,\n", + " parameter_count).tolist()\n", + "\n", + " optimal_value, optimal_parameters, sample_result = solvers.qaoa(\n", + " hamiltonian, layer_count, initial_parameters)\n", + "\n", + " return optimal_value, optimal_parameters, sample_result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Before running this function in parallel, let's execute it sequentially. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "layer_count = 1\n", + "seed = 123\n", + "\n", + "results = []\n", + "\n", + "for i in range(num_subgraphs):\n", + " src, tgt = rename_nodes(edgeListSources[i], edgeListTargets[i], nodeList[i])\n", + " opt_value, opt_params, sample_result = run_qaoa(src, tgt, nodeCountList[i], layer_count, seed)\n", + " results.append((opt_value, opt_params, sample_result))\n", + "\n", + " bitstring = sample_result.most_probable()\n", + " set_0 = [nodeList[i][j] for j, b in enumerate(bitstring) if b == '0']\n", + " set_1 = [nodeList[i][j] for j, b in enumerate(bitstring) if b == '1']\n", + " print(f'subgraph {i}: optimal_value = {opt_value:.4f}')\n", + " print(f' qubit-to-node map: {dict(enumerate(nodeList[i]))}')\n", + " print(f' most_probable = {bitstring} -> partition {set_0} | {set_1}')\n", + " print()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Since `solvers.qaoa()` returns the sampled measurement results along with the optimal parameters, we already have approximate max cut solutions for each subgraph. The cell below inspects the full results, printing the five most frequently sampled bitstrings for each subgraph along with the corresponding node partitions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(num_subgraphs):\n", + " opt_value, opt_params, sample_result = results[i]\n", + " print(f'subgraph {i}:')\n", + " # Sort all measured bitstrings by frequency (most frequent first)\n", + " all_bits = sorted(sample_result, key=lambda b: sample_result[b], reverse=True)\n", + " for bs in all_bits[:5]:\n", + " set_0 = [nodeList[i][j] for j, b in enumerate(bs) if b == '0']\n", + " set_1 = [nodeList[i][j] for j, b in enumerate(bs) if b == '1']\n", + " print(f' {bs} (count={sample_result[bs]}): partition {set_0} | {set_1}')\n", + " print()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "That completes the \"conquer\" stage of the divide-and-conquer algorithm. To learn more about how the results of the subgraph solutions are merged together to get a max cut approximation of the original graph, check out the 2nd notebook of this [series of interactive tutorials](https://github.com/NVIDIA/cuda-q-academic/tree/main/qaoa-for-max-cut). For the remainder of this guide, we'll set that step aside and examine how to parallelize the divide-and-conquer QAOA algorithm using CUDA-Q and [`MPI`](https://nvidia.github.io/cuda-quantum/latest/using/install/local_installation.html#distributed-computing-with-mpi). " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The diagram below illustrates a strategy for implementing the divide-and-conquer QAOA across 4 processors (which could be distinct GPUs or separate processes on a single GPU). The approach involves storing subgraph data in a dictionary, where the keys represent subgraph names. These dictionary keys are distributed among the 4 processors, with each processor responsible for solving the QAOA problem for the subgraphs corresponding to its assigned keys.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/parallel-workflow.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This strategy is implemented in the [qaoa-divide-and-conquer.py](qaoa-for-max-cut/qaoa-divide-and-conquer.py) script. To run it, open a terminal and execute the following commands:\n", + "\n", + "```bash\n", + "pip install mpi4py -q\n", + "mpiexec -np 4 --oversubscribe --allow-run-as-root python3 qaoa-for-max-cut/qaoa-divide-and-conquer.py\n", + "```\n", + "\n", + "You should see output from each of the 4 processors as they solve their assigned subgraph QAOA problems in parallel, followed by the merged result on processor 0." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The animation below captures a small instance of a recursive divide-and-conquer QAOA running on a CPU versus a GPU in parallel. The lineplots on the top depict the error between the calculated max cut solution and the true max cut of the graph over time. The graphs on the bottom represent the max cut solutions as various subgraph problems are solved and merged together. The green graphs on the right show the parallelization of solving subgraph problems simultaneously.\n", + "\n", + "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/maxcut_ani.gif)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Beyond Statevector Simulations\n", + "\n", + "#### Other simulators\n", + "\n", + "When using CUDA-Q on an NVIDIA GPU with available CUDA runtime libraries, the default target is set to [`nvidia`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu), which utilizes the cuQuantum single-GPU statevector simulator. On CPU-only systems, the default target is set to [`qpp-cpu`](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#cpu), an OpenMP CPU-only statevector simulator.\n", + "\n", + "For many applications, it's not necessary to simulate and access the entire statevector. CUDA-Q provides several additional categories of simulators beyond statevector:\n", + "\n", + "* **[Tensor network simulators](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/tnsims.html)** — suited for shallow-depth or low-entanglement circuits with many qubits, and matrix product state (MPS) simulators for moderate-depth circuits.\n", + "* **[Noisy simulators](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/noisy.html)** — including trajectory-based noisy simulation (compatible with GPU-accelerated backends), density matrix simulation, and stabilizer simulation for quantum error correction research.\n", + "* **[Photonics simulators](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/photonics.html)** — for photonic quantum computing applications.\n", + "\n", + "Each category includes one or more backend targets with different trade-offs in precision, qubit capacity, and hardware requirements. For a complete and up-to-date list of all simulator backends, see the [Circuit Simulation Backends](https://nvidia.github.io/cuda-quantum/latest/using/backends/simulators.html) documentation.\n", + "\n", + "The `nvidia` target also supports a number of [environment variables](https://nvidia.github.io/cuda-quantum/latest/using/backends/sims/svsims.html#single-gpu) for advanced performance tuning, such as gate fusion levels and memory management options.\n", + "\n", + "#### Quantum processing units\n", + "In addition to executing simulations, CUDA-Q supports running quantum kernels on physical quantum processing units from a growing set of hardware providers spanning ion trap, superconducting, neutral atom, and photonic architectures, as well as cloud platforms. For the current list of supported QPUs and instructions on how to connect to them, see the [Quantum Hardware](https://nvidia.github.io/cuda-quantum/latest/using/backends/hardware.html) documentation." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" } - ], - "source": [ - "# Testing the find_optimal_parameters function\n", - "layer_count = 1\n", - "seed = 123\n", - "\n", - "new_src = []\n", - "new_tgt = []\n", - "optimal_parameters = []\n", - "\n", - "for i in range(num_subgraphs):\n", - " src, tgt= rename_nodes(edgeListSources[i], edgeListTargets[i], nodeList[i])\n", - " new_src.append(src)\n", - " new_tgt.append(tgt)\n", - " optimal_parameters.append(find_optimal_parameters(src, tgt, nodeCountList[i], layer_count, seed))\n", - "\n", - "print(optimal_parameters)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally, we'll need to sample the `kernel_qaoa` circuit with the optimal parameters to find approximate max cut solutions to each of the subgraphs.\n", - "\n", - "**Exercise 3:** Edit the `FIX_ME` in the code block below to sample the QAOA circuits for each of the subgraphs using the optimal parameter found above." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise 3\n", - "# Sampling the QAOA circuits with the optimal parameters to identify an appoximate max cut of the subgraphs\n", - "\n", - "shots = 10000\n", - "\n", - "for i in range(num_subgraphs):\n", - " counts = FIX_ME(kernel_qaoa, nodeCountList[i], layer_count, new_src[i], new_tgt[i], optimal_parameters[i], shots_count=shots)\n", - " print('subgraph ',i,' has most_probable outcome = ',counts.most_probable())\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "That completes the \"conquer\" stage of the divide-and-conquer algorithm. To learn more about how the results of the subgraph solutions are merged together to get a max cut approximation of the original graph, check out the 2nd notebook of this [series of interactive tutorials](https://github.com/NVIDIA/cuda-q-academic/tree/main/qaoa-for-max-cut). For the purposes of today's tutorial we'll set that step aside and examine how we could parallelize the divide-and-conquer QAOA algorithm using CUDA-Q and `MPI`. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The diagram below illustrates a strategy for implementing the divide-and-conquer QAOA across 4 processors (which could be distinct GPUs or separate processes on a single GPU). The approach involves storing subgraph data in a dictionary, where the keys represent subgraph names. These dictionary keys are distributed among the 4 processors, with each processor responsible for solving the QAOA problem for the subgraphs corresponding to its assigned keys.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/parallel-workflow.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We've coded this strategy up in the one-step-qaoa.py file for you. The command line below executes the [qaoa-divide-and-conquer.py](https://github.com/NERSC/sc24-quantum-tutorial/blob/main/cudaq-overview/qaoa-divide-and-conquer.py) file on 4 processors in parallel. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Uncomment and execute this cell to install mpi4py if necessary\n", - "#%pip install mpi4py" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# MPI call\n", - "# Uncomment if you have OpenMPI installed with a GPU\n", - "#print(sys.executable)\n", - "#python_path = sys.executable\n", - "#!mpiexec -np 4 --oversubscribe --allow-run-as-root {python_path} divide-and-conquer-qaoa.py\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The animation below captures a small instance of a recursive divide-and-conquer QAOA running on a CPU versus a GPU in parallel. The lineplots on the top depict the error between the calculated max cut solution and the true max cut of the graph over time. The graphs on the bottom represent the max cut solutions as various subgraph problems are solved and merged together. The green graphs on the right show the parallelization of solving subgraph problems simultaneously.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/maxcut_ani.gif)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Beyond Statevector Simulations\n", - "\n", - "#### Other simulators\n", - "\n", - "When using CUDA-Q on NVIDIA GPU with available CUDA runtime libraries, the default target is set to `nvidia`. This will utilize the cuQuantum single-GPU statevector simulator. On CPU-only systems, the default target is set to `qpp-cpu` which uses the OpenMP CPU-only statevector simulator.\n", - "\n", - "For many applications, it's not necessary to simluate and access the entire statevector. The default simulator can be overridden by the environment variable CUDAQ_DEFAULT_SIMULATOR where tensor network, matrix product state simulators can be selected. Please refer to the table below for a list of backend simulator names along with its multi-GPU capability.\n", - "\n", - "![](https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/refs/heads/main/images/accelerating/backends.png)\n", - "\n", - "For more information about all the simulator backends available on [this documentation page](https://nvidia.github.io/cuda-quantum/latest/using/backends/simulators.html).\n", - "\n", - "#### Quantum processing units\n", - "In addition to executing simulations, CUDA-Q is equipped to run quantum kernels on quantum processing units. For more information on how to execute CUDA-Q code on quantum processing units, check out the [documentation](https://nvidia.github.io/cuda-quantum/latest/using/backends/hardware.html)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 2 + "nbformat": 4, + "nbformat_minor": 2 } diff --git a/ai-for-quantum/.ipynb_checkpoints/README-checkpoint.md b/ai-for-quantum/.ipynb_checkpoints/README-checkpoint.md index f1cb8eb..a108882 100644 --- a/ai-for-quantum/.ipynb_checkpoints/README-checkpoint.md +++ b/ai-for-quantum/.ipynb_checkpoints/README-checkpoint.md @@ -16,11 +16,6 @@ For example, the first notebook guides learners through using a pretrained diffu * ***Hands-on experience inserting AI models within quantum workflows:*** Learn how to prepare quantum data for input. ## Notebooks -The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in Google CoLab, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - -| Notebook | Google Colab | -| ----------- | ----------- | -|Lab 1 - Compiling Unitaries with Diffusion Models | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/ai-for-quantum/01_compiling_unitaries_using_diffusion_models.ipynb)| +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/ai-for-quantum/01_compiling_unitaries_diffusion.ipynb b/ai-for-quantum/01_compiling_unitaries_diffusion.ipynb new file mode 100644 index 0000000..22dfb92 --- /dev/null +++ b/ai-for-quantum/01_compiling_unitaries_diffusion.ipynb @@ -0,0 +1,3628 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d60f1e23-2541-4b01-84c8-0aff758061b4", + "metadata": { + "id": "d60f1e23-2541-4b01-84c8-0aff758061b4" + }, + "source": [ + "# AI for Quantum: Compiling Unitaries Using Diffusion Models\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}$\n", + "\n", + "AI is a powerful tool for enabling some of the hardest aspects of a hybrid quantum classical workflow including QEC, compilation, calibration, etc. (See the review paper [here](https://arxiv.org/abs/2411.09131) for more AI for Quantum use cases). Compiling quantum algorithms is an incredible challenge which involves identifying a target unitary, finding an appropriate circuit representation, and then efficiently running the circuit on highly contraining hardware.\n", + "\n", + "This lab is part of a two part series exploring unitary synthesis and circuit compilation and how AI supercomputing can help solve these challenges. In part 1, you will consider unitary synthesis and how it can be addressed with AI tools like diffusion models.\n", + "\n", + "In recent papers [Quantum circuit synthesis with diffusion models](https://doi.org/10.1038/s42256-024-00831-9) and [Synthesis of discrete-continuous quantum circuits with multimodal diffusion models](https://arxiv.org/abs/2506.01666), it was demonstrated how diffusion models can be used for unitary synthesis. This lab will explore the problem of unitary synthesis, introduce a diffusion model used in the work, and allow you to compile circuits of your own using AI.\n", + "\n", + "**Pre-requisites:** No experience with diffusion models is necessary. However, this notebook will not provide a detailed discussion on diffusion models or their construction. For curious readers, we suggest NVIDIA's Deep Learning Institute [course](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1) on diffusion models. As far as quantum prerequisites, familiarity with the basics of quantum computing like gates, state vectors, etc. is required. If you are not familiar with these concepts, please complete the [Quick Start to Quantum Computing](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum) course first.\n", + "\n", + "\n", + "**What you'll do:**\n", + "* Learn the basics of unitary synthesis and try to compile a unitary by hand\n", + "* Encode quantum circuits as inputs for the diffusion model\n", + "* Synthesize quantum circuits corresponding to a given unitary matrix with a diffusion model\n", + "* Evaluate if the obtained circuit is accurate\n", + "* Filter better quantum circuits\n", + "* Sample a circuit using a noise model\n", + "\n", + "\n", + "Let's begin with installing the relevant packages." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ZFf2-X07xgBq", + "metadata": { + "id": "ZFf2-X07xgBq", + "scrolled": true + }, + "outputs": [], + "source": [ + "# Uncomment and execute the following lines if you are working in an environment without these packages already installed\n", + "!pip install --no-compile genQC==0.2.5 -q\n", + "#!pip install torch\n", + "#!pip install numpy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "07d177c6-60fd-497d-ae2b-41e225b56a2d", + "metadata": { + "id": "07d177c6-60fd-497d-ae2b-41e225b56a2d" + }, + "outputs": [], + "source": [ + "import functools\n", + "import itertools\n", + "import numpy as np\n", + "import torch\n", + "import cudaq\n", + "import matplotlib.pyplot as plt\n", + "from dataclasses import dataclass\n", + "from typing import Sequence, List, Optional\n", + "from tqdm.auto import tqdm\n", + "\n", + "import genQC\n", + "import os\n", + "\n", + "# Fixed seed for reproducibility\n", + "torch.manual_seed(0)\n", + "np.random.seed(0)\n", + "\n", + "\n", + "os.environ['HF_HUB_DISABLE_PROGRESS_BARS'] = '1'\n", + "\n", + "# Users may hit a rate limit for HuggingFace requests if the HF_TOKEN is not set.\n", + "import huggingface_hub\n", + "hf_token = os.environ.get('HF_TOKEN')\n", + "if hf_token:\n", + " huggingface_hub.login(token=hf_token, add_to_git_credential=False)\n", + "\n", + "import genQC.utils.misc_utils as util\n", + "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", + "from genQC.pipeline.multimodal_diffusion_pipeline \\\n", + " import MultimodalDiffusionPipeline_ParametrizedCompilation\n", + "\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "from genQC.scheduler.scheduler_dpm import DPMScheduler\n", + "\n", + "from genQC.inference.sampling \\\n", + " import decode_tensors_to_backend, generate_compilation_tensors\n", + "from genQC.inference.evaluation_helper import get_unitaries\n", + "from genQC.inference.eval_metrics import UnitaryInfidelityNorm" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b6de952", + "metadata": {}, + "outputs": [], + "source": [ + "# ── Ported from genQC v0.1.0 (genQC.inference.export_cudaq) ──\n", + "# These utilities were removed in v0.2.x. They convert a decoded\n", + "# integer‑token tensor into a cudaq.kernel via an intermediate\n", + "# CircuitInstructions representation. These functions are included\n", + "# in this file to enable a few pedagogical examples.\n", + "\n", + "@dataclass\n", + "class CircuitInstruction():\n", + " name: str\n", + " control_nodes: Sequence[int]\n", + " target_nodes: Sequence[int]\n", + " params: Sequence[float]\n", + "\n", + "\n", + "class CircuitInstructions():\n", + " def __init__(self, tensor_shape: torch.Size) -> None:\n", + " assert len(tensor_shape) == 2 # [qubits, time]\n", + " self.tensor_shape = tensor_shape\n", + " self._instructions = []\n", + "\n", + " def add_instruction(self, name, control_nodes, target_nodes, params):\n", + " self._instructions.append(\n", + " CircuitInstruction(name, control_nodes, target_nodes, params))\n", + "\n", + " @property\n", + " def data(self): return self._instructions\n", + " @property\n", + " def length(self): return len(self._instructions)\n", + " @property\n", + " def num_qubits(self): return self.tensor_shape[0]\n", + " @property\n", + " def max_gates(self): return self.tensor_shape[1]\n", + "\n", + "\n", + "class CircuitsCudaqBackend():\n", + " KERNEL_VOCABULARY = {\n", + " \"h\": 1, \"cx\": 2, \"z\": 3, \"x\": 4, \"y\": 5, \"ccx\": 6, \"swap\": 7\n", + " }\n", + "\n", + " def _construct_kernel(self, gate_list, target_1_nodes_list,\n", + " target_2_nodes_list, control_1_nodes_list,\n", + " control_2_nodes_list):\n", + " num_gates = len(gate_list)\n", + " gate_list = [self.KERNEL_VOCABULARY[g] for g in gate_list]\n", + "\n", + " @cudaq.kernel\n", + " def place_gate_kernel(gate: int, qvector: cudaq.qview,\n", + " target_1: int, target_2: int,\n", + " control_1: int, control_2: int):\n", + " if gate == 1: h(qvector[target_1])\n", + " elif gate == 2: cx(qvector[control_1], qvector[target_1])\n", + " elif gate == 3: z(qvector[target_1])\n", + " elif gate == 4: x(qvector[target_1])\n", + " elif gate == 5: y(qvector[target_1])\n", + " elif gate == 6: x.ctrl(qvector[control_1], qvector[control_2],\n", + " qvector[target_1])\n", + " elif gate == 7: swap(qvector[target_1], qvector[target_2])\n", + "\n", + " @cudaq.kernel\n", + " def kernel(input_state: List[complex]):\n", + " qvector = cudaq.qvector(input_state)\n", + " for i in range(num_gates):\n", + " place_gate_kernel(gate_list[i], qvector,\n", + " target_1_nodes_list[i],\n", + " target_2_nodes_list[i],\n", + " control_1_nodes_list[i],\n", + " control_2_nodes_list[i])\n", + " return kernel\n", + "\n", + " def check_error_circuit(self, gate, num_target_nodes, num_control_nodes):\n", + " if gate not in self.KERNEL_VOCABULARY:\n", + " raise NotImplementedError(f\"Unknown gate {gate}\")\n", + " if gate in [\"h\", \"z\", \"x\", \"y\"]:\n", + " if num_target_nodes != 1 or num_control_nodes != 0: return False\n", + " elif gate in [\"cx\"]:\n", + " if num_target_nodes != 1 or num_control_nodes != 1: return False\n", + " elif gate in [\"ccx\"]:\n", + " if num_target_nodes != 1 or num_control_nodes != 2: return False\n", + " elif gate in [\"swap\"]:\n", + " if num_target_nodes != 2 or num_control_nodes != 0: return False\n", + " return True\n", + "\n", + " def export_cudaq(self, instructions: CircuitInstructions):\n", + " num_gates = instructions.length\n", + " gate_list = []\n", + " target_1_nodes_list = [9999] * num_gates\n", + " target_2_nodes_list = [9999] * num_gates\n", + " control_1_nodes_list = [9999] * num_gates\n", + " control_2_nodes_list = [9999] * num_gates\n", + "\n", + " for i, instruction in enumerate(instructions.data):\n", + " gate = instruction.name.lower()\n", + " control_nodes = instruction.control_nodes\n", + " target_nodes = instruction.target_nodes\n", + "\n", + " if len(instruction.params) > 0:\n", + " raise NotImplementedError(\"Only non-parametrized gates supported\")\n", + "\n", + " if not self.check_error_circuit(gate, len(target_nodes),\n", + " len(control_nodes)):\n", + " return None\n", + "\n", + " gate_list.append(gate)\n", + " if len(target_nodes) > 0:\n", + " target_1_nodes_list[i] = target_nodes[0]\n", + " if len(target_nodes) > 1:\n", + " target_2_nodes_list[i] = target_nodes[1]\n", + " if len(control_nodes) > 0:\n", + " control_1_nodes_list[i] = control_nodes[0]\n", + " if len(control_nodes) > 1:\n", + " control_2_nodes_list[i] = control_nodes[1]\n", + "\n", + " return self._construct_kernel(gate_list, target_1_nodes_list,\n", + " target_2_nodes_list,\n", + " control_1_nodes_list,\n", + " control_2_nodes_list)\n", + "\n", + "\n", + "def tensor_to_instructions(tensor, vocabulary_inverse,\n", + " params_tensor=None,\n", + " params_4pi_normalization=True,\n", + " sign_labels=None):\n", + " if sign_labels is None:\n", + " sign_labels = {\"control_nodes\": -1, \"target_nodes\": +1}\n", + "\n", + " assert tensor.dim() == 2\n", + " num_of_qubits, time = tensor.shape\n", + " instructions = CircuitInstructions(tensor_shape=tensor.shape)\n", + "\n", + " for t in range(time):\n", + " enc_time_slice = tensor[:, t]\n", + " for gate_index, gate in vocabulary_inverse.items():\n", + " target_nodes = (enc_time_slice == (sign_labels[\"target_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", + " control_nodes = (enc_time_slice == (sign_labels[\"control_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", + "\n", + " if target_nodes.nelement() > 0:\n", + " params = []\n", + " if params_tensor is not None:\n", + " params = params_tensor[:, t]\n", + " if params_4pi_normalization:\n", + " params = (params + 1.0) * 2.0 * np.pi\n", + " params = params.tolist()\n", + " instructions.add_instruction(gate, control_nodes.tolist(),\n", + " target_nodes.tolist(), params)\n", + " break\n", + " elif control_nodes.nelement() > 0:\n", + " raise RuntimeError(\"control_nodes found but no target_nodes\")\n", + "\n", + " return instructions\n", + "\n", + "\n", + "_backend = CircuitsCudaqBackend()\n", + "\n", + "def genqc_to_cudaq(tensor, vocabulary_inverse):\n", + " \"\"\"Convert a decoded integer-token tensor to a cudaq.kernel.\"\"\"\n", + " instructions = tensor_to_instructions(tensor, vocabulary_inverse)\n", + " return _backend.export_cudaq(instructions)" + ] + }, + { + "cell_type": "markdown", + "id": "41705f30-fb6a-46e8-855c-3357d3e73704", + "metadata": { + "id": "41705f30-fb6a-46e8-855c-3357d3e73704" + }, + "source": [ + "## The Challenge of Unitary Synthesis and Compilation\n", + "\n", + "In a sense, quantum computing is extremely simple, corresponding to the multiplication of a unitary matrix with a state vector to produce the desired quantum state that solves a problem. In the example below we use the ordering $\\ket{q_0q_1q_2}$. The initial state vector is $\\ket{000}$ and the state produced after multiplying by the unitary matrix $U$ is $\\ket{111}$.\n", + "\n", + "$$\n", + "\\underbrace{\n", + "\\begin{pmatrix}\n", + "0 & 0 & 0 & 1 & 0 & 0 & 0 & 0 \\\\\n", + "0 & 0 & 1 & 0 & 0 & 0 & 0 & 0 \\\\\n", + "0 & 1 & 0 & 0 & 0 & 0 & 0 & 0 \\\\\n", + "0 & 0 & 0 & 0 & 1 & 0 & 0 & 0 \\\\\n", + "0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 \\\\\n", + "0 & 0 & 0 & 0 & 0 & 0 & 1 & 0 \\\\\n", + "0 & 0 & 0 & 0 & 0 & 1 & 0 & 0 \\\\\n", + "1 & 0 & 0 & 0 & 0 & 0 & 0 & 0\n", + "\\end{pmatrix}\n", + "}_{\\text{Unitary } U}\n", + "\\underbrace{\n", + "\\begin{pmatrix}\n", + "1 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0\n", + "\\end{pmatrix}\n", + "}_{|000\\rangle}\n", + "\\quad = \\quad\n", + "\\underbrace{\n", + "\\begin{pmatrix}\n", + "0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 1\n", + "\\end{pmatrix}\n", + "}_{|111\\rangle}\n", + "$$\n", + "\n", + "The quantum circuit drawn below represents a **synthesis** of this unitary matrix. That is, it produces the same result as multiplying by $U$ above, regardless of the initial state.\n", + "\n", + "![](https://github.com/NVIDIA/cuda-q-academic/blob/main/images/toffoli-example-circuit.jpg?raw=true)\n", + "\n", + "\n", + "Wrapped up in this simple picture of unitary matrices and quantum circuits is incredible complexity which makes quantum computing so difficult.\n", + "\n", + "**Scaling:** First, the unitary matrix corresponding to a quantum circuit is huge, with $2^n \\times 2^n$ entries where $n$ is the number of qubits in the circuit. The matrix cannot be stored naively on any classical computer in its entirely for more than about 25 qubits.\n", + "\n", + "**Identifying the unitary:** Second, it is far from obvious in many cases what particular unitary matrix will solve a problem. Consider methods like VQE where the entire goal is to identify what sort of parameterized circuit (unitary matrix) solves the given problem.\n", + "\n", + "**Executing on a quantum device:** Finally, even if the unitary required is known, implementing it on a physical QPU requires it to be **synthesized (or compiled)** into a set of discrete gate operations compatible with the device. Furthermore decisions needs to be made concerning how these gates are performed and in which order to ensure that performance is achieved and bottlenecks are avoided.\n", + "\n", + "This is extremely challenging and gets even worse when considering the fact that different QPUs have different gate sets and hardware constraints, quantum error correction protocols add addition overhead, and time constraints require not only that an accurate circuit be synthesized, but that it is as simple as possible.\n", + "\n", + "It is no wonder why circuit synthesis is considered a leading AI for quantum use case as AI's aptitude for complex pattern recognition could provide a powerful means for compiling the unitaries necessary to run quantum algorithms at scale.\n", + "\n", + "In this lab, you will explore unitary synthesis and learn how to generate valid circuits given a target unitary.\n", + "\n", + "> **Exercise 1**:\n", + "> \n", + "> To get a sense for how difficult compilation is, try to compile the state of a single qubit by hand with this interactive game.\n", + ">\n", + ">Instructions for the game: You are given a random unitary and presented with two Bloch spheres depicting its action on the $\\ket{0}$ and $\\ket{1}$ states. Your job is to apply gate operations to get as close as possible to the target unitary. You will notice, that even when you can see exactly what each gate does, it is not obvious how to match the initial state exactly. Even if action on a single state is correct, the unitary may still be incorrect as it must properly operate on all basis states.\n" + ] + }, + { + "cell_type": "markdown", + "id": "e82bb399-5bdf-4858-9667-b38a283ab2f3", + "metadata": { + "id": "e82bb399-5bdf-4858-9667-b38a283ab2f3" + }, + "source": [ + "## An Overview of the Diffusion Model\n", + "\n", + "Though many AI techniques have been explored for circuit synthesis, the rest of this lab will look at recent work [(Fürrutter, et al., 2024)](https://doi.org/10.1038/s42256-024-00831-9) that used diffusion models for the task. We'll begin with a general overview of diffusion models and then discuss the specific advantages they offer for circuit synthesis.\n", + "\n", + "This section is not a comprehensive or particularly deep lesson on diffusion models for which we point the reader to [this course](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1). To build intuition, we will first use the common example of image generation before applying these concepts to our main topic of unitary synthesis.\n", + "\n", + "\n", + "### The Core Idea: Denoising Images\n", + "\n", + "The primary objective of a diffusion model is to generate high-quality samples by learning to reverse a noise-adding process, rather than learning the data distribution directly. The training begins with a clean dataset — in this case, images — to which Gaussian noise is incrementally added in a \"forward process.\"\n", + "\n", + "\n", + "To reverse this, the model employs a U-net architecture to learn the \"reverse process\" of denoising. The U-net is trained to take a noisy image as input and predict the specific noise pattern that was added to it. The model's parameters are optimized by minimizing a loss function that measures the difference between the predicted noise and the actual noise.\n", + "\n", + "It's important to note that the U-net is named for its U-shaped layer structure; this is purely an architectural descriptor and has no relation to the mathematical symbol $U$ for a unitary matrix. In the final stage, called inference, the model acts like an artist who starts with a block of static and \"chisels away\" the noise to reveal a clear image.\n", + "\n", + "The primary objective of a diffusion model is to generate high-quality samples by learning to reverse a carefully defined noise-adding process, rather than learning the data distribution directly. The training begins with a clean dataset, to which Gaussian noise is incrementally added in a \"forward process.\"\n", + "\n", + "To reverse this, the model employs a U-net architecture to learn the \"reverse process\" of denoising. The U-net is trained to take a noisy data sample as input and predict the specific noise pattern that was added to it. The model's parameters are then optimized by minimizing a loss function that measures the difference between the predicted noise and the actual noise.\n", + "\n", + "It's important to note that the U-net is named for its U-shaped layer structure; this is purely an architectural descriptor and has no relation to the mathematical symbol $U$ for a unitary matrix.\n", + "\n", + "In the final stage, called inference, the model acts like an artist who starts with a block of static and \"chisels away\" the noise to reveal a clear image.\n", + "\n", + "> **Exercise 2:**\n", + "> Try this widget to get some hands on experience for the diffusion model process. The widget is grossly oversimplified, but gives a visual representation of what is happening in the training and inference stages of a diffusion model. You'll first see how an image is deliberately corrupted with noise for training. Then, you'll watch the trained model take a fresh patch of random noise and reverse the process, generating a clean new image from scratch.\n", + "\n", + "\n", + "## Applying Diffusion to Unitary Synthesis\n", + "\n", + "### The Core Idea: Denoising Circuits\n", + "Now, let's apply the same concepts of noising and denoising to our primary goal: unitary compilation. The process follows the diagram below. First, training circuits are embedded into a data structure amenable to the neural network. Then, just like with the images, noise is added to the training data and it is input into the U-net model. The model is also given the target unitary matrix and any specific constraints (e.g., which gates to use). The output of the U-Net model is the predicted noise, and it is trained until its prediction is as accurate as possible.\n", + "\n", + "\n", + "\n", + "The inference step (shown below) then uses this trained model. It takes a target unitary, compilation instructions, and random noise as input. The model then \"denoises\" this input to produce candidate circuits that implement the target unitary.\n", + "\n", + "\n", + "\n", + "In a sense, the process is simple and can be treated as a black box. But there are also many challenges, such as ensuring sufficient quality and quantity of training data, choosing the right model architecture, and deciding how data is encoded.\n", + "\n", + "The primary advantage of this approach for quantum circuit compilation is that the diffusion model learns how to denoise corrupted samples, not the distribution of the circuits themselves. Most other approaches require generating sample circuits and then comparing their behavior to the target. Such a requirement is extremely expensive, as it would require running many quantum circuit simulations, which limits scalability.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "c98032b9-0191-4d06-8472-65e71f4f7a34", + "metadata": { + "id": "c98032b9-0191-4d06-8472-65e71f4f7a34" + }, + "source": [ + "## Preparing Quantum Circuit Data for the Model\n", + "\n", + "An important consideration for all AI applications is how the data is preprocessed before being input to the model. In this section we will explore a piece of this process related to **encoding** the quantum circuit. That is, representing the quantum circuit in such a way that is amenable to AI. Note that the target unitary and text prompt inputs are themselves prepared with distinct neural networks which will not be discussed here.\n", + "\n", + "\n", + "The figure below explains how we translate a quantum circuit diagram into a numerical, or **tokenized matrix**. Think of the matrix as a timeline of the circuit. Each row is a dedicated qubit, and each column is a step in time, moving from left to right and top to bottom. We fill the matrix using a codebook, or vocabulary, where each gate has a unique number (e.g., $H = 1$, $CNOT = 2$). For gates involving multiple qubits, we use a negative sign to mark the \"control\" qubit. For instance, the Hadamard gate on qubit $q_0$ is encoded as the column vector $( 1,0,0,0)^T$. A CNOT gate with a control on $q_0$ and a target on $q_3$ is represented by the column $(-2,0,0,2)^T$. The example circuit shown results in a $4\\times 8$ matrix, which includes $6$ columns for gate operations and $2$ columns of zeroes for padding to signify the end of the circuit.\n", + "\n", + "\n", + "\n", + "For improved numerical stability during model training, the discrete tokenized matrix is **embedded** into a continuous **tensor**.\n", + "The idea is to replace every integer in our matrix, including $0$, with a vector chosen from a specially prepared set of orthonormal basis vectors of dimension $d$. This conversion is vital for our diffusion model to perform well.\n", + "\n", + "To illustrate, consider an embedding space of dimension $d=7$ with a fixed orthonormal basis $\\mathbf{v_0}, \\cdots, \\mathbf{v_1}$ . Suppose $\\mathbf{v_0} = (0,0,0,0,0,0,1)^T$ and $\\mathbf{v_1} = (\\frac{1}{\\sqrt{7}}, \\cdots,\\frac{1}{\\sqrt{7}} )^T$. Then the tokenized column $(1,0,0,0)^T$, which represents a Hadamard gate on $q_0$, is transformed into the tensor: $$[\\mathbf{v}_1, \\mathbf{v}_0, \\mathbf{v}_0, \\mathbf{v}_0]^T = \\begin{pmatrix}\n", + "(\\frac{1}{\\sqrt{7}}, \\dots, \\frac{1}{\\sqrt{7}})^T \\\\\n", + "(0,0,0,0,0,0,1)^T\\\\\n", + "(0,0,0,0,0,0,1)^T\\\\\n", + "(0,0,0,0,0,0,1)^T\n", + "\\end{pmatrix}.$$\n", + "\n", + "\n", + "\n", + "\n", + "> **Exercise 3:**\n", + "> Write a function to encode the following circuit as a tensor using the vocabulary: \n", + "$H = 1$, $CNOT = 2$, $X = 3$, $CCX = 4$. Signal the end of the circuit with two columns of $5s$.\n", + "\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9119e9aa-165b-40b1-b16f-46a212062138", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "9119e9aa-165b-40b1-b16f-46a212062138", + "outputId": "bc8359fa-d4da-4b47-cb52-c3b90f3172d5" + }, + "outputs": [], + "source": [ + "#TODO Fill in tokenized matrix\n", + "tokenized = np.array([[1, 0,-4,-4, 0, 0,-2, 3, 0, 0, 0, 0, 0],\n", + " [0, 0,-4, 0, 1, 0, 0, 0,-2, 3, 0, 0, 0],\n", + " [0, 3, 4,-4, 0, 0, 0, 0, 0, 0, 2, 0, 0],\n", + " [0, 0, 0, 4, 0, 1, 2, 0, 2, 0,-2, 0, 0]])\n", + "\n", + "\n", + "def generate_orthonormal_vectors_qr(n, d):\n", + " \"\"\"\n", + " Generate n orthonormal vectors of dimension d using QR decomposition.\n", + "\n", + " Parameters:\n", + " n (int): Number of orthonormal vectors to generate\n", + " d (int): Dimension of each vector\n", + "\n", + " Returns:\n", + " numpy.ndarray: Matrix of shape (n, d) where each row is an orthonormal vector\n", + " \"\"\"\n", + " if n > d:\n", + " raise ValueError(f\"Cannot generate {n} orthonormal vectors in {d}-dimensional space\")\n", + "\n", + " # Generate random matrix and compute QR decomposition\n", + " random_matrix = np.random.randn(d, n)\n", + " Q, _ = np.linalg.qr(random_matrix)\n", + "\n", + " # Return first n columns as rows\n", + " return Q[:, :n].T\n", + "\n", + "d = 6\n", + "vocab_length = 5\n", + "embedding_vectors = generate_orthonormal_vectors_qr(vocab_length, d)\n", + "print(\"The randomized orthonormal basis vectors, v_0,...v_d are\\n\")\n", + "print(embedding_vectors)\n", + "\n", + "tensor = np.zeros((tokenized.shape[0], tokenized.shape[1], d))\n", + "\n", + "\n", + "for i in range(tokenized.shape[0]):\n", + " for j in range(tokenized.shape[1]):\n", + " token = tokenized[i][j]\n", + " if token < 0:\n", + " tensor[i, j, :] = -1*embedding_vectors[abs(token)]\n", + " else:\n", + " tensor[i, j, :] = embedding_vectors[abs(token)]\n", + "\n", + "print(\"\\n For example, the token matrix element in the 2nd column and 3rd row is replaced with the basis vector:\")\n", + "print(tensor[2][1][:])\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "6af10d6c-fb87-4bf3-a468-41bf8d5cb890", + "metadata": { + "id": "6af10d6c-fb87-4bf3-a468-41bf8d5cb890" + }, + "source": [ + "## Decoding the Generated Tensors\n", + "\n", + "The diffusion model is trained to generate new tensors. For example suppose the diffusion model generated a tensor whose first element was $(0.02, 0.95, -0.01, 0.04, 0.08, -0.03, 0.10)^T$. This must be **decoded** back into an integer like those in a tokenized matrix to be interpretable as a quantum circuit.\n", + "\n", + "This decoding is performed on each vector of the output tensor in a two-step process to determine the corresponding integer token. First, we identify the best-matching basis vector from the vocabulary by finding which one maximizes the absolute value of the cosine similarity with the generated vector. The index of this basis vector, $k$, gives us the magnitude of our token.\n", + "\n", + "Second, we determine the token's sign by computing the standard cosine similarity between the generated vector and the winning basis vector, $\\mathbf{v_k}$. The sign of this result becomes the sign of the token.\n", + "\n", + "Therefore, if a generated vector is found to be closest to basis vector $\\mathbf{v_k}$, and their cosine similarity is negative, the decoded entry in the tokenized matrix becomes $−k$.\n", + "\n", + "By repeating this for every vector in the generated tensor, we reconstruct the entire tokenized matrix, which gives us the blueprint for a new quantum circuit, as depicted below.\n", + "\n", + "\n", + "\n", + " > **Exercise 4:**\n", + "Write a function below to decode your tensor from Exercise 3 and recover the original tokenized matrix." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cac7ef9b-79ea-434d-ade2-186c83fef9f1", + "metadata": { + "id": "cac7ef9b-79ea-434d-ade2-186c83fef9f1", + "outputId": "73f90270-91a2-4ee4-a87c-7c8a9fa7b528" + }, + "outputs": [], + "source": [ + "#TODO\n", + "def find_closest_vector_signed(input_vector, matrix):\n", + " \"\"\"\n", + " Find the index of the most similar vector and return it with the sign of similarity.\n", + "\n", + " Parameters:\n", + " input_vector: shape (d,) - input vector\n", + " matrix: shape (6, d) - matrix of 6 vectors\n", + "\n", + " Returns:\n", + " sign * k where k is the index of most similar vector and sign is the sign of similarity\n", + " \"\"\"\n", + " # Normalize vectors\n", + " input_norm = input_vector / np.linalg.norm(input_vector)\n", + " matrix_norm = matrix / np.linalg.norm(matrix, axis=1, keepdims=True)\n", + "\n", + " # Compute cosine similarities\n", + " similarities = np.dot(matrix_norm, input_norm)\n", + "\n", + " # Find index of maximum absolute similarity\n", + " k = np.argmax(np.abs(similarities))\n", + "\n", + " # Get sign of similarity\n", + " sign = np.sign(similarities[k])\n", + "\n", + " return int(sign * k)\n", + "\n", + "\n", + "decoded = torch.zeros((tokenized.shape[0], tokenized.shape[1]))\n", + "\n", + "for i in range(decoded.shape[0]):\n", + " for j in range(decoded.shape[1]):\n", + "\n", + " decoded[i][j] = int(find_closest_vector_signed(tensor[i][j][:], embedding_vectors))\n", + "\n", + "decoded = decoded.to(torch.int64)\n", + "print(decoded)\n" + ] + }, + { + "cell_type": "markdown", + "id": "MDNVuoj1vQpK", + "metadata": { + "id": "MDNVuoj1vQpK" + }, + "source": [ + "The `genqc` function then translates this decoded matrix into a quantum kernel using the specified mapping between gates and integers stored as the `vocab` vector." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6kGk2jNVvYdB", + "metadata": { + "id": "6kGk2jNVvYdB" + }, + "outputs": [], + "source": [ + "\n", + "vocab_list = ['h', 'cx', 'x', 'ccx'] # H is 1, CNOT is 2, etc.\n", + "vocab_dict = {i + 1: gate for i, gate in enumerate(vocab_list)}\n", + "\n", + "# Utility function to translate the decoded matrix into a quantum circuit\n", + "kernel = genqc_to_cudaq(decoded, vocab_dict)\n", + "\n", + "input_state = [0] * (2**4) # the genqc generated kernel requires an input state\n", + "print(cudaq.draw(kernel, input_state))" + ] + }, + { + "cell_type": "markdown", + "id": "f5245732-5338-4eb1-aa23-344b8868ca40", + "metadata": { + "id": "f5245732-5338-4eb1-aa23-344b8868ca40" + }, + "source": [ + "Similar pre and postprocessing steps are present in all AI applications. When developing AI for quantum applications it is key to find clever ways to encode information such that it can be effectively processed by the AI model. " + ] + }, + { + "cell_type": "markdown", + "id": "541d0d7a", + "metadata": { + "id": "541d0d7a" + }, + "source": [ + "## Generating Circuits with the Diffusion Model\n", + "\n", + "Now that we've covered the problem setup and data processing, let's put the theory into practice using a pretrained model. While the training process itself is a fascinating topic, we'll focus on using the model here. You can explore training in more detail in these courses on [the basics of AI](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-01+V1) and [diffusion models](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1).\n", + "\n", + "\n", + "The first step is to select a unitary to compile. This model has been trained to compile unitaries arising from circuits composed of the gates `['h', 'cx', 'z', 'x', 'ccx', 'swap']`. Although this is a universal gate set, meaning it contains enough operations needed to construct any possible quantum circuit, performing arbitrary computations requires an incredible number of gates. For this tutorial, we will use a model trained to generate kernels with at most 12 gates. Therefore, we can only expect the model to work for unitaries under this constraint. Let's consider here the compilation of one such unitary.\n", + "\n", + "We start by defining our unitary as a `numpy.array`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4edf7018", + "metadata": { + "id": "4edf7018" + }, + "outputs": [], + "source": [ + "U = np.array(\n", + " [\n", + " [ 0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", + " [ 0. , -0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", + " [-0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", + " [ 0. , 0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", + " [ 0. , 0. , 0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", + " [ 0. , 0. , 0. , 0.70710678, 0. , 0. , 0.70710678, 0. ],\n", + " [ 0. , 0. , -0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", + " [ 0. , 0. , 0. ,-0.70710678, 0. , 0. , 0.70710678, 0. ]\n", + " ],\n", + " dtype=np.complex128\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e1453ad1-e4d4-4e38-9684-fd967bc1a8de", + "metadata": { + "id": "e1453ad1-e4d4-4e38-9684-fd967bc1a8de" + }, + "source": [ + "Next, run the cell below to prepare a torch device with CUDA if available." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ccb97524-0f13-49ff-893d-983c572e66c6", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ccb97524-0f13-49ff-893d-983c572e66c6", + "outputId": "d0225f95-c4f8-4a68-fda3-f8ad4a2797fa", + "scrolled": true + }, + "outputs": [], + "source": [ + "device = util.infer_torch_device() # Use CUDA if we can\n", + "util.MemoryCleaner.purge_mem() # Clean existing memory allocation\n", + "print(device)" + ] + }, + { + "cell_type": "markdown", + "id": "0047cbe9-a21c-4c72-a144-7a2929752329", + "metadata": { + "id": "0047cbe9-a21c-4c72-a144-7a2929752329" + }, + "source": [ + "Then, load the pretrained model from Hugging Face." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a6b18d03-6976-4eec-9ab0-7b763d73ad17", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 322, + "referenced_widgets": [ + "817fecbf36494cb59d5ff42caa1fe9e3", + "86ea7c338b5a407ab76aad8a45318149", + "c3641b0991174bfe9e3a0eb75229297e", + "db46b68da8fd4e46aca2b66606870ce4", + "a83de155c9434f128729e7347db7a36d", + "081b41ba50f045b49eb0632462453955", + "a5477470671e4aadb6819d358042bcb9", + "b7485d048d7f4a7a993bc01b5a278da2", + "487f574ae85e4be4b762eb361e434260", + "80f02a5129f149358ae4f2a230c7c169", + "edcd40c228f94111bb9247c77dbac612", + "c662e4d797bc47a08c32540e25deb9a3", + "44496a784f1a4d9ebfabcea69113ffc3", + "f0893c5606d24a7685e455bce32fbc3a", + "6ebf1e1dc9fd41f1b1b969267a1ed4d2", + "3e1a67f27ddc47da9654640415abcbc1", + "deebca617f664d3d80d58a4ce749147c", + "f88617e7ffbf4574aad45263697b1b2a", + "d61eb3bf737f4128b955fecb3dc9d9ab", + "f1f1d933d24f4ad3b8e4e9c98748470c", + "d4e31a5cc2e943f2a98e35eca2959f1d", + "daebf2fefc4e4ab7a4bbbe212155b679", + "d96d6d7e91434bb682d6e9f6122191ae", + "fd15130ef5ec49c1b6aed02762af346f", + "220dcd9c430447a2b43c7afd3fb77c87", + "694924a9c5e84629bd547865a00afa86", + "105804d846024856a86ff4ef0dd211e3", + "998c97c5a6ea4fff9940a37ae3e4d495", + "71dd0a545c8a4de5b74bffb636cea068", + "526512b558c24e18ae5638dd2cf26145", + "e0aad28ff3fb4cecbb6d9de4e8faafc0", + "29bccb934f7940309e73019628047d80", + "e10d01edf5a34456a8c650e10083bd88", + "77cb6f0eb0214050bbf91e1cd319df94", + "910f7c3b17d84062ae9cdfddd4892e1c", + "d931385cc1cb4f26a1099c688266953a", + "2174f58086db4250bb1d4a47bdbc2697", + "ee5c8b2902a0463f897837ff8be3907d", + "bf6c2ed3f2ac49b28ebec34e588629cb", + "13e57821f933459b9424da3e82dc6612", + "2c26e1222cc54ed2bbac10d2d9e78d38", + "23a348c1207343dda1aedd863fde57f2", + "db26355b91ff43e78e26331a8b3f10db", + "4bedbd4810c04b829e01b12ca9ff4392" + ] + }, + "id": "a6b18d03-6976-4eec-9ab0-7b763d73ad17", + "outputId": "8b0dc2e7-5b71-4604-ceba-9f081ab83fea" + }, + "outputs": [], + "source": [ + "pipeline = DiffusionPipeline.from_pretrained(\"Floki00/qc_unitary_3qubit\", device) # Download from Hugging Face\n", + "pipeline.scheduler.set_timesteps(40)" + ] + }, + { + "cell_type": "markdown", + "id": "bc112d57-71a8-403c-a0a9-d2f7d0eb7492", + "metadata": { + "id": "bc112d57-71a8-403c-a0a9-d2f7d0eb7492" + }, + "source": [ + "Next, we set the parameters the model was trained on. Note that these are fixed and depend on the pre-trained model. The gate types are pulled from `pipeline.gate_pool` and are used to build a vocabulary." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe0b7730", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fe0b7730", + "outputId": "0fd81b65-0afc-436c-ab4a-ffa691d85b4e" + }, + "outputs": [], + "source": [ + "print(pipeline.gate_pool)\n", + "vocab = {i + 1: gate for i, gate in enumerate(pipeline.gate_pool)}\n", + "num_of_qubits = 3 # Number of qubits\n", + "max_gates = 12" + ] + }, + { + "cell_type": "markdown", + "id": "c0ba07c5-075a-46ca-9a22-cc758ce33cb9", + "metadata": { + "id": "c0ba07c5-075a-46ca-9a22-cc758ce33cb9" + }, + "source": [ + "The model can compile circuits composed with any subset of these gates as long as the proper \"Compile using: [,,,]\" format is used." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0f0d3e85-a22e-4a1b-ad17-b02417e56734", + "metadata": { + "id": "0f0d3e85-a22e-4a1b-ad17-b02417e56734" + }, + "outputs": [], + "source": [ + "# Notice how the x gate is missing from the prompt indicating that we want to generate circuits without the x gate\n", + "prompt = \"Compile using: ['h', 'cx', 'z', 'ccx', 'swap']\"" + ] + }, + { + "cell_type": "markdown", + "id": "cab3a1e8-a404-4ebb-9a58-251d47eebb4a", + "metadata": { + "id": "cab3a1e8-a404-4ebb-9a58-251d47eebb4a" + }, + "source": [ + "The code below will now use this prompt and the unitary (`U`) defined above to sample (or generate) 128 circuits. Because the neural network can only process real numbers, we first split the unitary matrix U into its real and imaginary components and then combine them into a single input tensor. The `infer_comp.generate_comp_tensors` command calls the inference procedure and produces a set of output matrices (`out_matrices`) representing the circuit samples." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56edb114-3ef7-455b-b4b8-0e262e7b6316", + "metadata": { + "id": "56edb114-3ef7-455b-b4b8-0e262e7b6316" + }, + "outputs": [], + "source": [ + "# Number of circuits to sample from the trained diffusion model\n", + "samples = 128\n", + "\n", + "# Convert U to a torch complex tensor (the v0.2.5 API handles real/imag splitting internally)\n", + "U_torch = torch.tensor(U, dtype=torch.complex64)\n", + "\n", + "out_matrices = generate_compilation_tensors(\n", + " pipeline,\n", + " prompt=prompt,\n", + " U=U_torch,\n", + " samples=samples,\n", + " system_size=num_of_qubits,\n", + " num_of_qubits=num_of_qubits,\n", + " max_gates=max_gates,\n", + " g=10.0,\n", + " no_bar=True,\n", + " auto_batch_size=256,\n", + " tensor_prod_pad=False,\n", + " enable_params=False,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "81f09d75-a960-4456-bdb9-57ef7668864e", + "metadata": { + "id": "81f09d75-a960-4456-bdb9-57ef7668864e" + }, + "source": [ + "The matrix for the first circuit generated by the model is printed below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13c1af73-11bc-46ca-99dd-bccea29f95c1", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "13c1af73-11bc-46ca-99dd-bccea29f95c1", + "outputId": "d1644bc4-39d8-41d2-f17b-52fde3f68c69", + "scrolled": true + }, + "outputs": [], + "source": [ + "out_matrices[0]" + ] + }, + { + "cell_type": "markdown", + "id": "5605d0a1-626d-4aa5-8721-0416470215dc", + "metadata": { + "id": "5605d0a1-626d-4aa5-8721-0416470215dc" + }, + "source": [ + "## Converting matrices to CUDA-Q kernels" + ] + }, + { + "cell_type": "markdown", + "id": "8b43d006-1e6a-4911-9283-b5a96eb5cf06", + "metadata": { + "id": "8b43d006-1e6a-4911-9283-b5a96eb5cf06" + }, + "source": [ + "Next, we convert each generated matrix into a `cudaq.kernel`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01efc6f7-8f0f-425f-be58-7fcaf880d0fd", + "metadata": { + "id": "01efc6f7-8f0f-425f-be58-7fcaf880d0fd" + }, + "outputs": [], + "source": [ + "cudaq.set_target(\"qpp-cpu\") # Note that cpu is faster for 3-qubit kernels\n", + "\n", + "# cudaq.set_target('nvidia') # Set to GPU for larger circuits" + ] + }, + { + "cell_type": "markdown", + "id": "229ab19e", + "metadata": { + "id": "229ab19e" + }, + "source": [ + "It is possible that some of the generated matrices might not correspond to a valid kernel. For example, a generated matrix might have encoded a CNOT gate with two controls and no target (i.e., a column of the matrix might be $[-2,-2,0]$), and another generated matrix may have encoded an $H$ and $Z$ gate, applied separately and simultaneously to two qubits at step 1 (i.e., the first column of the matrix might be $[1,0,3]$). Neither of these are meaningful quantum kernels. Therefore, in the next code block, we filter out only the valid matrices." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9d5b9ae5-c11e-413b-af00-4412dc86b48d", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 87, + "referenced_widgets": [ + "b310c942e2cc4d7085c922bed5358160", + "aa900cd018d1403f84aa535058d507f9", + "4dc93ecde85c442d97266161bb5b0c02", + "3062232446fe4d8ea0a94ad3e2e7f961", + "4a6f46a6bf5b450ea634e86066557209", + "39e45c22c533420f830bfb050710ce18", + "38f77d3adea14406991d812dd550309b", + "e900a980155044799e6b0afdbfed061e", + "737ea14ba4c74ddf8cc270df7304ad25", + "ef3accf5708f4148a4925e25eb8d5cbd", + "f4f61fc340b74441b39358934a75a517" + ] + }, + "id": "9d5b9ae5-c11e-413b-af00-4412dc86b48d", + "outputId": "95fddc59-2f7a-46f9-b06f-1f9bc4258ee4" + }, + "outputs": [], + "source": [ + "kernel_list = []\n", + "valid_matrices = []\n", + "\n", + "invalid_matrices = 0\n", + "for out_matrices_i in tqdm(out_matrices):\n", + "\n", + " # Use a try-except to catch invalid matrices(if any)\n", + " try:\n", + " kernel = genqc_to_cudaq(out_matrices_i, vocab) # Convert out_matrices to CUDA-Q kernels\n", + " except:\n", + " kernel = None\n", + "\n", + " if kernel:\n", + " kernel_list.append(kernel)\n", + " valid_matrices.append(out_matrices_i)\n", + " else:\n", + " invalid_matrices += 1\n", + "\n", + "print(f\"The model generated {invalid_matrices} invalid matrices that do not correspond to a circuit.\")" + ] + }, + { + "cell_type": "markdown", + "id": "0a5823e6-9189-46b7-974d-43a2a0cad531", + "metadata": { + "id": "0a5823e6-9189-46b7-974d-43a2a0cad531" + }, + "source": [ + "For example, the following generated matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc7d35c7-a7cb-4663-8220-b1030e534deb", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dc7d35c7-a7cb-4663-8220-b1030e534deb", + "outputId": "97f8cacf-0eba-4249-c898-566eaf449d58" + }, + "outputs": [], + "source": [ + "valid_matrices[0]" + ] + }, + { + "cell_type": "markdown", + "id": "47116105-b1e1-454e-80fc-6a03061045f5", + "metadata": { + "id": "47116105-b1e1-454e-80fc-6a03061045f5" + }, + "source": [ + "corresponds to the following `cudaq.kernel`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4835c288-b018-49d7-b307-3ff05c2c8c56", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "4835c288-b018-49d7-b307-3ff05c2c8c56", + "outputId": "54038bab-34ad-4a21-f8d2-e046bc32f3e5" + }, + "outputs": [], + "source": [ + "# Arbitrary input state to the circuit for plotting\n", + "\n", + "input_state = [0] * (2**num_of_qubits)\n", + "\n", + "print(cudaq.draw(kernel_list[0], input_state))" + ] + }, + { + "cell_type": "markdown", + "id": "7W5DDsXP39Zr", + "metadata": { + "id": "7W5DDsXP39Zr" + }, + "source": [ + "Our first filter removed circuits that were structurally invalid, but this doesn't guarantee the remaining ones are correct. Think of it as checking for spelling errors before checking for meaning. Now, in the next section, we'll perform that second check: filtering for the circuits that actually approximate the target unitary." + ] + }, + { + "cell_type": "markdown", + "id": "59809685-5d70-4612-8642-c4e924a1ab6c", + "metadata": { + "id": "59809685-5d70-4612-8642-c4e924a1ab6c" + }, + "source": [ + "## Evaluating Sampled Unitaries\n", + "\n", + "As mentioned earlier, one of the key advantages of using diffusion models (DMs) as a unitary compiler is the ability to rapidly sample many circuits. However, as is common in machine learning, the model has a certain accuracy, meaning not all generated circuits are expected to exactly compile the specified unitary. In this section, you will evaluate how many of the generated circuits are indeed correct and then perform post-selection to identify (at least) one circuit that successfully performs the desired unitary operation." + ] + }, + { + "cell_type": "markdown", + "id": "f5d1e8ca-3abf-47b4-bbca-932bdff86f94", + "metadata": { + "id": "f5d1e8ca-3abf-47b4-bbca-932bdff86f94" + }, + "source": [ + "First, calculate the $2^n\\times2^n$ unitary matrix $U$ implemented by each of the kernels. The elements of this matrix are defined by the transition amplitudes between the basis states, which can be expressed as:\n", + "$$\n", + "\\begin{equation}\n", + " \\langle i|kernel|j\\rangle = U_{ij},\n", + "\\end{equation}\n", + "$$\n", + "where $|i\\rangle$ and $|j\\rangle$ are computational basis states (typically in the $Z$-basis), with $|i\\rangle$ representing the standard basis vector of dimension $2^n$ that has a $1$ in the $i^{th}$ position and $0$ elsewhere.\n", + "\n", + "> **Exercise 5:**\n", + "> Write a function to compute the expression above from the CUDA-Q kernel. Compute the unitaries for each of the 128 sampled circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c689aacd-caf6-45e6-a289-608945c2d2b5", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 70, + "referenced_widgets": [ + "476d90bf41d6406c8d76e3a711be7f74", + "5eeb57c04b964b178f32ae64fd1a4b37", + "db8c05704fd74268a13695e68427b817", + "12f353ac2bee47b7a9fcacbd95f0bd5b", + "fb7bef7423e0452c9538386408373d12", + "3728f02d8a45406a9d9f25fcde7533b6", + "ecfa17414df440008d57b784000b3c17", + "d27061d606424719bcd1ab57aeb6a22f", + "36f6b0f4543643b388dfd6b5a2996f3a", + "f03ac25204bf429b8b356bde6a52c521", + "e1ca83fab88b46e290b9f70d612193a9" + ] + }, + "id": "c689aacd-caf6-45e6-a289-608945c2d2b5", + "outputId": "16c24654-8b58-4391-bca8-06f7d190c633" + }, + "outputs": [], + "source": [ + "#TODO START\n", + "def get_unitary(kernel: cudaq.PyKernel) -> np.ndarray:\n", + " N = 2**num_of_qubits\n", + " unitary = np.zeros((N, N), dtype=np.complex128)\n", + "\n", + " for j in range(N):\n", + " basis_state_j = np.zeros(N, dtype=np.complex128)\n", + " basis_state_j[j] = 1\n", + " unitary[:, j] = np.array(cudaq.get_state(kernel, basis_state_j), copy=False)\n", + "\n", + " return unitary\n", + "\n", + "N = 2**num_of_qubits\n", + "got_unitaries = np.zeros((len(kernel_list), N, N), dtype=np.complex128)\n", + "\n", + "for i, kernel in tqdm(enumerate(kernel_list), total=got_unitaries.shape[0]):\n", + " got_unitaries[i, :, :] = get_unitary(kernel)\n", + "#TODO END\n" + ] + }, + { + "cell_type": "markdown", + "id": "c5eb62ba-40a6-486c-b307-47b11b9b3226", + "metadata": { + "id": "c5eb62ba-40a6-486c-b307-47b11b9b3226" + }, + "source": [ + "For example, the circuit printed above corresponds to the following unitary:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a471612-a3e4-4bea-9f61-dc823283af95", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "4a471612-a3e4-4bea-9f61-dc823283af95", + "outputId": "b8c122e1-0378-4ba8-8b28-c3216e00ad12" + }, + "outputs": [], + "source": [ + "np.set_printoptions(linewidth=1000)\n", + "print(np.round(got_unitaries[0], 4))" + ] + }, + { + "cell_type": "markdown", + "id": "e5241775-2692-4b10-b9ef-a75a3ddb0eb8", + "metadata": { + "id": "e5241775-2692-4b10-b9ef-a75a3ddb0eb8" + }, + "source": [ + "Now that we have the unitaries for each of the kernels, we compare them to the user provided unitary matrix, `U`.\n", + "To do so, we compute the infidelity between the exact unitary and the generated ones.\n", + "The infidelity is defined as follows:\n", + "\n", + "\\begin{equation}\n", + "\\text{Infidelity}(U, V) = 1 - \\left|\\frac{1}{2^n} \\text{Tr} (U^\\dagger V) \\right|^2.\n", + "\\end{equation}\n", + "\n", + "The infidelity is a value between 0 and 1, where 0 indicates that the unitaries are identical (up to a global phase).\n", + "\n", + "> **Exercise 6:**\n", + "Compute the infidelities for each sampled unitary and plot a histogram based on infidelity. How many circuits had a near zero infidelity?\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb4915e8-5a92-4038-ab49-fa67a0a5b668", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 436 + }, + "id": "bb4915e8-5a92-4038-ab49-fa67a0a5b668", + "outputId": "e52b7def-c130-4b4f-ecd9-2e3f96e6c8be" + }, + "outputs": [], + "source": [ + "#TODO Start\n", + "def infidelity(want_unitary, got_unitary):\n", + " return 1 - np.abs(np.trace(np.conj(want_unitary).T @ got_unitary) / 2**num_of_qubits) ** 2\n", + "\n", + "\n", + "infidelities = np.array([infidelity(U, got_unitary) for got_unitary in got_unitaries])\n", + "\n", + "plt.figure(figsize=(7, 4))\n", + "plt.title(\n", + " f\"Distribution of infidelities for {len(got_unitaries)} generated circuits\",\n", + " fontsize=12,\n", + ")\n", + "plt.ylabel(\"Number of circuits\", fontsize=14)\n", + "plt.xlabel(\"Unitary infidelity\", fontsize=14)\n", + "plt.hist(infidelities, bins=30)\n", + "plt.show()\n", + "\n", + "#TODO End" + ] + }, + { + "cell_type": "markdown", + "id": "f1912f6c-e685-4994-bf3a-fe9530dbd513", + "metadata": { + "id": "f1912f6c-e685-4994-bf3a-fe9530dbd513" + }, + "source": [ + "The circuit with the lowest infidelity is printed below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "656baf92-5a1e-4227-ac1f-67b5f9394310", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "656baf92-5a1e-4227-ac1f-67b5f9394310", + "outputId": "1d8a80d0-733c-422f-a598-1b4301f93d31" + }, + "outputs": [], + "source": [ + "min_index = np.argmin(infidelities)\n", + "\n", + "print(f\"The best kernel has an infidelity of {infidelities[min_index]:0.2},\")\n", + "\n", + "input_state = [0] * (2**num_of_qubits)\n", + "input_state[0] = 1\n", + "print(cudaq.draw(kernel_list[min_index], input_state))\n", + "\n", + "print(f\"with the unitary:\")\n", + "print(np.round(got_unitaries[min_index], 4))" + ] + }, + { + "cell_type": "markdown", + "id": "6e724b39-cc63-40c8-8ad0-9d90d94e23bd", + "metadata": { + "id": "6e724b39-cc63-40c8-8ad0-9d90d94e23bd" + }, + "source": [ + "which, as we can see, exactly compiled our targeted unitary:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46d190d6-7bed-4490-a343-bf856190a101", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "46d190d6-7bed-4490-a343-bf856190a101", + "outputId": "0af2edd7-126c-4b4d-ee97-689ed145d00f" + }, + "outputs": [], + "source": [ + "print(np.round(U, 4))" + ] + }, + { + "cell_type": "markdown", + "id": "f8bf2fe6-07ca-4bfa-a46b-96a91daeb185", + "metadata": { + "id": "f8bf2fe6-07ca-4bfa-a46b-96a91daeb185" + }, + "source": [ + "## Select a circuit that meets specific criteria" + ] + }, + { + "cell_type": "markdown", + "id": "983dc8c1-f3ae-4d06-ad7a-e57addfc6e74", + "metadata": { + "id": "983dc8c1-f3ae-4d06-ad7a-e57addfc6e74" + }, + "source": [ + "As you have seen above, you now have almost 30 kernels that compile the desired unitary! This is particularly valuable when dealing with hardware constraints, where, for instance, you might want to avoid using certain qubits or specific gates. Here are a few scenarios where these sorts of choices matter. The rest of the notebook will work through the first case, but you can come back and work through any of these preferences.\n", + "\n", + "1. A common practice for reducing circuit overhead is to minimize the number of Toffoli gates, as they are particularly costly and error-prone due to the large number of non-Clifford T gates required for their implementation.\n", + " \n", + "2. Certain QPUs like neutral atom and superconducting processors can trivially implement $Z$ gates using software control, while $X$ gates require a more error prone pulse. Thus, for these modalities, it is favorable to produce circuits with a bias towards $Z$ gates over $X$ gates, holding the number of two qubit gates constant.\n", + "\n", + "3. When considering quantum error correction (QEC), the type of QEC code can dictate which types of gates are transversal, meaning they can be trivially applied to all data qubits to produce the logical gate. This can change from code to code, so selecting circuits which maximize the number of transversal gates is a ideal. This can even mean favoring transversal CNOT gates over non-transversal single qubit gates.\n", + "\n", + "Going back to example 1 minimizing the number of Toffoli gates (`ccx`), let's sort our valid circuits for those with few `ccx` gates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "acb0f4d8-2e8a-4b32-9e37-006571ca2aa5", + "metadata": { + "id": "acb0f4d8-2e8a-4b32-9e37-006571ca2aa5" + }, + "outputs": [], + "source": [ + "# First, remove possible duplicates and only pick distinct circuits\n", + "_, idx_unique = np.unique(torch.stack(valid_matrices).cpu().numpy(), axis=0, return_index=True)\n", + "unique_tensors = torch.stack(valid_matrices).cpu()[idx_unique]\n", + "unique_infidelities = infidelities[idx_unique]\n", + "unique_kernels = [kernel_list[idx] for idx in idx_unique]\n", + "\n", + "# Then, find the correct circuits\n", + "idx_correct = torch.argwhere(torch.tensor(unique_infidelities) < 0.01).flatten()\n", + "correct_tensors = unique_tensors[idx_correct]\n", + "print(f\"The model generated {correct_tensors.shape[0]} distinct correct circuits.\")\n", + "\n", + "# Now flatten the last two dimensions (related to the actual circuit)\n", + "# and find out how many 5's (i.e., ccx) gates each circuit has:\n", + "num_ccx = (correct_tensors.flatten(1, 2) == 5).sum(1)\n", + "print(\"These circuits have this number of ccx gates:\", num_ccx)" + ] + }, + { + "cell_type": "markdown", + "id": "7625aac1-78d8-4ce3-9765-88ae48c92dd3", + "metadata": { + "id": "7625aac1-78d8-4ce3-9765-88ae48c92dd3" + }, + "source": [ + "It appears that the diffusion model requires at least one Toffoli gate to compile the unitary. You can now print a few of these circuits to select the one that best suits the situation or to identify any noteworthy patterns the model employs for this specific unitary." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "535501a6-cafb-48e6-b53c-636d7ac5a815", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "535501a6-cafb-48e6-b53c-636d7ac5a815", + "outputId": "f493183e-23f8-4707-d4fc-e01f8634d959" + }, + "outputs": [], + "source": [ + "# Get the correct kernels\n", + "correct_kernels = [unique_kernels[idx] for idx in idx_correct]\n", + "\n", + "# Get the ones with only one ccx\n", + "correct_kernels_ccx1 = [correct_kernels[idx] for idx in torch.argwhere(num_ccx == 1).flatten()]\n", + "\n", + "# Draw a few of these circuits\n", + "for kernel in correct_kernels_ccx1[:2]:\n", + " print(cudaq.draw(kernel, input_state))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "80e55b79-2529-4e8a-b8c3-d034afa07588", + "metadata": { + "id": "80e55b79-2529-4e8a-b8c3-d034afa07588", + "outputId": "9eead3d9-5310-41df-df05-f4c742d9b778" + }, + "outputs": [], + "source": [ + "# Get the tensors with more than one ccx\n", + "correct_kernels_ccx3 = [correct_kernels[idx] for idx in torch.argwhere(num_ccx == 3).flatten()]\n", + "\n", + "# Draw a few of these circuits\n", + "for kernel in correct_kernels_ccx3[:2]:\n", + " print(cudaq.draw(kernel, input_state))" + ] + }, + { + "cell_type": "markdown", + "id": "1fd9a7f3", + "metadata": { + "id": "1fd9a7f3" + }, + "source": [ + "\n", + "## Compiling Noisy Circuits" + ] + }, + { + "cell_type": "markdown", + "id": "03b7431a", + "metadata": { + "id": "03b7431a" + }, + "source": [ + "In this section, we'll define a `noise_model` and verify that a lower number of `ccx` gates yields better results under this noise model.\n", + "For more details, see the [Noisy Simulation example](https://nvidia.github.io/cuda-quantum/latest/examples/python/noisy_simulations.html) in CUDA-Q documentation. \n", + "\n", + "The cell below defines a depolarizing noise channel and applies it to all `ccx` and `cx` gates. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f556cf2b-ce21-47d2-824e-fd56737f9212", + "metadata": { + "id": "f556cf2b-ce21-47d2-824e-fd56737f9212" + }, + "outputs": [], + "source": [ + "# Define a noise model\n", + "\n", + "def tensor(matrices):\n", + " return functools.reduce(np.kron, matrices)\n", + "\n", + "def depolarizing_kraus(p: float, n: int = 2):\n", + " I = np.array([[1, 0], [0, 1]], dtype=np.complex128)\n", + " X = np.array([[0, 1], [1, 0]], dtype=np.complex128)\n", + " Y = np.array([[0, -1j], [1j, 0]], dtype=np.complex128)\n", + " Z = np.array([[1, 0], [0, -1]], dtype=np.complex128)\n", + "\n", + " paulis = [I, X, Y, Z]\n", + "\n", + " # Kraus operators\n", + " kraus_operators = [np.sqrt(1 - p) * tensor([I] * n)]\n", + " coeff = np.sqrt(p / (4**n - 1))\n", + "\n", + " for paulis in itertools.product(paulis, repeat=n):\n", + " if not all(np.array_equal(p, I) for p in paulis):\n", + " kraus_operators.append(coeff * tensor(paulis))\n", + "\n", + " return kraus_operators\n", + "\n", + "\n", + "noise_model = cudaq.NoiseModel()\n", + "noise_model.add_all_qubit_channel(\"ccx\", cudaq.KrausChannel(depolarizing_kraus(0.1, n=3)))\n", + "noise_model.add_all_qubit_channel(\"cx\", cudaq.KrausChannel(depolarizing_kraus(0.01)))" + ] + }, + { + "cell_type": "markdown", + "id": "d40c0242", + "metadata": {}, + "source": [ + "To simulate a noisy circuit, using a density matrix simulator is convenient. To call up the density matrix simulator, simply change the target with `cudaq.set_target(\"density-matrix-cpu\")`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b889d8fa-521c-4db8-86e3-1dd709096fd2", + "metadata": { + "id": "b889d8fa-521c-4db8-86e3-1dd709096fd2" + }, + "outputs": [], + "source": [ + "# Example of execution\n", + "cudaq.set_target(\"density-matrix-cpu\")\n", + "shots_count = 5000\n", + "\n", + "# Sample with noiseless simulation\n", + "result = dict(cudaq.sample(correct_kernels[0], input_state, shots_count=shots_count).items())" + ] + }, + { + "cell_type": "markdown", + "id": "5c5589e2", + "metadata": {}, + "source": [ + "\n", + "The `cudaq.sample` function can take a noise model as an argument to perform a simulation with noise: `cudaq.sample(kernel, noise_model=noise_model)`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "af9b6cac-9089-4733-84da-a24c2af68e2a", + "metadata": { + "id": "af9b6cac-9089-4733-84da-a24c2af68e2a" + }, + "outputs": [], + "source": [ + "# Sample using noisy simulation for a kernel with only 1 ccx gates\n", + "result_ccx = dict(\n", + " cudaq.sample(\n", + " correct_kernels_ccx1[0],\n", + " input_state,\n", + " noise_model=noise_model,\n", + " shots_count=shots_count,\n", + " ).items()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c004f110-3c99-481f-b17b-36227456ac55", + "metadata": { + "id": "c004f110-3c99-481f-b17b-36227456ac55" + }, + "outputs": [], + "source": [ + "# Sample using noisy simulation for a kernel with 3 ccx gates\n", + "result_ccx3 = dict(\n", + " cudaq.sample(\n", + " correct_kernels_ccx3[0],\n", + " input_state,\n", + " noise_model=noise_model,\n", + " shots_count=shots_count,\n", + " ).items()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d6525651-8e38-4093-ae16-4d1dc9aa7cd1", + "metadata": { + "id": "d6525651-8e38-4093-ae16-4d1dc9aa7cd1", + "outputId": "f55067d1-c1d7-4ad7-80eb-e1f110612d30" + }, + "outputs": [], + "source": [ + "# Merge all bitstrings to ensure consistency across results\n", + "bitstrings = sorted(set(result_ccx.keys()) | set(result.keys()) | set(result_ccx3.keys()))\n", + "\n", + "# Function to extract probabilities\n", + "def get_probabilities(result, keys):\n", + " total_shots = sum(result.values())\n", + " return [result.get(k, 0) / total_shots for k in keys]\n", + "\n", + "# Extracting probabilities\n", + "prob = get_probabilities(result, bitstrings)\n", + "prob_ccx = get_probabilities(result_ccx, bitstrings)\n", + "prob_ccx3 = get_probabilities(result_ccx3, bitstrings)\n", + "\n", + "# Bar width\n", + "bar_width = 0.3\n", + "x = np.arange(len(bitstrings))\n", + "\n", + "# Plot bars\n", + "plt.figure(figsize=(10, 6))\n", + "plt.bar(x - bar_width, prob, bar_width, label=\"Noiseless simulation\", color=\"#808080\")\n", + "plt.bar(x, prob_ccx, bar_width, label=\"Noisy simulation w/ 1 ccx\", color=\"#76B900\")\n", + "plt.bar(\n", + " x + bar_width,\n", + " prob_ccx3,\n", + " bar_width,\n", + " label=\"Noisy simulation w/ 3 ccx\",\n", + " color=\"#c4e884\",\n", + ")\n", + "\n", + "# Labels\n", + "plt.xticks(x, bitstrings)\n", + "plt.xlabel(\"Bitstring Outcomes\")\n", + "plt.ylabel(\"Probability\")\n", + "plt.title(\"Comparison of kernels with different number of ccx gates\")\n", + "plt.legend(fontsize=14)\n", + "\n", + "# Show plot\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "2a6ba367-9389-4eee-b679-18cc1a147959", + "metadata": { + "id": "2a6ba367-9389-4eee-b679-18cc1a147959" + }, + "source": [ + "This histogram represents why unitary compilation is so important. With a small three qubit example, running three circuits that produce the exact same unitary, the sampled circuits with more multi-qubit gates produce inferior results. Consider that for a fully scaled up application, good compilation might be the difference between success and a meaningless output or infeasible runtime. \n", + "\n", + "## Summary\n", + "\n", + "AI has the potential to be a powerful tool for compilation especially at scale. Researchers may be able to use such a tool to better understand the impacts of device noise or identify patterns which make for more favorable circuits. The AI workflow you explored today is also highly flexible. It can consider different gate sets, circuit lengths, and many other refinements to improve results. Keep an eye out for future research in this space as different AI techniques are applied to more complex quantum circuit compilation tasks." + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "gpuType": "T4", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "081b41ba50f045b49eb0632462453955": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "105804d846024856a86ff4ef0dd211e3": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "12f353ac2bee47b7a9fcacbd95f0bd5b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_f03ac25204bf429b8b356bde6a52c521", + "placeholder": "​", + "style": "IPY_MODEL_e1ca83fab88b46e290b9f70d612193a9", + "value": " 127/127 [00:14<00:00,  9.68it/s]" + } + }, + "13e57821f933459b9424da3e82dc6612": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "2174f58086db4250bb1d4a47bdbc2697": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_db26355b91ff43e78e26331a8b3f10db", + "placeholder": "​", + "style": "IPY_MODEL_4bedbd4810c04b829e01b12ca9ff4392", + "value": " 605M/605M [00:07<00:00, 111MB/s]" + } + }, + "220dcd9c430447a2b43c7afd3fb77c87": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_526512b558c24e18ae5638dd2cf26145", + "max": 28839173, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_e0aad28ff3fb4cecbb6d9de4e8faafc0", + "value": 28839173 + } + }, + "23a348c1207343dda1aedd863fde57f2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "29bccb934f7940309e73019628047d80": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "2c26e1222cc54ed2bbac10d2d9e78d38": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "3062232446fe4d8ea0a94ad3e2e7f961": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_ef3accf5708f4148a4925e25eb8d5cbd", + "placeholder": "​", + "style": "IPY_MODEL_f4f61fc340b74441b39358934a75a517", + "value": " 128/128 [00:07<00:00, 18.81it/s]" + } + }, + "36f6b0f4543643b388dfd6b5a2996f3a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "3728f02d8a45406a9d9f25fcde7533b6": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "38f77d3adea14406991d812dd550309b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "39e45c22c533420f830bfb050710ce18": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "3e1a67f27ddc47da9654640415abcbc1": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "44496a784f1a4d9ebfabcea69113ffc3": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_deebca617f664d3d80d58a4ce749147c", + "placeholder": "​", + "style": "IPY_MODEL_f88617e7ffbf4574aad45263697b1b2a", + "value": "config.yaml: " + } + }, + "476d90bf41d6406c8d76e3a711be7f74": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_5eeb57c04b964b178f32ae64fd1a4b37", + "IPY_MODEL_db8c05704fd74268a13695e68427b817", + "IPY_MODEL_12f353ac2bee47b7a9fcacbd95f0bd5b" + ], + "layout": "IPY_MODEL_fb7bef7423e0452c9538386408373d12" + } + }, + "487f574ae85e4be4b762eb361e434260": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "4a6f46a6bf5b450ea634e86066557209": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "4bedbd4810c04b829e01b12ca9ff4392": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "4dc93ecde85c442d97266161bb5b0c02": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_e900a980155044799e6b0afdbfed061e", + "max": 128, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_737ea14ba4c74ddf8cc270df7304ad25", + "value": 128 + } + }, + "526512b558c24e18ae5638dd2cf26145": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "5eeb57c04b964b178f32ae64fd1a4b37": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_3728f02d8a45406a9d9f25fcde7533b6", + "placeholder": "​", + "style": "IPY_MODEL_ecfa17414df440008d57b784000b3c17", + "value": "100%" + } + }, + "694924a9c5e84629bd547865a00afa86": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_29bccb934f7940309e73019628047d80", + "placeholder": "​", + "style": "IPY_MODEL_e10d01edf5a34456a8c650e10083bd88", + "value": " 28.8M/28.8M [00:01<00:00, 23.5MB/s]" + } + }, + "6ebf1e1dc9fd41f1b1b969267a1ed4d2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_d4e31a5cc2e943f2a98e35eca2959f1d", + "placeholder": "​", + "style": "IPY_MODEL_daebf2fefc4e4ab7a4bbbe212155b679", + "value": " 1.55k/? [00:00<00:00, 29.1kB/s]" + } + }, + "71dd0a545c8a4de5b74bffb636cea068": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "737ea14ba4c74ddf8cc270df7304ad25": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "77cb6f0eb0214050bbf91e1cd319df94": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_910f7c3b17d84062ae9cdfddd4892e1c", + "IPY_MODEL_d931385cc1cb4f26a1099c688266953a", + "IPY_MODEL_2174f58086db4250bb1d4a47bdbc2697" + ], + "layout": "IPY_MODEL_ee5c8b2902a0463f897837ff8be3907d" + } + }, + "80f02a5129f149358ae4f2a230c7c169": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "817fecbf36494cb59d5ff42caa1fe9e3": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_86ea7c338b5a407ab76aad8a45318149", + "IPY_MODEL_c3641b0991174bfe9e3a0eb75229297e", + "IPY_MODEL_db46b68da8fd4e46aca2b66606870ce4" + ], + "layout": "IPY_MODEL_a83de155c9434f128729e7347db7a36d" + } + }, + "86ea7c338b5a407ab76aad8a45318149": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_081b41ba50f045b49eb0632462453955", + "placeholder": "​", + "style": "IPY_MODEL_a5477470671e4aadb6819d358042bcb9", + "value": "Fetching 2 files: 100%" + } + }, + "910f7c3b17d84062ae9cdfddd4892e1c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_bf6c2ed3f2ac49b28ebec34e588629cb", + "placeholder": "​", + "style": "IPY_MODEL_13e57821f933459b9424da3e82dc6612", + "value": "open_clip_model.safetensors: 100%" + } + }, + "998c97c5a6ea4fff9940a37ae3e4d495": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "a5477470671e4aadb6819d358042bcb9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "a83de155c9434f128729e7347db7a36d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "aa900cd018d1403f84aa535058d507f9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_39e45c22c533420f830bfb050710ce18", + "placeholder": "​", + "style": "IPY_MODEL_38f77d3adea14406991d812dd550309b", + "value": "100%" + } + }, + "b310c942e2cc4d7085c922bed5358160": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_aa900cd018d1403f84aa535058d507f9", + "IPY_MODEL_4dc93ecde85c442d97266161bb5b0c02", + "IPY_MODEL_3062232446fe4d8ea0a94ad3e2e7f961" + ], + "layout": "IPY_MODEL_4a6f46a6bf5b450ea634e86066557209" + } + }, + "b7485d048d7f4a7a993bc01b5a278da2": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "bf6c2ed3f2ac49b28ebec34e588629cb": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "c3641b0991174bfe9e3a0eb75229297e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_b7485d048d7f4a7a993bc01b5a278da2", + "max": 2, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_487f574ae85e4be4b762eb361e434260", + "value": 2 + } + }, + "c662e4d797bc47a08c32540e25deb9a3": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_44496a784f1a4d9ebfabcea69113ffc3", + "IPY_MODEL_f0893c5606d24a7685e455bce32fbc3a", + "IPY_MODEL_6ebf1e1dc9fd41f1b1b969267a1ed4d2" + ], + "layout": "IPY_MODEL_3e1a67f27ddc47da9654640415abcbc1" + } + }, + "d27061d606424719bcd1ab57aeb6a22f": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "d4e31a5cc2e943f2a98e35eca2959f1d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "d61eb3bf737f4128b955fecb3dc9d9ab": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": "20px" + } + }, + "d931385cc1cb4f26a1099c688266953a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_2c26e1222cc54ed2bbac10d2d9e78d38", + "max": 605143316, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_23a348c1207343dda1aedd863fde57f2", + "value": 605143316 + } + }, + "d96d6d7e91434bb682d6e9f6122191ae": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_fd15130ef5ec49c1b6aed02762af346f", + "IPY_MODEL_220dcd9c430447a2b43c7afd3fb77c87", + "IPY_MODEL_694924a9c5e84629bd547865a00afa86" + ], + "layout": "IPY_MODEL_105804d846024856a86ff4ef0dd211e3" + } + }, + "daebf2fefc4e4ab7a4bbbe212155b679": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "db26355b91ff43e78e26331a8b3f10db": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "db46b68da8fd4e46aca2b66606870ce4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_80f02a5129f149358ae4f2a230c7c169", + "placeholder": "​", + "style": "IPY_MODEL_edcd40c228f94111bb9247c77dbac612", + "value": " 2/2 [00:01<00:00,  1.16it/s]" + } + }, + "db8c05704fd74268a13695e68427b817": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_d27061d606424719bcd1ab57aeb6a22f", + "max": 127, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_36f6b0f4543643b388dfd6b5a2996f3a", + "value": 127 + } + }, + "deebca617f664d3d80d58a4ce749147c": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "e0aad28ff3fb4cecbb6d9de4e8faafc0": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "e10d01edf5a34456a8c650e10083bd88": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "e1ca83fab88b46e290b9f70d612193a9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "e900a980155044799e6b0afdbfed061e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "ecfa17414df440008d57b784000b3c17": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "edcd40c228f94111bb9247c77dbac612": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "ee5c8b2902a0463f897837ff8be3907d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "ef3accf5708f4148a4925e25eb8d5cbd": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "f03ac25204bf429b8b356bde6a52c521": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "f0893c5606d24a7685e455bce32fbc3a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_d61eb3bf737f4128b955fecb3dc9d9ab", + "max": 1, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_f1f1d933d24f4ad3b8e4e9c98748470c", + "value": 1 + } + }, + "f1f1d933d24f4ad3b8e4e9c98748470c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "f4f61fc340b74441b39358934a75a517": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "f88617e7ffbf4574aad45263697b1b2a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "fb7bef7423e0452c9538386408373d12": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "1.2.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "fd15130ef5ec49c1b6aed02762af346f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "1.5.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_998c97c5a6ea4fff9940a37ae3e4d495", + "placeholder": "​", + "style": "IPY_MODEL_71dd0a545c8a4de5b74bffb636cea068", + "value": "model.pt: 100%" + } + } + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/ai-for-quantum/01_compiling_unitaries_using_diffusion_models.ipynb b/ai-for-quantum/01_compiling_unitaries_using_diffusion_models.ipynb deleted file mode 100644 index 51c768e..0000000 --- a/ai-for-quantum/01_compiling_unitaries_using_diffusion_models.ipynb +++ /dev/null @@ -1,1404 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "b2b87838-c10b-4ab4-af2a-ad4f9806b158", - "metadata": {}, - "outputs": [], - "source": [ - "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "id": "0", - "metadata": { - "id": "d60f1e23-2541-4b01-84c8-0aff758061b4" - }, - "source": [ - "# AI for Quantum: Compiling Unitaries Using Diffusion Models\n", - "$\\renewcommand{\\ket}[1]{|#1\\rangle}$\n", - "\n", - "AI is a powerful tool for enabling some of the hardest aspects of a hybrid quantum classical workflow including QEC, compilation, calibration, etc. (See the review paper [here](https://arxiv.org/abs/2411.09131) for more AI for Quantum use cases). Compiling quantum algorithms is an incredible challenge which involves identifying a target unitary, finding an appropriate circuit representation, and then efficiently running the circuit on highly contraining hardware.\n", - "\n", - "In recent papers [Quantum circuit synthesis with diffusion models](https://doi.org/10.1038/s42256-024-00831-9) and [Synthesis of discrete-continuous quantum circuits with multimodal diffusion models](https://arxiv.org/abs/2506.01666), it was demonstrated how diffusion models can be used for unitary synthesis. This lab will explore the problem of unitary synthesis, introduce a diffusion model used in the work, and allow you to compile circuits of your own using AI.\n", - "\n", - "**Pre-requisites:** No experience with diffusion models is necessary. However, this notebook will not provide a detailed discussion on diffusion models or their construction. For curious readers, we suggest NVIDIA's Deep Learning Institute [course](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1) on diffusion models. As far as quantum prerequisites, familiarity with the basics of quantum computing like gates, state vectors, etc. is required. If you are not familiar with these concepts, please complete the [Quick Start to Quantum Computing](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum) course first.\n", - "\n", - "\n", - "**What you'll do:**\n", - "* Learn the basics of unitary synthesis and try to compile a unitary by hand\n", - "* Encode quantum circuits as inputs for the diffusion model\n", - "* Synthesize quantum circuits corresponding to a given unitary matrix with a diffusion model\n", - "* Evaluate if the obtained circuit is accurate\n", - "* Filter better quantum circuits\n", - "* Sample a circuit using a noise model\n", - "\n", - "🎥 You can [watch a recording of the presentation](https://www.nvidia.com/en-us/on-demand/session/gtcdc25-dct51159/?playlistId=gtcdc25-quantum-computing-and-hpc&start=3889) of this notebook from a GTC DC tutorial in October 2025.\n", - "\n", - "Let's begin with installing the relevant packages." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1", - "metadata": { - "id": "ZFf2-X07xgBq" - }, - "outputs": [], - "source": [ - "# Uncomment and execute the following lines if you are working in an environment without these packages already installed\n", - "#!pip install cudaq\n", - "#!pip install genQC==0.1.0\n", - "#!pip install torch==2.8.0\n", - "#!pip install numpy==2.2.6" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2", - "metadata": { - "id": "07d177c6-60fd-497d-ae2b-41e225b56a2d" - }, - "outputs": [], - "source": [ - "import functools\n", - "import itertools\n", - "import numpy as np\n", - "import torch\n", - "import cudaq\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import genQC\n", - "from genQC.imports import *\n", - "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", - "from genQC.inference.export_cudaq import genqc_to_cudaq\n", - "import genQC.inference.infer_compilation as infer_comp\n", - "import genQC.util as util\n", - "\n", - "# Fixed seed for reproducibility\n", - "torch.manual_seed(0)\n", - "np.random.seed(0)" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": { - "id": "41705f30-fb6a-46e8-855c-3357d3e73704" - }, - "source": [ - "## The Challenge of Unitary Synthesis and Compilation\n", - "\n", - "In a sense, quantum computing is extremely simple, corresponding to the multiplication of a unitary matrix with a state vector to produce the desired quantum state that solves a problem. In the example below we use the ordering $\\ket{q_0q_1q_2}$. The initial state vector is $\\ket{000}$ and the state produced after multiplying by the unitary matrix $U$ is $\\ket{111}$.\n", - "\n", - "$$\n", - "\\underbrace{\n", - "\\begin{pmatrix}\n", - "0 & 0 & 0 & 1 & 0 & 0 & 0 & 0 \\\\\n", - "0 & 0 & 1 & 0 & 0 & 0 & 0 & 0 \\\\\n", - "0 & 1 & 0 & 0 & 0 & 0 & 0 & 0 \\\\\n", - "0 & 0 & 0 & 0 & 1 & 0 & 0 & 0 \\\\\n", - "0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 \\\\\n", - "0 & 0 & 0 & 0 & 0 & 0 & 1 & 0 \\\\\n", - "0 & 0 & 0 & 0 & 0 & 1 & 0 & 0 \\\\\n", - "1 & 0 & 0 & 0 & 0 & 0 & 0 & 0\n", - "\\end{pmatrix}\n", - "}_{\\text{Unitary } U}\n", - "\\underbrace{\n", - "\\begin{pmatrix}\n", - "1 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0\n", - "\\end{pmatrix}\n", - "}_{|000\\rangle}\n", - "\\quad = \\quad\n", - "\\underbrace{\n", - "\\begin{pmatrix}\n", - "0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 0 \\\\ 1\n", - "\\end{pmatrix}\n", - "}_{|111\\rangle}\n", - "$$\n", - "\n", - "The quantum circuit drawn below represents a **synthesis** of this unitary matrix. That is, it produces the same result as multiplying by $U$ above, regardless of the initial state.\n", - "\n", - "![](https://github.com/NVIDIA/cuda-q-academic/blob/main/images/toffoli-example-circuit.jpg?raw=true)\n", - "\n", - "\n", - "Wrapped up in this simple picture of unitary matrices and quantum circuits is incredible complexity which makes quantum computing so difficult.\n", - "\n", - "**Scaling:** First, the unitary matrix corresponding to a quantum circuit is huge, with $2^n \\times 2^n$ entries where $n$ is the number of qubits in the circuit. The matrix cannot be stored naively on any classical computer in its entirely for more than about 25 qubits.\n", - "\n", - "**Identifying the unitary:** Second, it is far from obvious in many cases what particular unitary matrix will solve a problem. Consider methods like VQE where the entire goal is to identify what sort of parameterized circuit (unitary matrix) solves the given problem.\n", - "\n", - "**Executing on a quantum device:** Finally, even if the unitary required is known, implementing it on a physical QPU requires it to be **synthesized (or compiled)** into a set of discrete gate operations compatible with the device. Furthermore decisions needs to be made concerning how these gates are performed and in which order to ensure that performance is achieved and bottlenecks are avoided.\n", - "\n", - "This is extremely challenging and gets even worse when considering the fact that different QPUs have different gate sets and hardware constraints, quantum error correction protocols add addition overhead, and time constraints require not only that an accurate circuit be synthesized, but that it is as simple as possible.\n", - "\n", - "It is no wonder why circuit synthesis is considered a leading AI for quantum use case as AI's aptitude for complex pattern recognition could provide a powerful means for compiling the unitaries necessary to run quantum algorithms at scale.\n", - "\n", - "In this lab, you will explore unitary synthesis and learn how to generate valid circuits given a target unitary.\n", - "\n", - "
\n", - "

Exercise 1

\n", - "

\n", - "To get a sense for how difficult compilation is, try to compile the state of a single qubit by hand with this interactive game.\n", - "Instructions for the game: You are given a random unitary and presented with two Bloch spheres depicting its action on the $\\ket{0}$ and $\\ket{1}$ states. Your job is to apply gate operations to get as close as possible to the target unitary. You will notice, that even when you can see exactly what each gate does, it is not obvious how to match the initial state exactly. Even if action on a single state is correct, the unitary may still be incorrect as it must properly operate on all basis states.\n", - "

\n", - "
\n" - ] - }, - { - "cell_type": "markdown", - "id": "4", - "metadata": { - "id": "e82bb399-5bdf-4858-9667-b38a283ab2f3" - }, - "source": [ - "## An Overview of the Diffusion Model\n", - "\n", - "Though many AI techniques have been explored for circuit synthesis, the rest of this lab will look at recent work [(Fürrutter, et al., 2024)](https://doi.org/10.1038/s42256-024-00831-9) that used diffusion models for the task. We'll begin with a general overview of diffusion models and then discuss the specific advantages they offer for circuit synthesis.\n", - "\n", - "This section is not a comprehensive or particularly deep lesson on diffusion models for which we point the reader to [this course](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1). To build intuition, we will first use the common example of image generation before applying these concepts to our main topic of unitary synthesis.\n", - "\n", - "\n", - "### The Core Idea: Denoising Images\n", - "\n", - "The primary objective of a diffusion model is to generate high-quality samples by learning to reverse a noise-adding process, rather than learning the data distribution directly. The training begins with a clean dataset — in this case, images — to which Gaussian noise is incrementally added in a \"forward process.\"\n", - "\n", - "\n", - "To reverse this, the model employs a U-net architecture to learn the \"reverse process\" of denoising. The U-net is trained to take a noisy image as input and predict the specific noise pattern that was added to it. The model's parameters are optimized by minimizing a loss function that measures the difference between the predicted noise and the actual noise.\n", - "\n", - "It's important to note that the U-net is named for its U-shaped layer structure; this is purely an architectural descriptor and has no relation to the mathematical symbol $U$ for a unitary matrix. In the final stage, called inference, the model acts like an artist who starts with a block of static and \"chisels away\" the noise to reveal a clear image.\n", - "\n", - "\n", - "In the final stage, called inference, the model acts like an artist who starts with a block of static and \"chisels away\" the noise to reveal a clear image.\n", - "\n", - "\n", - "
\n", - "

Exercise 2

\n", - "

\n", - "Try this widget to get some hands on experience for the diffusion model process. The widget is grossly oversimplified, but gives a visual representation of what is happening in the training and inference stages of a diffusion model. You'll first see how an image is deliberately corrupted with noise for training. Then, you'll watch the trained model take a fresh patch of random noise and reverse the process, generating a clean new image from scratch.\n", - "

\n", - "
\n", - "\n", - "## Applying Diffusion to Unitary Synthesis\n", - "\n", - "### The Core Idea: Denoising Circuits\n", - "Now, let's apply the same concepts of noising and denoising to our primary goal: unitary compilation. The process follows the diagram below. First, training circuits are embedded into a data structure amenable to the neural network. Then, just like with the images, noise is added to the training data and it is input into the U-net model. The model is also given the target unitary matrix and any specific constraints (e.g., which gates to use). The output of the U-Net model is the predicted noise, and it is trained until its prediction is as accurate as possible.\n", - "\n", - "\n", - "\n", - "The inference step (shown below) then uses this trained model. It takes a target unitary, compilation instructions, and random noise as input. The model then \"denoises\" this input to produce candidate circuits that implement the target unitary.\n", - "\n", - "\n", - "\n", - "In a sense, the process is simple and can be treated as a black box. But there are also many challenges, such as ensuring sufficient quality and quantity of training data, choosing the right model architecture, and deciding how data is encoded.\n", - "\n", - "The primary advantage of this approach for quantum circuit compilation is that the diffusion model learns how to denoise corrupted samples, not the distribution of the circuits themselves. Most other approaches require generating sample circuits and then comparing their behavior to the target. Such a requirement is extremely expensive, as it would require running many quantum circuit simulations, which limits scalability." - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": { - "id": "c98032b9-0191-4d06-8472-65e71f4f7a34" - }, - "source": [ - "## Preparing Quantum Circuit Data for the Model\n", - "\n", - "An important consideration for all AI applications is how the data is preprocessed before being input to the model. In this section we will explore a piece of this process related to **encoding** the quantum circuit. That is, representing the quantum circuit in such a way that is amenable to AI. Note that the target unitary and text prompt inputs are themselves prepared with distinct neural networks which will not be discussed here.\n", - "\n", - "\n", - "The figure below explains how we translate a quantum circuit diagram into a numerical, or **tokenized matrix**. Think of the matrix as a timeline of the circuit. Each row is a dedicated qubit, and each column is a step in time, moving from left to right and top to bottom. We fill the matrix using a codebook, or vocabulary, where each gate has a unique number (e.g., $H = 1$, $CNOT = 2$). For gates involving multiple qubits, we use a negative sign to mark the \"control\" qubit. For instance, the Hadamard gate on qubit $q_0$ is encoded as the column vector $( 1,0,0,0)^T$. A CNOT gate with a control on $q_0$ and a target on $q_3$ is represented by the column $(-2,0,0,2)^T$. The example circuit shown results in a $4\\times 8$ matrix, which includes $6$ columns for gate operations and $2$ columns of zeroes for padding to signify the end of the circuit.\n", - "\n", - "\n", - "\n", - "For improved numerical stability during model training, the discrete tokenized matrix is **embedded** into a continuous **tensor**.\n", - "The idea is to replace every integer in our matrix, including $0$, with a vector chosen from a specially prepared set of orthonormal basis vectors of dimension $d$. This conversion is vital for our diffusion model to perform well.\n", - "\n", - "To illustrate, consider an embedding space of dimension $d=7$ with a fixed orthonormal basis $\\mathbf{v_0}, \\cdots, \\mathbf{v_1}$ . Suppose $\\mathbf{v_0} = (0,0,0,0,0,0,1)^T$ and $\\mathbf{v_1} = (\\frac{1}{\\sqrt{7}}, \\cdots,\\frac{1}{\\sqrt{7}} )^T$. Then the tokenized column $(1,0,0,0)^T$, which represents a Hadamard gate on $q_0$, is transformed into the tensor: $$[\\mathbf{v}_1, \\mathbf{v}_0, \\mathbf{v}_0, \\mathbf{v}_0]^T = \\begin{pmatrix}\n", - "(\\frac{1}{\\sqrt{7}}, \\dots, \\frac{1}{\\sqrt{7}})^T \\\\\n", - "(0,0,0,0,0,0,1)^T\\\\\n", - "(0,0,0,0,0,0,1)^T\\\\\n", - "(0,0,0,0,0,0,1)^T\n", - "\\end{pmatrix}.$$\n", - "\n", - "
\n", - "

Exercise 3

\n", - "

\n", - "Write a function to encode the following circuit as a tensor using the vocabulary: \n", - "$H = 1$, $CNOT = 2$, $X = 3$, $CCX = 4$. Signal the end of the circuit with two columns of $5s$.\n", - "

\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "9119e9aa-165b-40b1-b16f-46a212062138", - "outputId": "bc8359fa-d4da-4b47-cb52-c3b90f3172d5" - }, - "outputs": [], - "source": [ - "#TODO Fill in tokenized matrix\n", - "tokenized = np.array([[],\n", - " [],\n", - " [],\n", - " ]])\n", - "\n", - "\n", - "def generate_orthonormal_vectors_qr(n, d):\n", - " \"\"\"\n", - " Generate n orthonormal vectors of dimension d using QR decomposition.\n", - "\n", - " Parameters:\n", - " n (int): Number of orthonormal vectors to generate\n", - " d (int): Dimension of each vector\n", - "\n", - " Returns:\n", - " numpy.ndarray: Matrix of shape (n, d) where each row is an orthonormal vector\n", - " \"\"\"\n", - " if n > d:\n", - " raise ValueError(f\"Cannot generate {n} orthonormal vectors in {d}-dimensional space\")\n", - "\n", - " # Generate random matrix and compute QR decomposition\n", - " random_matrix = np.random.randn(d, n)\n", - " Q, _ = np.linalg.qr(random_matrix)\n", - "\n", - " # Return first n columns as rows\n", - " return Q[:, :n].T\n", - "\n", - "d = 6\n", - "vocab_length = 5\n", - "embedding_vectors = generate_orthonormal_vectors_qr(vocab_length, d)\n", - "print(\"The randomized orthonormal basis vectors, v_0,...v_d are\\n\")\n", - "print(embedding_vectors)\n", - "\n", - "tensor = np.zeros((tokenized.shape[0], tokenized.shape[1], d))\n", - "\n", - "\n", - "#TODO Fill in the tensor based on the tokenized matrix and the embedding vectors\n", - "\n", - "print(\"\\n For example, the token matrix element in the 2nd column and 3rd row is replaced with the basis vector:\")\n", - "print(tensor[2][1][:])\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": { - "id": "6af10d6c-fb87-4bf3-a468-41bf8d5cb890" - }, - "source": [ - "## Decoding the Generated Tensors\n", - "\n", - "The diffusion model is trained to generate new tensors. For example suppose the diffusion model generated a tensor whose first element was $(0.02, 0.95, -0.01, 0.04, 0.08, -0.03, 0.10)^T$. This must be **decoded** back into an integer like those in a tokenized matrix to be interpretable as a quantum circuit.\n", - "\n", - "This decoding is performed on each vector of the output tensor in a two-step process to determine the corresponding integer token. First, we identify the best-matching basis vector from the vocabulary by finding which one maximizes the absolute value of the cosine similarity with the generated vector. The index of this basis vector, $k$, gives us the magnitude of our token.\n", - "\n", - "Second, we determine the token's sign by computing the standard cosine similarity between the generated vector and the winning basis vector, $\\mathbf{v_k}$. The sign of this result becomes the sign of the token.\n", - "\n", - "Therefore, if a generated vector is found to be closest to basis vector $\\mathbf{v_k}$, and their cosine similarity is negative, the decoded entry in the tokenized matrix becomes $−k$.\n", - "\n", - "By repeating this for every vector in the generated tensor, we reconstruct the entire tokenized matrix, which gives us the blueprint for a new quantum circuit, as depicted below.\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "

Exercise 4

\n", - "

\n", - "Write a function below to decode your tensor from Exercise 3 and recover the original tokenized matrix.\n", - "

\n", - "
\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8", - "metadata": { - "id": "cac7ef9b-79ea-434d-ade2-186c83fef9f1", - "outputId": "73f90270-91a2-4ee4-a87c-7c8a9fa7b528" - }, - "outputs": [], - "source": [ - "def find_closest_vector_signed(input_vector, matrix):\n", - " \"\"\"\n", - " Find the index of the most similar vector and return it with the sign of similarity.\n", - "\n", - " Parameters:\n", - " input_vector: shape (d,) - input vector\n", - " matrix: shape (6, d) - matrix of 6 vectors\n", - "\n", - " Returns:\n", - " sign * k where k is the index of most similar vector and sign is the sign of similarity\n", - " \"\"\"\n", - " # TODO Normalize vectors\n", - "\n", - " # TODO Compute cosine similarities\n", - "\n", - " # TODO Find index of maximum absolute similarity\n", - "\n", - " # TODO Get sign of similarity\n", - "\n", - " return int(sign * k)\n", - "\n", - "\n", - "# TODO use your function to recover the original tokenized matrix" - ] - }, - { - "cell_type": "markdown", - "id": "9", - "metadata": { - "id": "MDNVuoj1vQpK" - }, - "source": [ - "The `genqc` function then translates this decoded matrix into a quantum kernel using the specified mapping between gates and integers stored as the `vocab` vector." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": { - "id": "6kGk2jNVvYdB" - }, - "outputs": [], - "source": [ - "\n", - "vocab_list = ['h', 'cx', 'x', 'ccx'] # H is 1, CNOT is 2, etc.\n", - "vocab_dict = {i + 1: gate for i, gate in enumerate(vocab_list)}\n", - "\n", - "# Utility function to translate the decoded matrix into a quantum circuit\n", - "kernel = genqc_to_cudaq(decoded, vocab_dict)\n", - "\n", - "input_state = [0] * (2**4) # the genqc generated kernel requires an input state\n", - "print(cudaq.draw(kernel, input_state))" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": { - "id": "f5245732-5338-4eb1-aa23-344b8868ca40" - }, - "source": [ - "Similar pre and postprocessing steps are present in all AI applications. When developing AI for quantum applications it is key to find clever ways to encode information such that it can be effectively processed by the AI model. " - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": { - "id": "541d0d7a" - }, - "source": [ - "## Generating Circuits with the Diffusion Model\n", - "\n", - "Now that we've covered the problem setup and data processing, let's put the theory into practice using a pretrained model. While the training process itself is a fascinating topic, we'll focus on using the model here. You can explore training in more detail in these courses on [the basics of AI](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-01+V1) and [diffusion models](https://learn.nvidia.com/courses/course-detail?course_id=course-v1:DLI+S-FX-14+V1).\n", - "\n", - "\n", - "The first step is to select a unitary to compile. This model has been trained to compile unitaries arising from circuits composed of the gates `['h', 'cx', 'z', 'x', 'ccx', 'swap']`. Although this is a universal gate set, meaning it contains enough operations needed to construct any possible quantum circuit, performing arbitrary computations requires an incredible number of gates. For this tutorial, we will use a model trained to generate kernels with at most 12 gates. Therefore, we can only expect the model to work for unitaries under this constraint. Let's consider here the compilation of one such unitary.\n", - "\n", - "We start by defining our unitary as a `numpy.array`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": { - "id": "4edf7018" - }, - "outputs": [], - "source": [ - "U = np.array(\n", - " [\n", - " [ 0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", - " [ 0. , -0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", - " [-0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", - " [ 0. , 0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", - " [ 0. , 0. , 0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", - " [ 0. , 0. , 0. , 0.70710678, 0. , 0. , 0.70710678, 0. ],\n", - " [ 0. , 0. , -0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", - " [ 0. , 0. , 0. ,-0.70710678, 0. , 0. , 0.70710678, 0. ]\n", - " ],\n", - " dtype=np.complex128\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "14", - "metadata": { - "id": "e1453ad1-e4d4-4e38-9684-fd967bc1a8de" - }, - "source": [ - "Next, run the cell below to prepare a torch device with CUDA if available." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "ccb97524-0f13-49ff-893d-983c572e66c6", - "outputId": "d0225f95-c4f8-4a68-fda3-f8ad4a2797fa" - }, - "outputs": [], - "source": [ - "device = util.infer_torch_device() # Use CUDA if we can\n", - "util.MemoryCleaner.purge_mem() # Clean existing memory allocation\n", - "print(device)" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": { - "id": "0047cbe9-a21c-4c72-a144-7a2929752329" - }, - "source": [ - "Then, load the pretrained model from Hugging Face." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 322, - "referenced_widgets": [ - "817fecbf36494cb59d5ff42caa1fe9e3", - "86ea7c338b5a407ab76aad8a45318149", - "c3641b0991174bfe9e3a0eb75229297e", - "db46b68da8fd4e46aca2b66606870ce4", - "a83de155c9434f128729e7347db7a36d", - "081b41ba50f045b49eb0632462453955", - "a5477470671e4aadb6819d358042bcb9", - "b7485d048d7f4a7a993bc01b5a278da2", - "487f574ae85e4be4b762eb361e434260", - "80f02a5129f149358ae4f2a230c7c169", - "edcd40c228f94111bb9247c77dbac612", - "c662e4d797bc47a08c32540e25deb9a3", - "44496a784f1a4d9ebfabcea69113ffc3", - "f0893c5606d24a7685e455bce32fbc3a", - "6ebf1e1dc9fd41f1b1b969267a1ed4d2", - "3e1a67f27ddc47da9654640415abcbc1", - "deebca617f664d3d80d58a4ce749147c", - "f88617e7ffbf4574aad45263697b1b2a", - "d61eb3bf737f4128b955fecb3dc9d9ab", - "f1f1d933d24f4ad3b8e4e9c98748470c", - "d4e31a5cc2e943f2a98e35eca2959f1d", - "daebf2fefc4e4ab7a4bbbe212155b679", - "d96d6d7e91434bb682d6e9f6122191ae", - "fd15130ef5ec49c1b6aed02762af346f", - "220dcd9c430447a2b43c7afd3fb77c87", - "694924a9c5e84629bd547865a00afa86", - "105804d846024856a86ff4ef0dd211e3", - "998c97c5a6ea4fff9940a37ae3e4d495", - "71dd0a545c8a4de5b74bffb636cea068", - "526512b558c24e18ae5638dd2cf26145", - "e0aad28ff3fb4cecbb6d9de4e8faafc0", - "29bccb934f7940309e73019628047d80", - "e10d01edf5a34456a8c650e10083bd88", - "77cb6f0eb0214050bbf91e1cd319df94", - "910f7c3b17d84062ae9cdfddd4892e1c", - "d931385cc1cb4f26a1099c688266953a", - "2174f58086db4250bb1d4a47bdbc2697", - "ee5c8b2902a0463f897837ff8be3907d", - "bf6c2ed3f2ac49b28ebec34e588629cb", - "13e57821f933459b9424da3e82dc6612", - "2c26e1222cc54ed2bbac10d2d9e78d38", - "23a348c1207343dda1aedd863fde57f2", - "db26355b91ff43e78e26331a8b3f10db", - "4bedbd4810c04b829e01b12ca9ff4392" - ] - }, - "id": "a6b18d03-6976-4eec-9ab0-7b763d73ad17", - "outputId": "8b0dc2e7-5b71-4604-ceba-9f081ab83fea" - }, - "outputs": [], - "source": [ - "pipeline = DiffusionPipeline.from_pretrained(\"Floki00/qc_unitary_3qubit\", device) # Download from Hugging Face\n", - "pipeline.scheduler.set_timesteps(40)" - ] - }, - { - "cell_type": "markdown", - "id": "18", - "metadata": { - "id": "bc112d57-71a8-403c-a0a9-d2f7d0eb7492" - }, - "source": [ - "Next, we set the parameters the model was trained on. Note that these are fixed and depend on the pre-trained model. The gate types are pulled from `pipeline.gate_pool` and are used to build a vocabulary." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "fe0b7730", - "outputId": "0fd81b65-0afc-436c-ab4a-ffa691d85b4e" - }, - "outputs": [], - "source": [ - "print(pipeline.gate_pool)\n", - "vocab = {i + 1: gate for i, gate in enumerate(pipeline.gate_pool)}\n", - "num_of_qubits = 3 # Number of qubits\n", - "max_gates = 12" - ] - }, - { - "cell_type": "markdown", - "id": "20", - "metadata": { - "id": "c0ba07c5-075a-46ca-9a22-cc758ce33cb9" - }, - "source": [ - "The model can compile circuits composed with any subset of these gates as long as the proper \"Compile using: [,,,]\" format is used." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": { - "id": "0f0d3e85-a22e-4a1b-ad17-b02417e56734" - }, - "outputs": [], - "source": [ - "# Notice how the x gate is missing from the prompt indicating that we want to generate circuits without the x gate\n", - "prompt = \"Compile using: ['h', 'cx', 'z', 'ccx', 'swap']\"" - ] - }, - { - "cell_type": "markdown", - "id": "22", - "metadata": { - "id": "cab3a1e8-a404-4ebb-9a58-251d47eebb4a" - }, - "source": [ - "The code below will now use this prompt and the unitary (`U`) defined above to sample (or generate) 128 circuits. Because the neural network can only process real numbers, we first split the unitary matrix U into its real and imaginary components and then combine them into a single input tensor. The `infer_comp.generate_comp_tensors` command calls the inference procedure and produces a set of output matrices (`out_matrices`) representing the circuit samples." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": { - "id": "56edb114-3ef7-455b-b4b8-0e262e7b6316" - }, - "outputs": [], - "source": [ - "# Number of circuits to sample from the trained diffusion model\n", - "samples = 128\n", - "\n", - "# As the neural network works only with real numbers, we first separate\n", - "# the two components and create a 2-dimensional tensor for the magnitude\n", - "# of each component:\n", - "U_r, U_i = torch.Tensor(np.real(U)), torch.Tensor(np.imag(U))\n", - "U_tensor = torch.stack([U_r, U_i], dim=0)\n", - "\n", - "out_matrices = infer_comp.generate_comp_tensors(\n", - " pipeline=pipeline,\n", - " prompt=prompt,\n", - " U=U_tensor,\n", - " samples=samples,\n", - " system_size=num_of_qubits, # Max qubit number allowed by the model (this model is only trained with 3 qubits)\n", - " num_of_qubits=num_of_qubits,\n", - " max_gates=max_gates,\n", - " g=10, # classifier-free-guidance (CFG) scale\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "24", - "metadata": { - "id": "81f09d75-a960-4456-bdb9-57ef7668864e" - }, - "source": [ - "The matrix for the first circuit generated by the model is printed below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "13c1af73-11bc-46ca-99dd-bccea29f95c1", - "outputId": "d1644bc4-39d8-41d2-f17b-52fde3f68c69" - }, - "outputs": [], - "source": [ - "out_matrices[0]" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": { - "id": "5605d0a1-626d-4aa5-8721-0416470215dc" - }, - "source": [ - "## Converting matrices to CUDA-Q kernels" - ] - }, - { - "cell_type": "markdown", - "id": "27", - "metadata": { - "id": "8b43d006-1e6a-4911-9283-b5a96eb5cf06" - }, - "source": [ - "Next, we convert each generated matrix into a `cudaq.kernel`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": { - "id": "01efc6f7-8f0f-425f-be58-7fcaf880d0fd" - }, - "outputs": [], - "source": [ - "cudaq.set_target(\"qpp-cpu\") # Note that cpu is faster for 3-qubit kernels\n", - "\n", - "# cudaq.set_target('nvidia') # Set to GPU for larger circuits" - ] - }, - { - "cell_type": "markdown", - "id": "29", - "metadata": { - "id": "229ab19e" - }, - "source": [ - "It is possible that some of the generated matrices might not correspond to a valid kernel. For example, a generated matrix might have encoded a CNOT gate with two controls and no target (i.e., a column of the matrix might be $[-2,-2,0]$), and another generated matrix may have encoded an $H$ and $Z$ gate, applied separately and simultaneously to two qubits at step 1 (i.e., the first column of the matrix might be $[1,0,3]$). Neither of these are meaningful quantum kernels. Therefore, in the next code block, we filter out only the valid matrices." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 87, - "referenced_widgets": [ - "b310c942e2cc4d7085c922bed5358160", - "aa900cd018d1403f84aa535058d507f9", - "4dc93ecde85c442d97266161bb5b0c02", - "3062232446fe4d8ea0a94ad3e2e7f961", - "4a6f46a6bf5b450ea634e86066557209", - "39e45c22c533420f830bfb050710ce18", - "38f77d3adea14406991d812dd550309b", - "e900a980155044799e6b0afdbfed061e", - "737ea14ba4c74ddf8cc270df7304ad25", - "ef3accf5708f4148a4925e25eb8d5cbd", - "f4f61fc340b74441b39358934a75a517" - ] - }, - "id": "9d5b9ae5-c11e-413b-af00-4412dc86b48d", - "outputId": "95fddc59-2f7a-46f9-b06f-1f9bc4258ee4" - }, - "outputs": [], - "source": [ - "kernel_list = []\n", - "valid_matrices = []\n", - "\n", - "invalid_matrices = 0\n", - "for out_matrices_i in tqdm(out_matrices):\n", - "\n", - " # Use a try-except to catch invalid matrices(if any)\n", - " try:\n", - " kernel = genqc_to_cudaq(out_matrices_i, vocab) # Convert out_matrices to CUDA-Q kernels\n", - " except:\n", - " kernel = None\n", - "\n", - " if kernel:\n", - " kernel_list.append(kernel)\n", - " valid_matrices.append(out_matrices_i)\n", - " else:\n", - " invalid_matrices += 1\n", - "\n", - "print(f\"The model generated {invalid_matrices} invalid matrices that do not correspond to a circuit.\")" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": { - "id": "0a5823e6-9189-46b7-974d-43a2a0cad531" - }, - "source": [ - "For example, the following generated matrix" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "dc7d35c7-a7cb-4663-8220-b1030e534deb", - "outputId": "97f8cacf-0eba-4249-c898-566eaf449d58" - }, - "outputs": [], - "source": [ - "valid_matrices[0]" - ] - }, - { - "cell_type": "markdown", - "id": "33", - "metadata": { - "id": "47116105-b1e1-454e-80fc-6a03061045f5" - }, - "source": [ - "corresponds to the following `cudaq.kernel`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "4835c288-b018-49d7-b307-3ff05c2c8c56", - "outputId": "54038bab-34ad-4a21-f8d2-e046bc32f3e5" - }, - "outputs": [], - "source": [ - "# Arbitrary input state to the circuit for plotting\n", - "\n", - "input_state = [0] * (2**num_of_qubits)\n", - "\n", - "print(cudaq.draw(kernel_list[0], input_state))" - ] - }, - { - "cell_type": "markdown", - "id": "35", - "metadata": { - "id": "7W5DDsXP39Zr" - }, - "source": [ - "Our first filter removed circuits that were structurally invalid, but this doesn't guarantee the remaining ones are correct. Think of it as checking for spelling errors before checking for meaning. Now, in the next section, we'll perform that second check: filtering for the circuits that actually approximate the target unitary." - ] - }, - { - "cell_type": "markdown", - "id": "36", - "metadata": { - "id": "59809685-5d70-4612-8642-c4e924a1ab6c" - }, - "source": [ - "## Evaluating Sampled Unitaries\n", - "\n", - "As mentioned earlier, one of the key advantages of using diffusion models (DMs) as a unitary compiler is the ability to rapidly sample many circuits. However, as is common in machine learning, the model has a certain accuracy, meaning not all generated circuits are expected to exactly compile the specified unitary. In this section, you will evaluate how many of the generated circuits are indeed correct and then perform post-selection to identify (at least) one circuit that successfully performs the desired unitary operation." - ] - }, - { - "cell_type": "markdown", - "id": "37", - "metadata": { - "id": "f5d1e8ca-3abf-47b4-bbca-932bdff86f94" - }, - "source": [ - "First, calculate the $2^n\\times2^n$ unitary matrix $U$ implemented by each of the kernels. The elements of this matrix are defined by the transition amplitudes between the basis states, which can be expressed as:\n", - "$$\n", - "\\begin{equation}\n", - " \\langle i|kernel|j\\rangle = U_{ij},\n", - "\\end{equation}\n", - "$$\n", - "where $|i\\rangle$ and $|j\\rangle$ are computational basis states (typically in the $Z$-basis), with $|i\\rangle$ representing the standard basis vector of dimension $2^n$ that has a $1$ in the $i^{th}$ position and $0$ elsewhere.\n", - "\n", - "
\n", - "

Exercise 5

\n", - "

\n", - "Write a function to compute the expression above from the CUDA-Q kernel. Compute the unitaries for each of the 128 sampled circuits.\n", - "

\n", - "
" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 70, - "referenced_widgets": [ - "476d90bf41d6406c8d76e3a711be7f74", - "5eeb57c04b964b178f32ae64fd1a4b37", - "db8c05704fd74268a13695e68427b817", - "12f353ac2bee47b7a9fcacbd95f0bd5b", - "fb7bef7423e0452c9538386408373d12", - "3728f02d8a45406a9d9f25fcde7533b6", - "ecfa17414df440008d57b784000b3c17", - "d27061d606424719bcd1ab57aeb6a22f", - "36f6b0f4543643b388dfd6b5a2996f3a", - "f03ac25204bf429b8b356bde6a52c521", - "e1ca83fab88b46e290b9f70d612193a9" - ] - }, - "id": "c689aacd-caf6-45e6-a289-608945c2d2b5", - "outputId": "16c24654-8b58-4391-bca8-06f7d190c633" - }, - "outputs": [], - "source": [ - "def get_unitary(kernel: cudaq.PyKernel) -> np.ndarray:\n", - " \"\"\"\n", - " Computes the unitary from a given CUDA-Q kernel\n", - "\n", - " Parameters:\n", - " kernel: cudaq.PyKernel\n", - "\n", - " Returns:\n", - " np.array of the unitary corresponding to U_ij\n", - " \"\"\"\n", - "\n", - " # TODO write the function\n", - "\n", - "# TODO Compute all 128 unitaries\n" - ] - }, - { - "cell_type": "markdown", - "id": "39", - "metadata": { - "id": "c5eb62ba-40a6-486c-b307-47b11b9b3226" - }, - "source": [ - "For example, the circuit printed above corresponds to the following unitary:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "40", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "4a471612-a3e4-4bea-9f61-dc823283af95", - "outputId": "b8c122e1-0378-4ba8-8b28-c3216e00ad12" - }, - "outputs": [], - "source": [ - "np.set_printoptions(linewidth=1000)\n", - "print(np.round(got_unitaries[0], 4))" - ] - }, - { - "cell_type": "markdown", - "id": "41", - "metadata": { - "id": "e5241775-2692-4b10-b9ef-a75a3ddb0eb8" - }, - "source": [ - "Now that we have the unitaries for each of the kernels, we compare them to the user provided unitary matrix, `U`.\n", - "To do so, we compute the infidelity between the exact unitary and the generated ones.\n", - "The infidelity is defined as follows:\n", - "\n", - "\\begin{equation}\n", - "\\text{Infidelity}(U, V) = 1 - \\left|\\frac{1}{2^n} \\text{Tr} (U^\\dagger V) \\right|^2.\n", - "\\end{equation}\n", - "\n", - "The infidelity is a value between 0 and 1, where 0 indicates that the unitaries are identical (up to a global phase).\n", - "\n", - "
\n", - "

Exercise 5

\n", - "

\n", - "Compute the infidelities for each sampled unitary and plot a histogram based on infidelity. How many circuits had a near zero infidelity?\n", - "

\n", - "
\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "42", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 436 - }, - "id": "bb4915e8-5a92-4038-ab49-fa67a0a5b668", - "outputId": "e52b7def-c130-4b4f-ecd9-2e3f96e6c8be" - }, - "outputs": [], - "source": [ - "def infidelity(want_unitary, got_unitary):\n", - " \"\"\"\n", - " Computes the infidelity of each unitary with the target\n", - "\n", - " Parameters:\n", - " want_unitary (np.array) - the target unitary\n", - " got_unitary (np.array) - the unitary obtained from the diffusion model\n", - "\n", - " Returns:\n", - " float corresponding to the infidelity computed from both unitaries\n", - " \"\"\"\n", - " \n", - " # TODO complete the function\n", - "\n", - "\n", - "infidelities = np.array([infidelity(U, got_unitary) for got_unitary in got_unitaries])\n", - "\n", - "plt.figure(figsize=(7, 4))\n", - "plt.title(\n", - " f\"Distribution of infidelities for {len(got_unitaries)} generated circuits\",\n", - " fontsize=12,\n", - ")\n", - "plt.ylabel(\"Number of circuits\", fontsize=14)\n", - "plt.xlabel(\"Unitary infidelity\", fontsize=14)\n", - "plt.hist(infidelities, bins=30)\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "43", - "metadata": { - "id": "f1912f6c-e685-4994-bf3a-fe9530dbd513" - }, - "source": [ - "The circuit with the lowest infidelity is printed below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "44", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "656baf92-5a1e-4227-ac1f-67b5f9394310", - "outputId": "1d8a80d0-733c-422f-a598-1b4301f93d31" - }, - "outputs": [], - "source": [ - "min_index = np.argmin(infidelities)\n", - "\n", - "print(f\"The best kernel has an infidelity of {infidelities[min_index]:0.2},\")\n", - "\n", - "input_state = [0] * (2**num_of_qubits)\n", - "input_state[0] = 1\n", - "print(cudaq.draw(kernel_list[min_index], input_state))\n", - "\n", - "print(f\"with the unitary:\")\n", - "print(np.round(got_unitaries[min_index], 4))" - ] - }, - { - "cell_type": "markdown", - "id": "45", - "metadata": { - "id": "6e724b39-cc63-40c8-8ad0-9d90d94e23bd" - }, - "source": [ - "which, as we can see, exactly compiled our targeted unitary:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "46d190d6-7bed-4490-a343-bf856190a101", - "outputId": "0af2edd7-126c-4b4d-ee97-689ed145d00f" - }, - "outputs": [], - "source": [ - "print(np.round(U, 4))" - ] - }, - { - "cell_type": "markdown", - "id": "47", - "metadata": { - "id": "f8bf2fe6-07ca-4bfa-a46b-96a91daeb185" - }, - "source": [ - "## Select a circuit that meets specific criteria" - ] - }, - { - "cell_type": "markdown", - "id": "48", - "metadata": { - "id": "983dc8c1-f3ae-4d06-ad7a-e57addfc6e74" - }, - "source": [ - "As you have seen above, you now have almost 30 kernels that compile the desired unitary! This is particularly valuable when dealing with hardware constraints, where, for instance, you might want to avoid using certain qubits or specific gates. Here are a few scenarios where these sorts of choices matter. The rest of the notebook will work through the first case, but you can come back and work through any of these preferences.\n", - "\n", - "1. A common practice for reducing circuit overhead is to minimize the number of Toffoli gates, as they are particularly costly and error-prone due to the large number of non-Clifford T gates required for their implementation.\n", - " \n", - "2. Certain QPUs like neutral atom and superconducting processors can trivially implement $Z$ gates using software control, while $X$ gates require a more error prone pulse. Thus, for these modalities, it is favorable to produce circuits with a bias towards $Z$ gates over $X$ gates, holding the number of two qubit gates constant.\n", - "\n", - "3. When considering quantum error correction (QEC), the type of QEC code can dictate which types of gates are transversal, meaning they can be trivially applied to all data qubits to produce the logical gate. This can change from code to code, so selecting circuits which maximize the number of transversal gates is a ideal. This can even mean favoring transversal CNOT gates over non-transversal single qubit gates.\n", - "\n", - "Going back to example 1 minimizing the number of Toffoli gates (`ccx`), let's sort our valid circuits for those with few `ccx` gates." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "49", - "metadata": { - "id": "acb0f4d8-2e8a-4b32-9e37-006571ca2aa5" - }, - "outputs": [], - "source": [ - "# First, remove possible duplicates and only pick distinct circuits\n", - "_, idx_unique = np.unique(np.array(valid_matrices), axis=0, return_index=True)\n", - "unique_tensors = torch.stack(valid_matrices)[idx_unique]\n", - "unique_infidelities = infidelities[idx_unique]\n", - "unique_kernels = [kernel_list[idx] for idx in idx_unique]\n", - "\n", - "# Then, find the correct circuits\n", - "idx_correct = torch.argwhere(torch.tensor(unique_infidelities) < 0.01).flatten()\n", - "correct_tensors = unique_tensors[idx_correct]\n", - "print(f\"The model generated {correct_tensors.shape[0]} distinct correct circuits.\")\n", - "\n", - "# Now flatten the last two dimensions (related to the actual circuit)\n", - "# and find out how many 5's (i.e., ccx) gates each circuit has:\n", - "num_ccx = (correct_tensors.flatten(1, 2) == 5).sum(1)\n", - "print(\"These circuits have this number of ccx gates:\", num_ccx)" - ] - }, - { - "cell_type": "markdown", - "id": "50", - "metadata": { - "id": "7625aac1-78d8-4ce3-9765-88ae48c92dd3" - }, - "source": [ - "It appears that the diffusion model requires at least one Toffoli gate to compile the unitary. You can now print a few of these circuits to select the one that best suits the situation or to identify any noteworthy patterns the model employs for this specific unitary." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "51", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "535501a6-cafb-48e6-b53c-636d7ac5a815", - "outputId": "f493183e-23f8-4707-d4fc-e01f8634d959" - }, - "outputs": [], - "source": [ - "# Get the correct kernels\n", - "correct_kernels = [unique_kernels[idx] for idx in idx_correct]\n", - "\n", - "# Get the ones with only one ccx\n", - "correct_kernels_ccx1 = [correct_kernels[idx] for idx in torch.argwhere(num_ccx == 1).flatten()]\n", - "\n", - "# Draw a few of these circuits\n", - "for kernel in correct_kernels_ccx1[:2]:\n", - " print(cudaq.draw(kernel, input_state))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52", - "metadata": { - "id": "80e55b79-2529-4e8a-b8c3-d034afa07588", - "outputId": "9eead3d9-5310-41df-df05-f4c742d9b778" - }, - "outputs": [], - "source": [ - "# Get the tensors with more than one ccx\n", - "correct_kernels_ccx3 = [correct_kernels[idx] for idx in torch.argwhere(num_ccx == 3).flatten()]\n", - "\n", - "# Draw a few of these circuits\n", - "for kernel in correct_kernels_ccx3[:2]:\n", - " print(cudaq.draw(kernel, input_state))" - ] - }, - { - "cell_type": "markdown", - "id": "53", - "metadata": { - "id": "1fd9a7f3" - }, - "source": [ - "\n", - "## Compiling Noisy Circuits" - ] - }, - { - "cell_type": "markdown", - "id": "54", - "metadata": { - "id": "03b7431a" - }, - "source": [ - "In this section, we'll define a `noise_model` and verify that a lower number of `ccx` gates yields better results under this noise model.\n", - "For more details, see the [Noisy Simulation example](https://nvidia.github.io/cuda-quantum/latest/examples/python/noisy_simulations.html) in CUDA-Q documentation. \n", - "\n", - "The cell below defines a depolarizing noise channel and applies it to all `ccx` and `cx` gates. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "55", - "metadata": { - "id": "f556cf2b-ce21-47d2-824e-fd56737f9212" - }, - "outputs": [], - "source": [ - "# Define a noise model\n", - "\n", - "def tensor(matrices):\n", - " return functools.reduce(np.kron, matrices)\n", - "\n", - "def depolarizing_kraus(p: float, n: int = 2):\n", - " I = np.array([[1, 0], [0, 1]], dtype=np.complex128)\n", - " X = np.array([[0, 1], [1, 0]], dtype=np.complex128)\n", - " Y = np.array([[0, -1j], [1j, 0]], dtype=np.complex128)\n", - " Z = np.array([[1, 0], [0, -1]], dtype=np.complex128)\n", - "\n", - " paulis = [I, X, Y, Z]\n", - "\n", - " # Kraus operators\n", - " kraus_operators = [np.sqrt(1 - p) * tensor([I] * n)]\n", - " coeff = np.sqrt(p / (4**n - 1))\n", - "\n", - " for paulis in itertools.product(paulis, repeat=n):\n", - " if not all(np.array_equal(p, I) for p in paulis):\n", - " kraus_operators.append(coeff * tensor(paulis))\n", - "\n", - " return kraus_operators\n", - "\n", - "\n", - "noise_model = cudaq.NoiseModel()\n", - "noise_model.add_all_qubit_channel(\"ccx\", cudaq.KrausChannel(depolarizing_kraus(0.1, n=3)))\n", - "noise_model.add_all_qubit_channel(\"cx\", cudaq.KrausChannel(depolarizing_kraus(0.01)))" - ] - }, - { - "cell_type": "markdown", - "id": "56", - "metadata": {}, - "source": [ - "To simulate a noisy circuit, using a density matrix simulator is convenient. To call up the density matrix simulator, simply change the target with `cudaq.set_target(\"density-matrix-cpu\")`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57", - "metadata": { - "id": "b889d8fa-521c-4db8-86e3-1dd709096fd2" - }, - "outputs": [], - "source": [ - "# Example of execution\n", - "cudaq.set_target(\"density-matrix-cpu\")\n", - "shots_count = 5000\n", - "\n", - "# Sample with noiseless simulation\n", - "result = dict(cudaq.sample(correct_kernels[0], input_state, shots_count=shots_count).items())" - ] - }, - { - "cell_type": "markdown", - "id": "58", - "metadata": {}, - "source": [ - "\n", - "The `cudaq.sample` function can take a noise model as an argument to perform a simulation with noise: `cudaq.sample(kernel, noise_model=noise_model)`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59", - "metadata": { - "id": "af9b6cac-9089-4733-84da-a24c2af68e2a" - }, - "outputs": [], - "source": [ - "# Sample using noisy simulation for a kernel with only 1 ccx gates\n", - "result_ccx = dict(\n", - " cudaq.sample(\n", - " correct_kernels_ccx1[0],\n", - " input_state,\n", - " noise_model=noise_model,\n", - " shots_count=shots_count,\n", - " ).items()\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "60", - "metadata": { - "id": "c004f110-3c99-481f-b17b-36227456ac55" - }, - "outputs": [], - "source": [ - "# Sample using noisy simulation for a kernel with 3 ccx gates\n", - "result_ccx3 = dict(\n", - " cudaq.sample(\n", - " correct_kernels_ccx3[0],\n", - " input_state,\n", - " noise_model=noise_model,\n", - " shots_count=shots_count,\n", - " ).items()\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61", - "metadata": { - "id": "d6525651-8e38-4093-ae16-4d1dc9aa7cd1", - "outputId": "f55067d1-c1d7-4ad7-80eb-e1f110612d30" - }, - "outputs": [], - "source": [ - "# Merge all bitstrings to ensure consistency across results\n", - "bitstrings = sorted(set(result_ccx.keys()) | set(result.keys()) | set(result_ccx3.keys()))\n", - "\n", - "# Function to extract probabilities\n", - "def get_probabilities(result, keys):\n", - " total_shots = sum(result.values())\n", - " return [result.get(k, 0) / total_shots for k in keys]\n", - "\n", - "# Extracting probabilities\n", - "prob = get_probabilities(result, bitstrings)\n", - "prob_ccx = get_probabilities(result_ccx, bitstrings)\n", - "prob_ccx3 = get_probabilities(result_ccx3, bitstrings)\n", - "\n", - "# Bar width\n", - "bar_width = 0.3\n", - "x = np.arange(len(bitstrings))\n", - "\n", - "# Plot bars\n", - "plt.figure(figsize=(10, 6))\n", - "plt.bar(x - bar_width, prob, bar_width, label=\"Noiseless simulation\", color=\"#808080\")\n", - "plt.bar(x, prob_ccx, bar_width, label=\"Noisy simulation w/ 1 ccx\", color=\"#76B900\")\n", - "plt.bar(\n", - " x + bar_width,\n", - " prob_ccx3,\n", - " bar_width,\n", - " label=\"Noisy simulation w/ 3 ccx\",\n", - " color=\"#c4e884\",\n", - ")\n", - "\n", - "# Labels\n", - "plt.xticks(x, bitstrings)\n", - "plt.xlabel(\"Bitstring Outcomes\")\n", - "plt.ylabel(\"Probability\")\n", - "plt.title(\"Comparison of kernels with different number of ccx gates\")\n", - "plt.legend(fontsize=14)\n", - "\n", - "# Show plot\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "62", - "metadata": { - "id": "2a6ba367-9389-4eee-b679-18cc1a147959" - }, - "source": [ - "This histogram represents why unitary compilation is so important. With a small three qubit example, running three circuits that produce the exact same unitary, the sampled circuits with more multi-qubit gates produce inferior results. Consider that for a fully scaled up application, good compilation might be the difference between success and a meaningless output or infeasible runtime. \n", - "\n", - "## Summary\n", - "\n", - "AI has the potential to be a powerful tool for compilation especially at scale. Researchers might be able to use such a tool to better understand the impacts of device noise or identify patterns which make for more favorable circuits. The AI workflow you explored today is also highly flexible. It can consider different gate sets, circuit lengths, and many other refinements to improve results. Keep an eye out for future research in this space as different AI techniques are applied to more complex quantum circuit compilation tasks." - ] - } - ], - "metadata": { - "accelerator": "GPU", - "colab": { - "gpuType": "T4", - "provenance": [] - }, - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/ai-for-quantum/README.md b/ai-for-quantum/README.md index 902b968..ae4d732 100644 --- a/ai-for-quantum/README.md +++ b/ai-for-quantum/README.md @@ -16,11 +16,6 @@ For example, the first notebook guides learners through using a pretrained diffu * ***Hands-on experience inserting AI models within quantum workflows:*** Learn how to prepare quantum data for input. ## Notebooks -The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in Google CoLab, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - -| Notebook | Google Colab | -| ----------- | ----------- | -|Lab 1 - Compiling Unitaries with Diffusion Models | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/ai-for-quantum/01_compiling_unitaries_using_diffusion_models.ipynb)| +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/chemistry-simulations/README.md b/chemistry-simulations/README.md index 06e68ab..5a32702 100644 --- a/chemistry-simulations/README.md +++ b/chemistry-simulations/README.md @@ -4,17 +4,6 @@ This collection of notebooks explores techniques for calculating molecular groun *Pre-requisites:* Learners should have familiarity with Jupyter notebooks and programming in Python and CUDA-Q. Since these notebooks cover chemistry and materials science simulations, domain knowledge is helpful. It is assumed the reader has some familiarity already with quantum computation and is comfortable with braket notation and the concepts of qubits, quantum circuits, measurement, and circuit sampling. The CUDA-Q Academic course entitled "Quick Start to Quantum Computing with CUDA-Q" provide a walkthrough of this prerequisite CUDA-Q knowledge if the reader is new to quantum computing and CUDA-Q or needs refreshing. ## Notebooks -The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. - -Otherwise, if you have set up an account in Google CoLab, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - -| Notebook | Google Colab | -| ----------- | ----------- | -|Lab 1 - Solving the Ground State Problem with VQE and AI (Generative Quantum Eigensolver) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/vqe_and_gqe.ipynb)| -|Lab 2 - ADAPT VQE | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/adapt_vqe.ipynb)| -|Lab 3 - Krylov Quantum Subspace Diagonalization | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/krylov_subspace_diagonalization.ipynb)| -|Lab 4 - QM/MM: Combining VQE with a Polarizeable Embedding Framework | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/qmmm.ipynb)| -|Lab 5 - Canonical, Iterative, and Bayesian Quantum Phase Estimation | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/qpe.ipynb)| - +The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/dynamics101/README.md b/dynamics101/README.md index 097083f..1a65f27 100644 --- a/dynamics101/README.md +++ b/dynamics101/README.md @@ -18,13 +18,6 @@ Designed for advanced users with a solid background in quantum mechanics and fam * ***Model Time-Dependent Interactions:*** Learn to implement time-dependent Hamiltonian terms and custom operators to simulate dynamic quantum interactions. ## Notebooks -The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. - -Otherwise, if you have set up an account in Google CoLab, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - -| Notebook | Google Colab | -| ----------- | ----------- | -|Lab 1 - Jaynes-Cummings Hamiltonian | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/dynamics101/01_Jaynes_Cummings.ipynb)| -| Lab 2 - Time Dependent Hamiltonians |[![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/dynamics101/02_Time_Dependent.ipynb) | ||| +The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/hybrid-workflows/README.md b/hybrid-workflows/README.md index 4c7be8b..2f7ed84 100644 --- a/hybrid-workflows/README.md +++ b/hybrid-workflows/README.md @@ -3,11 +3,6 @@ Welcome to the hybrid workflows learning path. In this path you will learn about workflows that leverage classical AI supercomputing alongside QPUs. Note that most of the other CUDA-Q academic learning pathways focus on hybrid workflows too. This path will capture lessons that are more general or do not fit within the other more targeted pathways like "chemistry simulations". ## Notebooks -The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run on GPUs in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in Google CoLab, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - -| Notebook | Google Colab | -| ----------- | ----------- | -|Lab 1 - Quantum Enhanced Memetic-Tabu Search Applied to the LABS Problem | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/hybrid-workflows/01_quantum_enhanced_optimization_LABS.ipynb) \ No newline at end of file +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/images/Adapt-QAOA-flowchart.png b/images/Adapt-QAOA-flowchart.png new file mode 100644 index 0000000..d836c91 Binary files /dev/null and b/images/Adapt-QAOA-flowchart.png differ diff --git a/images/QAOA-GPT-flowchart.png b/images/QAOA-GPT-flowchart.png new file mode 100644 index 0000000..86041ed Binary files /dev/null and b/images/QAOA-GPT-flowchart.png differ diff --git a/instructions.md b/instructions.md deleted file mode 100644 index 56008fa..0000000 --- a/instructions.md +++ /dev/null @@ -1,52 +0,0 @@ -This directory contains Jupyter notebooks that can be run on a local installation of CUDA-Q. The `requirements.txt` and `Dockerfile` are -included here. Please refer to the [Quick Start Guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#validate-installation) -for instructions on how to install CUDA-Q on your system. - -Most of the material in these notebooks can be run without a GPU. However, the portions of the notebook that use MPI will require -a GPU to execute. - -If you don't have CUDA-Q installed on your system, you can run the notebooks in Google Colab. - -## Building the container for local execution - -The following command will build the CUDA Quantum Academic container. To -customize this container, make edits to the included `Dockerfile`. - -```sh -# Login to NVIDIA GPU Cloud for access to CUDA-Q base container -docker login nvcr.io -# Follow the login instructions at ngc.nvidia.com -# Next, build the container locally -docker build -t cuda-quantum-academic:latest . -``` - -To run the container, use the following command. - -```sh -docker run -p 8888:8888 cuda-quantum-academic:latest -``` - -You can now open a web browser to http://localhost:8888/lab to access the labs. - -### Changing the port -If you cannot use port 8888 on your local machine then you can specify a differnt -port when running the the container. For example, if you want to connect to your -Jupyter Lab on port 8000 using http://localhost:8000/lab, then you'd do the following: - -```sh -docker run -p 8000:8888 cuda-quantum-academic:latest -``` - -Here `8888` is the port used within the container. Docker is routing your local -traffic on `8000` to the container port `8888`. If you need to change the port used within -the container, then you can also specify that port when running your container. For example, -if you wish to direct your browser to port 8888 but run the Jupyter lab within the container -on port 8000, then you'd run the following: - -```sh -docker run -e JUPYTER_LAB_PORT=8000 -p 8888:8000 cuda-quantum-academic:latest -``` - - -## Running the notebooks in Google Colab -Simply click on the icon at the top of each notebook in github to open it up in Google Colab. In each notebook there instructions for running CoLab. diff --git a/qaoa-for-max-cut/00_StartHere.ipynb b/qaoa-for-max-cut/00_StartHere.ipynb index db2ac75..3cd8e35 100644 --- a/qaoa-for-max-cut/00_StartHere.ipynb +++ b/qaoa-for-max-cut/00_StartHere.ipynb @@ -82,7 +82,7 @@ "id": "48754d6e" }, "source": [ - "The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also including in this folder to help get you set up.\n", + "The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q).\n", "\n", "For links to run the notebooks in qBraid, CoCalc, or Google CoLab, please see the [READ_ME.md](https://github.com/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/READ_ME.md) file in this directory." ] diff --git a/qaoa-for-max-cut/01_Max-Cut-with-QAOA.ipynb b/qaoa-for-max-cut/01_Max-Cut-with-QAOA.ipynb index e14eabe..049d5db 100644 --- a/qaoa-for-max-cut/01_Max-Cut-with-QAOA.ipynb +++ b/qaoa-for-max-cut/01_Max-Cut-with-QAOA.ipynb @@ -50,7 +50,7 @@ "* **1.6** Preview how to scale QAOA for larger graphs using divide-and-conquer QAOA\n", "\n", "Learning Objectives:\n", - "* Apply CUDA-Q primitives such as `observe`, `sample`, and `vqe` to kernels\n", + "* Apply CUDA-Q primitives such as `observe` and `sample`, and the CUDA-QX Solvers `vqe` function, to kernels\n", "* Construct CUDA-Q kernels with and without parameters using function decoration\n", "* Visualize the divide, conquer, and merge stage of the divide-and-conquer QAOA as it is applied to a small graph\n", "\n" @@ -131,6 +131,11 @@ "from networkx.algorithms import community\n", "import cudaq\n", "from cudaq import spin\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", "from cudaq.qis import *\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", @@ -573,7 +578,7 @@ "\n", "![](https://github.com/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/images/QAOA-flowchart.png?raw=true)\n", "\n", - "The two green process blocks in the image above represent the quantum subroutines that we'll execute with CUDA-Q primitives. The green process block on the left will use the `observe` primitive to estimate the expectation value (this primitive is subsumed in a `vqe` call) and the green process on the right will use the `sample` primitive to identify the most probable outcome of the circuit. We'll describe how to program this entire flowchart using CUDA-Q in the following sections.\n" + "The three green boxes in the flowchart above represent the quantum subroutines that we'll execute with CUDA-Q. The first two — Execute QAOA Circuit and Evaluate Cost Function — work together inside the optimization loop: the circuit is run and the cost is estimated using the `observe` primitive (subsumed in a `solvers.vqe` call from the [CUDA-QX Solvers](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) library). The third — Sample Final Circuit — uses the `sample` primitive to read out the most probable bitstring from the converged circuit. We'll describe how to program this entire flowchart using CUDA-Q in the following sections.\n" ] }, { @@ -1136,7 +1141,7 @@ "" ] }, - "execution_count": 9, + "execution_count": null, "metadata": {}, "output_type": "execute_result" } @@ -1242,7 +1247,7 @@ "\n", "CUDA-Q has several built-in optimizers. You can find more information about the optimizers [here](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#optimizers). \n", "\n", - "The code block below defines our optimizer and sets the initial parameter values. We'll use the COBYLA optimizer. We'll set a seed and choose initial parameter values randomly. Recall that the number of parameters needed depends on the number of layers in our QAOA circuit. Each layer calls two parameters, one for the problem kernel and one for the mixer." + "The code block below sets the initial parameter values for the optimizer. We'll use the COBYLA optimizer. We'll set a seed and choose initial parameter values randomly. Recall that the number of parameters needed depends on the number of layers in our QAOA circuit. Each layer uses two parameters, one for the problem kernel and one for the mixer." ] }, { @@ -1254,16 +1259,15 @@ }, "outputs": [], "source": [ - "# Define the optimizer\n", + "# Set up optimization parameters\n", "seed = 110\n", "layer_count = 1 # set arbitrarily\n", "parameter_count: int = 2 * layer_count\n", "\n", - "# Specify the optimizer and its initial parameters.\n", - "optimizer = cudaq.optimizers.COBYLA()\n", + "# Specify the initial parameters.\n", "np.random.seed(seed)\n", - "optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n" + "initial_parameters = np.random.uniform(-np.pi, np.pi,\n", + " parameter_count).tolist()\n" ] }, { @@ -1273,7 +1277,7 @@ "id": "3286736c" }, "source": [ - "The `vqe` function is built into CUDA-Q and carries out the optimization loop once the optimizer, initial parameters, kernel, and Hamiltonian cost function have been defined. Let's add the `vqe` call to the code. The code block below implements the full optimizer loop. When you execute the code, you'll see the optimal parameter values identified for the max cut approximate solution to our `sampleGraph`. In the next two sections, we'll walk through how to read out an optimal cut from this." + "The `vqe` function from the [CUDA-QX Solvers](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) library carries out the optimization loop once the initial parameters, kernel, and Hamiltonian cost function have been defined. Let's add the `vqe` call to the code. The code block below implements the full optimizer loop. When you execute the code, you'll see the optimal parameter values identified for the max cut approximate solution to our `sampleGraph`. In the next two sections, we'll walk through how to read out an optimal cut from this." ] }, { @@ -1303,19 +1307,16 @@ " edge_qubit_tgt.append(nodes.index(v))\n", "\n", "\n", - "# Specify the optimizer and its initial parameters.\n", - "optimizer = cudaq.optimizers.COBYLA()\n", "np.random.seed(seed)\n", - "optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n", + "initial_parameters = np.random.uniform(-np.pi, np.pi,\n", + " parameter_count).tolist()\n", "\n", - "# Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - "optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(edge_qubit_src,edge_qubit_tgt),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, edge_qubit_src, edge_qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", + "# Pass the kernel, spin operator, and initial parameters to `cudaq_solvers.vqe`.\n", + "optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, edge_qubit_src, edge_qubit_tgt, thetas),\n", + " hamiltonian_max_cut(edge_qubit_src,edge_qubit_tgt),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", "\n", "# Print the optimized value and its parameters\n", "print(\"Optimal value = \", optimal_expectation)\n", @@ -1332,6 +1333,14 @@ "We can read out an approximation for the max cut value from this result. The optimal value reported is the average value ($\\langle \\psi|H|\\psi\\rangle$) of our problem Hamiltonian $H$ when applied to the state generated by the quantum circuit with the optimal parameter values. What this means that at our max cut value for the `sampleGraph` is at least 4.466. We'll use the sampling in the next section to identify the most probable measurements of the state generated by the QAOA kernel with these optimal parameter values." ] }, + { + "cell_type": "markdown", + "id": "4fbd0e1f", + "metadata": {}, + "source": [ + "**Note:** In this lab we built the QAOA circuit from scratch using `solvers.vqe()` in order to understand how QAOA works under the hood. In practice, the CUDA-QX Solvers library also provides a dedicated [`solvers.qaoa()`](https://nvidia.github.io/cudaqx/examples_rst/solvers/qaoa.html) function that handles circuit construction, parameterization, and sampling automatically. It additionally supports advanced features such as full parameterization (a separate parameter for every term in the Hamiltonian and mixer) and counterdiabatic driving." + ] + }, { "cell_type": "markdown", "id": "08f2e1ac", @@ -1383,19 +1392,16 @@ " # Each layer of the QAOA kernel contains 2 parameters\n", " parameter_count : int = 2*layer_count\n", "\n", - " # Specify the optimizer and its initial parameters.\n", - " optimizer = cudaq.optimizers.COBYLA()\n", " np.random.seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n", - "\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi,\n", + " parameter_count).tolist()\n", + "\n", + " # Pass the kernel, spin operator, and initial parameters to `cudaq_solvers.vqe`.\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas),\n", + " hamiltonian_max_cut(qubit_src, qubit_tgt),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", "\n", " return optimal_parameters" ] diff --git a/qaoa-for-max-cut/02_One-level-divide-and-conquer-QAOA.ipynb b/qaoa-for-max-cut/02_One-level-divide-and-conquer-QAOA.ipynb index af669be..b212870 100644 --- a/qaoa-for-max-cut/02_One-level-divide-and-conquer-QAOA.ipynb +++ b/qaoa-for-max-cut/02_One-level-divide-and-conquer-QAOA.ipynb @@ -122,7 +122,7 @@ " * Estimate the max cut value of a moderately sized example graph (called `sampleGraph2` in this notebook) using classical heuristics\n", " * Partition `sampleGraph2` into smaller subgraphs\n", "* **2.3 Conquer**\n", - " * Estimate the max cut value of each of the subgraphs in the partition using a routine QAOA implementation with CUDA-Q's `vqe` function\n", + " * Estimate the max cut value of each of the subgraphs in the partition using a routine QAOA implementation with the CUDA-QX Solvers `vqe` function\n", "* **2.4 Merge**\n", " * Apply a brute-force computation to stitch together the subgraph solutions into an approximate max cut of `sampleGraph2`\n", " * Define a new graph, `mergerGraph`, whose vertices represent each of the subgraphs in the partition and define a new cost function that codes the optimization problem for optimally stitching together subgraph solutions into a cut of `sampleGraph2`\n", @@ -184,6 +184,11 @@ "import cudaq\n", "from cudaq import spin\n", "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from typing import List\n" @@ -341,19 +346,14 @@ " # Each layer of the QAOA kernel contains 2 parameters\n", " parameter_count : int = 2*layer_count\n", "\n", - " # Specify the optimizer and its initial parameters.\n", - " optimizer = cudaq.optimizers.COBYLA()\n", " np.random.seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi, parameter_count).tolist()\n", "\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas),\n", + " hamiltonian_max_cut(qubit_src, qubit_tgt),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", "\n", " return optimal_parameters" ] @@ -779,7 +779,7 @@ "source": [ "## 2.4 Conquer stage of the algorithm sequentially executed\n", "\n", - "In this section, we solve the max cut problem for each of the five subgraphs using QAOA. We first create a loop calling the `vqe`function to compute the optimal parameters for the QAOA algorithm and the `sample` function to find the optimal cut. Here, the `vqe` and `sample` functions are applied to each subgraph sequentially. In the section 2.6, we adapt the code to execute some of the computation in parallel." + "In this section, we solve the max cut problem for each of the five subgraphs using QAOA. We first create a loop calling the `solvers.vqe` function to compute the optimal parameters for the QAOA algorithm and the `cudaq.sample` function to find the optimal cut. Here, these functions are applied to each subgraph sequentially. In the section 2.6, we adapt the code to execute some of the computation in parallel." ] }, { @@ -834,7 +834,7 @@ "id": "1221c3b9" }, "source": [ - "Recall that in Lab 1, we learned how the `vqe` function and the `sample` primitive are used to implement QAOA and approximate the max cut of a graph. Here, we iterate this on each of the subgraphs in the `subgraph_dictionary` to find the approximate max cut of each of the subgraphs.\n", + "Recall that in Lab 1, we learned how the `solvers.vqe` function and the `cudaq.sample` primitive are used to implement QAOA and approximate the max cut of a graph. Here, we iterate this on each of the subgraphs in the `subgraph_dictionary` to find the approximate max cut of each of the subgraphs.\n", "\n", "Before we run this computation, let's first examine some of our options for circuit simulation. The code block below shows the backends available to us. We'll use the `qpp-cpu` which runs on a CPU. If you have access to a GPU, you can switch to the `nvidia` target, which provides a GPU-accelerated statevector simulator. By default the simulator uses `FP32` floating point types. To switch to `FP64`, you can reset the target to `nvidia-fp64`. Later in the lab, we'll experiment with some of the other targets which can leverage multi-node, multi-GPU simulators." ] @@ -876,7 +876,7 @@ "id": "c3f2fb59" }, "source": [ - "** Exercise:** Edit the code block below replacing `FIX_ME` with the function from Lab 1 that solves the max cut problem using `vqe` and the `sample` commands. For your reference, the functions from Lab 1 have been copied to the top of this notebook and reside in section 2.1.2. The names of these functions are:\n", + "** Exercise:** Edit the code block below replacing `FIX_ME` with the function from Lab 1 that solves the max cut problem using `solvers.vqe` and the `cudaq.sample` commands. For your reference, the functions from Lab 1 have been copied to the top of this notebook and reside in section 2.1.2. The names of these functions are:\n", "\n", "* `hamiltonian_max_cut`\n", "* `kernel_qaoa`\n", @@ -1760,7 +1760,7 @@ "**Exercise:** Edit the code block below, changing the `FIX_ME` functions to a selection from the following. Two of these options will not be used.\n", "* `cudaq.sample` \n", "* `cudaq.observe`\n", - "* `cudaq.vqe`\n", + "* `solvers.vqe`\n", "* `.expectation()`\n", "* `.most_probable()`\n", "\n" @@ -1782,13 +1782,10 @@ "layer_count_merger = 1 # set arbitrarily\n", "parameter_count_merger: int = 2 * layer_count_merger\n", "\n", - "# Specify the optimizer and its initial parameters. Make it repeatable.\n", + "# Specify the initial parameters. Make it repeatable.\n", "cudaq.set_random_seed(101)\n", - "optimizer_merger = cudaq.optimizers.COBYLA()\n", "np.random.seed(101)\n", - "optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count_merger)\n", - "optimizer_merger.max_iterations=150\n", + "initial_parameters_merger = np.random.uniform(-np.pi, np.pi, parameter_count_merger).tolist()\n", "\n", "merger_nodes = list(mergerGraph.nodes())\n", "qubit_count = len(merger_nodes)\n", @@ -1799,14 +1796,12 @@ " merger_edge_src.append(merger_nodes.index(u))\n", " merger_edge_tgt.append(merger_nodes.index(v))\n", "\n", - "# Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - "optimal_expectation, optimal_parameters = FIX_ME(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=mHamiltonian(mergerGraph),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, merger_edge_src, merger_edge_tgt, parameter_vector),\n", - " optimizer=optimizer_merger,\n", - " parameter_count=parameter_count_merger,\n", - " shots = 10000)\n", + "# Pass the kernel and spin operator to `solvers.vqe`.\n", + "optimal_expectation, optimal_parameters, _ = FIX_ME(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, merger_edge_src, merger_edge_tgt, thetas),\n", + " mHamiltonian(mergerGraph),\n", + " initial_parameters_merger,\n", + " optimizer='cobyla', max_iterations=150, shots=10000)\n", "\n", "# Print the optimized value and its parameters\n", "print(\"Optimal value = \", optimal_expectation)\n", @@ -1853,13 +1848,10 @@ "layer_count_merger = 1 # set arbitrarily\n", "parameter_count_merger: int = 2 * layer_count_merger\n", "\n", - "# Specify the optimizer and its initial parameters. Make it repeatable.\n", + "# Specify the initial parameters. Make it repeatable.\n", "cudaq.set_random_seed(101)\n", - "optimizer_merger = cudaq.optimizers.COBYLA()\n", "np.random.seed(101)\n", - "optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count_merger)\n", - "optimizer_merger.max_iterations=150\n", + "initial_parameters_merger = np.random.uniform(-np.pi, np.pi, parameter_count_merger).tolist()\n", "\n", "merger_nodes = list(mergerGraph.nodes())\n", "qubit_count = len(merger_nodes)\n", @@ -1870,14 +1862,12 @@ " merger_edge_src.append(merger_nodes.index(u))\n", " merger_edge_tgt.append(merger_nodes.index(v))\n", "\n", - "# Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - "optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=mHamiltonian(mergerGraph),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, merger_edge_src, merger_edge_tgt, parameter_vector),\n", - " optimizer=optimizer_merger,\n", - " parameter_count=parameter_count_merger,\n", - " shots = 10000)\n", + "# Pass the kernel and spin operator to `solvers.vqe`.\n", + "optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, merger_edge_src, merger_edge_tgt, thetas),\n", + " mHamiltonian(mergerGraph),\n", + " initial_parameters_merger,\n", + " optimizer='cobyla', max_iterations=150, shots=10000)\n", "\n", "# Print the optimized value and its parameters\n", "print(\"Optimal value = \", optimal_expectation)\n", @@ -2077,6 +2067,11 @@ "import cudaq\n", "from cudaq import spin\n", "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from typing import List\n", diff --git a/qaoa-for-max-cut/03_Recursive-divide-and-conquer.ipynb b/qaoa-for-max-cut/03_Recursive-divide-and-conquer.ipynb index 8c027c4..f6ef291 100644 --- a/qaoa-for-max-cut/03_Recursive-divide-and-conquer.ipynb +++ b/qaoa-for-max-cut/03_Recursive-divide-and-conquer.ipynb @@ -169,6 +169,11 @@ "import cudaq\n", "from cudaq import spin\n", "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from typing import List" @@ -346,19 +351,14 @@ " # Each layer of the QAOA kernel contains 2 parameters\n", " parameter_count : int = 2*layer_count\n", "\n", - " # Specify the optimizer and its initial parameters.\n", - " optimizer = cudaq.optimizers.COBYLA()\n", " np.random.seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi, parameter_count).tolist()\n", "\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas),\n", + " hamiltonian_max_cut(qubit_src, qubit_tgt),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", "\n", " return optimal_parameters" ] @@ -1292,21 +1292,16 @@ " # The number of qubits we'll need is the same as the number of vertices in our graph\n", " qubit_count_merger : int = len(nodes_merger)\n", "\n", - " # Specify the optimizer and its initial parameters. Make it repeatable.\n", + " # Specify the initial parameters. Make it repeatable.\n", " cudaq.set_random_seed(12345)\n", - " optimizer_merger = cudaq.optimizers.COBYLA()\n", " np.random.seed(4321)\n", - " optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count_merger)\n", - " optimizer_merger.max_iterations=150\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=merger_Hamiltonian,\n", - " argument_mapper=lambda parameter_vector: (qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, parameter_vector),\n", - " optimizer=optimizer_merger,\n", - " parameter_count=parameter_count_merger,\n", - " shots = 10000)\n", + " initial_parameters_merger = np.random.uniform(-np.pi, np.pi, parameter_count_merger).tolist()\n", + "\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, thetas),\n", + " merger_Hamiltonian,\n", + " initial_parameters_merger,\n", + " optimizer='cobyla', max_iterations=150, shots=10000)\n", "\n", " # Sample the circuit using the optimized parameters\n", " # Sample enough times to distinguish the most_probable outcome for\n", @@ -1672,7 +1667,7 @@ "'1110010001110011111101101101000110010111100110011001111001011000011000110011010110011011100110011010'" ] }, - "execution_count": 13, + "execution_count": null, "metadata": {}, "output_type": "execute_result" } @@ -1797,6 +1792,11 @@ "import cudaq\n", "from cudaq import spin\n", "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from typing import List\n", @@ -1972,7 +1972,7 @@ "\n", "* **Initial parameters:** As we saw in this lab, by varying the seed for generating the random initial parameters, we obtained quite different results. Recent work points to several protocols for selecting better performing initial parameters (see for instance the work of [Sureshbabu et al.](https://arxiv.org/abs/2305.15201)).\n", "\n", - "* **Optimizer:** Throughout this series of labs we defaulted to the COBYLA optimizer for the optimizer loop in the algorithm. There are several other non-gradient and gradient-based optimizers available in CUDA-Q for you to try, and you can use your favorite sci.py optimizer as well. See the full list of built-in CUDA-Q optimizers [here](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.optimizers.optimizer).\n", + "* **Optimizer:** Throughout this series of labs we defaulted to the COBYLA optimizer for the optimizer loop in the algorithm. The `solvers.vqe` function accepts an optimizer name as a string (e.g. `optimizer='cobyla'`). Other options include gradient-based optimizers such as `'l-bfgs-b'` — see the [CUDA-QX Solvers documentation](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) for the full list of supported optimizers.\n", "\n", "* **QAOA kernel:** We used a standard QAOA kernel throughout this tutorial. There are several adaptations that could be made to the QAOA kernel. For example, one could replace the QAOA kernel with a similarly structured [multi-angle QAOA kernel](https://www.nature.com/articles/s41598-022-10555-8). The difference between the QAOA kernel and the multi-angle QAOA kernel is that instead of each layer having distinct parameters for the problem and the mixer kernels, each parameterized rotational gate would have its own distinct parameter. Another variation of the QAOA kernel is the ADAPT-QAOA algorithm which iteratively constructs the QAOA circuit, tailoring it for each problem [(Zhu et al.)](https://journals.aps.org/prresearch/pdf/10.1103/PhysRevResearch.4.033029). You can learn more about ADAPT-QAOA and how to implement it in CUDA-Q [here](https://nvidia.github.io/cuda-quantum/latest/applications/python/adapt_qaoa.html). Taking this one step further, researchers have used ADAPT-QAOA circuits to train a QAOA-GPT model that generates compact quantum circuits for the max cut problem [(Tyagin et al.)](https://arxiv.org/pdf/2504.16350).\n", "\n", @@ -2010,7 +2010,7 @@ "{'status': 'ok', 'restart': True}" ] }, - "execution_count": 1, + "execution_count": null, "metadata": {}, "output_type": "execute_result" } diff --git a/qaoa-for-max-cut/04_Assessment-Solution.ipynb b/qaoa-for-max-cut/04_Assessment-Solution.ipynb index bd277e2..602f753 100644 --- a/qaoa-for-max-cut/04_Assessment-Solution.ipynb +++ b/qaoa-for-max-cut/04_Assessment-Solution.ipynb @@ -1,757 +1,762 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "1e406cfc", - "metadata": {}, - "outputs": [], - "source": [ - "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": {}, - "source": [ - "# Divide-and-Conquer Implementation of QAOA\n", - "## Lab 4 Assessment\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8cb5cd25", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", - "# Run this portion of the notebook in a CPU runtime\n", - "# Uncomment the line below and execute the cell to install cuda-q\n", - "# !pip install cudaq\n", - "\n", - "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", - "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qaoa-for-max-cut/images ./images" - ] - }, - { - "cell_type": "markdown", - "id": "fd6bc9e4", - "metadata": {}, - "source": [ - "## 4.1 Lab Description\n", - "\n", - "Congratulations on making it this far! We hope you enjoyed conquering a large max cut problem while picking up a few skills along the way.\n", - "\n", - "For this assessment, the challenge is to adapt the code that we have created for the max cut problem and apply it to the weighted max cut problem. As we described at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb), there are many options for coding QAOA that can improve performance and accuracy. We encourage you to experiment with at least one of these to achieve a max cut approximation of a weighted version of the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb). We chose this moderately sized graph for the sake of time, but we do give you the option to experiment with other graphs.\n", - "\n", - "The learning objectives of this tutorial are:\n", - "* Execute the QAOA algorithm to find approximate max cuts of a given graph using CUDA Quantum\n", - "* Understand the limitations of the QAOA algorithm for solving max cut in the NISQ era \n", - "* Make adjustments to the divide-and-conquer QAOA algorithm through selection of initial parameter values, increased layers, choice of optimizer, or other methods\n", - "* Simulate quantum circuits in parallel on multiple GPUs to speed up overall run time using CUDA Quantum\n", - "\n", - "Let's get started! " - ] - }, - { - "cell_type": "markdown", - "id": "d3721df5", - "metadata": {}, - "source": [ - "## 4.2 Weighted Max Cut Problem" - ] - }, - { - "cell_type": "markdown", - "id": "71b602e0", - "metadata": {}, - "source": [ - "\n", - "The weighted max cut problem is a variation of the max cut problem. The weighted version of the problem aims to identify a partition of a graph's nodes into two sets which maximizes the sum of the weights of the edges between the two sets. We continue with the notation established in the previous labs. The only difference between this problem and the max cut problem from before is that we now want to maximize: \n", - "$$\\sum_{\\substack{(u,v)\\in E\\\\ u\\in V_0, v\\in V_1}}w_{u,v},$$\n", - "\n", - "where $w_{u,v}$ is the weight of the edge connecting vertex $u$ to $v$. As before $E$ is the set of the edges of the graph, and $V_0$ and $V_1$ define a partition of the vertices of the graph.\n" - ] - }, - { - "cell_type": "markdown", - "id": "458502bb", - "metadata": {}, - "source": [ - "## 4.3 Adapting our code from the previous labs" - ] - }, - { - "cell_type": "markdown", - "id": "2a462183", - "metadata": {}, - "source": [ - "We can use most of the code that we've already developed. There are a few changes that need to be made at the divide, conquer, and merge stages of the QAOA divide-and-conquer algorithm." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7a63c4f0", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", - "# Run this portion of the notebook in a GPU runtime \n", - "# Uncomment the line below and execute the cell to install cuda-q\n", - "# !pip install cudaq" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "6c4006e6", - "metadata": {}, - "outputs": [], - "source": [ - "# Necessary packages\n", - "import networkx as nx\n", - "from networkx import algorithms\n", - "from networkx.algorithms import community\n", - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from typing import List" - ] - }, - { - "cell_type": "markdown", - "id": "d173a9ba", - "metadata": {}, - "source": [ - "### 4.3.1 Divide\n", - "\n", - "Since we now have a weighted graph, we will want to take these weights into account when identifying the subgraph partition. We've made the adjustment to the `subgraph_partition` function below. This may produce a different partitioning of our weighted graph than if we had ignored the weights." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "fe4009ed", - "metadata": {}, - "outputs": [], - "source": [ - "def subgraphpartition(G,n, name, globalGraph):\n", - " \"\"\"Divide the graph up into at most n subgraphs\n", - " \n", - " Parameters\n", - " ----------\n", - " G: networkX.Graph \n", - " Graph that we want to subdivivde which lives inside of or is equatl to globalGraph\n", - " n : int\n", - " n is the maximum number of subgraphs in the partition\n", - " name : str\n", - " prefix for the graphs (in our case we'll use 'Global')\n", - " globalGraph: networkX.Graph\n", - " original problem graph\n", - " \n", - " Returns\n", - " -------\n", - " dict of str : networkX.Graph\n", - " Dictionary of networkX graphs with a string as the key\n", - " \"\"\"\n", - " greedy_partition = community.greedy_modularity_communities(G, weight='weight', resolution=1.1, cutoff=1, best_n=n)\n", - " number_of_subgraphs = len(greedy_partition)\n", - "\n", - " graph_dictionary = {}\n", - " graph_names=[]\n", - " for i in range(number_of_subgraphs):\n", - " subgraphname=name+':'+str(i)\n", - " graph_names.append(subgraphname)\n", - "\n", - " for i in range(number_of_subgraphs):\n", - " nodelist = sorted(list(greedy_partition[i]))\n", - " graph_dictionary[graph_names[i]] = nx.subgraph(globalGraph, nodelist)\n", - " \n", - " return(graph_dictionary) \n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "e9d7744a", - "metadata": {}, - "source": [ - "### 4.3.2 Conquer\n", - "\n", - "To adapt the dividie-and-conquer QAOA algorithm to handle a weighted graph, we will need to change the Hamiltonian function. We refer you to section 1.4.1 of [Lab 1](01_Max-Cut-with-QAOA.ipynb) to derive the Hamiltonian for the weighted max cut problem. Below we've copied and adapted the code from the `hamiltonian_max_cut` function from the previous labs by adding a new function argument for the weights. You'll need to fix the indicated line of code to take into account the weights of the edges. \n", - "\n", - "HINT: You'll need to consider the weight of each edge, which we have computed for you in the `edge_weight` variable.\n", - "\n", - "**Exercise:** Edit the line commented with `###FIX_ME###`" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "4ff41e0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise\n", - "\n", - "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", - " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", - " \n", - " Parameters\n", - " ----------\n", - " sources: List[int] \n", - " list of the source vertices for edges in the graph\n", - " targets: List[int]\n", - " list of the target vertices for the edges in the graph\n", - " weights : List[float]\n", - " list of the weight of the edge determined by the source and target with the same index\n", - " Returns\n", - " -------\n", - " cudaq.SpinOperator\n", - " Hamiltonian for finding the max cut of the graph defined by the given edges\n", - " \"\"\"\n", - " hamiltonian = 0\n", - " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", - " \n", - " for i in range(len(sources)):\n", - " # Add a term to the Hamiltonian for the edge (u,v)\n", - " qubitu = sources[i]\n", - " qubitv = targets[i]\n", - " edge_weight = weights[i]\n", - " hamiltonian += ##FIX_ME## \n", - " \n", - " return hamiltonian" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "9d031b65", - "metadata": { - "jupyter": { - "source_hidden": true - } - }, - "outputs": [], - "source": [ - "# SOLUTION\n", - "\n", - "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", - " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", - " \n", - " Parameters\n", - " ----------\n", - " sources: List[int] \n", - " list of the source vertices for edges in the graph\n", - " targets: List[int]\n", - " list of the target vertices for the edges in the graph\n", - " weights : List[float]\n", - " list of the weight of the edge determined by the source and target with the same index\n", - " Returns\n", - " -------\n", - " cudaq.SpinOperator\n", - " Hamiltonian for finding the max cut of the graph defined by the given edges\n", - " \"\"\"\n", - " hamiltonian = 0\n", - " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", - " \n", - " for i in range(len(sources)):\n", - " # Add a term to the Hamiltonian for the edge (u,v)\n", - " qubitu = sources[i]\n", - " qubitv = targets[i]\n", - " edge_weight = weights[i]\n", - " hamiltonian += 0.5*edge_weight*(spin.z(qubitu)*spin.z(qubitv)-spin.i(qubitu)*spin.i(qubitv))\n", - " \n", - " return hamiltonian\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "8817b585", - "metadata": {}, - "source": [ - "Since we've changed the function arguments for the `hamiltonian_max_cut` function, we've edited the code from the previous labs that calls this function." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "36167f37", - "metadata": {}, - "outputs": [], - "source": [ - "def find_optimal_parameters(G, layer_count, seed):\n", - " \"\"\"Function for finding the optimal parameters of QAOA for the max cut of a graph\n", - " Parameters\n", - " ----------\n", - " G: networkX graph \n", - " Problem graph whose max cut we aim to find\n", - " layer_count : int \n", - " Number of layers in the QAOA circuit\n", - " seed : int\n", - " Random seed for reproducibility of results\n", - " \n", - " Returns\n", - " -------\n", - " list[float]\n", - " Optimal parameters for the QAOA applied to the given graph G\n", - " \"\"\"\n", - " parameter_count: int = 2 * layer_count\n", - "\n", - " # Problem parameters\n", - " nodes = sorted(list(nx.nodes(G)))\n", - " qubit_src = []\n", - " qubit_tgt = []\n", - " weights = []\n", - " for u, v in nx.edges(G):\n", - " # We can use the index() command to read out the qubits associated with the vertex u and v.\n", - " qubit_src.append(nodes.index(u))\n", - " qubit_tgt.append(nodes.index(v))\n", - " weights.append(G.edges[u,v]['weight']) \n", - " # The number of qubits we'll need is the same as the number of vertices in our graph\n", - " qubit_count : int = len(nodes)\n", - " # Each layer of the QAOA kernel contains 2 parameters\n", - " parameter_count : int = 2*layer_count\n", - " \n", - " # Specify the optimizer and its initial parameters. \n", - " optimizer = cudaq.optimizers.COBYLA()\n", - " np.random.seed(seed)\n", - " cudaq.set_random_seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count) \n", - "\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt, weights),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", - "\n", - " return optimal_parameters" - ] - }, - { - "cell_type": "markdown", - "id": "37ba4faf", - "metadata": {}, - "source": [ - "### 4.3.3 Merge\n", - "\n", - "The weights of the edges between subgraphs will impact the merger stage of the algorithm as well. \n", - "\n", - "**Exercise:** Edit the code block by replacing `FIX_ME` with the appropriate values to compute the penalties associated with each edge of the merger graph." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29c3055f", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Exercise\n", - "# Compute the penalties for edges in the supplied mergerGraph\n", - "# for the subgraph partitioning of graph G\n", - "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", - " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", - " as edge attributes.\n", - " \n", - " Parameters\n", - " ----------\n", - " mergerGraph : networkX.Graph \n", - " Graph of connections between vertices in distinct subgraphs of G\n", - " subgraph_dictionary : dict of networkX graph with str as keys \n", - " subgraphs of G that are represented as nodes in the mergerGraph\n", - " G : networkX.Graph\n", - " graph whose vertices has an attribute 'color'\n", - " \n", - " Returns\n", - " -------\n", - " networkX.Graph\n", - " Merger graph containing penalties\n", - " \"\"\" \n", - " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", - " for i, j in mergerGraph.edges():\n", - " penalty_ij = 0\n", - " for u in nx.nodes(subgraph_dictionary[i]):\n", - " for neighbor_u in nx.all_neighbors(G, u):\n", - " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", - " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", - " penalty_ij += ### FIX_ME\n", - " else:\n", - " penalty_ij += ### FIX_ME\n", - " mergerGraph[i][j]['penalty'] = penalty_ij\n", - " return mergerGraph" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "32898a6b-28f5-4518-b47e-eed28431de06", - "metadata": { - "jupyter": { - "source_hidden": true - } - }, - "outputs": [], - "source": [ - "# Solution\n", - "# Compute the penalties for edges in the supplied mergerGraph\n", - "# for the subgraph partitioning of graph G\n", - "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", - " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", - " as edge attributes.\n", - " \n", - " Parameters\n", - " ----------\n", - " mergerGraph : networkX.Graph \n", - " Graph of connections between vertices in distinct subgraphs of G\n", - " subgraph_dictionary : dict of networkX graph with str as keys \n", - " subgraphs of G that are represented as nodes in the mergerGraph\n", - " G : networkX.Graph\n", - " graph whose vertices has an attribute 'color'\n", - " \n", - " Returns\n", - " -------\n", - " networkX.Graph\n", - " Merger graph containing penalties\n", - " \"\"\" \n", - " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", - " for i, j in mergerGraph.edges():\n", - " penalty_ij = 0\n", - " for u in nx.nodes(subgraph_dictionary[i]):\n", - " for neighbor_u in nx.all_neighbors(G, u):\n", - " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", - " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", - " penalty_ij += G.edges[u,neighbor_u]['weight']\n", - " else:\n", - " penalty_ij += -G.edges[u,neighbor_u]['weight']\n", - " mergerGraph[i][j]['penalty'] = penalty_ij\n", - " return mergerGraph" - ] - }, - { - "cell_type": "markdown", - "id": "85f53c2e", - "metadata": {}, - "source": [ - "Finally, since our cut value now depends on the weight of the edges, we will need to edit the `cutvalue` function that comptues the cut of the graph based on the coloring of the nodes.\n", - "\n", - "**Exercise:** Edit the `FIX_ME` line of the code block below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "033b9f97", - "metadata": {}, - "outputs": [], - "source": [ - "def cutvalue(G):\n", - " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", - " \n", - " Parameters\n", - " ----------\n", - " G: networkX.Graph \n", - " Graph with weighted edges and with binary value colors assigned to the vertices \n", - " \n", - " Returns\n", - " -------\n", - " int\n", - " cut value of the graph determined by the vertex colors and edge weights\n", - " \"\"\" \n", - " cut = 0\n", - " for u, v in G.edges():\n", - " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", - " cut+=##FIX_ME\n", - " return cut" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "dde63ea7", - "metadata": { - "jupyter": { - "source_hidden": true + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "1e406cfc", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", + "metadata": {}, + "source": [ + "# Divide-and-Conquer Implementation of QAOA\n", + "## Lab 4 Assessment\n", + "$\n", + "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", + "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", + "$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8cb5cd25", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", + "# Run this portion of the notebook in a CPU runtime\n", + "# Uncomment the line below and execute the cell to install cuda-q\n", + "# !pip install cudaq\n", + "\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qaoa-for-max-cut/images ./images" + ] + }, + { + "cell_type": "markdown", + "id": "fd6bc9e4", + "metadata": {}, + "source": [ + "## 4.1 Lab Description\n", + "\n", + "Congratulations on making it this far! We hope you enjoyed conquering a large max cut problem while picking up a few skills along the way.\n", + "\n", + "For this assessment, the challenge is to adapt the code that we have created for the max cut problem and apply it to the weighted max cut problem. As we described at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb), there are many options for coding QAOA that can improve performance and accuracy. We encourage you to experiment with at least one of these to achieve a max cut approximation of a weighted version of the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb). We chose this moderately sized graph for the sake of time, but we do give you the option to experiment with other graphs.\n", + "\n", + "The learning objectives of this tutorial are:\n", + "* Execute the QAOA algorithm to find approximate max cuts of a given graph using CUDA Quantum\n", + "* Understand the limitations of the QAOA algorithm for solving max cut in the NISQ era \n", + "* Make adjustments to the divide-and-conquer QAOA algorithm through selection of initial parameter values, increased layers, choice of optimizer, or other methods\n", + "* Simulate quantum circuits in parallel on multiple GPUs to speed up overall run time using CUDA Quantum\n", + "\n", + "Let's get started! " + ] + }, + { + "cell_type": "markdown", + "id": "d3721df5", + "metadata": {}, + "source": [ + "## 4.2 Weighted Max Cut Problem" + ] + }, + { + "cell_type": "markdown", + "id": "71b602e0", + "metadata": {}, + "source": [ + "\n", + "The weighted max cut problem is a variation of the max cut problem. The weighted version of the problem aims to identify a partition of a graph's nodes into two sets which maximizes the sum of the weights of the edges between the two sets. We continue with the notation established in the previous labs. The only difference between this problem and the max cut problem from before is that we now want to maximize: \n", + "$$\\sum_{\\substack{(u,v)\\in E\\\\ u\\in V_0, v\\in V_1}}w_{u,v},$$\n", + "\n", + "where $w_{u,v}$ is the weight of the edge connecting vertex $u$ to $v$. As before $E$ is the set of the edges of the graph, and $V_0$ and $V_1$ define a partition of the vertices of the graph.\n" + ] + }, + { + "cell_type": "markdown", + "id": "458502bb", + "metadata": {}, + "source": [ + "## 4.3 Adapting our code from the previous labs" + ] + }, + { + "cell_type": "markdown", + "id": "2a462183", + "metadata": {}, + "source": [ + "We can use most of the code that we've already developed. There are a few changes that need to be made at the divide, conquer, and merge stages of the QAOA divide-and-conquer algorithm." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7a63c4f0", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", + "# Run this portion of the notebook in a GPU runtime \n", + "# Uncomment the line below and execute the cell to install cuda-q\n", + "# !pip install cudaq" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "6c4006e6", + "metadata": {}, + "outputs": [], + "source": [ + "# Necessary packages\n", + "import networkx as nx\n", + "from networkx import algorithms\n", + "from networkx.algorithms import community\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from typing import List" + ] + }, + { + "cell_type": "markdown", + "id": "d173a9ba", + "metadata": {}, + "source": [ + "### 4.3.1 Divide\n", + "\n", + "Since we now have a weighted graph, we will want to take these weights into account when identifying the subgraph partition. We've made the adjustment to the `subgraph_partition` function below. This may produce a different partitioning of our weighted graph than if we had ignored the weights." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "fe4009ed", + "metadata": {}, + "outputs": [], + "source": [ + "def subgraphpartition(G,n, name, globalGraph):\n", + " \"\"\"Divide the graph up into at most n subgraphs\n", + " \n", + " Parameters\n", + " ----------\n", + " G: networkX.Graph \n", + " Graph that we want to subdivivde which lives inside of or is equatl to globalGraph\n", + " n : int\n", + " n is the maximum number of subgraphs in the partition\n", + " name : str\n", + " prefix for the graphs (in our case we'll use 'Global')\n", + " globalGraph: networkX.Graph\n", + " original problem graph\n", + " \n", + " Returns\n", + " -------\n", + " dict of str : networkX.Graph\n", + " Dictionary of networkX graphs with a string as the key\n", + " \"\"\"\n", + " greedy_partition = community.greedy_modularity_communities(G, weight='weight', resolution=1.1, cutoff=1, best_n=n)\n", + " number_of_subgraphs = len(greedy_partition)\n", + "\n", + " graph_dictionary = {}\n", + " graph_names=[]\n", + " for i in range(number_of_subgraphs):\n", + " subgraphname=name+':'+str(i)\n", + " graph_names.append(subgraphname)\n", + "\n", + " for i in range(number_of_subgraphs):\n", + " nodelist = sorted(list(greedy_partition[i]))\n", + " graph_dictionary[graph_names[i]] = nx.subgraph(globalGraph, nodelist)\n", + " \n", + " return(graph_dictionary) \n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "e9d7744a", + "metadata": {}, + "source": [ + "### 4.3.2 Conquer\n", + "\n", + "To adapt the dividie-and-conquer QAOA algorithm to handle a weighted graph, we will need to change the Hamiltonian function. We refer you to section 1.4.1 of [Lab 1](01_Max-Cut-with-QAOA.ipynb) to derive the Hamiltonian for the weighted max cut problem. Below we've copied and adapted the code from the `hamiltonian_max_cut` function from the previous labs by adding a new function argument for the weights. You'll need to fix the indicated line of code to take into account the weights of the edges. \n", + "\n", + "HINT: You'll need to consider the weight of each edge, which we have computed for you in the `edge_weight` variable.\n", + "\n", + "**Exercise:** Edit the line commented with `###FIX_ME###`" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4ff41e0c", + "metadata": {}, + "outputs": [], + "source": [ + "# Exercise\n", + "\n", + "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", + " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", + " \n", + " Parameters\n", + " ----------\n", + " sources: List[int] \n", + " list of the source vertices for edges in the graph\n", + " targets: List[int]\n", + " list of the target vertices for the edges in the graph\n", + " weights : List[float]\n", + " list of the weight of the edge determined by the source and target with the same index\n", + " Returns\n", + " -------\n", + " cudaq.SpinOperator\n", + " Hamiltonian for finding the max cut of the graph defined by the given edges\n", + " \"\"\"\n", + " hamiltonian = 0\n", + " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", + " \n", + " for i in range(len(sources)):\n", + " # Add a term to the Hamiltonian for the edge (u,v)\n", + " qubitu = sources[i]\n", + " qubitv = targets[i]\n", + " edge_weight = weights[i]\n", + " hamiltonian += ##FIX_ME## \n", + " \n", + " return hamiltonian" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9d031b65", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "# SOLUTION\n", + "\n", + "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", + " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", + " \n", + " Parameters\n", + " ----------\n", + " sources: List[int] \n", + " list of the source vertices for edges in the graph\n", + " targets: List[int]\n", + " list of the target vertices for the edges in the graph\n", + " weights : List[float]\n", + " list of the weight of the edge determined by the source and target with the same index\n", + " Returns\n", + " -------\n", + " cudaq.SpinOperator\n", + " Hamiltonian for finding the max cut of the graph defined by the given edges\n", + " \"\"\"\n", + " hamiltonian = 0\n", + " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", + " \n", + " for i in range(len(sources)):\n", + " # Add a term to the Hamiltonian for the edge (u,v)\n", + " qubitu = sources[i]\n", + " qubitv = targets[i]\n", + " edge_weight = weights[i]\n", + " hamiltonian += 0.5*edge_weight*(spin.z(qubitu)*spin.z(qubitv)-spin.i(qubitu)*spin.i(qubitv))\n", + " \n", + " return hamiltonian\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "8817b585", + "metadata": {}, + "source": [ + "Since we've changed the function arguments for the `hamiltonian_max_cut` function, we've edited the code from the previous labs that calls this function." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "36167f37", + "metadata": {}, + "outputs": [], + "source": [ + "def find_optimal_parameters(G, layer_count, seed):\n", + " \"\"\"Function for finding the optimal parameters of QAOA for the max cut of a graph\n", + " Parameters\n", + " ----------\n", + " G: networkX graph \n", + " Problem graph whose max cut we aim to find\n", + " layer_count : int \n", + " Number of layers in the QAOA circuit\n", + " seed : int\n", + " Random seed for reproducibility of results\n", + " \n", + " Returns\n", + " -------\n", + " list[float]\n", + " Optimal parameters for the QAOA applied to the given graph G\n", + " \"\"\"\n", + " parameter_count: int = 2 * layer_count\n", + "\n", + " # Problem parameters\n", + " nodes = sorted(list(nx.nodes(G)))\n", + " qubit_src = []\n", + " qubit_tgt = []\n", + " weights = []\n", + " for u, v in nx.edges(G):\n", + " # We can use the index() command to read out the qubits associated with the vertex u and v.\n", + " qubit_src.append(nodes.index(u))\n", + " qubit_tgt.append(nodes.index(v))\n", + " weights.append(G.edges[u,v]['weight']) \n", + " # The number of qubits we'll need is the same as the number of vertices in our graph\n", + " qubit_count : int = len(nodes)\n", + " # Each layer of the QAOA kernel contains 2 parameters\n", + " parameter_count : int = 2*layer_count\n", + " \n", + " np.random.seed(seed)\n", + " cudaq.set_random_seed(seed)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi, parameter_count).tolist()\n", + "\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas),\n", + " hamiltonian_max_cut(qubit_src, qubit_tgt, weights),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", + "\n", + " return optimal_parameters" + ] + }, + { + "cell_type": "markdown", + "id": "37ba4faf", + "metadata": {}, + "source": [ + "### 4.3.3 Merge\n", + "\n", + "The weights of the edges between subgraphs will impact the merger stage of the algorithm as well. \n", + "\n", + "**Exercise:** Edit the code block by replacing `FIX_ME` with the appropriate values to compute the penalties associated with each edge of the merger graph." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29c3055f", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Exercise\n", + "# Compute the penalties for edges in the supplied mergerGraph\n", + "# for the subgraph partitioning of graph G\n", + "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", + " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", + " as edge attributes.\n", + " \n", + " Parameters\n", + " ----------\n", + " mergerGraph : networkX.Graph \n", + " Graph of connections between vertices in distinct subgraphs of G\n", + " subgraph_dictionary : dict of networkX graph with str as keys \n", + " subgraphs of G that are represented as nodes in the mergerGraph\n", + " G : networkX.Graph\n", + " graph whose vertices has an attribute 'color'\n", + " \n", + " Returns\n", + " -------\n", + " networkX.Graph\n", + " Merger graph containing penalties\n", + " \"\"\" \n", + " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", + " for i, j in mergerGraph.edges():\n", + " penalty_ij = 0\n", + " for u in nx.nodes(subgraph_dictionary[i]):\n", + " for neighbor_u in nx.all_neighbors(G, u):\n", + " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", + " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", + " penalty_ij += ### FIX_ME\n", + " else:\n", + " penalty_ij += ### FIX_ME\n", + " mergerGraph[i][j]['penalty'] = penalty_ij\n", + " return mergerGraph" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "32898a6b-28f5-4518-b47e-eed28431de06", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "# Solution\n", + "# Compute the penalties for edges in the supplied mergerGraph\n", + "# for the subgraph partitioning of graph G\n", + "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", + " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", + " as edge attributes.\n", + " \n", + " Parameters\n", + " ----------\n", + " mergerGraph : networkX.Graph \n", + " Graph of connections between vertices in distinct subgraphs of G\n", + " subgraph_dictionary : dict of networkX graph with str as keys \n", + " subgraphs of G that are represented as nodes in the mergerGraph\n", + " G : networkX.Graph\n", + " graph whose vertices has an attribute 'color'\n", + " \n", + " Returns\n", + " -------\n", + " networkX.Graph\n", + " Merger graph containing penalties\n", + " \"\"\" \n", + " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", + " for i, j in mergerGraph.edges():\n", + " penalty_ij = 0\n", + " for u in nx.nodes(subgraph_dictionary[i]):\n", + " for neighbor_u in nx.all_neighbors(G, u):\n", + " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", + " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", + " penalty_ij += G.edges[u,neighbor_u]['weight']\n", + " else:\n", + " penalty_ij += -G.edges[u,neighbor_u]['weight']\n", + " mergerGraph[i][j]['penalty'] = penalty_ij\n", + " return mergerGraph" + ] + }, + { + "cell_type": "markdown", + "id": "85f53c2e", + "metadata": {}, + "source": [ + "Finally, since our cut value now depends on the weight of the edges, we will need to edit the `cutvalue` function that comptues the cut of the graph based on the coloring of the nodes.\n", + "\n", + "**Exercise:** Edit the `FIX_ME` line of the code block below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "033b9f97", + "metadata": {}, + "outputs": [], + "source": [ + "def cutvalue(G):\n", + " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", + " \n", + " Parameters\n", + " ----------\n", + " G: networkX.Graph \n", + " Graph with weighted edges and with binary value colors assigned to the vertices \n", + " \n", + " Returns\n", + " -------\n", + " int\n", + " cut value of the graph determined by the vertex colors and edge weights\n", + " \"\"\" \n", + " cut = 0\n", + " for u, v in G.edges():\n", + " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", + " cut+=##FIX_ME\n", + " return cut" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "dde63ea7", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "# Solution\n", + "def cutvalue(G):\n", + " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", + " \n", + " Parameters\n", + " ----------\n", + " G: networkX.Graph \n", + " Graph with weighted edges and with binary value colors assigned to the vertices\n", + " \n", + " Returns\n", + " -------\n", + " int \n", + " cut value of the graph determined by the vertex colors and edge weights\n", + " \"\"\" \n", + "\n", + " cut = 0\n", + " for u, v in G.edges():\n", + " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", + " cut+=G.edges[u,v]['weight']\n", + " return cut" + ] + }, + { + "cell_type": "markdown", + "id": "699dd281", + "metadata": {}, + "source": [ + "## 4.4 Weighted Max Cut using a modified Divide-and-Conquer QAOA\n", + "\n", + "For the sake of time, we have added the modifications that were made in the exercises above to the [Example-04.py](Example-04.py) which calls up the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb) with random weights assigned to the vertices. Run the MPI call below to see how the algorithm performs. You may notice the results are not competitive with the classical methods, as is. \n", + "\n", + "For the assessment, make modifications to the Example-04.py to improve performance by making some adjustments as discussed at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb). Here are a few recommendations:\n", + "\n", + "* Modify the layer count for the QAOA max cut (line 822) and the QAOA merger calls (line 505).\n", + "* Try different seeds to generate different initial parameters for the optimizer for the QAOA for max cut (line 823) and for the merger stage (line 507). Better yet, replace the random intitial parameters of the optimizer with the optimal parameters found in earlier runs of the algorithm. We've added a print command to [Example-04.py](Example-04.py) to view the optimal parameters of the max cut QAOA calls at each stage. For instance try initializing the optimzer with ( `[-1.8964004059756836, 1.0646218219788401]*layer_count`).\n", + "* Swap out the COBYLA optimizer with another optimizer supported by [CUDA-QX Solvers](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) (e.g. `'l-bfgs-b'`) in the `solvers.vqe` calls. Depending on your choice of optimizer you may need to add a `gradient` parameter (e.g. `gradient='parameter_shift'`). \n", + "* Replace the QAOA kernel with a multi-angle kernel. In addition to editing the `kernel_qaoa` function (line 113), you will need to adjust the parameter_count variables (lines 181 and 340) accordingly.\n", + "\n", + "Feel free to experiment with one or all of these suggestions, or try out your own ideas! You can also play around with different graph instances by editing the lines 709 to 750. " + ] + }, + { + "cell_type": "markdown", + "id": "5d1d82ef", + "metadata": {}, + "source": [ + "**Important** Before proceeding, you will need to switch to a runtime with access to a GPU. If you do restart your kernel, make sure to reload the packages below. If you are running on Google Colab and switch to a GPU runtime, you'll need to reinstall CUDA-Q by commenting out the indicated code. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38b43bed", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you already have cuda-q set up and are working in a GPU runtime\n", + "# with all the necessary files\n", + "# Run this cell in a GPU runtime\n", + "\n", + "#!pip install cudaq\n", + "\n", + "#!wget -q -O Example-04.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04.py\n", + "#!wget -q -O Example-04-Solution.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04-Solution.py" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b5584581", + "metadata": {}, + "outputs": [], + "source": [ + "#@title Execute this cell to reload the necessary packages\n", + "import networkx as nx\n", + "from networkx import algorithms\n", + "from networkx.algorithms import community\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from typing import List\n", + "import sys" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "da5a0774", + "metadata": {}, + "outputs": [], + "source": [ + "#@title Execute this cell to install mpi4py if necessary\n", + "%pip install mpi4py" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "949fb85c", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Subgraph problems to be computed on each processor have been assigned\n", + "Processor 1 received {'Global:1': , 'Global:5': } from processor 0\n", + "Processor 2 received {'Global:2': , 'Global:6': } from processor 0\n", + "Processor 3 received {'Global:3': } from processor 0\n", + "Working on finding max cut approximations for Global:3\n", + "Working on finding max cut approximations for Global:0:0\n", + "Working on finding max cut approximations for Global:1:0\n", + "Working on finding max cut approximations for Global:2:0\n", + "Optimal parameters = [1.9644984202286893, -0.39256359744504166]\n", + "most_probable outcome = 0011\n", + "Working on finding max cut approximations for Global:2:1\n", + "Optimal parameters = [-0.3831171294701392, 0.3930202244057288]\n", + "Optimal parameters = [-0.324086159500907, 0.3542926188419133]\n", + "most_probable outcome = 01110\n", + "Working on finding max cut approximations for Global:0:1\n", + "most_probable outcome = 01010\n", + "Working on finding max cut approximations for Global:1:1\n", + "Optimal parameters = [0.29862984672434045, -0.3484455261112559]\n", + "most_probable outcome = 10010110\n", + "{'Global:3': '10010110'} sent by processor 3\n", + "Optimal parameters = [1.9631271614779813, -0.3930417284990051]\n", + "most_probable outcome = 0011\n", + "Working on finding max cut approximations for Global:2:2\n", + "Optimal parameters = [1.9638723725453435, -0.3928639361315754]\n", + "most_probable outcome = 1001\n", + "Working on finding max cut approximations for Global:1:2\n", + "Optimal parameters = [1.9636066566612467, -0.39283580297617526]\n", + "most_probable outcome = 01001\n", + "Working on finding max cut approximations for Global:0:2\n", + "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", + "most_probable outcome = 01\n", + "Working on finding max cut approximations for Global:2:3\n", + "Optimal parameters = [1.964490963939847, -0.39255038961423616]\n", + "most_probable outcome = 1001\n", + "Found max cut approximations for ['Global:1:0', 'Global:1:1', 'Global:1:2']\n", + "prior to merging, the max cut value of Global:1 is 16\n", + "Merging these solutions together for a solution to Global:1\n", + "Optimal parameters = [1.8786973859651952, -0.3076420518819305]\n", + "most_probable outcome = 101\n", + "Working on finding max cut approximations for Global:0:3\n", + "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", + "most_probable outcome = 01\n", + "Found max cut approximations for ['Global:2:0', 'Global:2:1', 'Global:2:2', 'Global:2:3']\n", + "prior to merging, the max cut value of Global:2 is 17\n", + "Merging these solutions together for a solution to Global:2\n", + "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", + "most_probable outcome = 01\n", + "Found max cut approximations for ['Global:0:0', 'Global:0:1', 'Global:0:2', 'Global:0:3']\n", + "prior to merging, the max cut value of Global:0 is 17\n", + "Merging these solutions together for a solution to Global:0\n", + "the merger algorithm produced a new coloring of Global:1 with cut value, 17\n", + "Working on finding max cut approximations for Global:5\n", + "Optimal parameters = [1.2882962455367613, -1.924510280150267]\n", + "most_probable outcome = 1001110\n", + "{'Global:1': '0110100011010', 'Global:5': '1001110'} sent by processor 1\n", + "the merger algorithm produced a new coloring of Global:0 with cut value, 19\n", + "Working on finding max cut approximations for Global:4\n", + "Optimal parameters = [0.3105695141678175, -1.9442869395474525]\n", + "most_probable outcome = 00001111\n", + "Received {'Global:1': '0110100011010', 'Global:5': '1001110'} from processor 1\n", + "the merger algorithm produced a new coloring of Global:2 with cut value, 17\n", + "Working on finding max cut approximations for Global:6\n", + "Optimal parameters = [2.0530338597775857, -0.3930276771684167]\n", + "most_probable outcome = 1000110\n", + "Received {'Global:2': '110111100000', 'Global:6': '1000110'} from processor 2\n", + "Received {'Global:3': '10010110'} from processor 3\n", + "The results dictionary on GPU 0 = {'Global:0': '010111001010100', 'Global:4': '00001111', 'Global:1': '0110100011010', 'Global:5': '1001110', 'Global:2': '110111100000', 'Global:6': '1000110', 'Global:3': '10010110'}\n", + "{'Global:2': '110111100000', 'Global:6': '1000110'} sent by processor 2\n", + "The divide-and-conquer QAOA unaltered cut approximation of the graph, prior to the final merge, is 136\n", + "The divide-and-conquer QAOA max cut approximation of the graph is 136\n", + "This compares to a few runs of the greedy modularity maximization algorithm gives an average approximate Max Cut value of 152.6\n", + "with approximations ranging from 148 to 156\n" + ] + } + ], + "source": [ + "# MPI call\n", + "print(sys.executable)\n", + "python_path = sys.executable\n", + "!mpiexec -np 4 --oversubscribe --allow-run-as-root {python_path} Example-04-Solution.py" + ] + }, + { + "cell_type": "markdown", + "id": "925a5b68", + "metadata": {}, + "source": [ + "## 4.5 Next" + ] + }, + { + "cell_type": "markdown", + "id": "45b67f41", + "metadata": {}, + "source": [ + "To learn more about CUDA Quantum, check out our online [tutorials](https://nvidia.github.io/cuda-quantum/latest/using/tutorials.html)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1541691b", + "metadata": {}, + "outputs": [], + "source": [ + "import IPython\n", + "app = IPython.Application.instance()\n", + "app.kernel.do_shutdown(True)" + ] + }, + { + "cell_type": "markdown", + "id": "3231f912", + "metadata": {}, + "source": [ + "![](images/nvidia-logo.png)" + ] } - }, - "outputs": [], - "source": [ - "# Solution\n", - "def cutvalue(G):\n", - " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", - " \n", - " Parameters\n", - " ----------\n", - " G: networkX.Graph \n", - " Graph with weighted edges and with binary value colors assigned to the vertices\n", - " \n", - " Returns\n", - " -------\n", - " int \n", - " cut value of the graph determined by the vertex colors and edge weights\n", - " \"\"\" \n", - "\n", - " cut = 0\n", - " for u, v in G.edges():\n", - " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", - " cut+=G.edges[u,v]['weight']\n", - " return cut" - ] - }, - { - "cell_type": "markdown", - "id": "699dd281", - "metadata": {}, - "source": [ - "## 4.4 Weighted Max Cut using a modified Divide-and-Conquer QAOA\n", - "\n", - "For the sake of time, we have added the modifications that were made in the exercises above to the [Example-04.py](Example-04.py) which calls up the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb) with random weights assigned to the vertices. Run the MPI call below to see how the algorithm performs. You may notice the results are not competitive with the classical methods, as is. \n", - "\n", - "For the assessment, make modifications to the Example-04.py to improve performance by making some adjustments as discussed at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb). Here are a few recommendations:\n", - "\n", - "* Modify the layer count for the QAOA max cut (line 822) and the QAOA merger calls (line 505).\n", - "* Try different seeds to generate different initial parameters for the optimizer for the QAOA for max cut (line 823) and for the merger stage (line 507). Better yet, replace the random intitial parameters of the optimizer with the optimal parameters found in earlier runs of the algorithm. We've added a print command to [Example-04.py](Example-04.py) to view the optimal parameters of the max cut QAOA calls at each stage. For instance try initializing the optimzer with ( `[-1.8964004059756836, 1.0646218219788401]*layer_count`).\n", - "* Swap out the COYBLA optimizer with another [optimizer supported by CUDA Quantum](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.optimizers.optimizer) on line 184 and line 520. Depending on your choice of optimizer you may need to add in a variable for the gradient and make adjustments to the `vqe` calls (lines 191 and 526). \n", - "* Replace the QAOA kernel with a multi-angle kernel. In addition to editing the `kernel_qaoa` function (line 113), you will need to adjust the parameter_count variables (lines 181 and 340) accordingly.\n", - "\n", - "Feel free to experiment with one or all of these suggestions, or try out your own ideas! You can also play around with different graph instances by editing the lines 709 to 750. " - ] - }, - { - "cell_type": "markdown", - "id": "5d1d82ef", - "metadata": {}, - "source": [ - "**Important** Before proceeding, you will need to switch to a runtime with access to a GPU. If you do restart your kernel, make sure to reload the packages below. If you are running on Google Colab and switch to a GPU runtime, you'll need to reinstall CUDA-Q by commenting out the indicated code. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38b43bed", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you already have cuda-q set up and are working in a GPU runtime\n", - "# with all the necessary files\n", - "# Run this cell in a GPU runtime\n", - "\n", - "#!pip install cudaq\n", - "\n", - "#!wget -q -O Example-04.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04.py\n", - "#!wget -q -O Example-04-Solution.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04-Solution.py" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b5584581", - "metadata": {}, - "outputs": [], - "source": [ - "#@title Execute this cell to reload the necessary packages\n", - "import networkx as nx\n", - "from networkx import algorithms\n", - "from networkx.algorithms import community\n", - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "import sys" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "da5a0774", - "metadata": {}, - "outputs": [], - "source": [ - "#@title Execute this cell to install mpi4py if necessary\n", - "%pip install mpi4py" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "949fb85c", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Subgraph problems to be computed on each processor have been assigned\n", - "Processor 1 received {'Global:1': , 'Global:5': } from processor 0\n", - "Processor 2 received {'Global:2': , 'Global:6': } from processor 0\n", - "Processor 3 received {'Global:3': } from processor 0\n", - "Working on finding max cut approximations for Global:3\n", - "Working on finding max cut approximations for Global:0:0\n", - "Working on finding max cut approximations for Global:1:0\n", - "Working on finding max cut approximations for Global:2:0\n", - "Optimal parameters = [1.9644984202286893, -0.39256359744504166]\n", - "most_probable outcome = 0011\n", - "Working on finding max cut approximations for Global:2:1\n", - "Optimal parameters = [-0.3831171294701392, 0.3930202244057288]\n", - "Optimal parameters = [-0.324086159500907, 0.3542926188419133]\n", - "most_probable outcome = 01110\n", - "Working on finding max cut approximations for Global:0:1\n", - "most_probable outcome = 01010\n", - "Working on finding max cut approximations for Global:1:1\n", - "Optimal parameters = [0.29862984672434045, -0.3484455261112559]\n", - "most_probable outcome = 10010110\n", - "{'Global:3': '10010110'} sent by processor 3\n", - "Optimal parameters = [1.9631271614779813, -0.3930417284990051]\n", - "most_probable outcome = 0011\n", - "Working on finding max cut approximations for Global:2:2\n", - "Optimal parameters = [1.9638723725453435, -0.3928639361315754]\n", - "most_probable outcome = 1001\n", - "Working on finding max cut approximations for Global:1:2\n", - "Optimal parameters = [1.9636066566612467, -0.39283580297617526]\n", - "most_probable outcome = 01001\n", - "Working on finding max cut approximations for Global:0:2\n", - "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", - "most_probable outcome = 01\n", - "Working on finding max cut approximations for Global:2:3\n", - "Optimal parameters = [1.964490963939847, -0.39255038961423616]\n", - "most_probable outcome = 1001\n", - "Found max cut approximations for ['Global:1:0', 'Global:1:1', 'Global:1:2']\n", - "prior to merging, the max cut value of Global:1 is 16\n", - "Merging these solutions together for a solution to Global:1\n", - "Optimal parameters = [1.8786973859651952, -0.3076420518819305]\n", - "most_probable outcome = 101\n", - "Working on finding max cut approximations for Global:0:3\n", - "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", - "most_probable outcome = 01\n", - "Found max cut approximations for ['Global:2:0', 'Global:2:1', 'Global:2:2', 'Global:2:3']\n", - "prior to merging, the max cut value of Global:2 is 17\n", - "Merging these solutions together for a solution to Global:2\n", - "Optimal parameters = [0.7856194864736818, -1.9635088789904906]\n", - "most_probable outcome = 01\n", - "Found max cut approximations for ['Global:0:0', 'Global:0:1', 'Global:0:2', 'Global:0:3']\n", - "prior to merging, the max cut value of Global:0 is 17\n", - "Merging these solutions together for a solution to Global:0\n", - "the merger algorithm produced a new coloring of Global:1 with cut value, 17\n", - "Working on finding max cut approximations for Global:5\n", - "Optimal parameters = [1.2882962455367613, -1.924510280150267]\n", - "most_probable outcome = 1001110\n", - "{'Global:1': '0110100011010', 'Global:5': '1001110'} sent by processor 1\n", - "the merger algorithm produced a new coloring of Global:0 with cut value, 19\n", - "Working on finding max cut approximations for Global:4\n", - "Optimal parameters = [0.3105695141678175, -1.9442869395474525]\n", - "most_probable outcome = 00001111\n", - "Received {'Global:1': '0110100011010', 'Global:5': '1001110'} from processor 1\n", - "the merger algorithm produced a new coloring of Global:2 with cut value, 17\n", - "Working on finding max cut approximations for Global:6\n", - "Optimal parameters = [2.0530338597775857, -0.3930276771684167]\n", - "most_probable outcome = 1000110\n", - "Received {'Global:2': '110111100000', 'Global:6': '1000110'} from processor 2\n", - "Received {'Global:3': '10010110'} from processor 3\n", - "The results dictionary on GPU 0 = {'Global:0': '010111001010100', 'Global:4': '00001111', 'Global:1': '0110100011010', 'Global:5': '1001110', 'Global:2': '110111100000', 'Global:6': '1000110', 'Global:3': '10010110'}\n", - "{'Global:2': '110111100000', 'Global:6': '1000110'} sent by processor 2\n", - "The divide-and-conquer QAOA unaltered cut approximation of the graph, prior to the final merge, is 136\n", - "The divide-and-conquer QAOA max cut approximation of the graph is 136\n", - "This compares to a few runs of the greedy modularity maximization algorithm gives an average approximate Max Cut value of 152.6\n", - "with approximations ranging from 148 to 156\n" - ] + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" } - ], - "source": [ - "# MPI call\n", - "print(sys.executable)\n", - "python_path = sys.executable\n", - "!mpiexec -np 4 --oversubscribe --allow-run-as-root {python_path} Example-04-Solution.py" - ] - }, - { - "cell_type": "markdown", - "id": "925a5b68", - "metadata": {}, - "source": [ - "## 4.5 Next" - ] - }, - { - "cell_type": "markdown", - "id": "45b67f41", - "metadata": {}, - "source": [ - "To learn more about CUDA Quantum, check out our online [tutorials](https://nvidia.github.io/cuda-quantum/latest/using/tutorials.html)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1541691b", - "metadata": {}, - "outputs": [], - "source": [ - "import IPython\n", - "app = IPython.Application.instance()\n", - "app.kernel.do_shutdown(True)" - ] - }, - { - "cell_type": "markdown", - "id": "3231f912", - "metadata": {}, - "source": [ - "![](images/nvidia-logo.png)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/qaoa-for-max-cut/04_Assessment.ipynb b/qaoa-for-max-cut/04_Assessment.ipynb index 1ea7a33..af60751 100644 --- a/qaoa-for-max-cut/04_Assessment.ipynb +++ b/qaoa-for-max-cut/04_Assessment.ipynb @@ -1,554 +1,559 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "1e406cfc", - "metadata": {}, - "outputs": [], - "source": [ - "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "1e406cfc", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8b6dbe43", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", + "# Run this portion of the notebook in a CPU runtime\n", + "# Uncomment the line below and execute the cell to install cuda-q\n", + "# !pip install cudaq\n", + "\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qaoa-for-max-cut/images ./images" + ] + }, + { + "cell_type": "markdown", + "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", + "metadata": {}, + "source": [ + "# Divide-and-Conquer Implementation of QAOA\n", + "## Lab 4 Assessment\n", + "$\n", + "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", + "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", + "$" + ] + }, + { + "cell_type": "markdown", + "id": "fd6bc9e4", + "metadata": {}, + "source": [ + "## 4.1 Lab Description\n", + "\n", + "Congratulations on making it this far! We hope you enjoyed conquering a large max cut problem while picking up a few skills along the way.\n", + "\n", + "For this assessment, the challenge is to adapt the code that we have created for the max cut problem and apply it to the weighted max cut problem. As we described at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb), there are many options for coding QAOA that can improve performance and accuracy. We encourage you to experiment with at least one of these to achieve a max cut approximation of a weighted version of the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb). We chose this moderately sized graph for the sake of time, but we do give you the option to experiment with other graphs.\n", + "\n", + "The learning objectives of this tutorial are:\n", + "* Execute the QAOA algorithm to find approximate max cuts of a given graph using CUDA Quantum\n", + "* Understand the limitations of the QAOA algorithm for solving max cut in the NISQ era \n", + "* Make adjustments to the divide-and-conquer QAOA algorithm through selection of initial parameter values, increased layers, choice of optimizer, or other methods\n", + "* Simulate quantum circuits in parallel on multiple GPUs to speed up overall run time using CUDA Quantum\n", + "\n", + "Let's get started! " + ] + }, + { + "cell_type": "markdown", + "id": "d3721df5", + "metadata": {}, + "source": [ + "## 4.2 Weighted Max Cut Problem" + ] + }, + { + "cell_type": "markdown", + "id": "71b602e0", + "metadata": {}, + "source": [ + "\n", + "The weighted max cut problem is a variation of the max cut problem. The weighted version of the problem aims to identify a partition of a graph's nodes into two sets which maximizes the sum of the weights of the edges between the two sets. We continue with the notation established in the previous labs. The only difference between this problem and the max cut problem from before is that we now want to maximize: \n", + "$$\\sum_{\\substack{(u,v)\\in E\\\\ u\\in V_0, v\\in V_1}}w_{u,v},$$\n", + "\n", + "where $w_{u,v}$ is the weight of the edge connecting vertex $u$ to $v$. As before $E$ is the set of the edges of the graph, and $V_0$ and $V_1$ define a partition of the vertices of the graph.\n" + ] + }, + { + "cell_type": "markdown", + "id": "458502bb", + "metadata": {}, + "source": [ + "## 4.3 Adapting our code from the previous labs" + ] + }, + { + "cell_type": "markdown", + "id": "2a462183", + "metadata": {}, + "source": [ + "We can use most of the code that we've already developed. There are a few changes that need to be made at the divide, conquer, and merge stages of the QAOA divide-and-conquer algorithm." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0016c6c5", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", + "# Run this portion of the notebook in a GPU runtime \n", + "# Uncomment the line below and execute the cell to install cuda-q\n", + "# !pip install cudaq" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "6c4006e6", + "metadata": {}, + "outputs": [], + "source": [ + "# Necessary packages\n", + "import networkx as nx\n", + "from networkx import algorithms\n", + "from networkx.algorithms import community\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from typing import List" + ] + }, + { + "cell_type": "markdown", + "id": "d173a9ba", + "metadata": {}, + "source": [ + "### 4.3.1 Divide\n", + "\n", + "Since we now have a weighted graph, we will want to take these weights into account when identifying the subgraph partition. We've made the adjustment to the `subgraph_partition` function below. This may produce a different partitioning of our weighted graph than if we had ignored the weights." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "fe4009ed", + "metadata": {}, + "outputs": [], + "source": [ + "def subgraphpartition(G,n, name, globalGraph):\n", + " \"\"\"Divide the graph up into at most n subgraphs\n", + " \n", + " Parameters\n", + " ----------\n", + " G: networkX.Graph \n", + " Graph that we want to subdivivde which lives inside of or is equatl to globalGraph\n", + " n : int\n", + " n is the maximum number of subgraphs in the partition\n", + " name : str\n", + " prefix for the graphs (in our case we'll use 'Global')\n", + " globalGraph: networkX.Graph\n", + " original problem graph\n", + " \n", + " Returns\n", + " -------\n", + " dict of str : networkX.Graph\n", + " Dictionary of networkX graphs with a string as the key\n", + " \"\"\"\n", + " greedy_partition = community.greedy_modularity_communities(G, weight='weight', resolution=1.1, cutoff=1, best_n=n)\n", + " number_of_subgraphs = len(greedy_partition)\n", + "\n", + " graph_dictionary = {}\n", + " graph_names=[]\n", + " for i in range(number_of_subgraphs):\n", + " subgraphname=name+':'+str(i)\n", + " graph_names.append(subgraphname)\n", + "\n", + " for i in range(number_of_subgraphs):\n", + " nodelist = sorted(list(greedy_partition[i]))\n", + " graph_dictionary[graph_names[i]] = nx.subgraph(globalGraph, nodelist)\n", + " \n", + " return(graph_dictionary) \n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "e9d7744a", + "metadata": {}, + "source": [ + "### 4.3.2 Conquer\n", + "\n", + "To adapt the dividie-and-conquer QAOA algorithm to handle a weighted graph, we will need to change the Hamiltonian function. We refer you to section 1.4.1 of [Lab 1](01_Max-Cut-with-QAOA.ipynb) to derive the Hamiltonian for the weighted max cut problem. Below we've copied and adapted the code from the `hamiltonian_max_cut` function from the previous labs by adding a new function argument for the weights. You'll need to fix the indicated line of code to take into account the weights of the edges. \n", + "\n", + "HINT: You'll need to consider the weight of each edge, which we have computed for you in the `edge_weight` variable.\n", + "\n", + "**Exercise:** Edit the line commented with `###FIX_ME###`" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "4ff41e0c", + "metadata": {}, + "outputs": [], + "source": [ + "# Exercise\n", + "\n", + "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", + " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", + " \n", + " Parameters\n", + " ----------\n", + " sources: List[int] \n", + " list of the source vertices for edges in the graph\n", + " targets: List[int]\n", + " list of the target vertices for the edges in the graph\n", + " weights : List[float]\n", + " list of the weight of the edge determined by the source and target with the same index\n", + " Returns\n", + " -------\n", + " cudaq.SpinOperator\n", + " Hamiltonian for finding the max cut of the graph defined by the given edges\n", + " \"\"\"\n", + " hamiltonian = 0\n", + " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", + " \n", + " for i in range(len(sources)):\n", + " # Add a term to the Hamiltonian for the edge (u,v)\n", + " qubitu = sources[i]\n", + " qubitv = targets[i]\n", + " edge_weight = weights[i]\n", + " hamiltonian += ##FIX_ME## \n", + " \n", + " return hamiltonian" + ] + }, + { + "cell_type": "markdown", + "id": "4420624a", + "metadata": {}, + "source": [ + "Since we've changed the function arguments for the `hamiltonian_max_cut` function, we've edited the code from the previous labs that calls this function." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "36167f37", + "metadata": {}, + "outputs": [], + "source": [ + "def find_optimal_parameters(G, layer_count, seed):\n", + " \"\"\"Function for finding the optimal parameters of QAOA for the max cut of a graph\n", + " Parameters\n", + " ----------\n", + " G: networkX graph \n", + " Problem graph whose max cut we aim to find\n", + " layer_count : int \n", + " Number of layers in the QAOA circuit\n", + " seed : int\n", + " Random seed for reproducibility of results\n", + " \n", + " Returns\n", + " -------\n", + " list[float]\n", + " Optimal parameters for the QAOA applied to the given graph G\n", + " \"\"\"\n", + " parameter_count: int = 2 * layer_count\n", + "\n", + " # Problem parameters\n", + " nodes = sorted(list(nx.nodes(G)))\n", + " qubit_src = []\n", + " qubit_tgt = []\n", + " weights = []\n", + " for u, v in nx.edges(G):\n", + " # We can use the index() command to read out the qubits associated with the vertex u and v.\n", + " qubit_src.append(nodes.index(u))\n", + " qubit_tgt.append(nodes.index(v))\n", + " weights.append(G.edges[u,v]['weight']) \n", + " # The number of qubits we'll need is the same as the number of vertices in our graph\n", + " qubit_count : int = len(nodes)\n", + " # Each layer of the QAOA kernel contains 2 parameters\n", + " parameter_count : int = 2*layer_count\n", + " \n", + " np.random.seed(seed)\n", + " cudaq.set_random_seed(seed)\n", + " initial_parameters = np.random.uniform(-np.pi, np.pi, parameter_count).tolist()\n", + "\n", + " optimal_expectation, optimal_parameters, _ = solvers.vqe(\n", + " lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas),\n", + " hamiltonian_max_cut(qubit_src, qubit_tgt, weights),\n", + " initial_parameters,\n", + " optimizer='cobyla')\n", + "\n", + " return optimal_parameters" + ] + }, + { + "cell_type": "markdown", + "id": "37ba4faf", + "metadata": {}, + "source": [ + "### 4.3.3 Merge\n", + "\n", + "The weights of the edges between subgraphs will impact the merger stage of the algorithm as well. \n", + "\n", + "**Exercise:** Edit the code block by replacing `FIX_ME` with the appropriate values to compute the penalties associated with each edge of the merger graph." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29c3055f", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Exercise\n", + "# Compute the penalties for edges in the supplied mergerGraph\n", + "# for the subgraph partitioning of graph G\n", + "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", + " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", + " as edge attributes.\n", + " \n", + " Parameters\n", + " ----------\n", + " mergerGraph : networkX.Graph \n", + " Graph of connections between vertices in distinct subgraphs of G\n", + " subgraph_dictionary : dict of networkX graph with str as keys \n", + " subgraphs of G that are represented as nodes in the mergerGraph\n", + " G : networkX.Graph\n", + " graph whose vertices has an attribute 'color'\n", + " \n", + " Returns\n", + " -------\n", + " networkX.Graph\n", + " Merger graph containing penalties\n", + " \"\"\" \n", + " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", + " for i, j in mergerGraph.edges():\n", + " penalty_ij = 0\n", + " for u in nx.nodes(subgraph_dictionary[i]):\n", + " for neighbor_u in nx.all_neighbors(G, u):\n", + " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", + " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", + " penalty_ij += ### FIX_ME\n", + " else:\n", + " penalty_ij += ### FIX_ME\n", + " mergerGraph[i][j]['penalty'] = penalty_ij\n", + " return mergerGraph" + ] + }, + { + "cell_type": "markdown", + "id": "85f53c2e", + "metadata": {}, + "source": [ + "Finally, since our cut value now depends on the weight of the edges, we will need to edit the `cutvalue` function that comptues the cut of the graph based on the coloring of the nodes.\n", + "\n", + "**Exercise:** Edit the `FIX_ME` line of the code block below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "033b9f97", + "metadata": {}, + "outputs": [], + "source": [ + "# Exercise\n", + "def cutvalue(G):\n", + " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", + " \n", + " Parameters\n", + " ----------\n", + " G: networkX.Graph \n", + " Graph with weighted edges and with binary value colors assigned to the vertices \n", + " \n", + " Returns\n", + " -------\n", + " int\n", + " cut value of the graph determined by the vertex colors and edge weights\n", + " \"\"\" \n", + " cut = 0\n", + " for u, v in G.edges():\n", + " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", + " cut+=##FIX_ME\n", + " return cut" + ] + }, + { + "cell_type": "markdown", + "id": "699dd281", + "metadata": {}, + "source": [ + "## 4.4 Weighted Max Cut using a modified Divide-and-Conquer QAOA\n", + "\n", + "If you have not already done so, download the Example-04.py from the repository and save it to your working directory. Add the modifications that were made in the exercises above to the [Example-04.py](https://github.com/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/Example-04.py) which calls up the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb) with random weights assigned to the vertices. In particular fill in your code between the lines `# Edit the code above` and `# Edit the code below` for the functions: `hamiltonian_max_cut`, `merger_graph_penalties`, and `cutvalue`. Make sure to save the file. Run the MPI call below to see how the algorithm performs. You may notice the results are not competitive with the classical methods, as is. \n", + "\n", + "For the assessment, make modifications to the Example-04.py to improve performance by making some adjustments as discussed at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb). Here are a few recommendations:\n", + "\n", + "* Modify the layer count for the QAOA max cut (line 822) and the QAOA merger calls (line 505).\n", + "* Try different seeds to generate different initial parameters for the optimizer for the QAOA for max cut (line 823) and for the merger stage (line 507). Better yet, replace the random intitial parameters of the optimizer with the optimal parameters found in earlier runs of the algorithm. We've added a print command to [Example-04.py](Example-04.py) to view the optimal parameters of the max cut QAOA calls at each stage. For instance try initializing the optimzer with ( `[-1.8964004059756836, 1.0646218219788401]*layer_count`).\n", + "* Swap out the COBYLA optimizer with another optimizer supported by [CUDA-QX Solvers](https://nvidia.github.io/cudaqx/components/solvers/introduction.html) (e.g. `'l-bfgs-b'`) in the `solvers.vqe` calls. Depending on your choice of optimizer you may need to add a `gradient` parameter (e.g. `gradient='parameter_shift'`). \n", + "* Replace the QAOA kernel with a multi-angle kernel. In addition to editing the `kernel_qaoa` function (line 113), you will need to adjust the parameter_count variables (lines 181 and 340) accordingly.\n", + "\n", + "Feel free to experiment with one or all of these suggestions, or try out your own ideas! You can also play around with different graph instances by editing the lines 709 to 750. " + ] + }, + { + "cell_type": "markdown", + "id": "cf5e3d75", + "metadata": {}, + "source": [ + "**Important** Before proceeding, you will need to switch to a runtime with access to a GPU. If you do restart your kernel, make sure to reload the packages below. If you are running on Google Colab and switch to a GPU runtime, you'll need to reinstall CUDA-Q by commenting out the indicated code. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "497318c8", + "metadata": {}, + "outputs": [], + "source": [ + "# Instructions for Google Colab. You can ignore this cell if you already have cuda-q set up and are working in a GPU runtime\n", + "# with all the necessary files\n", + "# Run this cell in a GPU runtime\n", + "\n", + "#!pip install cudaq\n", + "\n", + "#!wget -q -O Example-04.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04.py" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d47a62bd", + "metadata": {}, + "outputs": [], + "source": [ + "#@title Execute this cell to reload the necessary packages\n", + "import networkx as nx\n", + "from networkx import algorithms\n", + "from networkx.algorithms import community\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from typing import List\n", + "import sys" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b61327d4", + "metadata": {}, + "outputs": [], + "source": [ + "#@title Execute this cell to install mpi4py if necessary\n", + "%pip install mpi4py" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "949fb85c", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# MPI call\n", + "print(sys.executable)\n", + "python_path = sys.executable\n", + "!mpiexec -np 4 --oversubscribe --allow-run-as-root {python_path} Example-04.py" + ] + }, + { + "cell_type": "markdown", + "id": "925a5b68", + "metadata": {}, + "source": [ + "## 4.5 Next" + ] + }, + { + "cell_type": "markdown", + "id": "45b67f41", + "metadata": {}, + "source": [ + "To learn more about CUDA Quantum, check out our online [tutorials](https://nvidia.github.io/cuda-quantum/latest/using/tutorials.html)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1541691b", + "metadata": {}, + "outputs": [], + "source": [ + "import IPython\n", + "app = IPython.Application.instance()\n", + "app.kernel.do_shutdown(True)" + ] + }, + { + "cell_type": "markdown", + "id": "3231f912", + "metadata": {}, + "source": [ + "![](images/nvidia-logo.png)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } }, - { - "cell_type": "code", - "execution_count": null, - "id": "8b6dbe43", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", - "# Run this portion of the notebook in a CPU runtime\n", - "# Uncomment the line below and execute the cell to install cuda-q\n", - "# !pip install cudaq\n", - "\n", - "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", - "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qaoa-for-max-cut/images ./images" - ] - }, - { - "cell_type": "markdown", - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": {}, - "source": [ - "# Divide-and-Conquer Implementation of QAOA\n", - "## Lab 4 Assessment\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$" - ] - }, - { - "cell_type": "markdown", - "id": "fd6bc9e4", - "metadata": {}, - "source": [ - "## 4.1 Lab Description\n", - "\n", - "Congratulations on making it this far! We hope you enjoyed conquering a large max cut problem while picking up a few skills along the way.\n", - "\n", - "For this assessment, the challenge is to adapt the code that we have created for the max cut problem and apply it to the weighted max cut problem. As we described at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb), there are many options for coding QAOA that can improve performance and accuracy. We encourage you to experiment with at least one of these to achieve a max cut approximation of a weighted version of the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb). We chose this moderately sized graph for the sake of time, but we do give you the option to experiment with other graphs.\n", - "\n", - "The learning objectives of this tutorial are:\n", - "* Execute the QAOA algorithm to find approximate max cuts of a given graph using CUDA Quantum\n", - "* Understand the limitations of the QAOA algorithm for solving max cut in the NISQ era \n", - "* Make adjustments to the divide-and-conquer QAOA algorithm through selection of initial parameter values, increased layers, choice of optimizer, or other methods\n", - "* Simulate quantum circuits in parallel on multiple GPUs to speed up overall run time using CUDA Quantum\n", - "\n", - "Let's get started! " - ] - }, - { - "cell_type": "markdown", - "id": "d3721df5", - "metadata": {}, - "source": [ - "## 4.2 Weighted Max Cut Problem" - ] - }, - { - "cell_type": "markdown", - "id": "71b602e0", - "metadata": {}, - "source": [ - "\n", - "The weighted max cut problem is a variation of the max cut problem. The weighted version of the problem aims to identify a partition of a graph's nodes into two sets which maximizes the sum of the weights of the edges between the two sets. We continue with the notation established in the previous labs. The only difference between this problem and the max cut problem from before is that we now want to maximize: \n", - "$$\\sum_{\\substack{(u,v)\\in E\\\\ u\\in V_0, v\\in V_1}}w_{u,v},$$\n", - "\n", - "where $w_{u,v}$ is the weight of the edge connecting vertex $u$ to $v$. As before $E$ is the set of the edges of the graph, and $V_0$ and $V_1$ define a partition of the vertices of the graph.\n" - ] - }, - { - "cell_type": "markdown", - "id": "458502bb", - "metadata": {}, - "source": [ - "## 4.3 Adapting our code from the previous labs" - ] - }, - { - "cell_type": "markdown", - "id": "2a462183", - "metadata": {}, - "source": [ - "We can use most of the code that we've already developed. There are a few changes that need to be made at the divide, conquer, and merge stages of the QAOA divide-and-conquer algorithm." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0016c6c5", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you have cuda-q set up.\n", - "# Run this portion of the notebook in a GPU runtime \n", - "# Uncomment the line below and execute the cell to install cuda-q\n", - "# !pip install cudaq" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "6c4006e6", - "metadata": {}, - "outputs": [], - "source": [ - "# Necessary packages\n", - "import networkx as nx\n", - "from networkx import algorithms\n", - "from networkx.algorithms import community\n", - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from typing import List" - ] - }, - { - "cell_type": "markdown", - "id": "d173a9ba", - "metadata": {}, - "source": [ - "### 4.3.1 Divide\n", - "\n", - "Since we now have a weighted graph, we will want to take these weights into account when identifying the subgraph partition. We've made the adjustment to the `subgraph_partition` function below. This may produce a different partitioning of our weighted graph than if we had ignored the weights." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "fe4009ed", - "metadata": {}, - "outputs": [], - "source": [ - "def subgraphpartition(G,n, name, globalGraph):\n", - " \"\"\"Divide the graph up into at most n subgraphs\n", - " \n", - " Parameters\n", - " ----------\n", - " G: networkX.Graph \n", - " Graph that we want to subdivivde which lives inside of or is equatl to globalGraph\n", - " n : int\n", - " n is the maximum number of subgraphs in the partition\n", - " name : str\n", - " prefix for the graphs (in our case we'll use 'Global')\n", - " globalGraph: networkX.Graph\n", - " original problem graph\n", - " \n", - " Returns\n", - " -------\n", - " dict of str : networkX.Graph\n", - " Dictionary of networkX graphs with a string as the key\n", - " \"\"\"\n", - " greedy_partition = community.greedy_modularity_communities(G, weight='weight', resolution=1.1, cutoff=1, best_n=n)\n", - " number_of_subgraphs = len(greedy_partition)\n", - "\n", - " graph_dictionary = {}\n", - " graph_names=[]\n", - " for i in range(number_of_subgraphs):\n", - " subgraphname=name+':'+str(i)\n", - " graph_names.append(subgraphname)\n", - "\n", - " for i in range(number_of_subgraphs):\n", - " nodelist = sorted(list(greedy_partition[i]))\n", - " graph_dictionary[graph_names[i]] = nx.subgraph(globalGraph, nodelist)\n", - " \n", - " return(graph_dictionary) \n", - "\n" - ] - }, - { - "cell_type": "markdown", - "id": "e9d7744a", - "metadata": {}, - "source": [ - "### 4.3.2 Conquer\n", - "\n", - "To adapt the dividie-and-conquer QAOA algorithm to handle a weighted graph, we will need to change the Hamiltonian function. We refer you to section 1.4.1 of [Lab 1](01_Max-Cut-with-QAOA.ipynb) to derive the Hamiltonian for the weighted max cut problem. Below we've copied and adapted the code from the `hamiltonian_max_cut` function from the previous labs by adding a new function argument for the weights. You'll need to fix the indicated line of code to take into account the weights of the edges. \n", - "\n", - "HINT: You'll need to consider the weight of each edge, which we have computed for you in the `edge_weight` variable.\n", - "\n", - "**Exercise:** Edit the line commented with `###FIX_ME###`" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "4ff41e0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise\n", - "\n", - "def hamiltonian_max_cut(sources : List[int], targets : List[int], weights : List[float]): \n", - " \"\"\"Hamiltonian for finding the max cut for the graph with edges defined by the pairs generated by source and target edges\n", - " \n", - " Parameters\n", - " ----------\n", - " sources: List[int] \n", - " list of the source vertices for edges in the graph\n", - " targets: List[int]\n", - " list of the target vertices for the edges in the graph\n", - " weights : List[float]\n", - " list of the weight of the edge determined by the source and target with the same index\n", - " Returns\n", - " -------\n", - " cudaq.SpinOperator\n", - " Hamiltonian for finding the max cut of the graph defined by the given edges\n", - " \"\"\"\n", - " hamiltonian = 0\n", - " # Since our vertices may not be a list from 0 to n, or may not even be integers,\n", - " \n", - " for i in range(len(sources)):\n", - " # Add a term to the Hamiltonian for the edge (u,v)\n", - " qubitu = sources[i]\n", - " qubitv = targets[i]\n", - " edge_weight = weights[i]\n", - " hamiltonian += ##FIX_ME## \n", - " \n", - " return hamiltonian" - ] - }, - { - "cell_type": "markdown", - "id": "4420624a", - "metadata": {}, - "source": [ - "Since we've changed the function arguments for the `hamiltonian_max_cut` function, we've edited the code from the previous labs that calls this function." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "36167f37", - "metadata": {}, - "outputs": [], - "source": [ - "def find_optimal_parameters(G, layer_count, seed):\n", - " \"\"\"Function for finding the optimal parameters of QAOA for the max cut of a graph\n", - " Parameters\n", - " ----------\n", - " G: networkX graph \n", - " Problem graph whose max cut we aim to find\n", - " layer_count : int \n", - " Number of layers in the QAOA circuit\n", - " seed : int\n", - " Random seed for reproducibility of results\n", - " \n", - " Returns\n", - " -------\n", - " list[float]\n", - " Optimal parameters for the QAOA applied to the given graph G\n", - " \"\"\"\n", - " parameter_count: int = 2 * layer_count\n", - "\n", - " # Problem parameters\n", - " nodes = sorted(list(nx.nodes(G)))\n", - " qubit_src = []\n", - " qubit_tgt = []\n", - " weights = []\n", - " for u, v in nx.edges(G):\n", - " # We can use the index() command to read out the qubits associated with the vertex u and v.\n", - " qubit_src.append(nodes.index(u))\n", - " qubit_tgt.append(nodes.index(v))\n", - " weights.append(G.edges[u,v]['weight']) \n", - " # The number of qubits we'll need is the same as the number of vertices in our graph\n", - " qubit_count : int = len(nodes)\n", - " # Each layer of the QAOA kernel contains 2 parameters\n", - " parameter_count : int = 2*layer_count\n", - " \n", - " # Specify the optimizer and its initial parameters. \n", - " optimizer = cudaq.optimizers.COBYLA()\n", - " np.random.seed(seed)\n", - " cudaq.set_random_seed(seed)\n", - " optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi,\n", - " parameter_count) \n", - "\n", - " # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`.\n", - " optimal_expectation, optimal_parameters = cudaq.vqe(\n", - " kernel=kernel_qaoa,\n", - " spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt, weights),\n", - " argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector),\n", - " optimizer=optimizer,\n", - " parameter_count=parameter_count)\n", - "\n", - " return optimal_parameters" - ] - }, - { - "cell_type": "markdown", - "id": "37ba4faf", - "metadata": {}, - "source": [ - "### 4.3.3 Merge\n", - "\n", - "The weights of the edges between subgraphs will impact the merger stage of the algorithm as well. \n", - "\n", - "**Exercise:** Edit the code block by replacing `FIX_ME` with the appropriate values to compute the penalties associated with each edge of the merger graph." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29c3055f", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# Exercise\n", - "# Compute the penalties for edges in the supplied mergerGraph\n", - "# for the subgraph partitioning of graph G\n", - "def merger_graph_penalties(mergerGraph, subgraph_dictionary, G):\n", - " \"\"\"Compute penalties for the edges in the mergerGraph and add them\n", - " as edge attributes.\n", - " \n", - " Parameters\n", - " ----------\n", - " mergerGraph : networkX.Graph \n", - " Graph of connections between vertices in distinct subgraphs of G\n", - " subgraph_dictionary : dict of networkX graph with str as keys \n", - " subgraphs of G that are represented as nodes in the mergerGraph\n", - " G : networkX.Graph\n", - " graph whose vertices has an attribute 'color'\n", - " \n", - " Returns\n", - " -------\n", - " networkX.Graph\n", - " Merger graph containing penalties\n", - " \"\"\" \n", - " nx.set_edge_attributes(mergerGraph, int(0), 'penalty')\n", - " for i, j in mergerGraph.edges():\n", - " penalty_ij = 0\n", - " for u in nx.nodes(subgraph_dictionary[i]):\n", - " for neighbor_u in nx.all_neighbors(G, u):\n", - " if neighbor_u in nx.nodes(subgraph_dictionary[j]):\n", - " if G.nodes[u]['color'] != G.nodes[neighbor_u]['color']:\n", - " penalty_ij += ### FIX_ME\n", - " else:\n", - " penalty_ij += ### FIX_ME\n", - " mergerGraph[i][j]['penalty'] = penalty_ij\n", - " return mergerGraph" - ] - }, - { - "cell_type": "markdown", - "id": "85f53c2e", - "metadata": {}, - "source": [ - "Finally, since our cut value now depends on the weight of the edges, we will need to edit the `cutvalue` function that comptues the cut of the graph based on the coloring of the nodes.\n", - "\n", - "**Exercise:** Edit the `FIX_ME` line of the code block below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "033b9f97", - "metadata": {}, - "outputs": [], - "source": [ - "# Exercise\n", - "def cutvalue(G):\n", - " \"\"\"Returns the cut value of G based on the coloring of the nodes of G\n", - " \n", - " Parameters\n", - " ----------\n", - " G: networkX.Graph \n", - " Graph with weighted edges and with binary value colors assigned to the vertices \n", - " \n", - " Returns\n", - " -------\n", - " int\n", - " cut value of the graph determined by the vertex colors and edge weights\n", - " \"\"\" \n", - " cut = 0\n", - " for u, v in G.edges():\n", - " if G.nodes[u]['color'] != G.nodes[v]['color']: \n", - " cut+=##FIX_ME\n", - " return cut" - ] - }, - { - "cell_type": "markdown", - "id": "699dd281", - "metadata": {}, - "source": [ - "## 4.4 Weighted Max Cut using a modified Divide-and-Conquer QAOA\n", - "\n", - "If you have not already done so, download the Example-04.py from the repository and save it to your working directory. Add the modifications that were made in the exercises above to the [Example-04.py](https://github.com/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/Example-04.py) which calls up the example graph from [Lab 2](2_One-level-divide-and-conquer-QAOA.ipynb) with random weights assigned to the vertices. In particular fill in your code between the lines `# Edit the code above` and `# Edit the code below` for the functions: `hamiltonian_max_cut`, `merger_graph_penalties`, and `cutvalue`. Make sure to save the file. Run the MPI call below to see how the algorithm performs. You may notice the results are not competitive with the classical methods, as is. \n", - "\n", - "For the assessment, make modifications to the Example-04.py to improve performance by making some adjustments as discussed at the end of [Lab 3](3_Recursive-divide-and-conquer.ipynb). Here are a few recommendations:\n", - "\n", - "* Modify the layer count for the QAOA max cut (line 822) and the QAOA merger calls (line 505).\n", - "* Try different seeds to generate different initial parameters for the optimizer for the QAOA for max cut (line 823) and for the merger stage (line 507). Better yet, replace the random intitial parameters of the optimizer with the optimal parameters found in earlier runs of the algorithm. We've added a print command to [Example-04.py](Example-04.py) to view the optimal parameters of the max cut QAOA calls at each stage. For instance try initializing the optimzer with ( `[-1.8964004059756836, 1.0646218219788401]*layer_count`).\n", - "* Swap out the COYBLA optimizer with another [optimizer supported by CUDA Quantum](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.optimizers.optimizer) on line 184 and line 520. Depending on your choice of optimizer you may need to add in a variable for the gradient and make adjustments to the `vqe` calls (lines 191 and 526). \n", - "* Replace the QAOA kernel with a multi-angle kernel. In addition to editing the `kernel_qaoa` function (line 113), you will need to adjust the parameter_count variables (lines 181 and 340) accordingly.\n", - "\n", - "Feel free to experiment with one or all of these suggestions, or try out your own ideas! You can also play around with different graph instances by editing the lines 709 to 750. " - ] - }, - { - "cell_type": "markdown", - "id": "cf5e3d75", - "metadata": {}, - "source": [ - "**Important** Before proceeding, you will need to switch to a runtime with access to a GPU. If you do restart your kernel, make sure to reload the packages below. If you are running on Google Colab and switch to a GPU runtime, you'll need to reinstall CUDA-Q by commenting out the indicated code. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "497318c8", - "metadata": {}, - "outputs": [], - "source": [ - "# Instructions for Google Colab. You can ignore this cell if you already have cuda-q set up and are working in a GPU runtime\n", - "# with all the necessary files\n", - "# Run this cell in a GPU runtime\n", - "\n", - "#!pip install cudaq\n", - "\n", - "#!wget -q -O Example-04.py https://raw.githubusercontent.com/NVIDIA/cuda-q-academic/main/qaoa-for-max-cut/Example-04.py" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d47a62bd", - "metadata": {}, - "outputs": [], - "source": [ - "#@title Execute this cell to reload the necessary packages\n", - "import networkx as nx\n", - "from networkx import algorithms\n", - "from networkx.algorithms import community\n", - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "import sys" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b61327d4", - "metadata": {}, - "outputs": [], - "source": [ - "#@title Execute this cell to install mpi4py if necessary\n", - "%pip install mpi4py" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "949fb85c", - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "# MPI call\n", - "print(sys.executable)\n", - "python_path = sys.executable\n", - "!mpiexec -np 4 --oversubscribe --allow-run-as-root {python_path} Example-04.py" - ] - }, - { - "cell_type": "markdown", - "id": "925a5b68", - "metadata": {}, - "source": [ - "## 4.5 Next" - ] - }, - { - "cell_type": "markdown", - "id": "45b67f41", - "metadata": {}, - "source": [ - "To learn more about CUDA Quantum, check out our online [tutorials](https://nvidia.github.io/cuda-quantum/latest/using/tutorials.html)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1541691b", - "metadata": {}, - "outputs": [], - "source": [ - "import IPython\n", - "app = IPython.Application.instance()\n", - "app.kernel.do_shutdown(True)" - ] - }, - { - "cell_type": "markdown", - "id": "3231f912", - "metadata": {}, - "source": [ - "![](images/nvidia-logo.png)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/qaoa-for-max-cut/Example-02-step-2-Solution.py b/qaoa-for-max-cut/Example-02-step-2-Solution.py index d45e674..e12b741 100644 --- a/qaoa-for-max-cut/Example-02-step-2-Solution.py +++ b/qaoa-for-max-cut/Example-02-step-2-Solution.py @@ -18,6 +18,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -253,19 +254,17 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) - - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() + + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt), + initial_parameters, + optimizer='cobyla') return optimal_parameters def qaoa_for_graph(G, layer_count, shots, seed): diff --git a/qaoa-for-max-cut/Example-02-step-2.py b/qaoa-for-max-cut/Example-02-step-2.py index 1dedcae..4760bb1 100644 --- a/qaoa-for-max-cut/Example-02-step-2.py +++ b/qaoa-for-max-cut/Example-02-step-2.py @@ -18,6 +18,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -253,19 +254,17 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) - - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() + + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt), + initial_parameters, + optimizer='cobyla') return optimal_parameters def qaoa_for_graph(G, layer_count, shots, seed): diff --git a/qaoa-for-max-cut/Example-02-step-3-Solution.py b/qaoa-for-max-cut/Example-02-step-3-Solution.py index 2b08ad4..bda97b2 100644 --- a/qaoa-for-max-cut/Example-02-step-3-Solution.py +++ b/qaoa-for-max-cut/Example-02-step-3-Solution.py @@ -18,6 +18,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -255,19 +256,17 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt), + initial_parameters, + optimizer='cobyla') return optimal_parameters def qaoa_for_graph(G, layer_count, shots, seed): @@ -529,13 +528,11 @@ def createMergerGraph(border, subgraphs): layer_count_merger = 1 # set arbitrarily parameter_count_merger: int = 2 * layer_count_merger - # Specify the optimizer and its initial parameters. Make it repeatable. + # Specify the initial parameters. Make it repeatable. cudaq.set_random_seed(101) - optimizer_merger = cudaq.optimizers.COBYLA() np.random.seed(101) - optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count_merger) - optimizer_merger.max_iterations=150 + initial_parameters_merger = np.random.uniform(-np.pi, np.pi, + parameter_count_merger).tolist() merger_nodes = list(mergerGraph.nodes()) qubit_count = len(merger_nodes) @@ -546,14 +543,14 @@ def createMergerGraph(border, subgraphs): merger_edge_src.append(merger_nodes.index(u)) merger_edge_tgt.append(merger_nodes.index(v)) - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=mHamiltonian(mergerGraph), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, merger_edge_src, merger_edge_tgt, parameter_vector), - optimizer=optimizer_merger, - parameter_count=parameter_count_merger, - shots = 10000) + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, merger_edge_src, merger_edge_tgt, thetas), + mHamiltonian(mergerGraph), + initial_parameters_merger, + optimizer='cobyla', + max_iterations=150, + shots=10000) # Print the optimized value and its parameters print("Optimal value = ", optimal_expectation) diff --git a/qaoa-for-max-cut/Example-03.py b/qaoa-for-max-cut/Example-03.py index c2f48a1..f87ff69 100644 --- a/qaoa-for-max-cut/Example-03.py +++ b/qaoa-for-max-cut/Example-03.py @@ -17,6 +17,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -167,20 +168,18 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) cudaq.set_random_seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt), + initial_parameters, + optimizer='cobyla') return optimal_parameters @@ -501,21 +500,19 @@ def merging(G, graph_dictionary, merger_graph): # The number of qubits we'll need is the same as the number of vertices in our graph qubit_count_merger : int = len(nodes_merger) - # Specify the optimizer and its initial parameters. Make it repeatable. + # Specify the initial parameters. Make it repeatable. cudaq.set_random_seed(12345) - optimizer_merger = cudaq.optimizers.COBYLA() np.random.seed(4321) - optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count_merger) - optimizer_merger.max_iterations=150 - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=merger_Hamiltonian, - argument_mapper=lambda parameter_vector: (qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, parameter_vector), - optimizer=optimizer_merger, - parameter_count=parameter_count_merger, - shots = 20000) + initial_parameters_merger = np.random.uniform(-np.pi, np.pi, + parameter_count_merger).tolist() + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, thetas), + merger_Hamiltonian, + initial_parameters_merger, + optimizer='cobyla', + max_iterations=150, + shots=20000) # Sample the circuit using the optimized parameters # Sample enough times to distinguish the most_probable outcome for diff --git a/qaoa-for-max-cut/Example-04-Solution.py b/qaoa-for-max-cut/Example-04-Solution.py index 95b65c3..83e46b7 100644 --- a/qaoa-for-max-cut/Example-04-Solution.py +++ b/qaoa-for-max-cut/Example-04-Solution.py @@ -17,6 +17,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -170,20 +171,18 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) cudaq.set_random_seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt, weights), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt, weights), + initial_parameters, + optimizer='cobyla') return optimal_parameters @@ -505,21 +504,19 @@ def merging(G, graph_dictionary, merger_graph): # The number of qubits we'll need is the same as the number of vertices in our graph qubit_count_merger : int = len(nodes_merger) - # Specify the optimizer and its initial parameters. Make it repeatable. + # Specify the initial parameters. Make it repeatable. cudaq.set_random_seed(merger_seed) - optimizer_merger = cudaq.optimizers.COBYLA() np.random.seed(merger_seed) - optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count_merger) - optimizer_merger.max_iterations=150 - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=merger_Hamiltonian, - argument_mapper=lambda parameter_vector: (qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, parameter_vector), - optimizer=optimizer_merger, - parameter_count=parameter_count_merger, - shots = 20000) + initial_parameters_merger = np.random.uniform(-np.pi, np.pi, + parameter_count_merger).tolist() + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, thetas), + merger_Hamiltonian, + initial_parameters_merger, + optimizer='cobyla', + max_iterations=150, + shots=20000) # Sample the circuit using the optimized parameters # Sample enough times to distinguish the most_probable outcome for diff --git a/qaoa-for-max-cut/Example-04.py b/qaoa-for-max-cut/Example-04.py index c9bff58..e577377 100644 --- a/qaoa-for-max-cut/Example-04.py +++ b/qaoa-for-max-cut/Example-04.py @@ -17,6 +17,7 @@ from networkx import algorithms from networkx.algorithms import community import cudaq +import cudaq_solvers as solvers from cudaq import spin from cudaq.qis import * import numpy as np @@ -169,20 +170,18 @@ def find_optimal_parameters(G, layer_count, seed): # Each layer of the QAOA kernel contains 2 parameters parameter_count : int = 2*layer_count - # Specify the optimizer and its initial parameters. - optimizer = cudaq.optimizers.COBYLA() + # Specify the initial parameters. np.random.seed(seed) cudaq.set_random_seed(seed) - optimizer.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count) + initial_parameters = np.random.uniform(-np.pi, np.pi, + parameter_count).tolist() - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=hamiltonian_max_cut(qubit_src, qubit_tgt, weights), - argument_mapper=lambda parameter_vector: (qubit_count, layer_count, qubit_src, qubit_tgt, parameter_vector), - optimizer=optimizer, - parameter_count=parameter_count) + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count, layer_count, qubit_src, qubit_tgt, thetas), + hamiltonian_max_cut(qubit_src, qubit_tgt, weights), + initial_parameters, + optimizer='cobyla') return optimal_parameters @@ -415,7 +414,7 @@ def merger_graph_penalties(mergerGraph, subgraph_dictionary, G): - # Edit the code below + # Edit the code above return mergerGraph @@ -504,21 +503,19 @@ def merging(G, graph_dictionary, merger_graph): # The number of qubits we'll need is the same as the number of vertices in our graph qubit_count_merger : int = len(nodes_merger) - # Specify the optimizer and its initial parameters. Make it repeatable. + # Specify the initial parameters. Make it repeatable. cudaq.set_random_seed(merger_seed) - optimizer_merger = cudaq.optimizers.COBYLA() np.random.seed(merger_seed) - optimizer_merger.initial_parameters = np.random.uniform(-np.pi, np.pi, - parameter_count_merger) - optimizer_merger.max_iterations=150 - # Pass the kernel, spin operator, and optimizer to `cudaq.vqe`. - optimal_expectation, optimal_parameters = cudaq.vqe( - kernel=kernel_qaoa, - spin_operator=merger_Hamiltonian, - argument_mapper=lambda parameter_vector: (qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, parameter_vector), - optimizer=optimizer_merger, - parameter_count=parameter_count_merger, - shots = 20000) + initial_parameters_merger = np.random.uniform(-np.pi, np.pi, + parameter_count_merger).tolist() + # Pass the kernel, spin operator, and optimizer to `solvers.vqe`. + optimal_expectation, optimal_parameters, _ = solvers.vqe( + lambda thetas: kernel_qaoa(qubit_count_merger, layer_count_merger, merger_edge_src, merger_edge_tgt, thetas), + merger_Hamiltonian, + initial_parameters_merger, + optimizer='cobyla', + max_iterations=150, + shots=20000) # Sample the circuit using the optimized parameters # Sample enough times to distinguish the most_probable outcome for diff --git a/qaoa-for-max-cut/README.md b/qaoa-for-max-cut/README.md index 6f992e3..dd89b1d 100644 --- a/qaoa-for-max-cut/README.md +++ b/qaoa-for-max-cut/README.md @@ -8,23 +8,6 @@ to an application of a divide-and-conquer QAOA algorithm to a large max cut prob * Completion of the [Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum) course or equivalent familiarity with variational quantum algorithms (e.g. VQE or QAOA). ## Notebooks -The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in any of the platforms listed below, -simply click on the icons below to run the notebooks on the listed platform. - - - -| Notebook |qBraid[^1] | Brev | Google Colab[^2] | -| ----------- | ----------- | ----------- | ----------- | -| Lab 0: Start Here | Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/00_StartHere.ipynb)| -|Lab 1: Max Cut with QAOA |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/01_Max-Cut-with-QAOA.ipynb)| -| Lab 2: One level Divide and Conquer[^3] |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/02_One-level-divide-and-conquer-QAOA.ipynb)| -| Lab 3: Recursive Divide and Conquer[^4] |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/03_Recursive-divide-and-conquer.ipynb)| -| Lab 4: Assessment[^5] |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qaoa-for-max-cut/04_Assessment.ipynb)| - -[^1]:If using qBraid Lab, use the [Environment Manager](https://docs.qbraid.com/lab/user-guide/environments) to install the CUDA-Q environment and then activate it in your notebook. In qBraid Lab you can switch to a GPU instance using the [Compute Manager](https://docs.qbraid.com/lab/user-guide/compute-manager). To run the optional interactive widgets in the notebooks, you'll need to copy the interactive widget folder into your environment. -[^2]:You will need to uncomment out the `pip install cudaq` code in each notebook to run on Google CoLab. To run the optional interactive widgets in the notebooks, you'll need to copy the interactive widget folder into your environment. -[^3]: The majority of the code in this lab can be executed on CPU. However, the final example employing parallelization does require a GPU. If you don't have GPU access, you can still appreciate and learn from the code without executing it. -[^4]: See the footnote above. -[^5]: The assessment does require a GPU to execute. If you don't have GPU access, you can still appreciate and learn from the code without executing it. +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/qaoa-for-max-cut/images/QAOA-flowchart.png b/qaoa-for-max-cut/images/QAOA-flowchart.png index c48199e..3556149 100644 Binary files a/qaoa-for-max-cut/images/QAOA-flowchart.png and b/qaoa-for-max-cut/images/QAOA-flowchart.png differ diff --git a/qaoa-for-max-cut/qaoa-divide-and-conquer.py b/qaoa-for-max-cut/qaoa-divide-and-conquer.py new file mode 100644 index 0000000..72c6d94 --- /dev/null +++ b/qaoa-for-max-cut/qaoa-divide-and-conquer.py @@ -0,0 +1,529 @@ +# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import networkx as nx +from networkx.algorithms import community +import cudaq +from cudaq import spin +import cudaq_solvers as solvers +import numpy as np +from mpi4py import MPI +from typing import List + + +cudaq.set_target("nvidia") +target = cudaq.get_target() + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +num_qpus = comm.Get_size() + + +# ─── Graph utilities (from Lab 2) ────────────────────────────────── + +def subgraph_of_vertex(graph_dictionary, vertex): + """Return the key of the subgraph that contains *vertex*, + or '' if the vertex is not found in any subgraph. + + Parameters + ---------- + graph_dictionary : dict of networkX.Graph with str as keys + vertex : int + + Returns + ------- + str + """ + location = '' + for key in graph_dictionary: + if vertex in graph_dictionary[key].nodes(): + location = key + return location + + +def border(G, subgraph_dictionary): + """Return the subgraph of G containing only edges that cross + between distinct subgraphs. + + Parameters + ---------- + G : networkX.Graph + subgraph_dictionary : dict of networkX.Graph with str as keys + + Returns + ------- + networkX.Graph + """ + borderGraph = nx.Graph() + for u, v in G.edges(): + is_border = True + for key in subgraph_dictionary: + SubG = subgraph_dictionary[key] + if (u, v) in list(nx.edges(SubG)): + is_border = False + if is_border: + borderGraph.add_edge(u, v) + return borderGraph + + +def cutvalue(G): + """Return the cut value of G based on the 'color' attribute of each node. + + Parameters + ---------- + G : networkX.Graph + + Returns + ------- + int + """ + cut = 0 + for u, v in G.edges(): + if str(G.nodes[u]['color']) != str(G.nodes[v]['color']): + cut += 1 + return cut + + +def subgraphpartition(G, n, name, globalGraph): + """Divide G into at most *n* subgraphs using greedy modularity. + + Parameters + ---------- + G : networkX.Graph + n : int + name : str + globalGraph : networkX.Graph + + Returns + ------- + dict of str : networkX.Graph + """ + greedy_partition = community.greedy_modularity_communities( + G, weight=None, resolution=1.1, cutoff=1, best_n=n) + graph_dictionary = {} + for i, part in enumerate(greedy_partition): + subgraphname = f"{name}:{i}" + nodelist = sorted(list(part)) + graph_dictionary[subgraphname] = nx.subgraph(globalGraph, nodelist) + return graph_dictionary + + +# ─── Max-cut QAOA via solvers ─────────────────────────────────────── + +def qaoa_for_graph(G, layer_count, shots, seed): + """Find an approximate max cut of G using ``solvers.qaoa()``. + + Parameters + ---------- + G : networkX.Graph + layer_count : int + shots : int + (Kept for API compatibility; solvers.qaoa handles sampling internally.) + seed : int + + Returns + ------- + str + Binary string representing the max-cut colouring of the vertices. + """ + if nx.number_of_nodes(G) == 1 or nx.number_of_edges(G) == 0: + results = '' + for u in list(nx.nodes(G)): + np.random.seed(seed) + results += str(np.random.randint(0, 1)) + return results + + # Remap to 0-indexed nodes so qubit indices match + G_mapped = nx.convert_node_labels_to_integers(G, ordering='sorted') + hamiltonian = solvers.get_maxcut_hamiltonian(G_mapped) + parameter_count = solvers.get_num_qaoa_parameters(hamiltonian, layer_count) + + np.random.seed(seed) + cudaq.set_random_seed(seed) + initial_parameters = np.random.uniform( + -np.pi, np.pi, parameter_count).tolist() + + optimal_value, optimal_parameters, sample_result = solvers.qaoa( + hamiltonian, layer_count, initial_parameters) + + print("Optimal value =", optimal_value) + print("most_probable outcome =", sample_result.most_probable()) + return str(sample_result.most_probable()) + + +# ─── Merger graph construction and QAOA ───────────────────────────── + +def createMergerGraph(border_graph, subgraphs): + """Build a graph with one vertex per subgraph and edges where + the corresponding subgraphs are connected by border edges. + + Parameters + ---------- + border_graph : networkX.Graph + subgraphs : dict of networkX.Graph with str as keys + + Returns + ------- + networkX.Graph + """ + M = nx.Graph() + for u, v in border_graph.edges(): + su = subgraph_of_vertex(subgraphs, u) + sv = subgraph_of_vertex(subgraphs, v) + if su != sv: + M.add_edge(su, sv) + return M + + +def merger_graph_penalties(mergerGraph, subgraph_dictionary, G): + """Compute penalty weights for each edge in the merger graph. + + Parameters + ---------- + mergerGraph : networkX.Graph + subgraph_dictionary : dict of networkX.Graph with str as keys + G : networkX.Graph + + Returns + ------- + networkX.Graph + """ + nx.set_edge_attributes(mergerGraph, int(0), 'penalty') + for i, j in mergerGraph.edges(): + penalty_ij = 0 + for u in nx.nodes(subgraph_dictionary[i]): + for neighbor_u in nx.all_neighbors(G, u): + if neighbor_u in nx.nodes(subgraph_dictionary[j]): + if str(G.nodes[u]['color']) != str(G.nodes[neighbor_u]['color']): + penalty_ij += 1 + else: + penalty_ij -= 1 + mergerGraph[i][j]['penalty'] = penalty_ij + return mergerGraph + + +def merger_hamiltonian(merger_edge_src, merger_edge_tgt, penalty): + """Build the weighted ZZ Hamiltonian for the merger optimisation. + + This is *not* a standard max-cut Hamiltonian — the weights come + from the penalty structure of the subgraph partition. + + Parameters + ---------- + merger_edge_src : List[int] + merger_edge_tgt : List[int] + penalty : List[int] + + Returns + ------- + cudaq.SpinOperator + """ + H = 0 + for i in range(len(merger_edge_src)): + H += -penalty[i] * spin.z(merger_edge_src[i]) * spin.z(merger_edge_tgt[i]) + return H + + +def merging(G, graph_dictionary, merger_graph): + """Use ``solvers.qaoa()`` on the merger graph to determine which + subgraphs should have their colours flipped. + + Parameters + ---------- + G : networkX.Graph + graph_dictionary : dict of networkX.Graph with str as keys + merger_graph : networkX.Graph + + Returns + ------- + str + Binary string (one bit per subgraph); '1' means flip that subgraph. + """ + mg = merger_graph_penalties(merger_graph, graph_dictionary, G) + has_nontrivial = any(mg[u][v]['penalty'] != 0 for u, v in nx.edges(mg)) + + if not has_nontrivial: + print('Merging stage is trivial') + return '0' * nx.number_of_nodes(merger_graph) + + merger_nodes = sorted(list(mg.nodes())) + merger_edge_src = [] + merger_edge_tgt = [] + penalty = [] + for u, v in nx.edges(mg): + merger_edge_src.append(merger_nodes.index(u)) + merger_edge_tgt.append(merger_nodes.index(v)) + penalty.append(mg[u][v]['penalty']) + + H = merger_hamiltonian(merger_edge_src, merger_edge_tgt, penalty) + layer_count_merger = 3 + parameter_count = solvers.get_num_qaoa_parameters(H, layer_count_merger) + + cudaq.set_random_seed(12345) + np.random.seed(4321) + initial_parameters = np.random.uniform( + -np.pi, np.pi, parameter_count).tolist() + + _, _, sample_result = solvers.qaoa( + H, layer_count_merger, initial_parameters) + + return str(sample_result.most_probable()) + + +# ─── Colouring utilities ──────────────────────────────────────────── + +def unaltered_colors(G, graph_dictionary, max_cuts): + """Colour G's vertices based on per-subgraph max-cut results. + + Parameters + ---------- + G : networkX.Graph + graph_dictionary : dict of networkX.Graph with str as keys + max_cuts : dict of str + + Returns + ------- + networkX.Graph + """ + for key in graph_dictionary: + SubG = graph_dictionary[key] + sorted_nodes = sorted(list(nx.nodes(SubG))) + for v in sorted_nodes: + G.nodes[v]['color'] = max_cuts[key][sorted_nodes.index(v)] + return G + + +def new_colors(graph_dictionary, G, mergerGraph, flip_colors): + """Flip subgraph colours according to the merger QAOA result. + + Parameters + ---------- + graph_dictionary : dict of networkX.Graph with str as keys + G : networkX.Graph + mergerGraph : networkX.Graph + flip_colors : str + + Returns + ------- + (networkX.Graph, str) + """ + mergerNodes = sorted(list(nx.nodes(mergerGraph))) + flipGraphColors = {} + for u in mergerNodes: + flipGraphColors[u] = int(flip_colors[mergerNodes.index(u)]) + + for key in graph_dictionary: + if flipGraphColors[key] == 1: + for u in graph_dictionary[key].nodes(): + G.nodes[u]['color'] = str(1 - int(G.nodes[u]['color'])) + + revised_colors = '' + for u in sorted(G.nodes()): + revised_colors += str(G.nodes[u]['color']) + return G, revised_colors + + +# ─── Recursive divide-and-conquer ─────────────────────────────────── + +def subgraph_solution(G, key, vertex_limit, subgraph_limit, + layer_count, global_graph, seed): + """Recursively find max-cut approximations of the subgraphs. + + Parameters + ---------- + G : networkX.Graph + key : str + vertex_limit : int + subgraph_limit : int + layer_count : int + global_graph : networkX.Graph + seed : int + + Returns + ------- + str + """ + results = {} + seed = 123 + + if nx.number_of_nodes(G) < vertex_limit + 1: + print('Working on finding max cut approximations for', key) + result = qaoa_for_graph(G, layer_count=layer_count, + shots=10000, seed=seed) + results[key] = result + nodes_of_G = sorted(list(G.nodes())) + for u in G.nodes(): + global_graph.nodes[u]['color'] = results[key][nodes_of_G.index(u)] + return result + + # Recursively apply the algorithm for large graphs + subgraph_limit = min(subgraph_limit, nx.number_of_nodes(G)) + subgraph_dictionary = subgraphpartition( + G, subgraph_limit, str(key), global_graph) + + for skey in subgraph_dictionary: + results[skey] = subgraph_solution( + subgraph_dictionary[skey], skey, vertex_limit, + subgraph_limit, layer_count, global_graph, seed) + + print('Found max cut approximations for', + list(subgraph_dictionary.keys())) + + G = unaltered_colors(G, subgraph_dictionary, results) + unaltered_cut_value = cutvalue(G) + print('prior to merging, the max cut value of', key, 'is', + unaltered_cut_value) + + print('Merging these solutions together for a solution to', key) + bordergraph = border(G, subgraph_dictionary) + merger_graph = createMergerGraph(bordergraph, subgraph_dictionary) + + try: + merger_results = merging(G, subgraph_dictionary, merger_graph) + except Exception: + merger_results = '0' * nx.number_of_nodes(merger_graph) + print('Merging subroutine opted out with an error for', key) + + alteredG, new_color_list = new_colors( + subgraph_dictionary, G, merger_graph, merger_results) + newcut = cutvalue(alteredG) + print('the merger algorithm produced a new coloring of', key, + 'with cut value,', newcut) + + return new_color_list + + +########################################################################### +# Main algorithm +########################################################################### + +if rank == 0: + n = 30 + m = 70 + seed = 20160 + sampleGraph3 = nx.gnm_random_graph(n, m, seed=seed) + + subgraph_dictionary = subgraphpartition( + sampleGraph3, 12, 'Global', sampleGraph3) + + number_of_subgraphs = len(sorted(subgraph_dictionary)) + number_of_subgraphs_per_qpu = int( + np.ceil(number_of_subgraphs / num_qpus)) + + keys_on_qpu = {} + for q in range(num_qpus): + keys_on_qpu[q] = [] + for k in range(number_of_subgraphs_per_qpu): + if k * num_qpus + q < number_of_subgraphs: + key = sorted(subgraph_dictionary)[k * num_qpus + q] + keys_on_qpu[q].append(key) + + print('Subgraph problems to be computed on each processor ' + 'have been assigned') + + for i in range(num_qpus): + subgraph_to_qpu = {k: subgraph_dictionary[k] + for k in keys_on_qpu[i]} + if i != 0: + comm.send(subgraph_to_qpu, dest=i, tag=rank) + else: + assigned_subgraph_dictionary = subgraph_to_qpu +else: + assigned_subgraph_dictionary = comm.recv(source=0, tag=0) + print(f"Processor {rank} received " + f"{assigned_subgraph_dictionary} from processor 0") + + +########################################################################### +# Solve assigned subgraph problems +########################################################################### +num_subgraphs = 11 +num_qubits = 9 +layer_count = 2 +results = {} + +for key in assigned_subgraph_dictionary: + G = assigned_subgraph_dictionary[key] + newcoloring_of_G = subgraph_solution( + G, key, num_subgraphs, num_qubits, layer_count, G, seed=13) + results[key] = newcoloring_of_G + + +########################################################################### +# Gather results on rank 0 +########################################################################### +if rank != 0: + comm.send(results, dest=0, tag=0) + print(f"{results} sent by processor {rank}") + +else: + for j in range(1, num_qpus): + colors = comm.recv(source=j, tag=0) + print(f"Received {colors} from processor {j}") + for key in colors: + results[key] = colors[key] + print("The results dictionary on GPU 0 =", results) + + # Colour the full graph using subgraph solutions + for key in subgraph_dictionary: + SubG = subgraph_dictionary[key] + color_list = [int(c) for c in results[key]] + for v in sorted(list(nx.nodes(SubG))): + idx = sorted(list(nx.nodes(SubG))).index(v) + SubG.nodes[v]['color'] = color_list[idx] + sampleGraph3.nodes[v]['color'] = SubG.nodes[v]['color'] + + print('The divide-and-conquer QAOA unaltered cut approximation ' + 'of the graph, prior to the final merge, is', + cutvalue(sampleGraph3)) + + # Final merger across all subgraphs + borderGraph = border(sampleGraph3, subgraph_dictionary) + mergerGraph = createMergerGraph(borderGraph, subgraph_dictionary) + merger_results = merging( + sampleGraph3, subgraph_dictionary, mergerGraph) + maxcutSampleGraph3, G_colors_with_maxcut = new_colors( + subgraph_dictionary, sampleGraph3, mergerGraph, merger_results) + + for node, attributes in maxcutSampleGraph3.nodes(data=True): + print(f"Node: {node}, Attributes: {attributes}") + + print('The divide-and-conquer QAOA max cut approximation ' + 'of the graph is', cutvalue(maxcutSampleGraph3)) + print('The divide and conquer max cut coloring is', + G_colors_with_maxcut) + + # Compare with classical greedy approximation + number_of_approx = 10 + randomlist = np.random.choice(3000, number_of_approx) + minapprox = nx.algorithms.approximation.one_exchange( + sampleGraph3, initial_cut=None, seed=int(randomlist[0]))[0] + maxapprox = minapprox + sum_of_approximations = 0 + for i in range(number_of_approx): + seed = int(randomlist[i]) + ith_approximation = nx.algorithms.approximation.one_exchange( + sampleGraph3, initial_cut=None, seed=seed)[0] + if ith_approximation < minapprox: + minapprox = ith_approximation + if ith_approximation > maxapprox: + maxapprox = ith_approximation + sum_of_approximations += ith_approximation + + average_approx = sum_of_approximations / number_of_approx + print('This compares to a few runs of the greedy modularity ' + 'maximization algorithm gives an average approximate ' + 'Max Cut value of', average_approx) + print('with approximations ranging from', minapprox, + 'to', maxapprox) diff --git a/qec101/01_QEC_Intro.ipynb b/qec101/01_QEC_Intro.ipynb index 6a450b8..65d67af 100644 --- a/qec101/01_QEC_Intro.ipynb +++ b/qec101/01_QEC_Intro.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "eead95ce", "metadata": { "id": "eead95ce" @@ -27,47 +27,52 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101 Lab 1 - The Basics of Classical and Quantum Error Correction\n", + "# QEC 101 — Lab 1: The Basics of Classical and Quantum Error Correction\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", "---\n", - "## Overview\n", - "One of the biggest challenges in realizing practical quantum computing is the noisy nature of qubits, making quantum error correction (QEC) essential for detecting and fixing errors in real time. In this lab, you’ll explore the fundamentals of error correction (EC) concepts and terminology, walk through examples of classical EC codes, examine how QEC differs from classical methods, and ultimately get hands-on experience coding your first QEC procedure.\n", + "\n", + "**What You Will Do:**\n", + "* Define the five aspects common to all error correction procedures\n", + "* Implement the classical repetition code and analyze its performance\n", + "* Construct the generator and parity check matrices for the Hamming code\n", + "* Identify the challenges that distinguish quantum error correction from classical methods\n", + "* Implement the three-qubit quantum repetition code using CUDA-Q\n", "\n", "**Prerequisites:**\n", - "Learners should have familiarity with Jupyter notebooks and programming in Python and CUDA-Q. It is assumed the reader has some familiarity already with quantum computation and is comfortable with braket notation and the concepts of qubits, quantum circuits, measurement, and circuit sampling. The CUDA-Q Academic course entitled \"[Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\" provide a walkthrough of this prerequisite knowledge if the reader is new to quantum computing and CUDA-Q or needs refreshing.\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **1.1** Define the basics of EC, including the 5 aspects common to EC procedures\n", - "* **1.2** Code the classical repetition code\n", - "* **1.3** Code the classical Hamming code\n", - "* **1.4** Experiment with noisy qubits to understand what makes QEC challenging\n", - "* **1.5** Explore why there is still hope for QEC\n", - "* **1.6** Learn the theory for the quantum repetition code\n", - "* **1.7** Implement the quantum repetition code in CUDA-Q\n", - "\n", - "Terminology and notation you'll use\n", - "* encoder, decoder, logical codewords, codespace, error space, noisy channel, logical error, logical error rate\n", - "* repetition code, Hamming code, $[n,k,d]$-codes\n", - "* syndrome" - ] - }, - { - "cell_type": "markdown", - "id": "b5046650", - "metadata": { - "id": "b5046650" - }, - "source": [ - "Execute the cells below to load all the necessary packages for this lab." + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement, circuit sampling)\n", + "* Familiarity with braket notation ($\\ket{\\psi}$, $\\bra{\\psi}$)\n", + "* Completion of [Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum) or equivalent knowledge\n", + "\n", + "**Key Terminology:**\n", + "* Encoder\n", + "* Decoder\n", + "* Logical Codewords\n", + "* Codespace\n", + "* Error Space\n", + "* Noisy Channel\n", + "* Logical Error\n", + "* Logical Error Rate\n", + "* Repetition Code\n", + "* Hamming Code\n", + "* $[n,k,d]$-codes\n", + "* Syndrome\n", + "* Parity Checks\n", + "* Distance\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq.BitFlipChannel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.KrausChannel) — bit-flip noise channel\n", + "* [`cudaq.register_operation`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.register_operation) — registers a custom unitary gate\n", + "\n", + "**Solutions:** [`Solutions/01_QEC_Intro_Solution.ipynb`](Solutions/01_QEC_Intro_Solution.ipynb)" ] }, { @@ -77,78 +82,51 @@ "metadata": {}, "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qec101/Images ./Images\n" + "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "3783a385", + "cell_type": "markdown", + "id": "b5046650", "metadata": {}, - "outputs": [], "source": [ - "# install `qutip` and `ipywidgets` in the current Python kernel. Skip this if they are already installed.\n", - "# `matplotlib` is required for all visualization tasks.\n", - "# Make sure to restart your kernel if you execute this!\n", - "# In a Jupyter notebook, go to the menu bar > Kernel > Restart Kernel.\n", - "# In VSCode, click on the Restart button in the Jupyter toolbar.\n", - "\n", - "# The '\\' before the '>' operator is so that the shell does not misunderstand\n", - "# the '>' qualifier for the bash pipe operation.\n", - "\n", - "import sys\n", - "\n", - "try:\n", - " import matplotlib.pyplot as plt\n", - " import qutip\n", - " import ipywidgets as widgets\n", - " import matplotlib_venn\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install qutip\\>=5 matplotlib\\>=3.5 matplotlib_venn\n", - " !{sys.executable} -m pip install ipywidgets\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")" + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", "import random\n", - "import matplotlib.pyplot as plt\n", "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", "\n", - "from typing import List\n", - "import ipywidgets as widgets\n", - "from ipywidgets import interact, Output, VBox, HBox\n", - "from IPython.display import display" + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *" ] }, { "cell_type": "markdown", "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc", - "metadata": { - "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc" - }, + "metadata": {}, "source": [ + "---\n", + "\n", "## 1.1 The Basics of Error Correction" ] }, @@ -185,6 +163,8 @@ "id": "1d55edc7-6827-4a0b-93ed-199e19ae7a22", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.2 The Repetition Code" ] }, @@ -193,7 +173,7 @@ "id": "261cb846-2e18-4ab1-bf50-800bc7c0abc2", "metadata": {}, "source": [ - "The most basic EC code is called the repetition code. \n", + "The most basic EC code is called the **repetition code**. \n", "\n", "Consider encoding the information in a single bit (0 or 1). The repetition code simply adds more bits which are in the same state. So a 3-bit repetition code encodes the logical 0 state ($0_L$) as 000 and the logical 1 state ($1_L$) as 111, making 000 and 111 the logical codewords.\n", "\n", @@ -224,15 +204,16 @@ "\n", "The table below shows the likelihood of the four possible scenarios below. Notice the three bit repetition code with majority count will transmit the message with 0.972 probability of success, a significant improvement over the original probability of 0.9. The **logical error rate** is equal to $1-p$, where $p$ is the probability of success. In the case of the 3-bit repetition code, the logical error rate is 0.028.\n", "\n", - "\"Drawing\"\n", + "\"Table\n", + "\n", + "\n", "\n", + "
\n", "\n", + "**Exercise 1:** The Repetition Code\n", + "\n", + "You now know enough to code up the repetition code. The exercise below will require you to generalize the repetition code so it will work with $n$ bits. Fill in the `##TODO##` sections and then observe the plots that are generated. What conclusions can you draw from the code performance using more bits? What do you notice about the logical error rate relative to the physical error rate?\n", "\n", - "
\n", - "

Exercise 1 - The repetition code:

\n", - "

\n", - "You now know enough to code up the repetition code. The exercise below will require you to generalize the repetition code so it will work with $n$ bits. Fill in the #TODO sections and then observe the plots that are generated. What conclusions can you draw from the code performance using more bits? What do you notice about the logical error rate relative to the physical error rate? \n", - "

\n", "
" ] }, @@ -243,93 +224,92 @@ "metadata": {}, "outputs": [], "source": [ - "def encode(bit, n):\n", - " \"\"\"Function that encodes a single bit rendundantly n times\n", + "# EXERCISE 1\n", + "\n", + "def encode(bit: int, n: int) -> list[int]:\n", + " \"\"\"Function that encodes a single bit redundantly n times.\n", "\n", " Parameters\n", " ----------\n", - " bit: int\n", + " bit : int\n", " Input bit (1 or 0)\n", " n : int\n", - " repetitions to use for encoding\n", + " Repetitions to use for encoding\n", "\n", " Returns\n", " -------\n", - " str\n", - " string of length n redundantly encoding bit\n", + " list[int]\n", + " List of length n redundantly encoding bit\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", "\n", "\n", - "\n", - "def decode(bits):\n", - " \"\"\"Function that decodes a message using majority voting to determine the closest codeword\n", + "def decode(bits: list[int]) -> int:\n", + " \"\"\"Function that decodes a message using majority voting to determine the closest codeword.\n", "\n", " Parameters\n", " ----------\n", - " bits: str\n", - " bitstring corresponding to message that has passed through noisy channel\n", + " bits : list[int]\n", + " List corresponding to message that has passed through noisy channel\n", "\n", " Returns\n", " -------\n", " int\n", " 1 or 0 corresponding to decoded codeword\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", "\n", "\n", + "def transmit(bits: list[int], p_error: float) -> list[int]:\n", + " \"\"\"Function that receives a codeword and randomly flips each bit with probability p_error\n", + " to emulate transmission through a noisy channel.\n", "\n", - "def transmit(bits, p_error):\n", - " \"\"\"Function that receives a codeword, and randomly flips each bit with probability p_error to emulate transmission through noisy channel\n", - " \n", " Parameters\n", " ----------\n", - " bits: str\n", - " bitstring corresponding to an encoded message without noise\n", - " p_error: float\n", - " probability that a bit will flip through transmission\n", + " bits : list[int]\n", + " List corresponding to an encoded message without noise\n", + " p_error : float\n", + " Probability that a bit will flip through transmission\n", "\n", " Returns\n", " -------\n", - " int\n", - " 1 or 0 corresponding to decoded codeword\n", + " list[int]\n", + " List corresponding to the message after noisy transmission\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", "\n", "\n", - "def simulate_logical_error_rate(n, p_error, trials):\n", + "def simulate_logical_error_rate(n: int, p_error: float, trials: int) -> float:\n", " \"\"\"Function to determine the logical error rate of an n-bit repetition code over specified number of trials.\n", - " \n", + "\n", " Parameters\n", " ----------\n", - " n: int\n", - " specifies n-bit repetition code to use\n", - " p_error: float\n", - " probability that a bit will flip through transmission\n", - " trials: int\n", - " number of trials used to determine logical error rate\n", + " n : int\n", + " Specifies n-bit repetition code to use\n", + " p_error : float\n", + " Probability that a bit will flip through transmission\n", + " trials : int\n", + " Number of trials used to determine logical error rate\n", "\n", " Returns\n", " -------\n", " float\n", - " The logical error rate `n_errors/trials`\n", - " \"\"\" \n", - "#TODO\n", + " The logical error rate (n_errors / trials)\n", + " \"\"\"\n", + " ##TODO##\n", "\n", - " \n", "\n", - "def plot_logical_vs_physical_error_rate(n, trials):\n", + "def plot_logical_vs_physical_error_rate(n: int, trials: int) -> None:\n", " \"\"\"Function to plot logical vs physical error rate for fixed n and number of trials.\n", - " \n", + "\n", " Parameters\n", " ----------\n", - " n: int\n", - " specifies n-bit repetition code to use\n", - " trials: int\n", - " number of trials used to determine logical error rate\n", - " \"\"\" \n", - "\n", - " #TODO\n", + " n : int\n", + " Specifies n-bit repetition code to use\n", + " trials : int\n", + " Number of trials used to determine logical error rate\n", + " \"\"\"\n", + " ##TODO##\n", "\n", " plt.figure(figsize=(10, 6))\n", " plt.plot(p_values, logical_error_rates, marker='o')\n", @@ -339,29 +319,30 @@ " plt.grid(True)\n", " plt.show()\n", "\n", - "# Plot 2: Logical Error Rate vs n \n", - "def plot_logical_vs_repetitions(p_error, max_n, trials):\n", - " \"\"\"Function to plot logical error rate vs bits used for redundant encoding\n", - " \n", + "\n", + "def plot_logical_vs_repetitions(p_error: float, max_n: int, trials: int) -> None:\n", + " \"\"\"Function to plot logical error rate vs bits used for redundant encoding.\n", + "\n", " Parameters\n", " ----------\n", - " max_n: int\n", - " specifies the maximum n-bit repetition code to use\n", - " p_error: float\n", - " probability that a bit will flip through transmission\n", - " trials: int\n", - " number of trials used to determine logical error rate\n", - " \"\"\" \n", - "\n", - " #TODO\n", + " p_error : float\n", + " Probability that a bit will flip through transmission\n", + " max_n : int\n", + " Specifies the maximum n-bit repetition code to use\n", + " trials : int\n", + " Number of trials used to determine logical error rate\n", + " \"\"\"\n", + " ##TODO##\n", + "\n", " plt.figure(figsize=(10, 6))\n", " plt.plot(n_values, logical_error_rates, marker='o')\n", - " plt.title('Logical Error Rate vs n')\n", - " plt.xlabel('n')\n", + " plt.title('Logical Error Rate vs Number of Repetitions (n)')\n", + " plt.xlabel('Number of Repetitions (n)')\n", " plt.ylabel('Logical Error Rate')\n", " plt.grid(True)\n", " plt.show()\n", "\n", + "\n", "# Example Usage\n", "n = 3 # Number of repetitions for the first plot\n", "p_error = 0.1 # Physical error rate for the second plot\n", @@ -377,10 +358,10 @@ { "cell_type": "markdown", "id": "b394caa4", - "metadata": { - "id": "b394caa4" - }, + "metadata": {}, "source": [ + "---\n", + "\n", "## 1.3 More Efficient EC Codes (The Hamming Code)\n", "\n", "There are many clever ways to improve the efficiency of EC codes. One common way is to make use of a concept called **parity checks**. Parity checks provide a clever way to index where errors occur, without a brute force statistical approach like the repetition code. \n", @@ -389,223 +370,18 @@ "\n", "This is accomplished by each parity bit encoding a parity, or the mod2 sum of a subset of the data bits. The [Venn diagram](https://en.wikipedia.org/wiki/Hamming_code) below depicts the encoding. In this example, $p_1$ encodes the parity of $d_1$, $d_2$, and $d_4$. If our data bits ($d_1d_2d_3d_4$) were 0110, then $p_1$ would be calculated to be 1.\n", "\n", - "\"Drawing\"\n", + "\"Venn\n", "\n", - "Either using the static Venn diagram above or the interactive one generated by executing the cell below, \n", - "reason through the following example:\n", + "Using the Hamming code widget [here](https://nvidia.github.io/cuda-q-academic/interactive_widgets/hamming.html), reason through the following example:\n", "\n", "> If you wanted to send the message 0110 (here $d_1 = 0$, $d_2 = 1$, $d_3 = 1$, and $d_4 = 0$), appending the three parity bits to the end of the original bitstring would produce the logical codeword: 0110110 (where $p_1 = 1$, $p_2 = 1$, and $p_3 = 0$). Note, this is a slight deviation from the traditional placement of the bits in the Hamming code done for simplicity.\n", ">\n", ">Errors could occur on any of the data or parity bits. Assume an error occurs on $d_2$ and the recipient receives 0010110. To produce the syndrome, the recipient can take the received data bits, 0010, and compute the expected parity. This is then compared to the parity that was sent, 110. The parity bits that disagree flag an error. \n", ">\n", ">In this case, the received message has parity bits 011 which disagrees with 110. Here, $p_1$ and $p_3$ are flagged. This syndrome can only correspond to an error on $d_2$ based on the Venn diagram. \n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8b233a2a", - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "from matplotlib_venn import venn3\n", - "import ipywidgets as widgets\n", - "from IPython.display import display, HTML, clear_output\n", - "from ipywidgets import VBox, HBox\n", - "\n", - "# Function to calculate parity bits\n", - "def calculate_parity_bits(data_bits):\n", - " d1, d2, d3, d4 = data_bits\n", - " p1 = d1 ^ d2 ^ d4\n", - " p2 = d1 ^ d3 ^ d4\n", - " p3 = d2 ^ d3 ^ d4\n", - " return [p1, p2, p3]\n", - "\n", - "# Function to update the Venn diagram labels based on data bits\n", - "def update_venn_labels(data_bits):\n", - " # Clear the previous output\n", - " clear_output(wait=True)\n", - "\n", - " # Clear the computed parity bit outputs\n", - " output_p1.clear_output()\n", - " output_p2.clear_output()\n", - " output_p3.clear_output()\n", - "\n", - " # Display the widgets again\n", - " display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", - "\n", - " # Create the Venn diagram\n", - " plt.figure(figsize=(8, 8))\n", - " venn = venn3(subsets=(1, 1, 1, 1, 1, 1, 1), set_labels=('p1', 'p2', 'p3'))\n", - "\n", - " # Set colors for the circles using NVIDIA color palette\n", - " venn.get_patch_by_id('100').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('010').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('001').set_color('#F9A825') # Yellow\n", - "\n", - " # Set colors for the intersections\n", - " venn.get_patch_by_id('110').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('101').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('011').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('111').set_color('#A3A3A3') # Light Gray\n", - "\n", - " # Set transparency for the circles\n", - " venn.get_patch_by_id('100').set_alpha(0.5)\n", - " venn.get_patch_by_id('010').set_alpha(0.5)\n", - " venn.get_patch_by_id('001').set_alpha(0.5)\n", - "\n", - " # Label the intersections with data bits\n", - " venn.get_label_by_id('100').set_text(f'')\n", - " venn.get_label_by_id('010').set_text(f'')\n", - " venn.get_label_by_id('001').set_text(f'')\n", - " venn.get_label_by_id('110').set_text(f'd1={data_bits[0]}')\n", - " venn.get_label_by_id('101').set_text(f'd2={data_bits[1]}')\n", - " venn.get_label_by_id('011').set_text(f'd3={data_bits[2]}')\n", - " venn.get_label_by_id('111').set_text(f'd4={data_bits[3]}')\n", - "\n", - " plt.show()\n", - "\n", - "# Function to update the Venn diagram and display the messages\n", - "def update_venn(data_bits, parity_bit):\n", - " parity_bits = calculate_parity_bits(data_bits)\n", - "\n", - " # Clear the previous output\n", - " clear_output(wait=True)\n", - "\n", - " # Display the widgets again\n", - " display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", - "\n", - " # Create the Venn diagram\n", - " plt.figure(figsize=(8, 8))\n", - " venn = venn3(subsets=(1, 1, 1, 1, 1, 1, 1), set_labels=('p1', 'p2', 'p3'))\n", - "\n", - " # Set colors for the circles using NVIDIA color palette\n", - " venn.get_patch_by_id('100').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('010').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('001').set_color('#F9A825') # Yellow\n", - "\n", - " # Set colors for the intersections\n", - " venn.get_patch_by_id('110').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('101').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('011').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('111').set_color('#A3A3A3') # Light Gray\n", - "\n", - "\n", - " # Set transparency for the circles\n", - " venn.get_patch_by_id('100').set_alpha(0.5)\n", - " venn.get_patch_by_id('010').set_alpha(0.5)\n", - " venn.get_patch_by_id('001').set_alpha(0.5)\n", - "\n", - " # Label the intersections with data bits\n", - " venn.get_label_by_id('100').set_text(f'')\n", - " venn.get_label_by_id('010').set_text(f'')\n", - " venn.get_label_by_id('001').set_text(f'')\n", - " venn.get_label_by_id('110').set_text(f'd1={data_bits[0]}')\n", - " venn.get_label_by_id('101').set_text(f'd2={data_bits[1]}')\n", - " venn.get_label_by_id('011').set_text(f'd3={data_bits[2]}')\n", - " venn.get_label_by_id('111').set_text(f'd4={data_bits[3]}')\n", - "\n", - " # Highlight the selected parity bit and relevant data bits\n", - " if parity_bit == 'p1':\n", - " venn.get_patch_by_id('100').set_edgecolor('black')\n", - " venn.get_patch_by_id('100').set_linewidth(5)\n", - " venn.get_patch_by_id('110').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('101').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('111').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('110').set_edgecolor('black')\n", - " venn.get_patch_by_id('110').set_linewidth(5)\n", - " venn.get_patch_by_id('101').set_edgecolor('black')\n", - " venn.get_patch_by_id('101').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p1.clear_output()\n", - " with output_p1:\n", - " display(HTML(f\"p1 = d1 + d2 + d4 (mod 2)= {data_bits[0]} + {data_bits[1]} + {data_bits[3]} (mod 2) = {parity_bits[0]}\"))\n", - " elif parity_bit == 'p2':\n", - " venn.get_patch_by_id('010').set_edgecolor('black')\n", - " venn.get_patch_by_id('010').set_linewidth(5)\n", - " venn.get_patch_by_id('110').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('011').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('111').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('110').set_edgecolor('black')\n", - " venn.get_patch_by_id('110').set_linewidth(5)\n", - " venn.get_patch_by_id('011').set_edgecolor('black')\n", - " venn.get_patch_by_id('011').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p2.clear_output()\n", - " with output_p2:\n", - " display(HTML(f\"p2 = d1 + d3 + d4 (mod 2)= {data_bits[0]} + {data_bits[2]} + {data_bits[3]} (mod 2) = {parity_bits[1]}\"))\n", - " elif parity_bit == 'p3':\n", - " venn.get_patch_by_id('001').set_edgecolor('black')\n", - " venn.get_patch_by_id('001').set_linewidth(5)\n", - " venn.get_patch_by_id('101').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('011').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('111').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('101').set_edgecolor('black')\n", - " venn.get_patch_by_id('101').set_linewidth(5)\n", - " venn.get_patch_by_id('011').set_edgecolor('black')\n", - " venn.get_patch_by_id('011').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p3.clear_output()\n", - " with output_p3:\n", - " display(HTML(f\"p3 = d2 + d3 + d4 (mod 2)= {data_bits[1]} + {data_bits[2]} + {data_bits[3]} (mod 2) = {parity_bits[2]}\"))\n", - "\n", - " plt.show()\n", - "\n", - "# Create a title widget\n", - "title = widgets.Label(value=\"Hamming Code Visualization: Computing parity bits (p1, p2, p3)\")\n", - "\n", - "\n", - "# Create widgets for user input\n", - "data_bits_widget = widgets.Dropdown(\n", - " options=['0000', '0001', '0010', '0011', '0100', '0101', '0110', '0111', '1000', '1001', '1010', '1011', '1100', '1101', '1110', '1111'],\n", - " value='1001',\n", - " description='Data Bits (d1, d2, d3, d4):', style={'description_width': 'initial'}\n", - ")\n", - "\n", - "# Create buttons for parity bits\n", - "button_p1 = widgets.Button(description='Compute p1', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#BBE07F')) # Green\n", - "button_p2 = widgets.Button(description='Compute p2', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#BD8FD1')) # Purple\n", - "button_p3 = widgets.Button(description='Compute p3', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#FCD492')) # Yellow\n", - "\n", - "# Create output areas for parity bit results\n", - "output_p1 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "output_p2 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "output_p3 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "\n", - "# Define the button click events\n", - "def on_button_p1_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p1')\n", "\n", - "def on_button_p2_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p2')\n", "\n", - "def on_button_p3_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p3')\n", - "\n", - "button_p1.on_click(on_button_p1_click)\n", - "button_p2.on_click(on_button_p2_click)\n", - "button_p3.on_click(on_button_p3_click)\n", - "\n", - "# Define the dropdown change event\n", - "def on_data_bits_change(change):\n", - " data_bits_list = [int(bit) for bit in change['new']]\n", - " update_venn_labels(data_bits_list)\n", - "\n", - "data_bits_widget.observe(on_data_bits_change, names='value')\n", - "\n", - "# Display the widgets\n", - "display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", - "\n", - "# Initial update of the Venn diagram labels\n", - "update_venn_labels([int(bit) for bit in data_bits_widget.value])\n" + "" ] }, { @@ -626,11 +402,12 @@ "id": "e9129bd4-a100-4061-86dd-e69fde915617", "metadata": {}, "source": [ - "
\n", - "

Exercise 2 - The matrix form of the Hamming code:

\n", - "

\n", - "The Hamming code is commonly constructed with special matrices so a few simple linear algebra operations can encode and decode messages. The next two cells will have you define these matrices and see if you can reproduce the example above. \n", - "

\n", + "
\n", + "\n", + "**Exercise 2:** The Matrix Form of the Hamming Code\n", + "\n", + "The Hamming code is commonly constructed with special matrices so a few simple linear algebra operations can encode and decode messages. The next two cells will have you define these matrices and see if you can reproduce the example above.\n", + "\n", "
\n", "\n", "\n", @@ -645,11 +422,13 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 2\n", + "\n", "message = np.array([0, 1, 1, 0])\n", "\n", "# The G matrix should properly encode the message when the following calculation is performed\n", "G = np.array([\n", - "#FILL IN G. \n", + " ##TODO## Fill in G\n", "])\n", "\n", "encoded = np.dot(message, G) % 2\n", @@ -671,15 +450,15 @@ "metadata": {}, "outputs": [], "source": [ - "received = np.array([0, 0, 1, 0,1,1,0])\n", + "received = np.array([0, 0, 1, 0, 1, 1, 0])\n", "print(received)\n", "\n", "# Define the parity check matrix H which takes a message and determines the syndrome.\n", "H = np.array([\n", - "#FILL IN H\n", + " ##TODO## Fill in H\n", "])\n", "\n", - "decoded = np.dot(H, recieved) % 2\n", + "decoded = np.dot(H, received) % 2\n", "\n", "# Should print [0 1 1]\n", "print(decoded)" @@ -690,6 +469,8 @@ "id": "354b601a-b343-46d7-9aa5-5d2f5725637c", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.4 What Makes QEC so Hard?" ] }, @@ -702,13 +483,14 @@ " \n", "1. Continuous Errors - Classical errors are always discrete bit flips. Quantum errors are continuous and can manifest in an infinite number of ways, potentially shifting a qubit's state to any point on the Bloch sphere. For instance, the figure below illustrates many possible errors that affect a qubit starting in the $\\ket{0}$ state. Errors can perturb states incoherently (from environmental effects) or coherently from slight hardware imperfections. This invites the question, \"Does QEC require an infinite amount of resources to correct errors?\"\n", " \n", - "\"Drawing\"\n", + "\"Bloch\n", "\n", "2. No Cloning - Quantum states cannot be copied. That is to say that the following expression holds:$~\\nexists U \\text{ such that } U(\\ket{\\psi} \\otimes \\ket{\\rho}) = \\ket{\\psi} \\otimes\\ket{\\psi}$. This means we cannot just send multiple copies of the quantum state through the noisy channel like the classical repetition code. \n", "\n", "3. Destructive Measurement - In classical EC, the state can be accessed at any time, making decoding much easier. Measuring a quantum state collapses it, making the EC moot if the state is destroyed. Therefore, more clever ways to extract syndromes are required. A secondary consequence of this fact is sampling error. Even if an algorithm could perform perfectly ensuring no sources of error, many applications require statistical sampling of the resulting state. If we sampled $\\ket{\\psi} = \\alpha\\ket{0} + \\beta\\ket{1}$ the frequency of 0's would be close to $\\alpha^2$ but deviate based on the number of samples per the Central Limit Theorem.\n", "\n", - "4. Scalability - Though scalability is an issue for classical EC, it is far more severe for QEC. Today's noisy intermediate scale quantum devices are very difficult to control, so each additional qubit required for QEC comes at great cost. Qubits also have short coherence times, so QEC procedures must complete within strict time constraints which gets harder at scale. Finally, the threshold theorem is in play. In classical EC, adding more bits always reduces the logical error rate. This is not true for quantum - physical qubits must have noise below a specific threshold in order for scaling the code to improve the error rates, otherwise, the results just get worse.\n" + "4. Scalability - Though scalability is an issue for classical EC, it is far more severe for QEC. Today's noisy intermediate scale quantum devices are very difficult to control, so each additional qubit required for QEC comes at great cost. Qubits also have short coherence times, so QEC procedures must complete within strict time constraints which gets harder at scale. Finally, the threshold theorem is in play. In classical EC, adding more bits always reduces the logical error rate. This is not true for quantum - physical qubits must have noise below a specific threshold in order for scaling the code to improve the error rates, otherwise, the results just get worse.\n", + "" ] }, { @@ -716,7 +498,9 @@ "id": "30bbbc72-eec4-475c-be5f-53f42fb47963", "metadata": {}, "source": [ - "## 1.5 There is still hope for QEC!" + "---\n", + "\n", + "## 1.5 There is Still Hope for QEC!" ] }, { @@ -726,101 +510,19 @@ "source": [ "The challenges discussed above are daunting but there are many ingenious techniques developed to help circumvent them. That said, practical QEC remains difficult to realize and is an extremely active research field - viewed as one of the most important prerequisites for useful quantum computing. This section will begin to bridge the gap between classical EC and QEC.\n", "\n", + "### Syndrome Extraction\n", "\n", - "### Digitization of errors\n", - "\n", - "Errors can perturb states incoherently from environmental effects or coherently from slight hardware imperfections. While both types of errors can be addressed, we’ll focus on coherent errors first because they’re often easier to isolate and analyze.\n", + "The no cloning principle means quantum states cannot be copied for QEC. We'll need a clever way to extract syndromes from the logical state that does not rely on repetition. But, how is this done without destroying the information that is being protected?\n", "\n", - "For instance, a rotation gate that should be at an angle of $\\frac{\\pi}{16} \\approx 0.196 $ ends up being more like 0.17. This may seem inconsequential, but imperfections like this accumulate and quickly ruin the outcome of a quantum algorithm. Execute the code block below and use the slider to change the number of rotation gates executed to see how the error can become substantial. Feel free to experiment with different values for the `angle`, `noisy_angle`, and the rotation axis in the `rotation_kernel`. \n", + "The solution involves **stabilizers** which are specially designed operators that act on a logical state without changing it, but still enable us to learn about errors by performing projective measurement of ancilla qubits. The next notebook in this series will introduce stabilizers with more mathematical rigor, and the following section of this lab will provide a more concrete example of a simple stabilizer in action. Essentially, stabilizers perform parity checks and project the quantum state into the $1\\ket{\\psi}$ state if the parity check passes and $-1\\ket{\\psi}$ if the parity check is violated. So, you return the same state either way, and with enough atabilizers, can identify which errors occured and fix them.\n", "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e4384ef7", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "c4245b45f7814e80a159c01104f1a6cf", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "interactive(children=(IntSlider(value=1, continuous_update=False, description='n:', max=20, min=1), Output()),…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Angles of rotation of a qubit\n", - "angle = np.pi / 16 \n", - "noisy_angle = 0.17 \n", "\n", - "# Kernel to initialize a qubit in the zero ket state and rotate it about the x axis by given angle n times\n", - "@cudaq.kernel\n", - "def rotation_kernel(n: int, angle: float):\n", - " qubit = cudaq.qubit()\n", - " for _ in range(n):\n", - " rx(angle, qubit) # CHANGE THE ROTATION AXIS\n", - "\n", - "# Function to plot sample results\n", - "def plot_results(results1, results2):\n", - " # Convert the sample results to a dictionary\n", - " result_dictionary1 = {k: v for k, v in results1.items()}\n", - " result_dictionary2 = {k: v for k, v in results2.items()}\n", - " \n", - " # Get all unique x-values from both dictionaries\n", - " all_keys = set(result_dictionary1.keys()).union(set(result_dictionary2.keys()))\n", - " all_keys = sorted(all_keys)\n", - "\n", - " # Convert the dictionary to lists for x and y values\n", - " x1 = list(all_keys)\n", - " y1 = list(result_dictionary1.values())\n", - " y2 = list(result_dictionary2.values())\n", - "\n", - " # Create the combined histogram\n", - " bar_width = 0.35\n", - " x_indices = range(len(x1))\n", - "\n", - " plt.bar(x_indices, y1, width=bar_width, color='#76B900', label='Noise-Free Results')\n", - " plt.bar([i + bar_width for i in x_indices], y2, width=bar_width, color='#484848', label='Noisy Results')\n", - "\n", - " # Add title and labels\n", - " plt.title('Comparing sampling results of n applications of a noise-free gate with a noisy version')\n", - " plt.xlabel(\"Basis States\")\n", - " plt.ylabel(\"Frequency\")\n", - " plt.xticks([i + bar_width / 2 for i in x_indices], x1)\n", - " plt.legend()\n", - "\n", - " # Show the plot\n", - " plt.tight_layout()\n", - " plt.show()\n", + "### Digitization of errors\n", "\n", - "# Function to update the plot based on the slider value\n", - "def update_plot(num_rotations):\n", - " expected_result = cudaq.sample(rotation_kernel, num_rotations, angle)\n", - " noisy_result = cudaq.sample(rotation_kernel, num_rotations, noisy_angle)\n", - " plot_results(expected_result, noisy_result)\n", + "Errors can perturb states incoherently from environmental effects or coherently from slight hardware imperfections. While both types of errors can be addressed, we’ll focus on coherent errors first because they’re often easier to isolate and analyze.\n", "\n", - "# Create an interactive slider\n", - "slider = widgets.IntSlider(min=1, max=20, step=1, value=1, description='n:', continuous_update=False)\n", - "interact(update_plot, num_rotations=slider)" + "For instance, a rotation gate that should be at an angle of $\\frac{\\pi}{16} \\approx 0.196 $ ends up being more like 0.17. This may seem inconsequential, but imperfections like this accumulate and quickly ruin the outcome of a quantum algorithm.\n", + "\n" ] }, { @@ -828,76 +530,11 @@ "id": "959da618", "metadata": {}, "source": [ - "Among the various coherent errors that can occur on a qubit storing the quantum state $\\ket{\\psi} = \\alpha \\ket{0}+\\beta\\ket{1}$, we will focus on three specific types:\n", - "* **Bit flip errors** swap a qubit's amplitudes, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}+\\alpha\\ket{1}$.\n", - "* **Phase flip errors** introduce a sign change in one of the amplitudes, transforming $\\ket{\\psi}$ to $\\alpha\\ket{0}-\\beta\\ket{1}$.\n", - "* **Combining a bit flip with a phase flip error** swaps amplitudes and applies a sign change, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}-\\alpha\\ket{1}$.\n", + "We will focus on three different coherent errors that can occur on a qubit storing the quantum state $\\ket{\\psi} = \\alpha \\ket{0}+\\beta\\ket{1}$:\n", "\n", - "Run the cell below to open an interactive tool that allows you to visualize the impact of different error types on various quantum states. Observe how some error types may not alter the state. Why do you think that happens? What patterns can you identify?" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1ba829f0", - "metadata": {}, - "outputs": [], - "source": [ - "# Execute this cell to see the interactive widget\n", - "# Don't concern yourself with the code below this line\n", - "# Function to update and display the Bloch sphere\n", - "def update_bloch_sphere(theta, phi, error_type):\n", - " alpha = np.cos(theta / 2)\n", - " beta = np.sin(theta / 2) * np.exp(1j * phi)\n", - " coefficients = [complex(alpha, 0), complex(0, beta)]\n", - " error_types = ['Bit Flip', 'Phase Flip', 'Bit & Phase Flip']\n", - " @cudaq.kernel\n", - " def initial_state_kernel(coefficients: list[complex]):\n", - " qubit = cudaq.qvector(coefficients)\n", - "\n", - " @cudaq.kernel\n", - " def initial_state_error(coefficients: list[complex], error: int):\n", - " qubit = cudaq.qvector(coefficients)\n", - " if error == 0 or error == 2:\n", - " # bit flip error\n", - " x(qubit)\n", - " if error == 1 or error == 2:\n", - " # phase flip error\n", - " z(qubit)\n", - "\n", - " state_no_error = cudaq.get_state(initial_state_kernel, coefficients)\n", - " state_with_error = cudaq.get_state(initial_state_error, coefficients, error_type)\n", - "\n", - " blochSphereList = []\n", - " # Define a sphere object representing the state of the single qubit\n", - " sphere = cudaq.add_to_bloch_sphere(state_no_error)\n", - " blochSphereList.append(sphere)\n", - " sphere = cudaq.add_to_bloch_sphere(state_with_error)\n", - " blochSphereList.append(sphere)\n", - "\n", - " # Create output widgets for the Bloch spheres and text\n", - " out1 = Output()\n", - " out2 = Output()\n", - " text1 = Output()\n", - " text2 = Output()\n", - "\n", - " with out1:\n", - " cudaq.show([blochSphereList[0]], nrows=1, ncols=1)\n", - " with out2:\n", - " cudaq.show([blochSphereList[1]], nrows=1, ncols=1)\n", - " with text1:\n", - " print(f\"|ψ> = cos(θ/2)|0⟩ + e^(iφ)sin(θ/2)|1⟩\")\n", - " with text2:\n", - " print(\"|ψ⟩ with a \", error_types[error_type], \" error\")\n", - "\n", - " display(VBox([HBox([VBox([text1, out1]), VBox([text2, out2])])]))\n", - "\n", - "# Create the interactive widget\n", - "theta_slider = widgets.FloatSlider(value=np.pi/2, min=0, max=2*np.pi, step=0.01, description='θ (radians):')\n", - "phi_slider = widgets.FloatSlider(value=0, min=0, max=np.pi, step=0.01, description='φ (radians):')\n", - "error_selector = widgets.Dropdown(options=[('None', -1), ('Bit Flip', 0), ('Phase Flip', 1), ('Bit & Phase Flip', 2)], value=-1, description='Error Type:')\n", - "\n", - "interact(update_bloch_sphere, theta=theta_slider, phi=phi_slider, error_type=error_selector)\n" + "* **Bit flip errors (X)** swap a qubit's amplitudes, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}+\\alpha\\ket{1}$.\n", + "* **Phase flip errors (Z)** introduce a sign change in one of the amplitudes, transforming $\\ket{\\psi}$ to $\\alpha\\ket{0}-\\beta\\ket{1}$.\n", + "* **Combining a bit flip with a phase flip error (X and Z)** swaps amplitudes and applies a sign change, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}-\\alpha\\ket{1}$." ] }, { @@ -907,18 +544,20 @@ "source": [ "Once we have identified one of these errors, we can correct it. For instance, if a qubit has undergone a bit flip error, we can correct it by applying an $X$ gate. Similarly, to correct a qubit that has experienced a phase flip error, we simply apply a $Z$ gate. How would you correct a qubit that has been identified as having undergone a bit flip error followed by a phase flip error? \n", "\n", - "We can address all coherent errors with a key insight: although the Bloch sphere suggests errors can occur through infinitely many possible rotations, all such errors can be broken down into three basic forms — bit flips, phase flips, or a combination of both bit flips and phase flips. \n", + "We can address all coherent errors with a key insight: although the Bloch sphere suggests errors can occur through infinitely many possible rotations, all such errors can be broken down into three basic forms — bit flips, phase flips, or a combination of both bit flips and phase flips. In other words, any error state can be reached by some combination of $X$, $Y$, and $Z$ rotations. So, why are there not an infinite number of corrections?\n", "\n", - "If you'd like an explanation of why this decomposition works, consult the optional section below. For now, remember that by detecting and correcting these three core error types, we can effectively handle any coherent noise. \n", + "The math below explains how an arbitrary error results in a finite set of possible error states. \n", + "\n", + " Consider a qubit in the following normalized state.\n", + " $$ \\ket{\\psi} = \\cos\\frac{\\theta}{2}\\ket{0} + e^{i\\phi}\\sin\\frac{\\theta}{2}\\ket{1} $$\n", + " \n", + " Coherent errors can be represented by the application of a Unitary $U(\\delta\\theta,\\delta\\phi)$ which acts on the ideal state and perturbs it.\n", + "$$ U(\\delta\\theta,\\delta\\phi)\\ket{\\psi} = \\cos\\frac{\\theta +\\delta\\theta}{2}\\ket{0} + e^{i\\phi+\\delta\\phi}\\sin\\frac{\\theta+\\delta\\theta}{2}\\ket{1} $$\n", + " Using the fact that the Pauli matrices form a basis for any 2x2 unitary matrix and taking advantage of the identity $Y=iXZ$, the operation can be rewritten as\n", + " $$ U(\\delta\\theta,\\delta\\phi) \\ket{\\psi} = \\alpha_II\\ket{\\psi} +\\alpha_X X\\ket{\\psi}+\\alpha_Z Z\\ket{\\psi}+\\alpha_{XZ}XZ\\ket{\\psi} $$\n", + " This means that any coherent error can be **digitized** into X-type bit flip errors ($X \\ket{\\psi} = \\alpha X\\ket{0} + \\beta X\\ket{1} = \\alpha\\ket{1} + \\beta\\ket{0}$), Z-type phase flip errors ($Z\\ket{\\psi} = \\alpha Z\\ket{0} + \\beta Z\\ket{1} = \\alpha\\ket{0} - \\beta\\ket{1}$), or a combination of the two (XZ). This makes correction much more tractable, as there are only three types of errors to consider.\n", "\n", - "> **Optional:** Consider a qubit in the following normalized state.\n", - "> $$ \\ket{\\psi} = \\cos\\frac{\\theta}{2}\\ket{0} + e^{i\\phi}\\sin\\frac{\\theta}{2}\\ket{1} $$\n", - "> \n", - "> Coherent errors can be represented by the application of a Unitary $U(\\delta\\theta,\\delta\\phi)$ which acts on the ideal state and perturbs it.\n", - ">$$ U(\\delta\\theta,\\delta\\phi)\\ket{\\psi} = \\cos\\frac{\\theta +\\delta\\theta}{2}\\ket{0} + e^{i\\phi+\\delta\\phi}\\sin\\frac{\\theta+\\delta\\theta}{2}\\ket{1} $$\n", - "> Using the fact that the Pauli matrices form a basis for any 2x2 unitary matrix and taking advantage of the identity $Y=iXZ$, the operation can be rewritten as\n", - "> $$ U(\\delta\\theta,\\delta\\phi) \\ket{\\psi} = \\alpha_II\\ket{\\psi} +\\alpha_X X\\ket{\\psi}+\\alpha_Z Z\\ket{\\psi}+\\alpha_{XZ}XZ\\ket{\\psi} $$\n", - "> This means that any coherent error can be **digitized** into X-type bit flip errors ($X \\ket{\\psi} = \\alpha X\\ket{0} + \\beta X\\ket{1} = \\alpha\\ket{1} + \\beta\\ket{0}$), Z-type phase flip errors ($Z\\ket{\\psi} = \\alpha Z\\ket{0} + \\beta Z\\ket{1} = \\alpha\\ket{0} - \\beta\\ket{1}$), or a combination of the two (XZ). This makes correction much more tractable, as there are only three types of errors to consider." + " Try the widget [linked here](https://nvidia.github.io/cuda-q-academic/interactive_widgets/error_digitization.html) to see a concrete example of this for an $X$ rotation (bitflip error) impacting the state $\\ket{000}$ and how stabilizers are they key to correcting the error." ] }, { @@ -926,15 +565,9 @@ "id": "b8ddc3f4-6d1e-4ed2-b34c-5018220ef617", "metadata": {}, "source": [ - "### Syndrome Extraction\n", - "\n", - "The no cloning principle means quantum states cannot be copied for QEC. We'll need a clever way to extract syndromes from the logical state that does not rely on repetition. But, how is this done without destroying the information that is being protected?\n", + "### Better QEC Codes and AI Solutions\n", "\n", - "The solution involves **stabilizers** which are specially designed operators that act on a logical state without changing it, but still enable us to learn about errors by performing projective measurement of ancilla qubits. The next notebook in this series will introduce stabilizers with more mathematical rigor, and the example in section 1.6 of this lab will provide a more concrete example of a simple stabilizer in action.\n", - "\n", - "### Better QEC codes and AI solutions\n", - "\n", - "Finally, overcoming the QEC scaling challenges will require breakthroughs on many fronts. Significant research efforts are targeting discovery of more efficient QEC codes that require fewer qubits. AI is already showing great promise as a tool to help find new QEC codes, and accelerate decoding. Later notebooks will explore AI for QEC applications.\n" + "Finally, overcoming the QEC scaling challenges will require breakthroughs on many fronts. Significant research efforts are targeting discovery of more efficient QEC codes that require fewer qubits. AI is already showing great promise as a tool to help find new QEC codes, and accelerate decoding. Later notebooks will explore AI for QEC applications." ] }, { @@ -942,6 +575,8 @@ "id": "d82e4b87-2d90-499e-91bc-c4f2af3312eb", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.6 The Quantum Repetition Code" ] }, @@ -952,11 +587,11 @@ "source": [ "A quantum state cannot be cloned, but it can be redundantly encoded across additional entangled qubits. Let's start with a generic normalized qubit state $\\ket{\\psi}$:\n", "\n", - "$$\\ket{\\psi} = \\alpha\\ket{0} +\\beta\\ket{1}.$$ \n", + "$$\\ket{\\psi} = \\alpha\\ket{0} +\\beta\\ket{1}.$$\n", "\n", "The 0 and 1 states can be encoded into a logical state making use of the larger 8-dimensional Hilbert space of three qubits: \n", "\n", - "$$\\ket{\\psi}_L = \\alpha\\ket{000} +\\beta\\ket{111} = \\alpha\\ket{0}_L +\\beta\\ket{1}_L.$$ \n", + "$$\\ket{\\psi}_L = \\alpha\\ket{000} +\\beta\\ket{111} = \\alpha\\ket{0}_L +\\beta\\ket{1}_L.$$\n", "\n", "Note that this is *not* equivalent to $\\ket{\\psi} \\otimes \\ket{\\psi} \\otimes \\ket{\\psi}$.\n", "\n", @@ -1001,7 +636,7 @@ "\n", "\n", "\n", - "\"Drawing\"\n", + "\"Quantum\n", "\n", "\n", "\n", @@ -1024,7 +659,8 @@ "$$ \\ket{0}(\\frac{1+Z_1Z_2}{2})\\ket{011} + \\ket{1}(\\frac{1-Z_1Z_2}{2})\\ket{011} = \\ket{0}(\\frac{1+ -1}{2})\\ket{011} + \\ket{1}(\\frac{1--1}{2})\\ket{011} = \\ket{1}\\ket{011}. $$\n", "\n", "A similar analysis will show that the second ancilla qubit will be measured as 0 with certainty without distubring the data qubits. Accordoing to the syndrome table, \n", - "this will trigger an application of the $X$ gate on the first qubit to correct the error.\n" + "this will trigger an application of the $X$ gate on the first qubit to correct the error.\n", + "" ] }, { @@ -1032,86 +668,74 @@ "id": "f5cbe2ef-044a-46ae-abf3-26f87ba30a12", "metadata": {}, "source": [ - "## 1.7 Exercise 3: Coding the Quantum Repetition Code\n", - "\n", - "
\n", - "

Exercise 3 - The matrix form of the Hamming code:

\n", - "

\n", - " Now that you understand the quantum repetition code, try to code it using CUDA-Q. Fill in each of the steps below marked \"#TODO\". CUDA-Q contains a couple of features particularly helpful for building QEC workflows. First, and already completed for you, is the definition of a custom noise model which produces custom identity operations that can randomly perform bit flips on specific qubits. Second, you can measure the ancilla qubits within the kernel and use the result to perform a correction operation. The documentation example on building kernels and mid-circuit measurement may be helpful for this exercise.\n", - "

\n", - " Try to code all the steps and then sample the kernel to determine the logical error rate.\n", - "

\n", - "
\n", - "\n" + "---\n", + "\n", + "## 1.7 Coding the Quantum Repetition Code\n", + "\n", + "
\n", + "\n", + "**Exercise 3:** The Quantum Repetition Code\n", + "\n", + "Now that you understand the quantum repetition code, try to code it using CUDA-Q. Fill in each of the steps below marked `##TODO##`. CUDA-Q contains a couple of features particularly helpful for building QEC workflows. First, and already completed for you, is the definition of a custom noise model which produces custom identity operations that can randomly perform bit flips on specific qubits. Second, you can measure the ancilla qubits within the kernel and use the result to perform a correction operation. The documentation example on [building kernels](https://nvidia.github.io/cuda-quantum/latest/using/examples/building_kernels.html) and [mid-circuit measurement](https://nvidia.github.io/cuda-quantum/latest/examples/python/measuring_kernels.html) may be helpful for this exercise.\n", + "\n", + "Try to code all the steps and then sample the kernel to determine the logical error rate.\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "8d662290-b51c-4009-8e7c-d9d5f7df40ad", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ \n", - " __global__ : { 0:276 00:468 1:256 }\n", - " b0 : { 0:468 1:532 }\n", - "}\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "import numpy as np\n", + "# EXERCISE 3\n", "\n", "cudaq.set_target('density-matrix-cpu')\n", "\n", - "\n", - "#First, create an empty noise model\n", + "# First, create an empty noise model\n", "noise_model = cudaq.NoiseModel()\n", "p = 0.1\n", "\n", - "#Build a custom gate which applies the identity operation\n", + "# Build a custom gate which applies the identity operation\n", "cudaq.register_operation(\"custom_i\", np.array([1, 0, 0, 1]))\n", "\n", - "#Add a bitflip noise channel to the custom_i gate applied to each qubit\n", + "# Add a bitflip noise channel to the custom_i gate applied to each qubit\n", "noise_model.add_channel(\"custom_i\", [0], cudaq.BitFlipChannel(p))\n", "noise_model.add_channel(\"custom_i\", [1], cudaq.BitFlipChannel(p))\n", "noise_model.add_channel(\"custom_i\", [2], cudaq.BitFlipChannel(p))\n", "\n", "@cudaq.kernel\n", - "def three_qubit_repetition_code():\n", - " \"\"\"Prepares a kernel for the 3-bit quantum repetition code\n", - "\n", - " Returns\n", - " -------\n", - " cudaq.kernel\n", - " Kernel for running the 3-bit quantum repetition code\n", - " \n", - " \"\"\" \n", + "def three_qubit_repetition_code() -> list[int]:\n", + " \"\"\"Prepares a kernel for the 3-bit quantum repetition code.\"\"\"\n", "\n", " # Create register for data and ancilla qubits\n", - " # TODO\n", + " ##TODO##\n", "\n", " # Initialize the logical |1> state as |111>\n", - " # TODO\n", + " ##TODO##\n", "\n", " # Apply custom_i to induce random bitflip errors\n", - " # TODO\n", - " \n", + " ##TODO##\n", + "\n", " # Extract Syndromes\n", - " # TODO\n", - " \n", + " ##TODO##\n", + "\n", " # Correct errors based on syndromes\n", - " # TODO\n", + " ##TODO##\n", "\n", "# Run the kernel and observe results\n", - "# The percent of samples that are 000 corresponds to the logical error rate\n", - "result = cudaq.sample(three_qubit_repetition_code, noise_model=noise_model)\n", - "print(result)" + "# The percent of samples that are 000 on first three qubits (data qubits) corresponds to the logical error rate\n", + "result = cudaq.run(three_qubit_repetition_code, noise_model=noise_model, shots_count=100)\n", + "\n", + "logical_errors = 0\n", + "for run in result:\n", + " if run[0] or run[1] or run[2]:\n", + " logical_errors += 1\n", + " print(run)\n", + "\n", + "print(\"Logical Error Rate:\", 1 - logical_errors / 100)" ] }, { @@ -1121,15 +745,22 @@ "source": [ "## Conclusion\n", "\n", - "You now have a basic understanding of EC and QEC. The next lab will explore stabilizers in more detail and equip you to code two of the most famous and fundamental QEC codes: the Shor code and the Steane code." + "You now have a basic understanding of error correction (EC) and quantum error correction (QEC). You explored the five aspects common to all EC procedures, implemented the classical repetition code and Hamming code, identified the unique challenges of QEC, and built the three-qubit quantum repetition code in CUDA-Q. The next lab will explore **stabilizers** in more detail and equip you to code two of the most famous and fundamental QEC codes: the Shor code and the Steane code. Future labs will cover more advanced topics like decoding and other specific QEC codes." + ] + }, + { + "cell_type": "markdown", + "id": "ca54ebb72627406f", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 Lab 2 — Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — introduces the stabilizer formalism for quantum error correction\n", + "* [Quick Start to Quantum — Notebook 1](https://github.com/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/01_quick_start_to_quantum.ipynb) — prerequisite notebook covering qubits, gates, and measurement in CUDA-Q\n", + "* [QEC 101 Lab 3 — Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — applies QEC codes with realistic noise models in CUDA-Q" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -1146,8 +777,23 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.3" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "A2", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/02_QEC_Stabilizers.ipynb b/qec101/02_QEC_Stabilizers.ipynb index 6cec4fa..26c2c8b 100644 --- a/qec101/02_QEC_Stabilizers.ipynb +++ b/qec101/02_QEC_Stabilizers.ipynb @@ -4,9 +4,7 @@ "cell_type": "code", "execution_count": null, "id": "eead95ce", - "metadata": { - "id": "eead95ce" - }, + "metadata": {}, "outputs": [], "source": [ "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", @@ -27,47 +25,63 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 2 - Stabilizers, the Shor code, and the Steane code\n", - "\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", - "This lab introduces the stabilizer formalism, a powerful tool for working with more sophisticated quantum error correction (QEC) codes. After a brief introduction to the theory, the lab will walk through the Shor and Steane codes with interactive coding exercises. \n", + "# QEC 101 — Lab 2: Stabilizers, the Shor Code, and the Steane Code\n", + "$\\renewcommand{\\ket}[1]{|{#1}\\rangle}\\renewcommand{\\bra}[1]{\\langle{#1}|}$\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Define stabilizers and explain their role in quantum error correction\n", + "* Implement the Steane code encoding and syndrome measurement in CUDA-Q\n", + "* Perform a code capacity analysis on the Steane code using CUDA-Q QEC\n", + "* Implement the Shor code encoding, error detection, and correction in CUDA-Q\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of QEC 101 Lab 1 (classical and quantum repetition codes, Hamming code)\n", + "* Familiarity with the Pauli matrices and tensor products\n", + "\n", + "**Key Terminology:**\n", + "* Stabilizer\n", + "* Codespace\n", + "* Logical Operator\n", + "* Syndrome\n", + "* CSS Code (Calderbank-Shor-Steane)\n", + "* Steane Code\n", + "* Shor Code\n", + "* Pauli Group\n", + "* Code Capacity\n", + "* Color Code\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library\n", + "\n", + "**Solutions:** [`Solutions/02_QEC_Stabilizers_Solution.ipynb`](Solutions/02_QEC_Stabilizers_Solution.ipynb)\n", + "\n", + "This lab introduces the stabilizer formalism, a powerful tool for working with more sophisticated quantum error correction (QEC) codes. After a brief introduction to the theory, the lab will walk through the Shor and Steane codes with interactive coding exercises.\n", "\n", "This lab was motivated by content from \"[Quantum Error Correction: an Introductory Guide](https://arxiv.org/abs/1907.11157)\" and \"[Quantum Error Correction for Dummies](https://arxiv.org/abs/2304.08678)\", both excellent resources we refer readers to for additional detail. For a more technical introduction, see chapter 10 of \"[Quantum Computation and Quantum Information](https://books.google.com/books?hl=en&lr=&id=-s4DEy7o-a0C&oi=fnd&pg=PR17&dq=quantum+computation+and+quantum+information&ots=NJ4KdqnzZt&sig=uKTETo5LLjWB9F_PV_zf0Sw3bvk#v=onepage&q=quantum%20computation%20and%20quantum%20information&f=false)\" or the [PhD thesis](https://arxiv.org/abs/quant-ph/9705052) where the concept of stabilizer codes was introduced.\n", "\n", - "This is the second lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete the first lab in this series.\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **2.1** Define stabilizers and why they are important\n", - "* **2.2** Interactively Learn and Code the Steane Code in CUDA-Q.\n", - "* **2.3** Perform Steane Code Capacity Analysis with CUDA-QX\n", - "* **2.4** Interactively Learn and Code the Shor Code in CUDA-Q.\n", - "\n", - "\n", - "\n", - "Lab 2 Learning Objectives:\n", - "* Understand what a stabilizer is, how it works, and why it is important\n", - "* Understand the approach of the Shor and Steane codes\n", - "* Understand logical operators\n", - "* Code the Shor and Steane codes in CUDA-Q" + "This is the second lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete the first lab in this series." ] }, { "cell_type": "markdown", - "id": "b5046650", - "metadata": { - "id": "b5046650" - }, + "id": "59346183e90b45b2", + "metadata": {}, "source": [ - "Execute the cells below to load all the necessary packages for this lab." + "
\n", + "\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", + "\n", + "
" ] }, { @@ -77,33 +91,45 @@ "metadata": {}, "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", - "#!pip install cudaq_qec\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qec101/Images ./Images\n" + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "b5046650", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from typing import List\n", + "\n", "import cudaq\n", "from cudaq import spin\n", "from cudaq.qis import *\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from typing import List" + "\n", + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec" ] }, { @@ -111,6 +137,8 @@ "id": "b3e18790-87ab-4180-b349-a4210abca136", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.1 Stabilizers and Logical Operators" ] }, @@ -126,7 +154,7 @@ "An operation $s$ acting on a state $\\ket{\\psi}$ is said to be a stabilizer of the state if the state is a +1 eigenstate of the operation $s \\ket{\\psi} = +1 \\ket{\\psi}$. The high-level intuiton here is that if small errors have accumulated in a logically encoded state, the action of applying this stabilizer is to project the state back to a perfectly error-free state, and we measure $+1$. Sometimes larger errors occur, and we do not measure $+1$, which informs us something has gone wrong.\n", "\n", "\n", - "In lab 1, the codespace was defined by the set of basis codewords, such as $\\ket{000}$ and $\\ket{111}$ for the 3-qubit quantum repetition code. In that lab the codewords were provided to you for each code, but in a stabilizer code, we can equivalently define the codespace by providing the stabilizers which stabilize each basis codeword. In practice, this process of defining a code by the stabilizers is much more efficient and scalable as the codes grow larger.\n", + "In lab 1, the **codespace** was defined by the set of basis codewords, such as $\\ket{000}$ and $\\ket{111}$ for the 3-qubit quantum repetition code. In that lab the codewords were provided to you for each code, but in a stabilizer code, we can equivalently define the codespace by providing the stabilizers which stabilize each basis codeword. In practice, this process of defining a code by the stabilizers is much more efficient and scalable as the codes grow larger.\n", "\n", "The codespace $C$ can be defined as formed by all $\\ket{\\psi}$ such that $s_i\\ket{\\psi} = +1 \\ket{\\psi}$ for each $s_i\\in S$, where these $s_i$ are stabilizers which form a group $S$ (note: in some texts this group $S$ is called the stabilizer, not the elements). That is, the codespace is the joint +1 eigenspace fixed by the stabilizers. \n", "\n", @@ -158,7 +186,7 @@ "\n", "Three key properties for $[[n,k,d]]$ stabilizers:\n", "\n", - "1. Here we consider only to Pauli product stabilizers, that is, $s_i$ needs to be a Pauli-group element. The n-qubit Pauli group $G_n$ is a special group constructed from the Pauli matrices:\n", + "1. Here we consider only to Pauli product stabilizers, that is, $s_i$ needs to be a Pauli-group element. The n-qubit **Pauli group** $G_n$ is a special group constructed from the Pauli matrices:\n", "\n", " $$ I = \\begin{pmatrix} 1 & 0 \\\\ 0 & 1 \\end{pmatrix}, \\quad X = \\begin{pmatrix} 0 & 1 \\\\ 1 & 0 \\end{pmatrix}, \\quad Y = \\begin{pmatrix} 0 & -i \\\\ i & 0 \\end{pmatrix}, \\quad Z = \\begin{pmatrix} 1 & 0 \\\\ 0 & -1 \\end{pmatrix}$$\n", "\n", @@ -192,9 +220,11 @@ "id": "458cbf66-7cdf-4aaf-83ec-8597d941ab3e", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.2 The Steane Code\n", "\n", - "The Steane code is a famous QEC code that is the quantum version of the [7,4,3] Hamming code introduced in the first QEC lab. One immediate difference is that the Steane code encodes a single logical qubit making it a [[7,1,3]] code.\n", + "The **Steane code** is a famous QEC code that is the quantum version of the [7,4,3] Hamming code introduced in the first QEC lab. One immediate difference is that the Steane code encodes a single logical qubit making it a [[7,1,3]] code.\n", "\n", "Remember, that the Hamming code adds additional parity bits that help \"triangulate\" where an error occurred. In the lab 1 exercises you constructed the generator matrix $G$ and used it to produce the logical codewords in the classical Hamming code. For example, $b=0110$ was encoded as\n", "\n", @@ -215,7 +245,7 @@ "\\end{bmatrix}\n", "$$\n", "\n", - "Any logically encoded state, $c$, could then be multiplied by the parity check matrix ($H$) to determine if any syndromes were triggered or not. \n", + "Any logically encoded state, $c$, could then be multiplied by the parity check matrix ($H$) to determine if any **syndromes** were triggered or not. \n", "\n", "\n", "$$\n", @@ -276,17 +306,19 @@ "\n", "The encoding circuit to produce the logical codewords is shown below, and is based off the constraints imposed by the parity check matrix. \n", "\n", - "\"Drawing\"\n", + "\"Quantum\n", + "\n", "\n", "\n", "\n", + "
\n", + "\n", + "**Exercise 1:**\n", "\n", - "
\n", - "

Exercise 1 - The Steane Code:

\n", - "

\n", "In the cell below, build a CUDA-Q kernel to encode the logical 0 state using the Steane code. Sample the circuit to prove that you indeed created the appropriate superposition. In the cells following, complete the entire Steane code by adding stabilizer checks and code to measure the logical state. Complete the numbered tasks as well to confirm your code works as expected.\n", - "

\n", - "
\n" + "\n", + "
\n", + "" ] }, { @@ -296,28 +328,23 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 1\n", "@cudaq.kernel\n", "def steane_code():\n", - " \"\"\"Prepares a kernel for the Steane Code\n", - " Returns\n", - " -------\n", - " cudaq.kernel\n", - " Kernel for running the Steane code\n", - " \"\"\" \n", + " \"\"\"Prepares a kernel for the Steane Code\"\"\"\n", "\n", - " #Initialize Registers\n", - " #TODO\n", + " # Initialize Registers\n", + " ##TODO##\n", "\n", " # Create a superposition over all possible combinations of parity check bits\n", - " #TODO\n", + " ##TODO##\n", "\n", - " #Entangle states to enforce constraints of parity check matrix (circuit above)\n", - " #TODO\n", + " # Entangle states to enforce constraints of parity check matrix (circuit above)\n", + " ##TODO##\n", "\n", "\n", - "\n", - "results = cudaq.sample(steane_code, shots_count=10000)\n", - "print(results) " + "results = cudaq.sample(steane_code, shots_count=1000)\n", + "print(results)" ] }, { @@ -334,9 +361,11 @@ "The syndromes can be visually interpreted by putting a colored X on the syndromes that are flagged. Each coloring of this graph uniquely corresponds to an error on a specific qubit which is why the Steane code is often referred to as a **color code**.\n", "\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", + "\n", + "You are now ready to code the rest of the Steane code. After encoding, introduce an $X$ error and $Z$ error on the qubits of your choice. Try performing the $X$ and $Z$ syndrome measurements using the same three ancilla qubits and resetting them in between. Make your code such that you can measure the data qubits and confirm the state of the logical qubit. \n", "\n", - "You are now ready to code the rest of the Steane code. After encoding, introduce an $X$ error and $Z$ error on the qubits of your choice. Try performing the $X$ and $Z$ syndrome measurements using the same three ancilla qubits and resetting them in between. Make your code such that you can measure the data qubits and confirm the state of the logical qubit. " + "Note, to return only the measurements of the `data_qubits` register, use `cudaq.run`. This requires specification of a return statement within the kernel and a return type when the kernel is defined (done for you below). Note: `run` will be a bit slower than `sample` as it much launch a new kernel each time." ] }, { @@ -346,50 +375,36 @@ "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", + "# EXERCISE 1\n", "@cudaq.kernel\n", - "def steane_code():\n", + "def steane_code() -> list[int]:\n", " \"\"\"Prepares a kernel for the Steane Code\n", " Returns\n", " -------\n", - " cudaq.kernel\n", - " Kernel for running the Steane code\n", - " \"\"\" \n", + " list[int]: list of measurements of the data_qubits register\n", + " \"\"\"\n", + " ##TODO## Initialize Registers (data_qubits and ancilla_qubits)\n", "\n", - " #Initialize Registers\n", - " #TODO\n", + " ##TODO## Create a superposition over all possible combinations of parity check bits\n", "\n", - " # Create a superposition over all possible combinations of parity check bits\n", - " #TODO\n", - "\n", - " #Entangle states to enforce constraints of parity check matrix (circuit above)\n", - " #TODO\n", - "\n", - " #Add Errors (Optional)\n", - " #TODO\n", + " ##TODO## Entangle states to enforce constraints of parity check matrix\n", "\n", - " \n", - " \n", - " # Perform Stabilizer checks for Z errors\n", - " #TODO\n", + " ##TODO## Add Errors (Optional)\n", "\n", + " ##TODO## Perform Stabilizer checks for Z errors\n", "\n", - " # Perform Stabilizer checks for X errors\n", - " #TODO\n", + " ##TODO## Perform Stabilizer checks for X errors\n", "\n", + " ##TODO## Correct X errors\n", "\n", - " # Correct X errors\n", - " #TODO\n", + " ##TODO## Correct Z errors\n", "\n", - " # Correct Z errors\n", - " #TODO\n", + " ##TODO## Return measurement of data_qubits\n", "\n", + "results = cudaq.run(steane_code, shots_count=100)\n", + "print(results)\n", "\n", - "results = cudaq.sample(steane_code, shots_count=1000)\n", - "print(results) \n", - "\n", - "#Post-process Results\n", - "#TODO" + "##TODO## Post-process Results" ] }, { @@ -399,9 +414,9 @@ "source": [ "Now, test your code! Just measure in the $Z$ basis as the same procedure could be performed with the $X$ basis. \n", "\n", - "1. Try adding single $X$ errors, guess which stabilizers should flag and confrm they do.\n", + "1. Try adding single $X$ errors, guess which stabilizers should flag and confirm they do.\n", "2. Add two errors. Confirm the code cannot correct the errors and a logical bitflip occurs.\n", - "3. It turns out that like the Shor code, there are alternate choices for $\\bar{X}$. Modify your counting code above and test if $X_0X_1X_4$ or $X_0X_4X_5$ are valid choices for $\\bar{X}$. " + "3. It turns out there are alternate choices for $\\bar{X}$. Modify your counting code above and test if $X_0X_1X_4$ or $X_0X_4X_5$ are valid choices for $\\bar{X}$. " ] }, { @@ -409,38 +424,31 @@ "id": "c9245417-c131-4d5a-9b05-cfd5940b0464", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.3 Steane Code Capacity Analysis with CUDA-Q QEC\n", "\n", "\n", "[CUDA-QX](https://developer.nvidia.com/cuda-qx) is set of libraries that enable easy acceleration of quantum application development. One of the libraries, [CUDA-Q QEC](https://nvidia.github.io/cudaqx/components/qec/introduction.html), is focused on error correction and can help expedite much of the work done above. This final section will demonstrate how to run a code capacity memory experiment with the Steane code.\n", "\n", - "A memory experiment is a procedure to test how well a protocol can preserve quantum information. Such an experiment can help assess the quality of a QEC code but is often limited by assumptions that deviate from a realistic noise model. One such example is a code capacity experiment. A code capacity procedure determines the logical error rate of a QEC code under strict assumptions such as perfect gates or measurement. Code capacity experiments can help put an upper bound on a procedure's threshold and is therefore a good starting place to compare new codes.\n", + "A memory experiment is a procedure to test how well a protocol can preserve quantum information. Such an experiment can help assess the quality of a QEC code but is often limited by assumptions that deviate from a realistic noise model. One such example is a code capacity experiment. A **code capacity** procedure determines the logical error rate of a QEC code under strict assumptions such as perfect gates or measurement. Code capacity experiments can help put an upper bound on a procedure's threshold and is therefore a good starting place to compare new codes.\n", "\n", "The process is outlined in the diagram below. Assume the 0000000 bitstring is the baseline (no error). Bitflips are then randomly introduced and produce errors in the data vector to produce results like 0100010. If this were a real test on a physical quantum device, the data vector would not be known and a user could only proceed through the bottom path in the figure - performing syndrome extraction and then decoding the result to see if a logical flip occurred. In a code capacity experiment, the data vector with errors is known, so it can be used to directly compute if a logical state flip occurred or not. Dividing the number of times the actual (top path) and predicted (bottom path) results agree by the total number of rounds provides an estimate of the logical error rate for the code being tested. \n", "\n", "\n", - "\"Drawing\"\n", + "\"Flowchart\n", "\n", "\n", - "
\n", - "

Exercise 2 - CUDA-Q QEC Code Capacity Experiment:

\n", - "

\n", - "CUDA-Q QEC allows researchers to streamline experiments like this with just a few lines of code. Try running the cells below to compute the logical error rate of the Steane code under code capacity assumptions given probability of error $p$.\n", - "

\n", + "
\n", + "\n", + "**Exercise 2:**\n", + "\n", + "CUDA-Q QEC allows researchers to streamline experiments like this with just a few lines of code. Try running the cells below to compute the logical error rate of the Steane code under code capacity assumptions given probability of error $p$.\n", + "\n", "
\n", "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "60073994-dd9c-496f-93a0-feb1ec6fcb39", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np \n", - "import cudaq_qec as qec " + "\n", + "" ] }, { @@ -520,7 +528,7 @@ "syndromes, data = qec.sample_code_capacity(Hz, nShots, p)\n", "\n", "for x in range(nShots):\n", - " print(\"Data Qubits\", data[x], \"Syndromes\", syndromes[x])" + " print(\"Data Qubits:\", data[x], \"Syndromes:\", syndromes[x])" ] }, { @@ -532,7 +540,7 @@ "\n", "If the experiment is repeated many times with different $p$ values, a plot can be generated like the one shown below. The purple line is the $y=x$ and corresponds to the case that the logical error rate is identical to the physical error rate. Anywhere the green line is below the purple line indicates that the Steane code was able to produce a logical error rate that is less than the physical error rate of the data qubits. When the green line is above the purple, the Steane code produced a worse logical error rate indicating that it would have been better to just use the data qubits and avoid the QEC procedure. The crossover point is an estimate for the code's threshold. Refining this estimate would require more sophisticated circuit level noise models that more accurately represent the performance of the Steane code under realistic conditions. \n", "\n", - "\"Drawing\"\n", + "\"Plot\n", "\n", "Though code capacity has much room to improve, it is a great example of the utility of CUDA-Q QEC and how simple procedures can be streamlined so users can focus on testing codes rather than coding up the details of each test." ] @@ -542,9 +550,11 @@ "id": "29eefa9f-dc3b-4e68-b54e-aee51f15cc5f", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.4 The Shor Code\n", "\n", - "The first QEC code was proposed by Peter Shor in 1995, known as the [Shor code]((https://journals.aps.org/pra/abstract/10.1103/PhysRevA.52.R2493)). The Shor code is a [[9,1,3]] code which uses 9 qubits to encode a single qubit, but can correct single $X$ or $Z$-type errors.\n", + "The first QEC code was proposed by Peter Shor in 1995, known as the **[Shor code](https://journals.aps.org/pra/abstract/10.1103/PhysRevA.52.R2493)**. The Shor code is a [[9,1,3]] code which uses 9 qubits to encode a single qubit, but can correct single $X$ or $Z$-type errors.\n", "\n", "\n", "The motivation for the code, is that the 3-qubit repetition code can correct bit flip errors but not phase flip errors. We can consider why this is by examining the encoded $\\ket{+}_L$ state, which looks like the following:\n", @@ -568,7 +578,8 @@ "\n", "This encoding of $\\psi = \\alpha \\ket{0} + \\beta \\ket{1}$ can be implemented with the following quantum circuit:\n", "\n", - "\"Drawing\"\n" + "\"Quantum\n", + "" ] }, { @@ -592,7 +603,8 @@ "Can you see what the logical operators need to be? \n", "\n", "\n", - "For a logical bit flip to occur ($\\bar{X}$) the phase of each block needs to change. This is accomplished by performing a $Z $ operation on one of the qubits in each block, thus $\\bar{X} = Z_1Z_4Z_7$ is a valid choice, though not the only choice as others like $\\bar{X} = Z_2Z_5Z_8$ or even $\\bar{X} = Z_1Z_2Z_3Z_4Z_5Z_6Z_7Z_8Z_9$ also work. Similarly, for $\\bar{Z}$ to take $\\ket{1}_L$ to $-\\ket{1}_L$ (and $\\ket{0}_L$ to itself) all of the bits need to flip, thus $\\bar{Z} = X_1X_2X_3X_4X_5X_6X_7X_8X_9$. The curious reader can confirm that the anticommutativity holds between these logical operators and that they commute with each stabilizer discussed below.\n" + "For a logical bit flip to occur ($\\bar{X}$) the phase of each block needs to change. This is accomplished by performing a $Z $ operation on one of the qubits in each block, thus $\\bar{X} = Z_1Z_4Z_7$ is a valid choice, though not the only choice as others like $\\bar{X} = Z_2Z_5Z_8$ or even $\\bar{X} = Z_1Z_2Z_3Z_4Z_5Z_6Z_7Z_8Z_9$ also work. Similarly, for $\\bar{Z}$ to take $\\ket{1}_L$ to $-\\ket{1}_L$ (and $\\ket{0}_L$ to itself) all of the bits need to flip, thus $\\bar{Z} = X_1X_2X_3X_4X_5X_6X_7X_8X_9$. The curious reader can confirm that the anticommutativity holds between these logical operators and that they commute with each stabilizer discussed below.\n", + "" ] }, { @@ -645,13 +657,15 @@ "\n", "\n", "\n", + "
\n", + "\n", + "**Exercise 3:**\n", "\n", - "
\n", - "

Exercise 3 - The Shor Code:

\n", - "

\n", "Now you have all of the backgound necessary to code the Shor code in CUDA-Q. Fill in the sections below to build up a kernel that performs Shor code encoding and syndrome checks. The kernel should be constructed such that you can apply errors and select mesurement in the $Z$ or $X$ basis. Complete the tasks listed below to ensure your code works.\n", - "

\n", - "
\n" + "\n", + "
\n", + "\n", + "" ] }, { @@ -661,64 +675,45 @@ "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "\n", + "# EXERCISE 3\n", "@cudaq.kernel\n", - "def shor_code(error_qubit: list[int], error_location: list[int], measure: int):\n", + "def shor_code(error_type: list[int], error_location: list[int], measure: int):\n", " \"\"\"Prepares a kernel for the Shor Code\n", "\n", " Parameters\n", " -----------\n", - " error_qubit: list[int]\n", - " a list where each element is an applied error designated as 1 =x or 2 =z\n", + " error_type: list[int]\n", + " a list where each element is an applied error designated as 1 = x or 2 = z\n", " error_location: list[int]\n", " each element corresponds to the index of the qubit which the error occurs on\n", " measure: int\n", - " Option to measure in the z basis (0) or the x basis (1)\n", - "\n", - " Returns\n", - " -------\n", - " cudaq.kernel\n", - " Kernel for running the Shor code\n", - " \"\"\" \n", + " Option to measure in the z basis (1) or the x basis (2)\n", + " \"\"\"\n", "\n", - " #Encode the data qubits with Shor encoding circuit. Hint: It might be helpful to create separate registers for the data and ancilla qubits\n", - " #TODO\n", + " ##TODO## Encode the data qubits with Shor encoding circuit.\n", + " # Hint: It might be helpful to create separate registers for the data and ancilla qubits\n", "\n", " # Initial Psi (25/75) distribution in Z and X basis\n", - " ry(1.04772,data_qubits[0])\n", + " ry(1.04772, data_qubits[0])\n", " rz(1.521, data_qubits[0])\n", "\n", - " # Apply optional single qubit errors \n", - " #TODO\n", - " \n", - " # Apply Hadamard gate to ancilla qubits \n", - " #TODO\n", - " \n", - " # Apply the Bit Flip syndromes \n", - " #TODO\n", + " ##TODO## Apply optional single qubit errors\n", + "\n", + " ##TODO## Apply Hadamard gate to ancilla qubits\n", "\n", - " # Apply the phase flip syndromes \n", - " #TODO\n", + " ##TODO## Apply the Bit Flip syndromes\n", "\n", - " # Apply Hadamard gate to ancilla qubits \n", - " #TODO\n", + " ##TODO## Apply the phase flip syndromes\n", "\n", - " # Perform mid-circuit measurements to determine syndromes \n", - " #TODO\n", - " \n", - " \n", - " # Apply the appropriate corrections based on the results from the syndrome measurements\n", - " #TODO\n", + " ##TODO## Apply Hadamard gate to ancilla qubits\n", "\n", + " ##TODO## Perform mid-circuit measurements to determine syndromes\n", "\n", - " #Perform Hadamard on data qubits to rotate out of X basis (because of concatonated code)\n", - " #TODO\n", - " \n", - " #Measure in X or Z basis depending on kernel input\n", - " h(data_qubits) # put a Hadamard before the measurement to transform back into the Z basis\n", - " # An X basis measurement can be obtained by applying a second Hadamard before a Z basis measurement\n", - " #TODO" + " ##TODO## Apply the appropriate corrections based on the results from the syndrome measurements\n", + "\n", + " ##TODO## Perform Hadamard on data qubits to rotate out of X basis (because of concatenated code)\n", + "\n", + " ##TODO## Measure in X or Z basis depending on kernel input" ] }, { @@ -742,14 +737,16 @@ "metadata": {}, "outputs": [], "source": [ - "def post_process(results):\n", - " \"\"\"takes results from a CUDA-Q sample and prints the results and the number of 0's and 1's by computing the parity of the bitstrings.\n", + "# EXERCISE 3\n", + "def post_process(results: cudaq.SampleResult) -> None:\n", + " \"\"\"Takes results from a CUDA-Q sample and prints the number of 0's and 1's by computing the parity of the bitstrings.\n", "\n", " Parameters\n", " -----------\n", " results: cudaq.SampleResult\n", - " A dictionary of the results from sampling the quantum state\n", - " \"\"\"\n" + " A dictionary of the results from sampling the quantum state\n", + " \"\"\"\n", + " ##TODO## Compute parity of each measurement bitstring and count zeros/ones" ] }, { @@ -777,16 +774,35 @@ "metadata": {}, "outputs": [], "source": [ - "# TODO\n", - "# Run tests on your Shor Code" + "# EXERCISE 3\n", + "##TODO## Run tests on your Shor Code" + ] + }, + { + "cell_type": "markdown", + "id": "130329ac73b74863", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "In this lab, you explored the stabilizer formalism and applied it to implement two foundational quantum error correction codes in CUDA-Q. You learned how stabilizers define a codespace through their joint +1 eigenspace, eliminating the need to explicitly enumerate basis codewords. You implemented the Steane code — a [[7,1,3]] CSS code — and verified its syndrome extraction for both $X$ and $Z$ errors using ancilla qubits with mid-circuit measurement. Using CUDA-Q QEC, you performed a code capacity analysis to estimate the Steane code's error correction threshold. Finally, you built the Shor code — a [[9,1,3]] concatenated code — and explored how it independently corrects bit flip and phase flip errors by leveraging two layers of repetition codes.\n", + "\n", + "In the next lab, you will explore noisy simulation of QEC codes using CUDA-Q's noise modeling capabilities." + ] + }, + { + "cell_type": "markdown", + "id": "436620bc88954196", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — continues the QEC series with circuit-level noise simulation\n", + "* [QEC 101 — Lab 1: Introduction to QEC](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) — covers the prerequisite classical and quantum repetition codes\n", + "* [QEC 101 — Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — extends stabilizer formalism to surface and toric codes" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -800,9 +816,22 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, diff --git a/qec101/03_QEC_Noisy_Simulation.ipynb b/qec101/03_QEC_Noisy_Simulation.ipynb index 5ead926..0379c1f 100644 --- a/qec101/03_QEC_Noisy_Simulation.ipynb +++ b/qec101/03_QEC_Noisy_Simulation.ipynb @@ -1,46 +1,91 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "e182f30007b64a15", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, { "cell_type": "markdown", "id": "29c92634-d993-4c37-a7c7-ab9ff1384e2d", "metadata": {}, "source": [ - "# Lab 3 - Simulating Quantum Noise #\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", - "---\n", - "## Overview\n", - "Noise is the enemy of useful quantum computing and the reason quantum error correction (QEC) is necessary in the first place. Noise from either the environment or the unavoidable imperfections of device controls produce errors that corrupt the quantum information stored on the qubits and ruin algorithm results. \n", + "# Simulating Quantum Noise — QEC101: Lab 3\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "Though all QPUs share this reality, each QPU exhibits a unique noise profile depending on its qubit modality and a variety of other design factors. Understanding a device's noise is critical for guiding algorithm development, discovery of new QEC techniques, and improvements to the hardware itself. \n", - "\n", - "\n", - "Simulating noisy QPUs can be extremely helpful for this task and complements the valuable but limited experimental data. For example, researchers can train QEC methods that rely on AI with massive simulated data sets. Similarly, insights from simulation can inform design improvements. Simulation can also be used to model the physics of the individual qubits and identify specific sources of noise, much like NVIDIA uses GPUs to simulate digital twins for next generation GPUs in the design process.\n", + "---\n", "\n", - "This lab will provide an overview of simulating noisy quantum circuits. You will learn how to use CUDA-Q to perform a number of different simulations and apply the results to different uses cases.\n", + "**What You Will Do:**\n", + "* Define quantum noise channels using density matrices and Kraus operators\n", + "* Simulate noisy quantum circuits with both density matrix and trajectory-based methods in CUDA-Q\n", + "* Analyze the impact of different noise patterns on a quantum chemistry algorithm (VQE for H₂)\n", + "* Implement zero noise extrapolation as a quantum error mitigation technique\n", + "* Run noisy QEC experiments on the Steane code using the Stim simulator\n", + "* Build a noise model from dynamical simulation of a superconducting transmon qubit\n", "\n", "**Prerequisites:**\n", - "Learners should have familiarity with Jupyter notebooks and programming in Python and CUDA-Q. It is assumed the reader has some familiarity already with quantum computation and is comfortable with braket notation and the concepts of qubits, quantum circuits, measurement, and circuit sampling. The CUDA-Q Academic course entitled \"[Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\" provides a walkthrough of this prerequisite knowledge if the reader is new to quantum computing and CUDA-Q or needs refreshing. Learners would also benefit from completing the first two notebooks in this series [The Basics of Classical and Quantum Error Correction](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) and [Stabilizers, the Shor code, and the Steane code](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb).\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **3.1** Define a quantum noise channel, the density matrix, and Kraus operators\n", - "* **3.2** Learn two ways to simulate noise: density matrix and trajectory simulation\n", - "* **3.3** Lean how to use noisy simulation for three different uses cases\n", - " * **3.3a** Study the impact of noise on a standard quantum chemistry algorithm \n", - " * **3.3b** Use noise modeling to implement a noise mitigation technique\n", - " * **3.3c** Run QEC experiments with noise models\n", - "* **3.4** Perform dynamical simulation of a qubit to build a noise model for quantum circuit simulation\n", + "* Python and Jupyter familiarity\n", + "* Basic quantum computing concepts (qubits, gates, measurement, braket notation) — see [Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\n", + "* Familiarity with CUDA-Q kernel syntax and `cudaq.sample`\n", + "* Completion of [01 — The Basics of Classical and Quantum Error Correction](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb)\n", + "* Completion of [02 — Stabilizers, the Shor Code, and the Steane Code](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb)\n", + "\n", + "**Key Terminology:**\n", + "* noise channel\n", + "* density matrix\n", + "* trajectory simulation\n", + "* density matrix simulation\n", + "* Kraus operator\n", + "* quantum error mitigation\n", + "* zero noise extrapolation\n", + "* circuit-level noise\n", + "* dynamical simulation\n", + "* amplitude damping\n", + "* pure state\n", + "* mixed state\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes\n", + "* [`cudaq.observe`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.observe) — computes expectation value of a spin operator\n", + "* [`cudaq.get_state`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.get_state) — returns the statevector or density matrix\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq.SpinOperator`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.SpinOperator) — Pauli spin operator (Hamiltonian)\n", + "* [`cudaq_solvers.create_molecule`](https://nvidia.github.io/cuda-quantum/latest/api/solvers/python_api.html#cudaq_solvers.create_molecule) — builds molecular Hamiltonian from geometry\n", + "* [`cudaq.evolve`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.evolve) — runs dynamical time evolution of a quantum system\n", + "\n", + "**Solutions:** [`Solutions/03_QEC_Noisy_Simulation_Solution.ipynb`](Solutions/03_QEC_Noisy_Simulation_Solution.ipynb)" + ] + }, + { + "cell_type": "markdown", + "id": "5ed8e305", + "metadata": {}, + "source": [ + "
\n", "\n", - "Terminology and notation you'll use\n", - "* noise channel, density matrix, trajectory simulation, density matrix simulation, Kraus operator\n", - "* quantum error mitigation, zero noise extrapolation\n", - "* circuit-level noise experiments\n", - "* dynamical simulation, amplitude damping\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "Before we get started, excecute the cells below to load the necessary packages." + "
" ] }, { @@ -50,33 +95,25 @@ "metadata": {}, "outputs": [], "source": [ - "### Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "e10523fd-5b1c-4b84-a0c1-9fd1506936be", + "cell_type": "markdown", + "id": "fc00a28e75e448f2", "metadata": {}, - "outputs": [], "source": [ - "# If you are working in an environment that does not have cudaqx installed, \n", - "# uncomment the code below to install cudaq-solvers and the required dependencies. \n", - "# Then restart the kernel before executing the next cell.\n", - "#!sudo apt-get update && sudo apt-get install -y gfortran\n", - "#!pip install cudaq-solvers -q\n", - "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", - "#!unzip -q main.zip\n", - "#!mkdir Images\n", - "#!mv cuda-q-academic-main/qec101/Images/noisy ./Images/noisy" + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { @@ -86,14 +123,22 @@ "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "import numpy as np\n", - "import cudaq_solvers as solvers\n", "from typing import List, Optional\n", + "\n", + "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", + "\n", + "import cudaq\n", "from cudaq import spin, operators, ScalarOperator, Schedule, ScipyZvodeIntegrator\n", - "import cupy as cp\n", - "import os" + "\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "\n", + "import cupy as cp" ] }, { @@ -101,22 +146,24 @@ "id": "c43db24e-0cc1-41bc-8a00-5922541a1bdd", "metadata": {}, "source": [ - "## 3.1 Quantum Noise Channels ## \n", + "---\n", + "\n", + "## 3.1 Quantum Noise Channels\n", "\n", "In the first lab of this series, the concept of a **noise channel** was introduced. A noise channel is a mathematical model used to describe how a quantum state is impacted by the presence of noise. \n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", "A noise channel can correspond to application of a gate to physical qubits, a qubit's interaction with another nearby qubit, or simply the passage of time and the resulting decay of the quantum state as it interacts with anything else from the environment. QEC is a promising solution to this problem as a logically encoded quantum state can go through the noise channel, impacting each data qubit, while providing a means for the original state to be restored.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", - "However, as previous labs have emphasized, QEC is hard to implement, and the development of new QEC protocols is still an active research field. In practice, experimental data obtained from the QPU can help measure quantities like gate fidelity and inform a **noise model** which captures all of the noise channels present in the device. This noise model can then be used to simulate data that emulates the performance of the QPU. \n", + "However, as previous labs have emphasized, QEC is hard to implement, and the development of new QEC protocols is still an active research field. In practice, experimental data obtained from the QPU can help measure quantities like gate fidelity and inform a noise model which captures all of the noise channels present in the device. This noise model can then be used to simulate data that emulates the performance of the QPU. \n", "\n", "There are many practical benefits to this that will be explored in this lab. A recent example of this is [NVIDIA's work with QuEra](https://developer.nvidia.com/blog/nvidia-and-quera-decode-quantum-errors-with-ai/) to build an AI decoder. Training this model required a massive amount of data which could be obtained efficiently via simulation. Noisy circuit simulation allowed for millions of syndromes to be obtained with their associated errors, something not possible to do with experimental data. \n", "\n", "\n", - "### The Density Matrix ###\n", + "### The Density Matrix\n", "\n", "Before discussing some of the ways to simulate noise, it is necessary to take a step back and consider representation of a quantum state using the **density matrix**. The density matrix ($\\rho$) is a mathematical object that completely describes a quantum state and has the following properties. \n", "\n", @@ -124,20 +171,19 @@ "2. It is Hermitian: $\\rho = \\rho ^{\\dagger}$\n", "3. It is positive semi-definite. (All eigenvalues are positive.)\n", "\n", - "If a quantum system is in one of a any quantum states $\\ket{\\psi_i}$ with probability $p_i$, then the density matrix is defined as a linear combination of outer products of those states with probability coefficients:\n", + "If a quantum system is in one of any quantum states $\\ket{\\psi_i}$ with probability $p_i$, then the density matrix is defined as a linear combination of outer products of those states with probability coefficients:\n", "\n", "$$\\rho = \\sum_i p_i \\ket{\\psi_i}\\bra{\\psi_i} $$\n", "\n", "\n", "\n", + "
\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", - "use CUDA-Q's $\\texttt{get\\_state}$ function and the density matrix simulator (more on that later) to produce any three qubit density matrix. Write code to check that the three properties listed above are met. Make sure to set tolerances on these checks so that, for example, an eigenvalue of zero is not wrongfully flagged as `-1.2e-20`. \n", - "

\n", - "
\n", - "\n" + "**Exercise 1:**\n", + "\n", + "Use CUDA-Q’s `get_state` function and the density matrix simulator (more on that later) to produce any three qubit density matrix. Write code to check that the three properties listed above are met. Make sure to set tolerances on these checks so that, for example, an eigenvalue of zero is not wrongfully flagged as `-1.2e-20`.\n", + "\n", + "
" ] }, { @@ -147,26 +193,28 @@ "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "import numpy as np\n", - "\n", + "# EXERCISE 1\n", "cudaq.set_target(\"density-matrix-cpu\")\n", "\n", - "#Build Kernel and get state\n", - "#TODO \n", + "@cudaq.kernel\n", + "def test():\n", + " reg = cudaq.qvector(3)\n", + " h(reg)\n", + "\n", + "print(\"State vector:\")\n", + "print(cudaq.get_state(test))\n", "\n", - "#get density matrix\n", - "#TODO\n", + "# get density matrix\n", + "##TODO##\n", "\n", "# Compute Trace\n", - "#TODO\n", + "##TODO##\n", "\n", "# Check if Hermitian\n", - "#TODO\n", + "##TODO##\n", "\n", "# Check if positive semi-definite\n", - "#TODO\n", - "\n" + "##TODO##" ] }, { @@ -174,25 +222,22 @@ "id": "1c7b3175-0d51-4d49-b36a-7211389730dc", "metadata": {}, "source": [ - "
\n", - "
\n", - "\n", - "Statevectors correspond to **pure states**, while the density matrix can describe **mixed states**, that is an overall state composed of a combination of pure states.\n", + "Statevectors correspond to **pure states**, while the **density matrix** can describe **mixed states**, that is an overall state composed of a combination of pure states.\n", "\n", "A state is considered pure if the trace of $\\rho^2$ is equal to 1.\n", "\n", "This can be a bit confusing because a pure state can actually be a superposition state and a mixed state can be a combination of two states that do not describe superpositions. The following exercise will make this more clear.\n", "\n", "\n", - "
\n", - "

Exercise 2 :

\n", - "

\n", - "Consider the density matrix $\\rho = \\frac{1}{2}\\ket{00}\\bra{00} + \\frac{1}{2}\\ket{11}\\bra{11}$. \n", + "

\n", "\n", - "Using CUDA-Q build kernels for the $\\ket{00}$ state and the $\\ket{11}$ state, using these kernels and the $\\texttt{get\\_state}$ command define the density matrix $\\rho$, and compute trace($\\rho^2$). Is the state pure?\n", - "

\n", - "
\n", - "\n" + "**Exercise 2:**\n", + "\n", + "Consider the density matrix $\\rho = \\frac{1}{2}\\ket{00}\\bra{00} + \\frac{1}{2}\\ket{11}\\bra{11}$.\n", + "\n", + "Using CUDA-Q build kernels for the $\\ket{00}$ state and the $\\ket{11}$ state, using these kernels and the `get_state` command define the density matrix $\\rho$, and compute trace($\\rho^2$). Is the state pure?\n", + "\n", + "
" ] }, { @@ -202,7 +247,8 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO\n" + "# EXERCISE 2\n", + "##TODO##" ] }, { @@ -220,7 +266,7 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO\n" + "##TODO##" ] }, { @@ -228,40 +274,36 @@ "id": "ecdeafb0-ae88-4197-bab3-7bb68c3e9b1c", "metadata": {}, "source": [ - "
\n", - "
\n", + "A mixed state means that there is classical uncertainty about which quantum state defines the system, even if both quantum states are deterministic like $\\ket{00}$ and $\\ket{11}$. However, a Bell state is pure, meaning that the overall quantum state is known with certainty, even if the state describes a superposition with inherent uncertainty. Another key term is completely mixed state, which refers to a density matrix where all of the eigenvalues are the same, meaning the density matrix describes the state with the theoretical maximum of uncertainty. \n", "\n", - "A mixed state means that there is classical uncertainly about which quantum state defines the system, even if both quantum states are deterministic like $\\ket{00}$ and $\\ket{11}$. However, a bell state is pure, meaning that the overall quantum state is known with certainly, even if the state describes a superposition with inherent uncertainty. Another key term is **completely mixed state**, which refers to a density matrix where all of the eigenvalues are the same, meaning the density matrix describes the state with the theoretical maximum of uncertainty. \n", + "### Kraus Operators\n", "\n", - "### Kraus Operators ###\n", - "\n", - "Now, why the business about density matrices? The answer is that a noise channel needs to be an effective model that can generalize to impact mixed states. In fact, many noise channels will produce a mixed state from a pure state.\n", + "Now, why the business about density matrices? The answer is that a noise channel needs to be an effective model that can generalize to impact mixed states. In fact, many noise channels will produce a mixed state from a pure state.\n", "\n", "Mathematically this is done with **Kraus operators** ($K_i$) that evolve the density matrix as the state proceeds through a noisy channel $\\epsilon$.\n", "\n", "$$ \\epsilon(\\rho) = \\sum_i K_i\\rho K_i^{\\dagger} $$\n", "\n", - "Kraus operators have the condition that $ \\sum_i K_i K_i^{\\dagger} =1 $ so the trace of the density matrix is preserved.\n", + "**Kraus operators** have the condition that $ \\sum_i K_i K_i^{\\dagger} =1 $ so the trace of the density matrix is preserved.\n", "\n", "For example, a valid set of operators is $K_0 = \\sqrt{1-p} I $ and $K_1 = \\sqrt{p}X$ which will perform a bitflip error with probability $p$ and apply the identity (no change) with probability $1-p$. Let's apply this to the density matrix, $\\rho_0$, for the $\\ket{0}$ state. The result becomes $ \\epsilon(\\rho_0) = (1-p)I\\rho_0 I + pX\\rho_0 X $. Notice the result is now mixed state. \n", "\n", "The table below summarizes some of the channels included in CUDA-Q which you will use in later exercises. Notice too, that each noise channel can be geometrically represented as a deformation of the Bloch sphere.\n", "\n", "\n", - "\"Drawing\"\n", + "\"Table\n", "\n", " \n", "\n", "By applying any number of Kraus operators to the density matrix, it is possible to evolve it and sample the resulting state to determine how noise impacts the output. This is easily accomplished in CUDA-Q with the `density-matrix-cpu` backend. You can then build a noise model consisting of noisy channels applied to specific gate operations with select probabilities. The exercise below will get you started with the syntax.\n", "\n", - "
\n", - "

Exercise 3 :

\n", - "

\n", - "You will be using CUDA-Q's built in noise channel tools throughout this lab. Get a sense for how it works by building a two qubit kernel and perform an $X$ operation on each qubit. Edit the code block below to build a noise model consisting of two bitflip channels with probabilities of .10 and .25 on the $X$ gate for qubit 0 and 1, respectively. Does the sample distribution agree with what you would expect? \n", - "

\n", - "
\n", + "
\n", "\n", - "\n" + "**Exercise 3:**\n", + "\n", + "You will be using CUDA-Q’s built in noise channel tools throughout this lab. Get a sense for how it works by building a two qubit kernel and perform an $X$ operation on each qubit. Edit the code block below to build a noise model consisting of two bitflip channels with probabilities of .10 and .25 on the $X$ gate for qubit 0 and 1, respectively. Does the sample distribution agree with what you would expect?\n", + "\n", + "
" ] }, { @@ -271,10 +313,10 @@ "metadata": {}, "outputs": [], "source": [ - "noise = cudaq.NoiseModel() # #Defines an empty noise model\n", + "# EXERCISE 3\n", + "noise = cudaq.NoiseModel()\n", + "##TODO## Add two BitFlipChannel noise channels\n", "\n", - "noise.add_channel('x', [0], cudaq.BitFlipChannel(.1)) # Adds a bitflip error channel (p=0.1) to X gates on qubit 0.\n", - "#TODO Add noise channel for second qubit\n", "\n", "@cudaq.kernel\n", "def test():\n", @@ -290,53 +332,22 @@ "id": "7a1bcb92-ea4d-4e00-868d-11158003d18f", "metadata": {}, "source": [ - "## 3.2 Two Ways to Simulate Noise ##\n", - "\n", - "Density matrix simulation can produce exact results with the quality of simulation limited only by the accuracy of the underlying noise model. Unfortunately, density matrix simulation is expensive and requires storage of the entire $2^N \\times 2^N $ matrix, limiting it to a smaller number of qubits. \n", + "---\n", "\n", - "This scalability problem can be circumvented with a method called trajectory based simulation which allows for approximate noise simulation at much larger scales. Unlike density matrix simulation that applies Kraus operators to every state, trajectory based simulation assumes the Kraus operators occur as a Markov process. \n", + "## 3.2 Two Ways to Simulate Noise\n", "\n", - "The assumption of a Markov process is that the application of each Kraus operator is independent from the others. This is usually a reasonable assumption as a physical QPU might, for example, only apply gates in an isolated gate zone. \n", + "**Density matrix simulation** can produce exact results with the quality of simulation limited only by the accuracy of the underlying noise model. Unfortunately, **density matrix simulation** is expensive and requires storage of the entire $2^N \\times 2^N $ matrix, limiting it to a smaller number of qubits. \n", "\n", - "The code blocks below will make it clear how the two approaches differ. Consider a very basic circuit that prepares the $\\ket{111}$ state with bitflip errors on each qubit corrupting the result. First, run the cell below. Notice that `get_state` returns the same density matrix each time you run the code. This density matrix describes the mixture of all possible pure states and returns the sample distribution below." + "This scalability problem can be circumvented with a method called **trajectory simulation** which allows for approximate noise simulation at much larger scales. In trajectory simulation, a statevector is evolved through the circuit and at each noisy gate, a Kraus operator is selected at random based on its probability. This is sometimes called Monte Carlo trajectory simulation. This produces one trajectory per shot, and in the limit of many trajectories, the results converge to those of the density matrix simulation. The CUDA-Q density matrix simulator and GPU-accelerated statevector simulator can each produce equivalent results in noise simulations. Run the density matrix version below and look at the output." ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "0131ab05-87c0-40fb-a327-b87f273eef98", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(0.008,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.512,0)\n", - "\n", - "(0.008,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.512,0)\n", - "\n", - "{ 000:12 001:27 010:32 011:113 100:41 101:114 110:147 111:514 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "\n", "cudaq.set_target(\"density-matrix-cpu\")\n", "\n", "noise = cudaq.NoiseModel()\n", @@ -362,38 +373,9 @@ "id": "771b889c-3ddc-4bab-8294-e376b4964a55", "metadata": {}, "source": [ - "Trajectory based simulation can run in CUDA-Q by simply changing the target to `nvidia`. If the kernel below had no noise, the statevector (output from `get_state`) should be [0,0,0,0,0,0,0,1] corresponding to the $\\ket{111}$ state. When sampling is performed with the trajectory based simulator, the Kraus operators are applied based on their probabilities to produce a new state vector for each shot. The widget below allows you to explore the possible outcomes and their associated probabilities. \n", - "\n", - "Try running the CUDA-Q simulation above with two or three different bitflip error probabilities and set the slider below to match. Confirm that the results from the density matrix simulations above match the expected distribution from the trajectory-based approach. You will need to move the `Images > noisy > trajectory_widget.py` file from the [CUDA-Q Academic github repository](https://github.com/NVIDIA/cuda-q-academic) into your working directory to execute this optional cell." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "dd5ff45e-f5d0-4b5e-ae0e-4c90bc07b219", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "055d2c969ab6448fbdf6fc21574c90e6", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "VBox(children=(FloatSlider(value=0.1, continuous_update=False, description='P(error):', max=0.2, step=0.01), T…" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from Images.noisy.trajectory_widget import show_error_tree_widget\n", + "**Trajectory simulation** can run in CUDA-Q by simply changing the target to `nvidia`. If the kernel below had no noise, the statevector (output from `get_state`) should be [0,0,0,0,0,0,0,1] corresponding to the $\\ket{111}$ state. When sampling is performed with the trajectory based simulator, the Kraus operators are applied based on their probabilities to produce a new state vector for each shot. The widget below allows you to explore the possible outcomes and their associated probabilities. \n", "\n", - "# this will render the entire widget\n", - "show_error_tree_widget()" + "Try running the CUDA-Q simulation above with two or three different bitflip error probabilities and use the [interactive trajectory noise widget](https://nvidia.github.io/cuda-q-academic/interactive_widgets/trajectory_noise_demo.html) to confirm that the results from the density matrix simulations above match the expected distribution from the trajectory-based approach." ] }, { @@ -404,76 +386,13 @@ "Running the code below, notice `get_state` produces a different state vector each time. Because the number of possible trajectories is small, trajectory based sampling can reproduce the same distribution that would be obtained from density matrix simulation." ] }, - { - "cell_type": "markdown", - "id": "5ed8e305", - "metadata": {}, - "source": [ - "⚠️\n", - "\n", - "Just a heads-up: The rest of this notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning!\n", - "\n", - "⚠️" - ] - }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "28a9093d-db01-495c-a672-4a31b8247456", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "{ 000:7 001:20 010:36 011:133 100:30 101:132 110:118 111:524 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "\n", "cudaq.set_target(\"nvidia\")\n", "\n", "noise = cudaq.NoiseModel()\n", @@ -499,7 +418,7 @@ "source": [ "Another benefit of trajectory based simulation is that it can be used with tensor network based simulators to simulate circuits that would be far too large for density matrix or statevector simulation. CUDA-Q can run exact tensor network or approximate Matrix Product State (MPS) simulations with trajectory based simulation to simulate systems of hundreds to thousands of qubits.\n", "\n", - "Clever sampling algorithms can also be used to filter trajectories and exclude certain types of errors or focus on sampling only a subset of the most likely errors. A [recent paper published by NVIDIA research]()https://arxiv.org/pdf/2504.16297 explains this in greater detail and described how methods like this can sample trillions of noisy samples in just a few hours using an AI supercomputer. This is extremely helpful for training AI QEC decoders where experimental data cannot be obtained in sufficient volume." + "Clever sampling algorithms can also be used to filter trajectories and exclude certain types of errors or focus on sampling a particular type of error. This technique is called importance sampling and is another active area of research for making noisy simulation more practical and beneficial to QEC researchers." ] }, { @@ -507,9 +426,11 @@ "id": "abbc20c4-ee4d-4749-85f6-9580e11cc127", "metadata": {}, "source": [ - " ## 3.3 Use cases for Noisy Simulations ##\n", + "---\n", "\n", - "This section will explore three use cases of noisy simulation used to model the impact of noise patterns on algorithms, perform quantum error mitigation, and run QEC experiments." + "## 3.3 Use Cases for Noisy Simulations\n", + "\n", + "This section will explore three use cases of noisy simulation used to model the impact of noise patterns on algorithms, perform **quantum error mitigation**, and run QEC experiments." ] }, { @@ -517,7 +438,7 @@ "id": "c46a3fbb-c6bf-4075-9047-505abe3f891b", "metadata": {}, "source": [ - "### 3.3a: Understanding How Noise Impacts Algorithm Results ##3\n", + "### 3.3a: Understanding How Noise Impacts Algorithm Results\n", "\n", "A natural application of noisy simulation is to explore how different noise patterns might impact the results of an algorithm. Such simulations can be beneficial for a number of reasons. This section along with the following two will explore three use cases for noisy circuit simulation. \n", "\n", @@ -531,148 +452,7 @@ "execution_count": null, "id": "73ebc8a8-2867-4b8d-b445-9628a9f02559", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Note: you may need to restart the kernel to use updated packages.\n", - "{ 000:9 001:39 010:33 011:128 100:39 101:132 110:130 111:490 }\n", - "\n", - "-1.0404318468267384\n", - " ╭───╮╭───────────╮ ╭────────────╮╭───╮»\n", - "q0 : ┤ x ├┤ rx(1.571) ├──●──────────────────────────●──┤ rx(-1.571) ├┤ h ├»\n", - " ├───┤╰───────────╯╭─┴─╮ ╭─┴─╮╰────────────╯╰───╯»\n", - "q1 : ┤ x ├─────────────┤ x ├──●────────────────●──┤ x ├───────────────────»\n", - " ├───┤ ╰───╯╭─┴─╮╭──────────╮╭─┴─╮├───┤╭───────────╮ »\n", - "q2 : ┤ h ├──────────────────┤ x ├┤ rz(-0.1) ├┤ x ├┤ h ├┤ rx(1.571) ├──────»\n", - " ├───┤ ╰───╯╰──────────╯╰───╯╰───╯╰───────────╯ »\n", - "q3 : ┤ h ├────────────────────────────────────────────────────────────────»\n", - " ╰───╯ »\n", - "\n", - "################################################################################\n", - "\n", - " ╭───╮ ╭───╮ »\n", - "──●─────────────────────────────●───────────┤ h ├────┤ h ├─────────────────»\n", - "╭─┴─╮ ╭─┴─╮ ╭───┴───┴───╮╰───╯ »\n", - "┤ x ├──●───────────────●──────┤ x ├─────┤ rx(1.571) ├──●───────────────────»\n", - "╰───╯╭─┴─╮╭─────────╮╭─┴─╮╭───┴───┴────╮╰───────────╯╭─┴─╮ »\n", - "─────┤ x ├┤ rz(0.1) ├┤ x ├┤ rx(-1.571) ├─────────────┤ x ├──●──────────────»\n", - " ╰───╯╰─────────╯╰───╯╰────────────╯ ╰───╯╭─┴─╮╭──────────╮»\n", - "──────────────────────────────────────────────────────────┤ x ├┤ rz(-0.1) ├»\n", - " ╰───╯╰──────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "─────────────────────────────────────────────────────────────────────»\n", - " ╭────────────╮╭───╮ »\n", - "───────●──┤ rx(-1.571) ├┤ h ├──●─────────────────────────────●───────»\n", - " ╭─┴─╮╰────────────╯╰───╯╭─┴─╮ ╭─┴─╮ »\n", - "──●──┤ x ├───────────────────┤ x ├──●───────────────●──────┤ x ├─────»\n", - "╭─┴─╮├───┤╭───────────╮ ╰───╯╭─┴─╮╭─────────╮╭─┴─╮╭───┴───┴────╮»\n", - "┤ x ├┤ h ├┤ rx(1.571) ├───────────┤ x ├┤ rz(0.1) ├┤ x ├┤ rx(-1.571) ├»\n", - "╰───╯╰───╯╰───────────╯ ╰───╯╰─────────╯╰───╯╰────────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────────●─────────────────────────────────────────────────»\n", - " ╭───╮ ╭───╮╭─┴─╮ »\n", - "────┤ h ├────┤ h ├┤ x ├──●───────────────────────────────●────────────»\n", - " ├───┤ ╰───╯╰───╯╭─┴─╮ ╭─┴─╮ ╭───╮»\n", - "────┤ h ├──────────────┤ x ├──●─────────────────●──────┤ x ├─────┤ h ├»\n", - "╭───┴───┴───╮ ╰───╯╭─┴─╮╭───────────╮╭─┴─╮╭───┴───┴────╮├───┤»\n", - "┤ rx(1.571) ├───────────────┤ x ├┤ rz(0.025) ├┤ x ├┤ rx(-1.571) ├┤ h ├»\n", - "╰───────────╯ ╰───╯╰───────────╯╰───╯╰────────────╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────────────────────────────────────────●─────────────────────────»\n", - " ╭─┴─╮ ╭───╮╭───────────╮»\n", - "───────────────●───────────────────────────●──────┤ x ├─────┤ h ├┤ rx(1.571) ├»\n", - "╭───────────╮╭─┴─╮ ╭─┴─╮╭───┴───┴────╮├───┤╰───────────╯»\n", - "┤ rx(1.571) ├┤ x ├──●─────────────────●──┤ x ├┤ rx(-1.571) ├┤ h ├─────────────»\n", - "╰───────────╯╰───╯╭─┴─╮╭───────────╮╭─┴─╮╰───╯╰────────────╯╰───╯ »\n", - "──────────────────┤ x ├┤ rz(0.025) ├┤ x ├─────────────────────────────────────»\n", - " ╰───╯╰───────────╯╰───╯ »\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "──●────────────────────────────────────────────────────────────────────────»\n", - "╭─┴─╮ »\n", - "┤ x ├──●────────────────────────────●──────────────────────────────●───────»\n", - "╰───╯╭─┴─╮ ╭─┴─╮ ╭───╮ ╭───────────╮╭─┴─╮ »\n", - "─────┤ x ├──●──────────────────●──┤ x ├────┤ h ├────┤ rx(1.571) ├┤ x ├──●──»\n", - " ╰───╯╭─┴─╮╭────────────╮╭─┴─╮├───┤╭───┴───┴───╮╰───────────╯╰───╯╭─┴─╮»\n", - "──────────┤ x ├┤ rz(-0.025) ├┤ x ├┤ h ├┤ rx(1.571) ├──────────────────┤ x ├»\n", - " ╰───╯╰────────────╯╰───╯╰───╯╰───────────╯ ╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " ╭───╮ ╭───────────╮ »\n", - "─────────────────────────●──────┤ h ├─────┤ rx(1.571) ├──●────────────»\n", - " ╭─┴─╮╭───┴───┴────╮╰───┬───┬───╯╭─┴─╮ »\n", - "────────────────────●──┤ x ├┤ rx(-1.571) ├────┤ h ├────┤ x ├──●───────»\n", - " ╭─┴─╮╰───╯╰────────────╯ ╰───╯ ╰───╯╭─┴─╮ »\n", - "───────────────●──┤ x ├─────────────────────────────────────┤ x ├──●──»\n", - "╭───────────╮╭─┴─╮╰───╯ ╰───╯╭─┴─╮»\n", - "┤ rz(0.025) ├┤ x ├───────────────────────────────────────────────┤ x ├»\n", - "╰───────────╯╰───╯ ╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "───────────────────────────────────────────────────────────────────────────»\n", - " »\n", - "────────────────────────●────────────────────────────●─────────────────────»\n", - " ╭─┴─╮ ╭────────────╮╭───╮╭─┴─╮ »\n", - "───────────────●──────┤ x ├─────┤ rx(-1.571) ├┤ h ├┤ x ├──●────────────────»\n", - "╭───────────╮╭─┴─╮╭───┴───┴────╮╰───┬───┬────╯╰───╯╰───╯╭─┴─╮╭────────────╮»\n", - "┤ rz(0.025) ├┤ x ├┤ rx(-1.571) ├────┤ h ├───────────────┤ x ├┤ rz(-0.025) ├»\n", - "╰───────────╯╰───╯╰────────────╯ ╰───╯ ╰───╯╰────────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────●──────────────────────────●───────────────────────────────»\n", - " ╭─┴─╮ ╭───╮╭───────────╮╭─┴─╮ »\n", - "───────●──────┤ x ├────┤ h ├┤ rx(1.571) ├┤ x ├──●──────────────────────────»\n", - " ╭─┴─╮ ╰───╯ ╰───╯╰───────────╯╰───╯╭─┴─╮ »\n", - "──●──┤ x ├────────────────────────────────────┤ x ├──●──────────────────●──»\n", - "╭─┴─╮├───┤╭───────────╮ ╰───╯╭─┴─╮╭────────────╮╭─┴─╮»\n", - "┤ x ├┤ h ├┤ rx(1.571) ├────────────────────────────┤ x ├┤ rz(-0.025) ├┤ x ├»\n", - "╰───╯╰───╯╰───────────╯ ╰───╯╰────────────╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "──────────────────────────────────────────────────────────────────»\n", - " »\n", - "──────●───────────────────────────●────────────────────────────●──»\n", - " ╭─┴─╮ ╭───╮╭───────────╮╭─┴─╮ ╭─┴─╮»\n", - "────┤ x ├─────┤ h ├┤ rx(1.571) ├┤ x ├──●──────────────────●──┤ x ├»\n", - "╭───┴───┴────╮├───┤╰───────────╯╰───╯╭─┴─╮╭────────────╮╭─┴─╮├───┤»\n", - "┤ rx(-1.571) ├┤ h ├──────────────────┤ x ├┤ rz(-0.025) ├┤ x ├┤ h ├»\n", - "╰────────────╯╰───╯ ╰───╯╰────────────╯╰───╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " ╭────────────╮\n", - "──────●───────┤ rx(-1.571) ├\n", - " ╭─┴─╮ ├────────────┤\n", - "────┤ x ├─────┤ rx(-1.571) ├\n", - "╭───┴───┴────╮╰────────────╯\n", - "┤ rx(-1.571) ├──────────────\n", - "╰────────────╯ \n", - "────────────────────────────\n", - " \n", - "\n" - ] - } - ], + "outputs": [], "source": [ "cudaq.set_target(\"nvidia\")\n", "print(cudaq.sample(test, noise_model = noise))\n", @@ -685,16 +465,18 @@ "\n", "numQubits = molecule.n_orbitals * 2\n", "numElectrons = molecule.n_electrons\n", - "spin = 0\n", + "mol_spin = 0\n", "initialX = [-.2] * solvers.stateprep.get_num_uccsd_parameters(\n", " numElectrons, numQubits)\n", "\n", "@cudaq.kernel\n", "def uccsd():\n", - " q = cudaq.qvector(numQubits)\n", - " for i in range(numElectrons):\n", - " x(q[i])\n", - " solvers.stateprep.uccsd(q, initialX, numElectrons, spin)\n", + " reg = cudaq.qvector(numQubits)\n", + + " for i in range(2):\n", + " x(reg[i])\n", + " solvers.stateprep.uccsd(reg, initialX, numElectrons, mol_spin)\n", + "\n", "#noiseless value\n", "print(cudaq.observe(uccsd, molecule.hamiltonian, noise_model = noise_empty).expectation())\n", @@ -716,12 +498,13 @@ "id": "56c3fa72-0c52-4f47-afc3-2cb91b8c29ed", "metadata": {}, "source": [ - "
\n", - "

Exercise 4 :

\n", - "

\n", + "

\n", + "\n", + "**Exercise 4:**\n", + "\n", "Now, write a function that computes the expectation values for various configurations of errors. The function comments will guide you on the inputs and what the function should return.\n", - "

\n", - "
\n" + "\n", + "
" ] }, { @@ -731,7 +514,8 @@ "metadata": {}, "outputs": [], "source": [ - "def get_noisy_data(e_type =[], gate=[] , qubit=[], prob=[], shots=-1, trajectories=None):\n", + "# EXERCISE 4\n", + "def get_noisy_data(e_type: list[int] = [], gate: list[str] = [], qubit: list[int] = [], prob: list[float] = [], shots: int = -1, trajectories: int | None = None) -> list[float]:\n", " \"\"\"The function takes in various configurations of noise channels, builds a noise model, uses the noise model to obtain 40 expectation values,\n", " and the returns a list of the difference between the noisy expectation values and the noiseless.\n", "\n", @@ -751,15 +535,17 @@ " shots: int\n", " Designates the number of shots used to compute the expectation value. Default (-1) is exact, non-shot based result.\n", " trajectories: int\n", - " Designates the number of trajecttores sampled when computing the expectation value.\n", + " Designates the number of trajectories sampled when computing the expectation value.\n", "\n", " Returns\n", " -------\n", " list[float]\n", - " List of length 40 where each elementis the diffence between the noisy and noiseless value.\n", + " List of length 40 where each element is the difference between the noisy and noiseless value.\n", " \"\"\"\n", "\n", - " #TODO" + " ##TODO##\n", + "\n", + " return normalized_data" ] }, { @@ -772,14 +558,11 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "08cc39a7-ac88-4a4f-860c-e953541929ae", "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", "def plot_data(datasets, categories, labels=None):\n", " \"\"\"\n", " Plots the mean and ±1 SD error bars for one or more datasets,\n", @@ -838,7 +621,8 @@ " if len(datasets) > 1:\n", " plt.legend()\n", "\n", - " plt.show()\n" + " plt.show()\n", + "" ] }, { @@ -870,7 +654,8 @@ "labels =['series 1']\n", "\n", "plot_data([data], categories, labels)\n", - "\n" + "\n", + "" ] }, { @@ -894,7 +679,7 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO" + "##TODO##" ] }, { @@ -917,7 +702,7 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO" + "##TODO##" ] }, { @@ -939,7 +724,7 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO" + "##TODO##" ] }, { @@ -961,7 +746,7 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO" + "##TODO##" ] }, { @@ -969,9 +754,9 @@ "id": "7e2da1e1-78b2-4aca-9748-918c9a43dea2", "metadata": {}, "source": [ - "### 3.3b: Zero Noise Extrapolation ###\n", + "### 3.3b: Zero Noise Extrapolation\n", "\n", - "QPU results today are sometimes improved using **quantum error mitigation (QEM)** techniques. QEM techniques use classical postprocessing to improve results without the utilization of proper QEC protocols. One such QEM technique is **zero noise extrapolation (ZNE)**. The idea behind ZNE is that it is really hard to remove noise from an algorithm run on a physical QPU, but it is very easy to add noise. \n", + "QPU results today are sometimes improved using quantum error mitigation (QEM) techniques. QEM techniques use classical postprocessing to improve results without the utilization of proper QEC protocols. One such QEM technique is **zero noise extrapolation (ZNE)**. The idea behind ZNE is that it is really hard to remove noise from an algorithm run on a physical QPU, but it is very easy to add noise. \n", "\n", "The ZNE process works by applying increasing factors of error through clever application of the identity operator. For example, consider a circuit composed of a single layer of $R_X$ rotations of $\\pi$ radians. Applying the same gate three times is mathematically the same as applying it once and should have no impact on the result. \n", "\n", @@ -979,24 +764,25 @@ "\n", "Experimentally, this is truly the identity operation as each gate is a noise channel and the total noise factor is increased from 1x to 3x. If this procedure is continued (5x, 7x, 9x, ...) the data can be fit to a curve and extrapolated back to estimate the experimentally inaccessible case of a 0x noise factor! So, paradoxically, adding noise can improve the result. \n", "\n", - "\"Drawing\"\n", + "\"Plot\n", "\n", "\n", "ZNE is a useful technique because it can be used experimentally. Noisy circuit simulation can demonstrate its effectiveness and help benchmark the effectiveness of ZNE when used on a physical QPU, help refine noise models, and test other QEM techniques before running experiments. \n", "\n", "\n", - "
\n", - "

Exercise 5 :

\n", - "

\n", - "You will now code an ZNE example by following the steps below:\n", + "

\n", + "\n", + "**Exercise 5:**\n", + "\n", + "You will now code a ZNE example by following the steps below:\n", "\n", "1. Create a Random Hamiltonian for a larger (20 qubit circuit)\n", "2. Define a quantum circuit with a layer of $R_x(\\pi/2)$ gates followed by a layer of $X$ gates.\n", - "3. Put a bitflip error on the $X$ gates and an Amplitudes Damping error on the $R_X$ gates.\n", - "4. Perform ZNE to obtain a correction for each. (Hint: use the $\\texttt{np.poly1d()}$ to fit a polynomial.)\n", + "3. Put a bitflip error on the $X$ gates and an **amplitude damping** error on the $R_X$ gates.\n", + "4. Perform ZNE to obtain a correction for each. (Hint: use the `np.poly1d()` to fit a polynomial.)\n", "5. Apply the correction to the original noisy circuit and calculate the percent error of the noisy circuit and the ZNE corrected result relative to the noiseless case.\n", - "

\n", - "
\n" + "\n", + "
" ] }, { @@ -1006,11 +792,12 @@ "metadata": {}, "outputs": [], "source": [ - "#Make Hamiltonian\n", - "#TODO\n", + "# EXERCISE 5\n", + "# Make Hamiltonian\n", + "##TODO##\n", "\n", - "#Simulate noise and fit extrapolations\n", - "#TODO\n", + "# Simulate noise and fit extrapolations\n", + "##TODO##\n", "\n", "\n", "def plot_zero_noise_extrapolation(noise_factors, measurements, poly_fit):\n", @@ -1018,16 +805,12 @@ " Plot the original data vs. noise factor and the polynomial fit extended \n", " down to noise=0 to show the extrapolation result.\n", " \"\"\"\n", - " # Create a range of noise values from 0 to slightly beyond the largest noise factor\n", " x_range = np.linspace(0, max(noise_factors) + 0.5, 50)\n", " y_fit = poly_fit(x_range)\n", "\n", - " # Plot measured data points\n", " plt.scatter(noise_factors, measurements, label='Measured Data', color='blue')\n", - " # Plot polynomial fit\n", " plt.plot(x_range, y_fit, label='Fit (degree = {})'.format(poly_fit.order), color='red')\n", "\n", - " # Highlight the zero-noise extrapolation point\n", " extrapolated_value = poly_fit(0)\n", " plt.scatter([0], [extrapolated_value], color='green', zorder=5, \n", " label='Zero-Noise Extrapolation = {:.3f}'.format(extrapolated_value))\n", @@ -1042,11 +825,7 @@ " print(f\"Percent Error of ZNE Estimate {(extrapolated_value - noiseless)/noiseless*100} %\")\n", "\n", "\n", - "print(f\"Percent Error of Uncorrected Noisy Circuit: {(results[0] - noiseless)/noiseless*100} %\")\n", - "\n", - "\n", - "plot_zero_noise_extrapolation(factors, results, linear)\n", - "plot_zero_noise_extrapolation(factors, results, quadratic)\n" + "##TODO## Run ZNE and plot results" ] }, { @@ -1054,8 +833,7 @@ "id": "62f0113a-c9e5-4da6-8fd5-9906243a04e6", "metadata": {}, "source": [ - "### 3.3c: QEC Experiments ###\n", - "\n" + "### 3.3c: QEC Experiments" ] }, { @@ -1063,18 +841,20 @@ "id": "51443267-658a-4a75-8f15-5f1eefae39b9", "metadata": {}, "source": [ - "Noisy circuit simulation is perhaps most useful as a tools for QEC researchers. One can test how a code will perform in a variety of different noise conditions. Assuming an accurate noise model, this can be a great way to assess characteristics of new codes. Below you will add noise to the Steane code you prepared in lab 2. \n", + "Noisy circuit simulation is perhaps most useful as a tool for QEC researchers. One can test how a code will perform in a variety of different noise conditions. Assuming an accurate noise model, this can be a great way to assess characteristics of new codes. Below you will add noise to the Steane code you prepared in lab 2. \n", "\n", "\n", - "
\n", - "

Exercise 6 :

\n", - "

\n", + "

\n", + "\n", + "**Exercise 6:**\n", + "\n", "Apply noise to the Steane code in the following three ways and determine which case produces the best and worst logical error rates, keeping the probability of error fixed at 0.05. In which cases is the logical error rate an improvement over the 0.05 error rate?\n", - "1. Use $\\texttt{cudaq.apply\\_noise(cudaq.XError, p, data\\_qubits[j])}$ to manually apply Kraus operators following encoding of the Steane code but before the stabilizer checks are run. These errors are not tied to gates but model errors induced while the system idles.\n", - "2. Now, use $\\texttt{cudaq.apply\\_noise(cudaq.Depolarization2, p, data\\_qubits[i], data\\_qubits[j])}$ to apply a depolarization error following all of the two qubit gates in the encoding circuit, whee q and r are the two qubits involved in the gate operation.\n", - "3. Apply a bitflip noise channel to all $\\texttt{mz}$ measurements. In this case, errors are also possible in measurements performed on the ancillas. This helps model situations where measurements are performed in a way that is not fault tolerant.\n", - "

\n", - "
\n" + "\n", + "1. Use `cudaq.apply_noise(cudaq.XError, p, data_qubits[j])` to manually apply Kraus operators following encoding of the Steane code but before the stabilizer checks are run. These errors are not tied to gates but model errors induced while the system idles.\n", + "2. Now, use `cudaq.apply_noise(cudaq.Depolarization2, p, data_qubits[i], data_qubits[j])` to apply a depolarization error following all of the two qubit gates in the encoding circuit, where q and r are the two qubits involved in the gate operation.\n", + "3. Apply a bitflip noise channel to all `mz` measurements. In this case, errors are also possible in measurements performed on the ancillas. This helps model situations where measurements are performed in a way that is not fault tolerant.\n", + "\n", + "
" ] }, { @@ -1233,7 +1013,44 @@ "\n", "logical_rate = ones/(zeros+ones)\n", " \n", - "print(f\"logical error rate:{logical_rate}\")\n" + "print(f\"logical error rate:{logical_rate}\")\n", + "" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d69f9e2d-2094-47cf-9721-987b389f9813", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "The different error sources should get increasingly worse, with the worst case involving measurement which cannot be fixed by stabilizer measurements.\n", + "\n", + "Case 1 is performed by adding the following code right after the encoding circuit and should produce a result around 0.04.\n", + "\n", + "\n", + " for j in range(7):\n", + " cudaq.apply_noise(cudaq.XError, p, data_qubits[j])\n", + "\"\"\"\n", + "\n", + "\"\"\"\n", + "**Answer:** \n", + "Case 2 is performed by adding individual errors following each gate in the encoding circuit. See example below.\n", + "The logical error rate should be around 0.11\n", + "\n", + " x.ctrl(data_qubits[0],data_qubits[1])\n", + " cudaq.apply_noise(cudaq.Depolarization2, p, data_qubits[0], data_qubits[1])\n", + "\"\"\"\n", + "\n", + "\n", + "\"\"\"\n", + "Case 3 is performed by adding the following line before the CUDA-Q kernel. This should produce a logical error rate of around 0.45. \n", + "\n", + " noise = cudaq.NoiseModel()\n", + " noise.add_all_qubit_channel(\"mz\", cudaq.BitFlipChannel(0.1))\n", + "\"\"\"\n", + "" ] }, { @@ -1241,40 +1058,34 @@ "id": "4cfd73b9-21d0-44ae-b314-35b6e4963b22", "metadata": {}, "source": [ - "## 3.4: Using Dynamical Simulations to Build a Noise Model ###\n", + "---\n", "\n", - "The noise models used thus far are meant to mimic the underlying physics of physical qubits. Often, noise models are heavily informed by experiment, but extracting meaningful insights can be extremely difficult for such complex systems. \n", + "## 3.4 Using Dynamical Simulations to Build a Noise Model\n", "\n", - "\"Drawing\"\n", + "The noise models used thus far are meant to mimic the underlying physics of physical qubits. Often, noise models are heavily informed by experiment, but extracting meaningful insights can be extremely difficult for such complex systems. \n", "\n", - "To help with this task, the physics of the qubits can also be simulated to better understand noise sources and improve interpretation of experimental data. This sort of simulation is known as dynamical simulation and models the evolution of a quantum system over time as the system interacts with its environment. \n", + "\"Flowchart\n", "\n", - "Exercise:\n", + "To help with this task, the physics of the qubits can also be simulated to better understand noise sources and improve interpretation of experimental data. This sort of simulation is known as **dynamical simulation** and models the evolution of a quantum system over time as the system interacts with its environment. \n", "\n", - "The code below will help you walk through an example of using dynamical simulation to produce a noise model for a single qubit amplitude damping channel. Recall, the corresponding noise channel looks like this. \n", + "The code below will help you walk through an example of using dynamical simulation to produce a noise model for a single qubit **amplitude damping** channel. Recall, the corresponding noise channel looks like this. \n", "\n", - "$$ \\epsilon(\\rho) = \\sqrt{1-p}*\\rho + \\sqrt{p}*\\rho*0.5*(X+iY) $$\n", + "$$ \\epsilon(\\rho) = \\sqrt{1-p} \\cdot \\rho + \\sqrt{p} \\cdot \\rho \\cdot 0.5 \\cdot (X+iY) $$\n", "\n", "Thus, the goal is to simulate a simple qubit system to determine what $p$, the probability of energy loss resulting in decay to the ground state, is.\n", "\n", "Dynamical simulation is its own topic that warrants a detailed introduction that will not be provided here. Instead, the steps of the dynamical simulation will be discussed at a high level while curious readers can explore the CUDA-Q dynamics page for more information and more detailed examples.\n", "\n", - "To get started, import the following functions and libraries. This example will use the CUDA-Q dynamics backend, set like any other backend. " + "To get started, this example will use the CUDA-Q dynamics backend, set like any other backend." ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "id": "779f4d94-c0f1-4417-a8ed-417c2b0366f7", "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin, operators, ScalarOperator, Schedule, ScipyZvodeIntegrator\n", - "import numpy as np\n", - "import cupy as cp\n", - "import os\n", - "import matplotlib.pyplot as plt\n", "cudaq.set_target(\"dynamics\")" ] }, @@ -1294,12 +1105,13 @@ "The code below sets up the the problem Hamiltonian, defines the dimensions of the system and specifies the initial ground state. The terms have more meaning than described above, but their details are not relevant for the purposes of this exercise.\n", "

\n", "
\n", - "\n" + "\n", + "" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "33b1b507-26d2-4753-9179-344a9111c2ff", "metadata": {}, "outputs": [], @@ -1327,7 +1139,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "id": "8b205e7e-6a71-4b25-8537-f401cf049d08", "metadata": {}, "outputs": [], @@ -1349,31 +1161,10 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "id": "3316d5fb-7d4d-42ad-aec4-2cd4ac199cc2", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'No Decoherence')" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAUNdJREFUeJzt3XlYVPXiBvB3FmaGHRQBWWRxX1ExCEvNokjN8ppLWmpq2uI1i+stLdPMa5iZaenNq6VmZZpWdktTCzXNSNxwwwVRREH2ZVhnYOb7+8Ocn1xRGRw4M8P7eZ55ZM6cM/POUWZez/mec2RCCAEiIiIiOyGXOgARERGRJbHcEBERkV1huSEiIiK7wnJDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEJFd2rNnD2QyGTZv3ix1FCJqZCw3RDZu7dq1kMlk0Gg0yMjIuOnxBx54AF26dLHY67399tuQyWSmm5OTE1q1aoXBgwdjzZo10Ol0FnstIqL6YLkhshM6nQ4LFixotNf75JNP8MUXX+Djjz/Gc889h4KCAkyYMAERERG4fPlyo+UgIvpfLDdEdqJ79+5YtWoVMjMzG+X1hg0bhmeeeQYTJ07E7NmzsX//fnz55Zc4efIkhg8f3igZrEFZWZnUEYjof7DcENmJN954AwaDoU5bb6qrqzFv3jy0bt0aarUawcHBeOONN+56l9LTTz+N5557DgcOHMAvv/xS47EDBw7g0Ucfhbu7O5ycnNCvXz/s37//pufIyMjAxIkT4efnB7VajZCQELz44ovQ6/WmeS5cuIDhw4ejWbNmcHJywr333outW7fWmsloNGL+/PkICAiARqPBQw89hPPnz980X13yXd8ll5ycjNGjR8PT0xP333+/6fEvv/wS4eHhcHR0RLNmzfDUU0/dtBXr+m7C5ORk9O/fH05OTvD398fChQtvylRZWYm3334b7dq1g0ajQcuWLTF06FCkpqbWeH9LlixB586dodFo4OPjg+effx6FhYW1rg+ipoDlhshOhISEYOzYsXXaevPcc89h9uzZ6NmzJz788EP069cPcXFxeOqpp+46x5gxYwAAO3fuNE3btWsX+vbtC61Wizlz5uDdd99FUVERHnzwQSQmJprmy8zMREREBDZs2ICRI0fio48+wpgxY/Dbb7+hvLwcAJCdnY3evXtjx44deOmllzB//nxUVlbi8ccfx/fff39TngULFuD777/H9OnTMXPmTPz55594+umna8xT13zXDR8+HOXl5Xj33XcxadIkAMD8+fMxduxYtG3bFosXL8Yrr7yC+Ph49O3bF0VFRTWWLywsxKOPPoqwsDB88MEH6NChA15//XX8/PPPpnkMBgMee+wxzJ07F+Hh4fjggw8wbdo0FBcX4+TJk6b5nn/+efzzn//Efffdh6VLl2L8+PH46quvEBMTg6qqqrr+tRHZF0FENm3NmjUCgDh48KBITU0VSqVSvPzyy6bH+/XrJzp37my6n5SUJACI5557rsbzTJ8+XQAQu3btuu3rzZkzRwAQubm5tT5eWFgoAIi//e1vQgghjEajaNu2rYiJiRFGo9E0X3l5uQgJCREPP/ywadrYsWOFXC4XBw8evOl5ry/7yiuvCABi3759psdKSkpESEiICA4OFgaDQQghxO7duwUA0bFjR6HT6UzzLl26VAAQJ06cMDvf9fc+atSoGtnS0tKEQqEQ8+fPrzH9xIkTQqlU1pjer18/AUCsW7fONE2n0wlfX1/x5JNPmqatXr1aABCLFy++5brYt2+fACC++uqrGo9v37691ulETQW33BDZkdDQUIwZMwYrV67E1atXa51n27ZtAIDY2Nga0//xj38AwC1379SVi4sLAKCkpAQAkJSUhJSUFIwePRr5+fnIy8tDXl4eysrK8NBDD2Hv3r0wGo0wGo3YsmULBg8ejF69et30vDKZzJQ/IiKixu4gFxcXTJ48GWlpaUhOTq6x3Pjx46FSqUz3+/TpA+Dari1z8t3ohRdeqHH/u+++g9FoxIgRI0zL5+XlwdfXF23btsXu3btvWkfPPPOM6b5KpUJERIQpEwB8++238PLywtSpU2+5LjZt2gR3d3c8/PDDNV43PDwcLi4uN70uUVOhlDoAEVnWrFmz8MUXX2DBggVYunTpTY9funQJcrkcbdq0qTHd19cXHh4euHTp0l29fmlpKQDA1dUVAJCSkgIAGDdu3C2XKS4uhl6vh1arveNh65cuXUJkZORN0zt27Gh6/MbnaNWqVY35PD09AcA0JqWu+a4vB1zbBXijlJQUCCHQtm3bWpd3cHCocT8gIMBUUG7Mdfz4cdP91NRUtG/fHkrlrT+mU1JSUFxcDG9v71ofz8nJueWyRPaM5YbIzoSGhuKZZ57BypUrMWPGjFvO979frpZyfTzI9fJ0favH+++/j+7du9e6jIuLCwoKChokj0KhqHW6EMKsfDdydHSscd9oNEImk+Hnn3+u9fX+d/k7Zaoro9EIb29vfPXVV7U+3qJFC7Oej8hesNwQ2aFZs2bhyy+/xHvvvXfTY0FBQTAajUhJSTFt7QCuDdQtKipCUFDQXb32F198AQCIiYkBALRu3RoA4Obmhujo6Fsu16JFC7i5udUYLFuboKAgnD179qbpZ86cMT1ujrrmu9NzCCEQEhKCdu3a1es5anvOAwcOoKqq6qYtPzfO8+uvv+K+++67qXARNWUcc0Nkh1q3bo1nnnkG//nPf5CVlVXjsYEDBwIAlixZUmP64sWLAQCDBg2q9+uuX78en376KaKiovDQQw8BAMLDw9G6dWssWrTItMvqRrm5uQAAuVyOIUOG4Mcff8ShQ4dumu/6Vo2BAwciMTERCQkJpsfKysqwcuVKBAcHo1OnTmZlrmu+2xk6dCgUCgXmzp1709YXIQTy8/PNygQATz75JPLy8rBs2bKbHrv+GiNGjIDBYMC8efNumqe6uvqmo7SImgpuuSGyU2+++Sa++OILnD17Fp07dzZNDwsLw7hx47By5UoUFRWhX79+SExMxOeff44hQ4agf//+dXr+zZs3w8XFBXq9HhkZGdixYwf279+PsLAwbNq0yTSfXC7Hp59+igEDBqBz584YP348/P39kZGRgd27d8PNzQ0//vgjAODdd9/Fzp070a9fP0yePBkdO3bE1atXsWnTJvz+++/w8PDAjBkz8PXXX2PAgAF4+eWX0axZM3z++ee4ePEivv32W8jl5v2fzZx8t9K6dWv861//wsyZM5GWloYhQ4bA1dUVFy9exPfff4/Jkydj+vTpZuUaO3Ys1q1bh9jYWCQmJqJPnz4oKyvDr7/+ipdeeglPPPEE+vXrh+effx5xcXFISkrCI488AgcHB6SkpGDTpk1YunQphg0bZtbrEtkF6Q7UIiJLuPFQ8P81btw4AaDGoeBCCFFVVSXmzp0rQkJChIODgwgMDBQzZ84UlZWVd3y964dDX79pNBoREBAgHnvsMbF69epbPsfRo0fF0KFDRfPmzYVarRZBQUFixIgRIj4+vsZ8ly5dEmPHjhUtWrQQarVahIaGiilTptQ4nDs1NVUMGzZMeHh4CI1GIyIiIsRPP/1U43muHwq+adOmGtMvXrwoAIg1a9aYne9Oh8F/++234v777xfOzs7C2dlZdOjQQUyZMkWcPXvWNM//Hpp/3bhx40RQUFCNaeXl5eLNN980/T35+vqKYcOGidTU1BrzrVy5UoSHhwtHR0fh6uoqunbtKl577TWRmZlZa04ieycTwswRbERERERWjGNuiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2ZUmdxI/o9GIzMxMuLq6Nti1dYiIiMiyhBAoKSmBn5/fHU/W2eTKTWZmJgIDA6WOQURERPVw+fJlBAQE3HaeJlduXF1dAVxbOW5ubhKnISIiorrQarUIDAw0fY/fTpMrN9d3Rbm5ubHcEBER2Zi6DCnhgGIiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyK5KWm71792Lw4MHw8/ODTCbDli1b7rjMnj170LNnT6jVarRp0wZr165t8JxERERkOyQtN2VlZQgLC8Py5cvrNP/FixcxaNAg9O/fH0lJSXjllVfw3HPPYceOHQ2clIiIiGyFpNeWGjBgAAYMGFDn+VesWIGQkBB88MEHAICOHTvi999/x4cffoiYmJiGiklEREQ2xKbG3CQkJCA6OrrGtJiYGCQkJEiUiIiIiABAX21EXqkOGUUVyNZWSprFpq4KnpWVBR8fnxrTfHx8oNVqUVFRAUdHx5uW0el00Ol0pvtarbbBcxIREdkTIQTySvVILyhDWl45LuWX4VJBOdLyy5FVXIHiiipUVhlN84cHeeLbF3tLltemyk19xMXFYe7cuVLHICIianRCCBiMAkYBGIWA+OtPgxAwGK79aTQKVBsFSiqrUViuR2GZHrmlOlwuKMel/HKkF5TjckE5yvSGOr2mWimHg0LWwO/s9myq3Pj6+iI7O7vGtOzsbLi5udW61QYAZs6cidjYWNN9rVaLwMDABs1JRETUmHJLdDhwMR9nrpbgTJYW6QXlKCirQlG5HtVGYZHXkMkAP3dHBDV3+uvmjODmTvD3cIKHkwPcNA5w0SihkEtbbAAbKzdRUVHYtm1bjWm//PILoqKibrmMWq2GWq1u6GhERESNKq9Uhy1HM/BDUiZOZBTX+3lkMkAhk8FVo4Snkwqezio0c1Yh0PNaiWnVzAmBzZwQ4OkIjYPCgu+g4UhabkpLS3H+/HnT/YsXLyIpKQnNmjVDq1atMHPmTGRkZGDdunUAgBdeeAHLli3Da6+9hgkTJmDXrl345ptvsHXrVqneAhERUaPKLKrAit9SseHgZeir/3+cS6eWbggLdEcHXzeEtnBGc2c1PJ0d4OiggEwmg1wGyGWya2VGLoNCJoNCLoNMJv2WFkuTtNwcOnQI/fv3N92/vvto3LhxWLt2La5evYr09HTT4yEhIdi6dSteffVVLF26FAEBAfj00095GDgREdk9bWUVFu88h68OXEKV4dqupm4B7hgeHoBHu7REC1fupbhOJoSwzM44G6HVauHu7o7i4mK4ublJHYeIiOi2hBDYeuIq3vkxGTkl147+vTe0GV5+qC2iQpvb5ZaX2pjz/W1TY26IiIiakuLyKsz47jh+PpkFAAjxcsY7T3RGn7YtJE5m3VhuiIiIrNDhSwV4+eskZBRVwEEhw5T+bfBCv9Y2M6hXSiw3REREVsRoFPjkt1Qs/uUcDEaBoOZOWDaqJ7oGuEsdzWaw3BAREVmJnJJKxG48ht/P5wEAnujuh38N6QJXjYPEyWwLyw0REZEV+O1cLv7xTRLySvVwdFDgnSc6Y1h4QJMZMGxJLDdEREQS0lcbsWjnWazcewEA0MHXFctG90QbbxeJk9kulhsiIiKJXMgtxSsbk3D8yrUzDI+LCsLMgR05aPgusdwQERE1Mn21Ef/5LRUf7z4PfbURHk4OWPhkNzzS2VfqaHaB5YaIiKiRCCGw60wOFvx8Bik5pQCAfu1aIG5oV/h51H4BaDIfyw0REVED01cb8fPJq1i9Pw3HLhcBAJo7qzB7cCc8HubHQcMWxnJDRERkYZVVBqQXlCPpchH+OJ+H+DM5KKmsBgBoHOR4tncIXugXCg8nlcRJ7RPLDRER0W0YjAJJl4twMqMYKTklKCyrQnFFFUp01aiqNqLaaES1QUBv+P8/C8r0Nz2Pj5saoyOCMCoyEN6uGgneSdPBckNERFSLc9klWLM/DdtPXkVheZXZyzupFOji544eQR6I7uiDnq08oZBz91NjYLkhIiK6QWZRBf61NRnbTmSZprk7OuCeYE+083GFj5sGbo5KuKgdoFLK4SCXQamQQ6mQQfXXn96uGng6OXAsjURYboiIiP6y8WA65v6YjHK9ATIZENPJF2OjghAR0gxKhVzqeFRHLDdERNTk6aoNeGvLSXxz6AoAoFeQJ+YN6YKOLd0kTkb1wXJDRERNWrm+Gs9/cRj7UvIglwH/eKQ9XuzXGnKOj7FZLDdERNRklemqMW51Ig5dKoSTSoEVz4Sjb7sWUseiu8RyQ0RETZK+2ogXvjyMQ5cK4aZRYu2ECPRs5Sl1LLIAlhsiImpyhBD45+Zj2JeSByeVAl9MjERYoIfUschCOPSbiIianHUJl/BDUiaUchlWPBPOYmNnWG6IiKhJOX6lCPO3ngYAvDmoI8fY2CGWGyIiajKKK6owZf0R6A1GxHT2wbO9g6WORA2A5YaIiJoEIQRe33wclwsqEODpiIXDwngGYTvFckNERE3C53+kYfupLDgoZFg+uifcHR2kjkQNhOWGiIjs3vErRZi/7do4mzcGduQAYjvHckNERHbt+jibKoPAo519Oc6mCWC5ISIiu3XjOJvAZo54b1g3jrNpAlhuiIjIbnGcTdPEckNERHbpxnE2bw7siG4BHtIGokbDckNERHbnf8fZjOM4myaF5YaIiOyKEAKvbT7GcTZNGMsNERHZlX/vScWOU9kcZ9OEsdwQEZHd2H02B4t2ngUAzH28C8fZNFEsN0REZBfS8sow7eujEAIYHdkKoyNbSR2JJMJyQ0RENi+3RIdn1yRCW1mN8CBPvD24s9SRSEIsN0REZNO0lVV4dk0i0vLLEeDpiE+e7gmVkl9vTZlS6gBERET1lV+qw7NrDuJUphZeLip8MTES3m4aqWORxFhuiIjIJl3MK8PEzw/iQm4Zmjmr8PmECIR4OUsdi6wAyw0REdkUIQR+SMrEm9+fQJneAD93DdZNjEQbbxepo5GVYLkhIiKbIITA0ctFWLj9DP68UAAAiAxpho9G9YAPd0XRDVhuiIjIalQbjCivMqBCb0CZrhrlegMKyvQ4fKkQO5OzcfqqFgCgVsrx0gNtMKV/aygVHDxMNbHcEBFRoyrXV2PXmRwcvlSI5Ewtckt0KCzXo0xvgL7aeNtl1Uo5Huvmh1cfbosAT6dGSky2huWGiIgaRY62EkviU/DD0QyU6Q23nVchl8HJQQFHlQIuGiW6+rvj3tDmGNilJdydeDkFuj2WGyIialBCCHz2+0V8+Ms5U6kJau6E/u290S3AHX4ejmjurIKTWmkqNGqlnBe7pHpjuSEiogZTpqvGa5uPY+uJqwCA7oEeeP3RDrg3tBnLCzUYlhsiImoQJZVVGPNZIpIuF8FBIcPsxzrh6cggyOUsNdSwWG6IiMjiyvXVmLD2IJIuF8HDyQGfju2FXsHNpI5FTQTLDRERWZQQAq9uTMLBtEK4apT4cmIkuvi7Sx2LmhCeHICIiCzqk99SseNUNlQKOdaOv4fFhhodyw0REVlMQmo+Fu04CwCY+0RnhAdxVxQ1PpYbIiKyiJLKKkzfdAxGAQwPD8CoiFZSR6ImiuWGiIgs4t1tp5FRVIEAT0fMebyz1HGoCWO5ISKiu3YorQBfJ14GALw/LAwuah6vQtJhuSEiorsihMD8bacBAE/dE4io1s0lTkRNHcsNERHdlW0nsnA0vQhOKgViH24ndRwilhsiIqo/fbURC3ecAQBM6hMKbzeNxImIrKDcLF++HMHBwdBoNIiMjERiYuJt51+yZAnat28PR0dHBAYG4tVXX0VlZWUjpSUioht9+eclXMovRwtXNSb3DZU6DhEAicvNxo0bERsbizlz5uDIkSMICwtDTEwMcnJyap1//fr1mDFjBubMmYPTp0/js88+w8aNG/HGG280cnIiIiquqMJHu1IAAK9Gt4MzBxGTlZC03CxevBiTJk3C+PHj0alTJ6xYsQJOTk5YvXp1rfP/8ccfuO+++zB69GgEBwfjkUcewahRo+64tYeIiCzv33vOo6i8Cm28XTCiV4DUcYhMJCs3er0ehw8fRnR09P+HkcsRHR2NhISEWpfp3bs3Dh8+bCozFy5cwLZt2zBw4MBbvo5Op4NWq61xIyKiu3OlsBxr9qcBAGYO6AClQvJRDkQmkm1DzMvLg8FggI+PT43pPj4+OHPmTK3LjB49Gnl5ebj//vshhEB1dTVeeOGF2+6WiouLw9y5cy2anYioqftg5znoq424N7QZHuzgLXUcohpsqmrv2bMH7777Lv7973/jyJEj+O6777B161bMmzfvlsvMnDkTxcXFptvly5cbMTERkf05mVGM749mAADeHNgJMplM4kRENUm25cbLywsKhQLZ2dk1pmdnZ8PX17fWZd566y2MGTMGzz33HACga9euKCsrw+TJk/Hmm29CLr+5q6nVaqjVasu/ASKiJkgIgXf/OmHfE9390DWAV/wm6yPZlhuVSoXw8HDEx8ebphmNRsTHxyMqKqrWZcrLy28qMAqFAsC1XzgiImpYe87m4o/UfKgUckx/pL3UcYhqJelxe7GxsRg3bhx69eqFiIgILFmyBGVlZRg/fjwAYOzYsfD390dcXBwAYPDgwVi8eDF69OiByMhInD9/Hm+99RYGDx5sKjlERNQwqg1GxP18bavN+PuCEdjMSeJERLWTtNyMHDkSubm5mD17NrKystC9e3ds377dNMg4PT29xpaaWbNmQSaTYdasWcjIyECLFi0wePBgzJ8/X6q3QETUZGw+fAXnskvh4eSAl/q3kToO0S3JRBPbn6PVauHu7o7i4mK4ublJHYeIyCaU66vxwPt7kFOiw1uPdcLE+0OkjkRNjDnf3zZ1tBQREUlj1d6LyCnRoVUzJ4y5N0jqOES3xXJDRES3laOtxH/2pgIAXnu0PVRKfnWQdeO/UCIiuq2FO86iXG9A90APDOraUuo4RHfEckNERLd07HIRNh++AgCYM5gn7CPbwHJDRES1EkLg7R9PAQCe7BmAHq08JU5EVDcsN0REVKstSRk4ml4EZ5UCrz/KE/aR7WC5ISKimxSXV+HdbdcuYjzlwTbwdtNInIio7lhuiIjoJu/8lIzcEh1at3DmOW3I5rDcEBFRDbvP5ODbI1cgkwELh4VBreTlbci2sNwQEZFJVnEl/rn5GABg4n0hCA/iIGKyPSw3REQEAKgyGDH16yPIK9Wjg68rpsdwEDHZJpYbIiKCEAJvbTmJg2mFcFUr8ckz4dA4cHcU2SaWGyKiJk4IgQU/n8GGg5chlwEfjAhDiJez1LGI6k0pdQAiIpKOvtqIOf89ha8T0wEAC4Z2wyOdfSVORXR3WG6IiJqo8zmlmPndcRxMK4RMBrw9uDNG3BModSyiu8ZyQ0TUhAghcDJDi68PpuObg5dRbRRwUSux9KnueKijj9TxiCyC5YaIyA6U6qqRnKlFam4pCsv1KKmshq7KiMpqg+lPbUUVkjO1yC/Tm5aL7uiN2Y91RqvmThKmJ7IslhsiIhtlNArsOJWFjYcuY++5XBhF3ZZTKeWI6eyLURGB6N3aq2FDEkmA5YaIyAadyizGrC0ncTS9yDStpbsGHXxd0dxFDVeNEhoHBdRKuelPZ5US7Xxd0cHXlYd5k11juSEisjHfHr6CGd8dR5VBwFmlwLjewRjeK5CHbxP9heWGiMiGLN99Hu/vOAsAeLiTD+Y90QW+7rxiN9GNWG6IiGzEl39eMhWbFx9ojX8+0h5yuUziVETWh+WGiMgG7DqTjdk/nAQAvBrdDtOi20qciMh68fILRERW7mpxBV7deAxGAYzoFYCXH2ojdSQiq8ZyQ0RkxYxGgembjqG4ogrdAtwx/29dIZNxVxTR7bDcEBFZsc8T0rD/fD4cHRRYMrI7HBT82Ca6E/6WEBFZqZySSizeeQ4A8Magjght4SJxIiLbwHJDRGSlFmw7gxJdNcIC3PF0RCup4xDZDJYbIiIrdPhSIb47mgGZDHjniS485JvIDCw3RERWaNFf57MZ1jMAYYEe0oYhsjEsN0REVuaP83lIuJAPlUKOVx5uJ3UcIpvDckNEZEWEEFi089pWm9GRreDv4ShxIiLbw3JDRGRF9pzNxZH0IqiVcrz0QGup4xDZJJYbIiIrIYTA4l+uHfo9rncwvN14QUyi+mC5ISKyEntT8nAioxhOKgWe7xsqdRwim8VyQ0RkJVbuTQUAPHVPKzR3UUuchsh2sdwQEVmBkxnF2H8+Hwq5DBPuD5Y6DpFNY7khIrICK/deAAA81q0lAjydJE5DZNtYboiIJHalsBxbT1wFAEzmWBuiu8ZyQ0Qksc9+vwiDUeD+Nl7o7OcudRwim8dyQ0QkoeLyKmw8eBkAt9oQWQrLDRGRhL48cAnlegM6tnRDn7ZeUschsgssN0REEqmsMmDN/jQAwOS+IZDJeOVvIktguSEiksiWoxnIK9XBz12Dx7r5SR2HyG6w3BARScBoFFi579rh3xPuD4GDgh/HRJbC3yYiIgnEn8nBhdwyuGqUeCqildRxiOwKyw0RUSMTQuCTPecBAE9HBsFFrZQ4EZF9YbkhImpkiRcLcCS9CCqlnJdaIGoALDdERI3sk9+uXSBzeHgAvF01Eqchsj8sN0REjehUZjH2nM2FXMaT9hE1FJYbIqJGtOK36xfI9ENQc2eJ0xDZp3qNYtPpdDhw4AAuXbqE8vJytGjRAj169EBISIil8xER2Y2LeWXYejwTAPBCv9YSpyGyX2aVm/3792Pp0qX48ccfUVVVBXd3dzg6OqKgoAA6nQ6hoaGYPHkyXnjhBbi6ujZUZiIim7Tk13MwCuChDt7o5OcmdRwiu1Xn3VKPP/44Ro4cieDgYOzcuRMlJSXIz8/HlStXUF5ejpSUFMyaNQvx8fFo164dfvnll4bMTURkU85mleC/x65ttXn14XYSpyGyb3XecjNo0CB8++23cHBwqPXx0NBQhIaGYty4cUhOTsbVq1ctFpKIyNZ9+Ms5CAEM7OqLLv7uUschsmsyIYSQOkRj0mq1cHd3R3FxMdzcuFmYiBpe4sUCjPhPAmQyYOcrfdHWh7vticxlzve35EdLLV++HMHBwdBoNIiMjERiYuJt5y8qKsKUKVPQsmVLqNVqtGvXDtu2bWuktERE5jEYBeb+eAoA8NQ9rVhsiBqBRcvNsWPHoFAo6jz/xo0bERsbizlz5uDIkSMICwtDTEwMcnJyap1fr9fj4YcfRlpaGjZv3oyzZ89i1apV8Pf3t9RbICKyqE2HLuNUphauGiWmP8KxNkSNweIXNDFnL9fixYsxadIkjB8/HgCwYsUKbN26FatXr8aMGTNumn/16tUoKCjAH3/8YRr7ExwcbJHcRESWlq2tRNzPZwAA0x5qi+YuaokTETUNZpWboUOH3vbx4uJiyGSyOj2XXq/H4cOHMXPmTNM0uVyO6OhoJCQk1LrMf//7X0RFRWHKlCn44Ycf0KJFC4wePRqvv/76LbcY6XQ66HQ6032tVlunfEREd0MIgRnfHkdxRRW6+rtjXO9gqSMRNRlm7Zb68ccfUVlZCXd391pvLi4udX6uvLw8GAwG+Pj41Jju4+ODrKysWpe5cOECNm/eDIPBgG3btuGtt97CBx98gH/961+3fJ24uLgaGQMDA+uckYiovr46kI7dZ3OhUsrxwYgwOCgkH+JI1GSYteWmY8eOePLJJzFx4sRaH09KSsJPP/1kkWC1MRqN8Pb2xsqVK6FQKBAeHo6MjAy8//77mDNnTq3LzJw5E7Gxsab7Wq2WBYeIGlRCaj7e/u+1QcT/fKQ92nEQMVGjMuu/EuHh4Thy5MgtH1er1WjVqlWdnsvLywsKhQLZ2dk1pmdnZ8PX17fWZVq2bIl27drV2AXVsWNHZGVlQa/X3zKTm5tbjRsRUUNJyS7Bi18dRrVRYHCYH57rw8vSEDU2s8rNihUr8P7779/y8Y4dO+LixYt1ei6VSoXw8HDEx8ebphmNRsTHxyMqKqrWZe677z6cP38eRqPRNO3cuXNo2bIlVCpVHd8FEVHDOJpeiOH/SUBReRXCAtzx/rBudR6HSESWY1a5UavVcHJystiLx8bGYtWqVfj8889x+vRpvPjiiygrKzMdPTV27NgaA45ffPFFFBQUYNq0aTh37hy2bt2Kd999F1OmTLFYJiIicxmMAp/uu4BRq/68VmwCPbBmfAQ0DnU/NQYRWU6dx9yUlZXB2dm5zk9cl/lHjhyJ3NxczJ49G1lZWejevTu2b99uGmScnp4Oufz/+1dgYCB27NiBV199Fd26dYO/vz+mTZuG119/vc65iIgsJb9Uh+2nsvBFwiWcySoBAPRv3wLLRveEs9riZ9ogojqq8+UXWrZsiWnTpmHcuHFo2bJlrfMIIfDrr79i8eLF6Nu3b42tLtaCl18goju5WlyBg2mFSMkuQV6pDtqKauiqDaisMqKyyoDKagMKy6qQUVRhWsZVo8QbAzviqXsCuSuKqAGY8/1d5/9a7NmzB2+88QbefvtthIWFoVevXvDz84NGo0FhYSGSk5ORkJAApVKJmTNn4vnnn7/rN0JE1FiMRoGdyVlY/XsaEtMK6rxcB19XDO8ViCd7+sPDiWP/iKyB2RfOTE9Px6ZNm7Bv3z5cunQJFRUV8PLyQo8ePRATE4MBAwaYdQmGxsYtN0T0vzKLKvDPzcew/3w+AEAuAzr7uaNrgDt8XDVwc1RC46CAxkEOjVIBjYMCzmolOrR0hZvGQeL0RE2DOd/fvCo4ETVph9IKMGHtQWgrq6FxkGPCfSEYGxUMX3eN1NGI6AYNslvqRrt370b//v3rFY6IyFr8kZqH5z4/hHK9AWEB7vhwZHeEtqj7mdaJyDrV63zgjz76KFq3bo1//etfuHz5sqUzERE1uDNZWkxce63Y9GnrhQ2To1hsiOxEvcpNRkYG/v73v2Pz5s0IDQ1FTEwMvvnmm1ueJZiIyJoUlesxed1hVFQZcH8bL3w6rhccVdY7VpCIzFOvcuPl5YVXX30VSUlJOHDgANq1a4eXXnoJfn5+ePnll3Hs2DFL5yQisgghBKZvOob0gnIEeDri41E9oFay2BDZk7u+TG3Pnj0xc+ZM/P3vf0dpaSlWr16N8PBw9OnTB6dOnbJERiIii/khKRO/ns6BSiHHf8aEw9OZh28T2Zt6l5uqqips3rwZAwcORFBQEHbs2IFly5YhOzsb58+fR1BQEIYPH27JrEREdyW/VIe5P177T9fLD7VBZz93iRMRUUOo19FSU6dOxddffw0hBMaMGYOFCxeiS5cupsednZ2xaNEi+Pn5WSwoEdHdenfbGRSWV6GDryue79da6jhE1EDqVW6Sk5Px8ccfY+jQoVCr1bXO4+Xlhd27d99VOCIiSzmVWYzvjl4BAMQN7QoHxV3vlSciK1Wv3+45c+Zg+PDhNxWb6upq7N27FwCgVCrRr1+/u09IRHSXhBCI23YGQgCDw/zQo5Wn1JGIqAHVq9z0798fBQU3X3uluLiYJ/cjIquzNyUPv5/Pg0ohx2sx7aWOQ0QNrF7lRghR61Vv8/Pz4ezsfNehiIgsRQiBD385BwAYExWEwGZOEiciooZm1piboUOHAgBkMhmeffbZGrulDAYDjh8/jt69e1s2IRHRXfj9fB6SLhdBrZTjBQ4iJmoSzCo37u7XDpsUQsDV1RWOjo6mx1QqFe69915MmjTJsgmJiOpJCIGlv6YAAJ6ODEIL19oPgCAi+2JWuVmzZg0AIDg4GNOnT+cuKCKyaokXC3DoUiFUSjme7xcqdRwiaiT1OhR8zpw5ls5BRGRxq/dfBAA82TMAPm4aidMQUWOpc7np2bMn4uPj4enpiR49etQ6oPi6I0eOWCQcEVF9peeXY2dyNgBgwn3B0oYhokZV53LzxBNPmAYQDxkypKHyEBFZxOcJaRAC6NuuBdr6uEodh4gakUwIIaQO0Zi0Wi3c3d1RXFwMNzc3qeMQUQMoqaxCVNwulOqqsXb8PXigvbfUkYjoLpnz/c3zjxOR3dl8+ApKddUIbeGMvm1bSB2HiBpZnXdLeXp63naczY1qO3sxEVFjMBoF1v6RBgAYf18I5PK6fW4Rkf2oc7lZsmRJA8YgIrKMXWdycCm/HG4aJZ7s6S91HCKSQJ3Lzbhx4xoyBxGRRVw//HtUZCs4qep1tgsisnF1/s3XarWmATxarfa283KgLhFJ4fRVLf5IzYdCLsPYqGCp4xCRRMwac3P16lV4e3vDw8Oj1vE31y+oaTAYLBqSiKgu1u5PAwA82sUX/h6Ot5+ZiOxWncvNrl270KxZMwDA7t27GywQEVF95Jfq8H1SBgCetI+oqatzuenXr1+tPxMRWYP1B9KhrzYiLMAdPVt5Sh2HiCRU79F2hYWF+Oyzz3D69GkAQKdOnTB+/HjT1h0iosairzZi3Z+XAAAT7g+p82kriMg+1eskfnv37kVwcDA++ugjFBYWorCwEB999BFCQkKwd+9eS2ckIrqtrScykVuig4+bGgO6tJQ6DhFJrF5bbqZMmYKRI0fik08+gUKhAAAYDAa89NJLmDJlCk6cOGHRkEREtyKEwKf7rh3+PebeIKiUPPE6UVNXr0+B8+fP4x//+Iep2ACAQqFAbGwszp8/b7FwRER3kpCaj1OZWjg6KPB0ZJDUcYjICtSr3PTs2dM01uZGp0+fRlhY2F2HIiKqq5X7LgAARvQKgKezSuI0RGQN6rxb6vjx46afX375ZUybNg3nz5/HvffeCwD4888/sXz5cixYsMDyKYmIanE2qwR7zuZCLrs2kJiICABkQghRlxnlcjlkMhnuNLu1n8TPnEumE5F1++emY9h0+AoGdPHFJ8+ESx2HiBqQOd/fdd5yc/HixbsORkRkKdnaSmz566R9k/qGSpyGiKxJnctNUBAH6hGR9VjxWyqqDAK9gjx50j4iquGuLpmbnJyM9PR06PX6GtMff/zxuwpFRHQ7OdpKrD+QDgCYFt1W4jREZG3qVW4uXLiAv/3tbzhx4kSNcTjXzwpqzWNuiMj2ffJbKnTVRoQHeeL+Nl5SxyEiK1OvQ8GnTZuGkJAQ5OTkwMnJCadOncLevXvRq1cv7Nmzx8IRiYj+X0ZRhWmrzavR7XipBSK6Sb223CQkJGDXrl3w8vKCXC6HXC7H/fffj7i4OLz88ss4evSopXMSEQEAFm4/A121EZEhzXBfm+ZSxyEiK1SvLTcGgwGurq4AAC8vL2RmZgK4Nuj47NmzlktHRHSDI+mF+CEpEzIZ8NZjnbjVhohqVa8tN126dMGxY8cQEhKCyMhILFy4ECqVCitXrkRoKA/JJCLLMxgF5v73FABgWM8AdPF3lzgREVmrepWbWbNmoaysDADwzjvv4LHHHkOfPn3QvHlzbNy40aIBiYgAYNW+Czh2pRiuaiX+GdNe6jhEZMXqVW5iYmJMP7dp0wZnzpxBQUEBPD09uZmYiCwuJbsEi3eeAwC8NbgTvN00EiciImt2V+e5AYDLly8DAAIDA+86DBHR/yqprMKU9UegNxjRv30LDA8PkDoSEVm5eg0orq6uxltvvQV3d3cEBwcjODgY7u7umDVrFqqqqiydkYiaKINRYNqGJJzLLoW3qxoLnuzGrcNEdEf12nIzdepUfPfdd1i4cCGioqIAXDs8/O2330Z+fj4++eQTi4YkoqanymDEa5uPY9eZHKiVcqwa2ws+3B1FRHVQ56uC38jd3R0bNmzAgAEDakzftm0bRo0aheLiYosFtDReFZzI+hWU6fHqxiT8di4XSrkMH43qgYFdW0odi4gk1CBXBb+RWq1GcHDwTdNDQkKgUqnq85RERKisMmDL0Qy8t/0MCsur4OigwL+f6Yn+7b2ljkZENqRe5ebvf/875s2bhzVr1kCtVgMAdDod5s+fj7///e8WDUhEtk8IgfwyPXK0OpRUVkFXbfzrZoCuyoiCMj1OZhZj95kcaCurAQAdfF2xaHgYz2dDRGarc7kZOnRojfu//vorAgICEBYWBgA4duwY9Ho9HnroIcsmJCKbZDAK7DiVhf8mZSIxrQAFZfo6Lefv4Yhnewfj2fuC4aCo1zEPRNTE1bncuLvX/N/Tk08+WeM+DwUnout+Sc7GvJ+SkV5QbpomkwHNnVVwc3SAWqmAWim/dnNQwEWtQEdfN4QHeeLe0OaQy3lEFBHVX53LzZo1axoyBxHZgcoqA97+7ylsOHjt/FeeTg4YHdkKD3bwRld/D6iU3BJDRA3vrk7il5uba7pQZvv27dGiRQuLhCIi21OhN2DyF4ewLyUPMhkwuU8oXoluB0eVQupoRNTE1Ou/UWVlZZgwYQJatmyJvn37om/fvvDz88PEiRNRXl5+5yf4H8uXL0dwcDA0Gg0iIyORmJhYp+U2bNgAmUyGIUOGmP2aRGQ5umoDxq9NxL6UPDipFFg3IQIzB3ZksSEiSdSr3MTGxuK3337Djz/+iKKiIhQVFeGHH37Ab7/9hn/84x9mPdfGjRsRGxuLOXPm4MiRIwgLC0NMTAxycnJuu1xaWhqmT5+OPn361OctEJGFCCEwe8sp/HmhAC5qJdZNiECfttyKS0TSqddJ/Ly8vLB582Y88MADNabv3r0bI0aMQG5ubp2fKzIyEvfccw+WLVsGADAajQgMDMTUqVMxY8aMWpcxGAzo27cvJkyYgH379qGoqAhbtmyp0+vxJH5ElvXVgUt48/uTkMuAteMj0Lcdiw0RWZ4539/12nJTXl4OHx+fm6Z7e3ubtVtKr9fj8OHDiI6O/v9Acjmio6ORkJBwy+XeeecdeHt7Y+LEiXd8DZ1OB61WW+NGRJaRmluKuT8mAwD+GdOBxYaIrEK9yk1UVBTmzJmDyspK07SKigrMnTvXdK2pusjLy4PBYLipKPn4+CArK6vWZX7//Xd89tlnWLVqVZ1eIy4uDu7u7qYbD1knsgyjUeD1zcehrzaib7sWeKFfqNSRiIgA1PNoqSVLluDRRx+96SR+Go0GO3bssGjAG5WUlGDMmDFYtWoVvLy86rTMzJkzERsba7qv1WpZcIgs4Is/L+HQpUI4qxSIG9qVV+smIqtRr3LTtWtXpKSk4KuvvsKZM2cAAKNGjcLTTz8NR0fHOj+Pl5cXFAoFsrOza0zPzs6Gr6/vTfOnpqYiLS0NgwcPNk0zGo3X3ohSibNnz6J169Y1llGr1aZLRBCRZRSU6bFo57XTQMwY0AH+HnX/vSciamhml5uqqip06NABP/30EyZNmnRXL65SqRAeHo74+HjT4dxGoxHx8fG1XqOqQ4cOOHHiRI1ps2bNQklJCZYuXcotMkSNZOmv51BSWY1OLd0wOjJI6jhERDWYXW4cHBxqjLW5W7GxsRg3bhx69eqFiIgILFmyBGVlZRg/fjwAYOzYsfD390dcXBw0Gg26dOlSY3kPDw8AuGk6ETWM8zml+PJAOgBg1qCOUPBSCURkZeq1W2rKlCl477338Omnn0KpvKuTHGPkyJHIzc3F7NmzkZWVhe7du2P79u2mQcbp6emQy3nKdiJr8cHOszAYBaI7+qB3m7qNfSMiakz1Os/N3/72N8THx8PFxQVdu3aFs7Nzjce/++47iwW0NJ7nhqj+TmUWY9BHv0MmA3a+0hdtfVyljkRETYQ539/12uzi4eFx01XBicj+Lf01BQDwWDc/FhsislpmlRuj0Yj3338f586dg16vx4MPPoi3337brCOkiMg2ncwoxs7kbMhlwLSH2kodh4jolswazDJ//ny88cYbcHFxgb+/Pz766CNMmTKlobIRkRVZ8VsqAGBwmB/aeLtInIaI6NbMKjfr1q3Dv//9b+zYsQNbtmzBjz/+iK+++sp0rhkisk/p+eXYduIqAOCFfq3vMDcRkbTMKjfp6ekYOHCg6X50dDRkMhkyMzMtHoyIrMenv1+AUQD92rVAx5YciE9E1s2sclNdXQ2NRlNjmoODA6qqqiwaioisR36pDt8cugwAeL4vrx9FRNbPrAHFQgg8++yzNS5nUFlZiRdeeKHG4eDWfCg4EZlnXcIlVFYZ0dXfHVGtm0sdh4jojswqN+PGjbtp2jPPPGOxMERkXcr11ViXkAYAeL5fKC+OSUQ2waxys2bNmobKQURWaNOhKygsr0KrZk54tPPNF7MlIrJGvK4BEdWq2mDEqn0XAACT+oRAqeDHBRHZBn5aEVGtfj6ZhSuFFWjmrMKw8ECp4xAR1RnLDRHdRAiBlXuvbbUZc28QHFUKiRMREdUdyw0R3eTPCwU4kVEMtVKOsVFBUschIjILyw0R3eT6WJvhvQLQ3EV9h7mJiKwLyw0R1ZCSXYJdZ3IgkwET7+dJ+4jI9rDcEFEN18faxHTyRYiX8x3mJiKyPiw3RGSSo63ElqQMAMDkftxqQ0S2ieWGiEw+238RVQaBXkGe6NnKU+o4RET1wnJDRACuXSDzi4RLAIAXH2gtcRoiovpjuSEiAMDKfRdQrjegq787HuzgLXUcIqJ6Y7khIuSX6rDuj2tbbV6JbssLZBKRTWO5ISKs3HcBFVUGdAvgVhsisn0sN0RNXB632hCRnWG5IWriPth5DhVVBoQFuKN/e261ISLbx3JD1IQlZ2qx8WA6AODNQZ241YaI7ALLDVETJYTAOz+dglEAg7q2RERIM6kjERFZBMsNURO16dAV/HmhACqlHDMGdJA6DhGRxbDcEDVBmUUVmPdTMgDgHw+3Q2AzJ4kTERFZDssNURNjMAq8tvk4SnTV6NHKA8/14TWkiMi+sNwQNTELfj6N38/nQeMgx/vDwqCQcxAxEdkXlhuiJmRDYjpW7bsIAFg0PAxtvF0kTkREZHksN0RNxLqENMz47gQA4O/92+Cxbn4SJyIiahhKqQMQUcMq11djwc9nsO6vK36Pvy8YsQ+3kzgVEVHDYbkhslMVegO+P5qBFb+lIr2gHAAw7aG2vMQCEdk9lhsiG5OjrcSFvDJkaytRUlmNyioDKvQGVFRdu1VWGZCWV44j6YXQVRsBAC3dNXh/WBjub+slcXoioobHckNkA9Lzy/HlgUvYcSoLl/LL67xcYDNHPNs7BCPvCYSLmr/uRNQ08NOOyIoVV1Rhwc+nseHgZQhxbZpcBgQ3d4a3mxpuGgc4qhRwdFBA46CA018/e7mqcU+wJ1q3cOEuKCJqclhuiKzUobQCTFl/BNlaHQCgb7sWeDqyFaJaN4ebxkHidERE1ovlhsgKbT95FS9vSIK+2ohQL2fEDe2KyNDmUsciIrIJLDdEVmb7ySy8+NURCAFEd/TGR6N6wEnFX1UiorriJyaRFTl8qQDTNhyFEMCw8AAsGNoVSgXPtUlEZA5+ahJZiavFFXju80PQVRsR3dGbxYaIqJ74yUlkBQxGgWkbklBYXoXOfm74aFQPFhsionripyeRFVi26zwSLxbAWaXAstE9OcaGiOgusNwQSez0VS0+3pUCAJj/t64I8XKWOBERkW1juSGSkMEoMOO7E6g2CsR09sGQHv5SRyIisnksN0QS+vLPSzh2uQiuaiXeeaKL1HGIiOwCyw2RRIorqvDhr+cAAK8N6AAfN43EiYiI7APLDZFEVvyWiqLyKrT1dsGoewKljkNEZDdYbogkcLW4Aqt/vwgAeP3RDjzsm4jIgviJSiSBD385B121ERHBzfBQR2+p4xAR2RWWG6JGdi67BJsPXwEAzBjYATKZTOJERET2heWGqJG99/MZGAXwaGdf9GzlKXUcIiK7w3JD1IgOXMhH/JkcKOQy/PPR9lLHISKySyw3RI1ECIG4n88AAJ66JxCtW7hInIiIyD6x3BA1kp9PZiHpchGcVApMi24rdRwiIrtlFeVm+fLlCA4OhkajQWRkJBITE28576pVq9CnTx94enrC09MT0dHRt52fyBpUGYx4f8dZAMBzfULh7coT9hERNRTJy83GjRsRGxuLOXPm4MiRIwgLC0NMTAxycnJqnX/Pnj0YNWoUdu/ejYSEBAQGBuKRRx5BRkZGIycnqrsNiem4mFcGLxcVJvcNlToOEZFdkwkhhJQBIiMjcc8992DZsmUAAKPRiMDAQEydOhUzZsy44/IGgwGenp5YtmwZxo4de8f5tVot3N3dUVxcDDc3t7vOT3QnpbpqPPD+buSV6vHOE50xNipY6khERDbHnO9vSbfc6PV6HD58GNHR0aZpcrkc0dHRSEhIqNNzlJeXo6qqCs2aNWuomER3ZdXeC8gr1SO4uRNGRbSSOg4Rkd1TSvnieXl5MBgM8PHxqTHdx8cHZ86cqdNzvP766/Dz86tRkG6k0+mg0+lM97Vabf0DE5kpp6QSq/ZdAAD8M6YDHHiZBSKiBmfTn7QLFizAhg0b8P3330OjqX2AZlxcHNzd3U23wEBeoJAaz0fxKSjXGxAW6IGBXX2ljkNE1CRIWm68vLygUCiQnZ1dY3p2djZ8fW//RbBo0SIsWLAAO3fuRLdu3W4538yZM1FcXGy6Xb582SLZie7kTJYW6w+kAwBmDuBlFoiIGouk5UalUiE8PBzx8fGmaUajEfHx8YiKirrlcgsXLsS8efOwfft29OrV67avoVar4ebmVuNG1NCEEHjnx2QYBTCgiy/uDW0udSQioiZD0jE3ABAbG4tx48ahV69eiIiIwJIlS1BWVobx48cDAMaOHQt/f3/ExcUBAN577z3Mnj0b69evR3BwMLKysgAALi4ucHHhGV/JOuxMzsYfqflQKeV4Y2BHqeMQETUpkpebkSNHIjc3F7Nnz0ZWVha6d++O7du3mwYZp6enQy7//w1Mn3zyCfR6PYYNG1bjeebMmYO33367MaMT1UpXbcD8racBAJP6hCCwmZPEiYiImhbJz3PT2HieG2poH8WnYPEv5+Dtqsbu6Q/AWS35/yGIiGyezZznhsjenMsuwce7UgAAbw7qyGJDRCQBlhsiCzEYBV7bfBxVBoGHOnjj8TA/qSMRETVJLDdEFrI0PgVJl4vgqlZi/t+68tBvIiKJsNwQWcDec7mm3VHzhnSBrzuv+k1EJBWWG6K7dDGvDK9sTIIQwKiIVhjSw1/qSERETRrLDdFdyNZWYsxnB1BQpkcXfzfMGdxJ6khERE0eyw1RPaXnl2P0qj9xpbACQc2dsObZCGgcFFLHIiJq8nicKlE9/JGah6nrjyK/TI+W7hp8MSESLVzVUsciIiKw3BCZJaOoAkt/PYdvDl0BAHT2c8PqZ++BjxsHEBMRWQuWG6K/GIwCFVUGVOgNqKwymH4u1xtwNkuLfSl52H02B8a/zuk9OrIV3hzIE/UREVkbfipTk1NtMOKP1HzsT83D8cvFuFxYjrxSHSqrjHVavnfr5oh9uB16BTdr4KRERFQfLDfUZJTpqvHZ7xexLuES8kp1t53X0UEBR5UCjg4KaBzk8Pd0QmRIM8R09kEbb9dGSkxERPXBckNNwk/HM/H2f08hr1QPAPB0csAjnXzRM8gDbbxd0MJFA1eNEo4qBdRKOc8uTERkw1huyK5VVhkwa8tJbD58bQBwUHMnxD7cDgO7toSDgmdCICKyRyw3ZLeKyvV47vNDOHSpEHIZMKV/G7z8UFuWGiIiO8dyQ3Ypr1SHUSv/REpOKdw0SqwYE47erb2kjkVERI2A5YbsTnFFFcZ+loiUnFL4umnw+YQItPflIGAioqaC5Ybsir7aiEnrDiH5qhZeLiqsnxSJ0BYuUsciIqJGxMEHZFfe/vEUEi8WwFWtxLoJLDZERE0Ryw3Zja8OXML6A+mQyYCPRvVAJz83qSMREZEEWG7ILhxKK8CcH04BAKY/0h79O3hLnIiIiKTCckM2r7BMj6lfH0W1UWBQt5Z46YHWUkciIiIJsdyQTRNC4PVvj+NqcSVCvZyx8MluPLswEVETx3JDNu3LA+nYmZwNlUKOj0b14BW6iYiI5YZs15ksLeb9lAwAeH1AB3Txd5c4ERERWQOWG7JJFXoDpq4/Cn21Ef3bt8CE+4KljkRERFaC5YZs0js/JSMlpxQtXNV4f3gYx9kQEZEJyw3ZnK3Hr+LrxGvns1kysju8XNRSRyIiIivCckM25XJBOWZ8dxwA8GK/1rivDS+GSURENbHckM2oMhgxbcNRlFRWo0crD7z6cDupIxERkRViuSGb8eEv53AkvQiuGiU+eqoHHBT850tERDfjtwPZhK3Hr+Lfe1IBAHFDuyKwmZPEiYiIyFqx3JDVO5VZjOmbjgEAnrs/BI9185M4ERERWTOWG7JqeaU6TF53GBVVBvRp64UZAzpIHYmIiKwcyw1ZLW1lFZ5dk4iMogqEeDlj2aieUHKcDRER3QG/KcgqlemqMWHNQZzM0KK5swqfjusFdycHqWMREZEN4FUGyerklugwYe1BnMgohptGiXUTI9C6hYvUsYiIyEaw3JBVOX6lCFPWH8Hlggo0c1ZhzbP3oLMfL4hJRER1x3JDVqGyyoBVey9gaXwKqo0Cgc0csW5CJEK8nKWORkRENoblhiRVWKbHlqQMrNp7AZnFlQCAgV19Efe3bhxjQ0RE9cJyQxZTZTCiqLwKJZVVKNVVo8ogYBQCBuMNNyGQW6LDpfwyHEorxNH0IugNRgCAn7sGrz3aAU909+NVvomIqN5YbqjecrSV2HUmB3+k5uNkZjEu5ZfDYBRmP0+nlm54KiIQI3oFQuOgaICkRETUlLDckFmEENhzNhdr/0jDvpRc1NZlXNVKuGiUcFDIoZDLIJcBSrkccrkMCjng6aRCgKcTugW4IyKkGY+EIiIii2K5oTr7PSUPcT+fxqlMrWla90AP9G3XAr2CPNHWxwXerhoo5NylRERE0mG5oTvKK9Xhze9PYMepbACAk0qBpyNb4enIIATzaCYiIrIyLDd0Wwmp+Zi24ShySnRQyGUYGxWElx9sC09nldTRiIiIasVyQ7UyGAU+3pWCj+JTYBRAG28XfDyqBzq2dJM6GhER0W2x3NBNcrSVeGVjEv5IzQcADA8PwNwnOsNJxX8uRERk/fhtRTXsS8nFqxuTkFeqh5NKgX8N6YKhPQOkjkVERFRnLDcEAKg2GLHk1xQs33MeQgAdfF2xbHRPtPHmYdpERGRbWG4IV4srMO3rJCSmFQAARke2wuzHOvGEekREZJNYbpq4naey8Pq3x1FYXgUXtRJxQ7ticJif1LGIiIjqjeWmiSrXV2PeT8n4OvEyAKCLvxuWjerJ89YQEZHNY7lpYoQQ+CU5G/O2JuNyQQVkMmBy31D84+H2UCnlUscjIiK6ayw3Tcjpq1rE/XwGe8/lArh2Fe5FI8LQu7WXxMmIiIgsh+XGzlUZjNh7LhfrD6Qj/kwOAEClkOO5PiGY0r8NnNX8J0BERPaF32w2QAiBKoOAgIAQ//vYX3/i2g+lldVIzS3D+dxSHLxYgN/P56GgTA8AkMmAgV1aYnpMe4RwbA0REdkpqyg3y5cvx/vvv4+srCyEhYXh448/RkRExC3n37RpE9566y2kpaWhbdu2eO+99zBw4MBGTGw51QYjzueW4mSGFqm5pcgsqsDVokpkFlegVFeNyioDdNXGm0qNOZo7q/BEd388c28rhLbgeWuIiMi+SV5uNm7ciNjYWKxYsQKRkZFYsmQJYmJicPbsWXh7e980/x9//IFRo0YhLi4Ojz32GNavX48hQ4bgyJEj6NKliwTvoO4qqwxIyS7FqcxinMgoxslMLc5c1UJXbbTYa8hlQGAzJ7Ru4YLOfm7o07YFerTygIOCg4WJiKhpkAlxN9sE7l5kZCTuueceLFu2DABgNBoRGBiIqVOnYsaMGTfNP3LkSJSVleGnn34yTbv33nvRvXt3rFix4o6vp9Vq4e7ujuLiYri5We4ikPpqI/LLdCjXG1CuM6BEV4WrRZW4UliBi3mlSL6qRWpuGQzGm1e3i1qJTn5uaO/jigBPR/h5OMLPQwN3RweolQpoHBRQKeWQy67NL5Nd++Gvu5Bdnw4ZlAoZiwwREdkdc76/Jd1yo9frcfjwYcycOdM0TS6XIzo6GgkJCbUuk5CQgNjY2BrTYmJisGXLllrn1+l00Ol0pvtarfbug9fiaHohRq78847zeTo5oLOfOzr7u6GLnzu6+LsjqJkT5NebCxEREd0VSctNXl4eDAYDfHx8akz38fHBmTNnal0mKyur1vmzsrJqnT8uLg5z5861TODbcFIpoZTL4KRSwEmlhJNagZbuGvh7OKJVMyd08nNDp5bu8HFTm7a8EBERkeVJPuamoc2cObPGlh6tVovAwECLv04Xfzecf9c2BzUTERHZE0nLjZeXFxQKBbKzs2tMz87Ohq+vb63L+Pr6mjW/Wq2GWq22TODb4NYYIiIi6yDpyFOVSoXw8HDEx8ebphmNRsTHxyMqKqrWZaKiomrMDwC//PLLLecnIiKipkXy3VKxsbEYN24cevXqhYiICCxZsgRlZWUYP348AGDs2LHw9/dHXFwcAGDatGno168fPvjgAwwaNAgbNmzAoUOHsHLlSinfBhEREVkJycvNyJEjkZubi9mzZyMrKwvdu3fH9u3bTYOG09PTIZf//wam3r17Y/369Zg1axbeeOMNtG3bFlu2bLH6c9wQERFR45D8PDeNraHOc0NEREQNx5zvb57tjYiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEBERkV1huSEiIiK7wnJDREREdoXlhoiIiOyK5JdfaGzXT8is1WolTkJERER1df17uy4XVmhy5aakpAQAEBgYKHESIiIiMldJSQnc3d1vO0+Tu7aU0WhEZmYmXF1dIZPJLPrcWq0WgYGBuHz5Mq9b1YC4nhsH13Pj4HpuPFzXjaOh1rMQAiUlJfDz86txQe3aNLktN3K5HAEBAQ36Gm5ubvzFaQRcz42D67lxcD03Hq7rxtEQ6/lOW2yu44BiIiIisissN0RERGRXWG4sSK1WY86cOVCr1VJHsWtcz42D67lxcD03Hq7rxmEN67nJDSgmIiIi+8YtN0RERGRXWG6IiIjIrrDcEBERkV1huSEiIiK7wnJjpuXLlyM4OBgajQaRkZFITEy87fybNm1Chw4doNFo0LVrV2zbtq2Rkto2c9bzqVOn8OSTTyI4OBgymQxLlixpvKA2zpz1vGrVKvTp0weenp7w9PREdHT0Hf/90zXmrOfvvvsOvXr1goeHB5ydndG9e3d88cUXjZjWdpn7+Xzdhg0bIJPJMGTIkIYNaEfMWddr166FTCarcdNoNA0bUFCdbdiwQahUKrF69Wpx6tQpMWnSJOHh4SGys7NrnX///v1CoVCIhQsXiuTkZDFr1izh4OAgTpw40cjJbYu56zkxMVFMnz5dfP3118LX11d8+OGHjRvYRpm7nkePHi2WL18ujh49Kk6fPi2effZZ4e7uLq5cudLIyW2Luet59+7d4rvvvhPJycni/PnzYsmSJUKhUIjt27c3cnLbYu56vu7ixYvC399f9OnTRzzxxBONE9bGmbuu16xZI9zc3MTVq1dNt6ysrAbNyHJjhoiICDFlyhTTfYPBIPz8/ERcXFyt848YMUIMGjSoxrTIyEjx/PPPN2hOW2fuer5RUFAQy00d3c16FkKI6upq4erqKj7//POGimgX7nY9CyFEjx49xKxZsxoint2oz3qurq4WvXv3Fp9++qkYN24cy00dmbuu16xZI9zd3Rsp3TXcLVVHer0ehw8fRnR0tGmaXC5HdHQ0EhISal0mISGhxvwAEBMTc8v5qX7rmcxnifVcXl6OqqoqNGvWrKFi2ry7Xc9CCMTHx+Ps2bPo27dvQ0a1afVdz++88w68vb0xceLExohpF+q7rktLSxEUFITAwEA88cQTOHXqVIPmZLmpo7y8PBgMBvj4+NSY7uPjg6ysrFqXycrKMmt+qt96JvNZYj2//vrr8PPzu6nA0/+r73ouLi6Gi4sLVCoVBg0ahI8//hgPP/xwQ8e1WfVZz7///js+++wzrFq1qjEi2o36rOv27dtj9erV+OGHH/Dll1/CaDSid+/euHLlSoPlbHJXBSeiu7dgwQJs2LABe/bsafiBgU2Qq6srkpKSUFpaivj4eMTGxiI0NBQPPPCA1NHsQklJCcaMGYNVq1bBy8tL6jh2LyoqClFRUab7vXv3RseOHfGf//wH8+bNa5DXZLmpIy8vLygUCmRnZ9eYnp2dDV9f31qX8fX1NWt+qt96JvPdzXpetGgRFixYgF9//RXdunVryJg2r77rWS6Xo02bNgCA7t274/Tp04iLi2O5uQVz13NqairS0tIwePBg0zSj0QgAUCqVOHv2LFq3bt2woW2UJT6jHRwc0KNHD5w/f74hIgLgbqk6U6lUCA8PR3x8vGma0WhEfHx8jUZ6o6ioqBrzA8Avv/xyy/mpfuuZzFff9bxw4ULMmzcP27dvR69evRojqk2z1L9no9EInU7XEBHtgrnruUOHDjhx4gSSkpJMt8cffxz9+/dHUlISAgMDGzO+TbHEv2mDwYATJ06gZcuWDRWTh4KbY8OGDUKtVou1a9eK5ORkMXnyZOHh4WE6pG3MmDFixowZpvn3798vlEqlWLRokTh9+rSYM2cODwWvA3PXs06nE0ePHhVHjx4VLVu2FNOnTxdHjx4VKSkpUr0Fm2Duel6wYIFQqVRi8+bNNQ7pLCkpkeot2ARz1/O7774rdu7cKVJTU0VycrJYtGiRUCqVYtWqVVK9BZtg7nr+Xzxaqu7MXddz584VO3bsEKmpqeLw4cPiqaeeEhqNRpw6darBMrLcmOnjjz8WrVq1EiqVSkRERIg///zT9Fi/fv3EuHHjasz/zTffiHbt2gmVSiU6d+4stm7d2siJbZM56/nixYsCwE23fv36NX5wG2POeg4KCqp1Pc+ZM6fxg9sYc9bzm2++Kdq0aSM0Go3w9PQUUVFRYsOGDRKktj3mfj7fiOXGPOas61deecU0r4+Pjxg4cKA4cuRIg+aTCSFEw20XIiIiImpcHHNDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEBERkV1huSEim/Lss89iyJAhUscgIivGC2cSkdWQyWS3fXzOnDlYunQpeO5RIrodlhsishpXr141/bxx40bMnj0bZ8+eNU1zcXGBi4uLFNGIyIZwtxQRWQ1fX1/Tzd3dHTKZrMY0FxeXm3ZLPfDAA5g6dSpeeeUVeHp6wsfHB6tWrUJZWRnGjx8PV1dXtGnTBj///HON1zp58iQGDBgAFxcX+Pj4YMyYMcjLy2vkd0xEDYHlhohs3ueffw4vLy8kJiZi6tSpePHFFzF8+HD07t0bR44cwSOPPIIxY8agvLwcAFBUVIQHH3wQPXr0wKFDh7B9+3ZkZ2djxIgREr8TIrIElhsisnlhYWGYNWsW2rZti5kzZ0Kj0cDLywuTJk1C27ZtMXv2bOTn5+P48eMAgGXLlqFHjx5499130aFDB/To0QOrV6/G7t27ce7cOYnfDRHdLY65ISKb161bN9PPCoUCzZs3R9euXU3TfHx8AAA5OTkAgGPHjmH37t21jt9JTU1Fu3btGjgxETUklhsisnkODg417stkshrTrh+FZTQaAQClpaUYPHgw3nvvvZueq2XLlg2YlIgaA8sNETU5PXv2xLfffovg4GAolfwYJLI3HHNDRE3OlClTUFBQgFGjRuHgwYNITU3Fjh07MH78eBgMBqnjEdFdYrkhoibHz88P+/fvh8FgwCOPPIKuXbvilVdegYeHB+RyfiwS2TqZ4Kk+iYiIyI7wvyhERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu/J/GT4ZH/DHhxQAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "evolution_result = cudaq.evolve(hamiltonian,\n", " dimensions,\n", @@ -1409,31 +1200,10 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "id": "40abb26a-e966-4dc2-aa07-11ef4248364d", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'Decoherence')" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAATuxJREFUeJzt3XlcVOXiBvBnZmBmZBtAZBUBQcV9ASXNtUhS00wrs3LLLEttoe4vLZVs08xKTW+WN61s0TIrczdcKsMNdxYVFcEFEJBdZpiZ9/cHOTeuaAwynFme7+czn+TMOTPPnBQe3vOec2RCCAEiIiIiOyGXOgARERFRQ2K5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISKrlpmZCZlMhgULFkgdhYhsBMsNkQP7/PPPIZPJTA+1Wo3AwEDExcVh8eLFKC0tlToiEZHZnKQOQETSe+ONNxAWFoaqqirk5ORg165deOGFF/DBBx9g/fr16NSpk9QRiYjqjOWGiDBo0CBER0ebvp4xYwZ27NiB++67D8OGDUNaWhqaNGkiYULLKy8vh6urq9QxiKgB8LAUEdXqrrvuwqxZs3D+/Hl89dVXpuXp6el48MEH4e3tDbVajejoaKxfv/6G7YuKivDiiy8iNDQUKpUKzZs3x9ixY5Gfn29aJy8vDxMnToSfnx/UajU6d+6ML7744qaZPv30U4SHh0OlUqF79+44cODADevUJd/1w3G7d+/Gs88+C19fXzRv3tz0/ObNm9GnTx+4urrC3d0dQ4YMQUpKSo3XGD9+PNzc3HDx4kUMHz4cbm5uaNasGV5++WUYDIYa6xqNRixatAgdO3aEWq1Gs2bNcO+99+LgwYM11vvqq68QFRWFJk2awNvbG4888giys7Nvuj+IqHYsN0R0U2PGjAEAbNu2DQCQkpKCO+64A2lpaZg+fTref/99uLq6Yvjw4fjxxx9N25WVlaFPnz746KOPMHDgQCxatAiTJ09Geno6Lly4AAC4du0a+vfvj1WrVuGxxx7De++9B41Gg/Hjx2PRokU3ZPnmm2/w3nvv4emnn8Zbb72FzMxMjBgxAlVVVaZ16prvumeffRapqamYPXs2pk+fDgBYtWoVhgwZAjc3N7z77ruYNWsWUlNT0bt3b2RmZtbY3mAwIC4uDk2bNsWCBQvQr18/vP/++/j0009rrDdx4kS88MILCA4Oxrvvvovp06dDrVZj7969pnXefvttjB07Fq1atcIHH3yAF154AYmJiejbty+KiorM+L9GRBBE5LBWrlwpAIgDBw7cdB2NRiO6du0qhBDi7rvvFh07dhSVlZWm541Go+jVq5do1aqVadns2bMFALFu3bobXs9oNAohhFi4cKEAIL766ivTczqdTvTs2VO4ubmJkpISIYQQ586dEwBE06ZNRWFhoWndn3/+WQAQv/zyi2lZXfNd/9y9e/cWer3etLy0tFR4enqKSZMm1cick5MjNBpNjeXjxo0TAMQbb7xRY92uXbuKqKgo09c7duwQAMRzzz13032RmZkpFAqFePvtt2s8f/z4ceHk5HTDciK6NY7cENEtubm5obS0FIWFhdixYwcefvhhlJaWIj8/H/n5+SgoKEBcXBxOnz6NixcvAgB++OEHdO7cGQ888MANryeTyQAAmzZtgr+/P0aPHm16ztnZGc899xzKysqwe/fuGtuNGjUKXl5epq/79OkDADh79iwAmJXvukmTJkGhUJi+3r59O4qKijB69GjT9vn5+VAoFIiJicHOnTtv+DyTJ0+u8XWfPn1Mma7vC5lMhoSEhJvui3Xr1sFoNOLhhx+u8b7+/v5o1apVre9LRDfHCcVEdEtlZWXw9fVFRkYGhBCYNWsWZs2aVeu6eXl5CAoKwpkzZzBy5Mhbvu758+fRqlUryOU1f8dq27at6fm/a9GiRY2vrxedq1evAoBZ+a4LCwur8fzp06cBVM83qo2Hh0eNr6/Pn/nfXNczAcCZM2cQGBgIb2/vWl/z+vsKIdCqVatan3d2dr7ptkR0I5YbIrqpCxcuoLi4GBERETAajQCAl19+GXFxcbWuHxERYbEsfx9h+TshBADUK9//ngF2/TVWrVoFf3//G7Z3cqr5LfNmmcxlNBohk8mwefPmWl/Tzc2tQd6HyFGw3BDRTa1atQoAEBcXh5YtWwKoHkWIjY295Xbh4eE4ceLELdcJCQnBsWPHYDQaa4zepKenm543hzn5biY8PBwA4OvrW+/XqO01t27disLCwpuO3oSHh0MIgbCwMLRu3bpB3pfIkXHODRHVaseOHXjzzTcRFhaGxx57DL6+vujfvz8++eQTXL58+Yb1r1y5YvrzyJEjcfTo0VrPULo+0jJ48GDk5ORgzZo1puf0ej0++ugjuLm5oV+/fmblNSffzcTFxcHDwwPvvPNOjbOwzHmN/zVy5EgIITBnzpwbnru+L0aMGAGFQoE5c+aYlv19nYKCArPfl8iRceSGiLB582akp6dDr9cjNzcXO3bswPbt2xESEoL169dDrVYDAJYuXYrevXujY8eOmDRpElq2bInc3FwkJSXhwoULOHr0KADgX//6F9auXYuHHnoITzzxBKKiolBYWIj169dj2bJl6Ny5M5566il88sknGD9+PJKTkxEaGoq1a9diz549WLhwIdzd3c3+HHXNdzMeHh74+OOPMWbMGHTr1g2PPPIImjVrhqysLGzcuBF33nknlixZYlamAQMGYMyYMVi8eDFOnz6Ne++9F0ajEb///jsGDBiAqVOnIjw8HG+99RZmzJiBzMxMDB8+HO7u7jh37hx+/PFHPPXUU3j55ZfN3h9EDkuq07SISHrXT4m+/lAqlcLf31/cc889YtGiRabTsf/uzJkzYuzYscLf3184OzuLoKAgcd9994m1a9fWWK+goEBMnTpVBAUFCaVSKZo3by7GjRsn8vPzTevk5uaKCRMmCB8fH6FUKkXHjh3FypUra7zO9VPB33vvvRuyABAJCQlm5/unU+B37twp4uLihEajEWq1WoSHh4vx48eLgwcPmtYZN26ccHV1vWHbhIQE8b/fWvV6vXjvvfdEZGSkUCqVolmzZmLQoEEiOTm5xno//PCD6N27t3B1dRWurq4iMjJSTJkyRZw8ebLWnERUO5kQ/zMGSkRERGTDOOeGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXXG4i/gZjUZcunQJ7u7upjvyEhERkXUTQqC0tBSBgYE33HD3fzlcubl06RKCg4OljkFERET1kJ2djebNm99yHYcrN9cv6Z6dnQ0PDw+J0xAREVFdlJSUIDg4uE63ZnG4cnP9UJSHhwfLDRERkY2py5QSTigmIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWHu3EmERGRNdLpjbimM+BalQECAkqFHEonOVROCiidOBZhDpYbIiIiCyrT6pFTfA2XiiqRU1yJnJJK5JZUIrdEi7zS6j8XlOmgN4qbvoabygk+bko0dVOhmZsK/ho1AjRq+GvU8PdQI0DTBL4eKqidFY34yawXyw0REVEdCCFQoTOg6FoVrpbrUHytCsXXqlDy13+Lr1WhpLIKxdf0KKrQIbekEpeLK1FaqTfrfRRyGWRAjbJTptWjTKtHZkHFLbf1dlX+VXbU8HJVQukkN40AVRmMqKwyQltlQKXeYBoluv7fCl31n3V6I1TOCripFNC4KBHW1AUtm7mhfaAHuod5w0PtXJ/d16hYboiIiP6mQqfHiYslOJpdhJO5pcgqrEB2YQXyy7SoMtx8dOVW3NVOCNQ0MY20+GnU8PNQwc9dDT8PNXzclXBROqGJ838PQRmNAjqDEdoqIwrKtcgv06GgTIvckkrklGj/Kk/XTKNBlVVGFJbrUFiuQ+rlktvaB6VaPfLLABRU4Gh2kWm5XAZ0D/XGfZ0DcX+XQKstOjIhRP3+T9mokpISaDQaFBcXw8PDQ+o4RERkBbIKKrAtNQfbUnORfP4qDLc4RKRUyOHp4gxNE2d4ujjDQ139Z4+/Hpq/Hn4eKgT8VWjcVJYdSxBCoPhaFS7/VXRyiitRVFEFnd4IncGAKoOAk1wGtbMCamd59X+dFGiiVKCJswIuSgXUyur/KhVyVFYZUaHT40qpFucKynEmrxzJ5wtrjBy5qZwwqnswxvcKRbC3i0U/H2Dez2+WGyIickhVBiO2p+biq73n8eeZghrP+Xuo0TlYg/aBGoQ0dUGwtwv8PdTwdHFGE2cFZDKZRKmllV1Ygc0nLuO7gxeQkVcGAHCSy/D4HSF47u5W8HZVWuy9WW5ugeWGiMix6fRGfJ+cjaU7MnCpuBIAIJMBPVs2xT3t/BDb1q9RRiJsmRACu09dwfLfz2JPRnUxdFc74fm7W2F8r1A4KRr+7C6Wm1tguSEickxGo8BPRy7i/W2ncLHoGgDAx02FR7oH45EewWjuxUJTH3sy8vHWxjSk/TXPp12AB+aO6IjOwZ4N+j4sN7fAckNE5HiOXyhGwvoTOJRVBADwdVfhmf7hGN2jBU+fbgAGo8D3B7Mxd3M6iq9VwcdNiT9euatB9605P795thQREdmtgjItFmw7idUHsiEE4KJUYOpdEXjizjCWmgakkMvwSI8WiG3nh7c3pqF3hI+k+5flhoiI7I7BKPDNvvN4b+tJlPx1nZnhXQIxY3Bb+HmoJU5nv3zcVPhwVBepY7DcEBGRfTmUdRWzfz6BExf/Owdkzv3t0T3UW+Jk1FhYboiIyC7kl2nx3paTWHMwGwDgoXbCy3Ft8FhMCBRyxzx121Gx3BARkU0rrazC8t/P4bPfz6JcZwAAPBTVHK8MioSPm0ridCQFlhsiIrJJuSWV+GrveXy19zyuVlQBADo11yBhaDtEhfAQlCNjuSEiIptRrtVj18kr2HT8Mram5JhuLtmymSv+NbAN7u3g77BXD6b/YrkhIiKrVHytChevXkNmQTmOXSjGsQtFSD5/FVq90bRO91AvTLgzDAPb+Vnkqrhkm1huiIio0RVfq0JGXiky8sqQWVCBgjItCst1yC+rvqt1QZnWNH/mf4U0dcG97f0xtHMgOgRpGjk52QKWGyIiahQZeaVYd+gidp+6gpRLJXXaxttVieZeTdA+0AOdm3uiW4gXWvm68dAT3RLLDRERWYwQAnsyCrA48TT2ZxbWeC5Ao0aErxta+riimbsK3q4qNHVToqmrEt6uSvhr1HBR8scUmY9/a4iIyCIy88sx86cT+CMjH0D1JfoHtGmGIZ0CcGeED3zdeaVgsgyWGyIialBCCHy19zze2pgGrd4IpZMcj/Zogcn9wuGvYaEhy2O5ISKiBnNNZ8D0dcfw85FLAIA7I5ri7eEdEerjKnEyciQsN0RE1CAKy3WY+MUBHM4qgkIuw/R7I/FknzBO/qVGx3JDRES3La+kEqOX78WZK+XQNHHGp2OiENOyqdSxyEFJfsWjpUuXIjQ0FGq1GjExMdi/f/8t11+4cCHatGmDJk2aIDg4GC+++CIqKysbKS0REf2v/DItHv3PPpy5Uo4AjRprJ/dksSFJSVpu1qxZg/j4eCQkJODQoUPo3Lkz4uLikJeXV+v633zzDaZPn46EhASkpaXhs88+w5o1a/Dqq682cnIiIgKAogodHv/PPmTklSFAo8Z3T/dEKz93qWORg5O03HzwwQeYNGkSJkyYgHbt2mHZsmVwcXHBihUral3/zz//xJ133olHH30UoaGhGDhwIEaPHv2Poz1ERNTwrukMGLfyANJzStHMXYWvn4xBsLeL1LGIpCs3Op0OycnJiI2N/W8YuRyxsbFISkqqdZtevXohOTnZVGbOnj2LTZs2YfDgwTd9H61Wi5KSkhoPIiK6PUajwItrjuBodhE8XZzxzZMxaNnMTepYRAAknFCcn58Pg8EAPz+/Gsv9/PyQnp5e6zaPPvoo8vPz0bt3bwghoNfrMXny5Fselpo7dy7mzJnToNmJiBzdu1vSsSUlB0qFHMvHRvNQFFkVyScUm2PXrl1455138O9//xuHDh3CunXrsHHjRrz55ps33WbGjBkoLi42PbKzsxsxMRGR/fl633l88ttZAMB7D3VC91BviRMR1STZyI2Pjw8UCgVyc3NrLM/NzYW/v3+t28yaNQtjxozBk08+CQDo2LEjysvL8dRTT+G1116DXH5jV1OpVFCpVA3/AYiIHNDuU1cw++cUAED8Pa1xf5cgiRMR3UiykRulUomoqCgkJiaalhmNRiQmJqJnz561blNRUXFDgVEoFACqL/dNRESWk55TgilfH4LBKDCiWxCm3RUhdSSiWkl6Eb/4+HiMGzcO0dHR6NGjBxYuXIjy8nJMmDABADB27FgEBQVh7ty5AIChQ4figw8+QNeuXRETE4OMjAzMmjULQ4cONZUcIiJqeHkllXhi5QGUafWICfPGvBGdeOVhslqSlptRo0bhypUrmD17NnJyctClSxds2bLFNMk4KyurxkjNzJkzIZPJMHPmTFy8eBHNmjXD0KFD8fbbb0v1EYiI7F6FTo+JXxzEpeJKtGzmik/GREHpZFNTNsnByISDHc8pKSmBRqNBcXExPDw8pI5DRGTVDEaBp1cdxK9pefB2VeLHZ3shpClvgkmNz5yf36zeRERUKyEE3tyQil/T8qB0qj7lm8WGbAHLDRER1eqD7afw+Z+ZAIAPH+6CqBAvaQMR1RHLDRER3WDpzgx8tCMDAPD60HYY0ilA4kREdSfphGIiIrIuQggs230W7209CQCYPigS4+8MkzgVkXlYboiICED1/aLe2piGFXvOAQBeiG2Fyf3CJU5FZD6WGyIiwtVyHeK/O4KdJ68AAF4b3BaT+raUOBVR/bDcEBE5uJ3peXjtx+O4VFwJlZMc8x/sxNsqkE1juSEiclApl4qx6NfT2JZafY+/MB9XLH20G9oF8hpgZNtYboiIHMjl4mvYdfIK1h+5hKSzBQAAhVyGJ+4MxQuxreGq4o8Fsn38W0xEZGeulutwrqAc2YUVuHD1Gi5crUB24TWcyy/HxaJrpvXkMuC+ToGYMiACbfzdJUxM1LBYboiIbJgQAimXSrAzPQ97zxXgZE4Z8su0N11fLgM6BGkwqEMAhnYOQHMvl0ZMS9Q4WG6IiGyQwSjw0+GLWLHnHFIuldzwfIBGjWAvFzT3boLmXi4I9mqCYG8XdAjSwI2HnsjO8W84EZGNOZx1FbN+PoETF6tLjcpJjn6tm6Fv62boEKRBhK8bCww5NP7tJyKyEUIIfPrbWczfehIGo4C72gmT+4Xj0R4t4OWqlDoekdVguSEisgFVBiNe+u4o1h+9BAAY1jkQs4e2g4+bSuJkRNaH5YaIyMpp9QZM/eYwtqfmwlkhQ8LQ9ngspgVkMpnU0YisEssNEZEVMxoFXlxzBNtTc6F0kuOTMVEY0MZX6lhEVo3lhojIir27JR2bjudAqZDjs3HR6NOqmdSRiKyeXOoARERUu7XJF/DJb2cBAPMf7MRiQ1RHLDdERFboVG4pZv50HADw/N2tMLwrb2RJVFcsN0REVuaazoApXx9CZZURfVr54Pm7W0kdicimsNwQEVmZxTtO43ReGXzdVfhwVBfI5TwrisgcLDdERFbkVG4plv81z+btBzryOjZE9cByQ0RkJYxGgdd+PA69UWBgOz/c085P6khENonlhojISqxNvoADmVfholTg9WHtpY5DZLNYboiIrEBBmRbvbE4DAMTf0xqBnk0kTkRku1huiIiswNzN6SiqqELbAA+M7xUqdRwim8ZyQ0Qksb1nC7A2+QJkMuDtBzrAScFvzUS3g/+CiIgkpNMbMfOnEwCAR3u0QLcWXhInIrJ9LDdERBJa/vtZZOSVwcdNif+Li5Q6DpFdYLkhIpLI+YJyLE48DQCYdV87aFycJU5EZB9YboiIJCCEwOyfU6DVG9E7wgfDOgdKHYnIbrDcEBFJYNPxHOw+dQVKJzneHN4BMhlvsUDUUFhuiIgaWUllFeb8kgIAeLZ/OMJ8XCVORGRfWG6IiBrZB9tOIa9UizAfV0zuFy51HCK7w3JDRNSIjmYX4cukTADAW8M7QO2skDYQkR1iuSEiaiQ6vRGv/HAMRgE80DUId0b4SB2JyC6x3BARNZKPd51Bek4pvF2VmHVfO6njENktlhsiokZwKrcUS3ZWX9Pm9WHt4e2qlDgRkf1iuSEisjC9wYh/rT2GKoNAbFtfDO0UIHUkIrvGckNEZGGLEk/jaHYR3FVOvKYNUSNguSEisqCkMwVYsjMDAPDOiI4I0DSROBGR/WO5ISKykAtXKzDt20MQAhgVHYyhvMUCUaNguSEisoByrR5PfnEQ+WU6tA3wQMIwnh1F1FhYboiIGlhllQGTv0pGek4pfNxU+M+4aLgonaSOReQwWG6IiBpQZZUBk748iN9P58NFqcCnY6MQ5Ml5NkSNib9KEBE1kEtF1/D0qmQcv1iMJs4KrBzfHd1aeEkdi8jhsNwQEd0mIQTWH72EN35JRUG5Dl4uzvhkTDR6hHlLHY3IIbHcEBHVk9EosPvUFXy8+wz2nysEALQN8MCnY6IQ7O0icToix8VyQ0RUR5VVBmTklSE9pxSHs64iMS0POSWVAAClkxzTBkTgqX4toXLinb6JpMRyQ0RUi2s6Aw5nX8WJi8U4dqEYaZdLkFlQAYNR1FjPXeWEUd2DMaF3GCcOE1kJlhsior8IIbAnowBf7zuPXSev4FqV4YZ1PF2c0cbPHW0DPNCvdTP0DG8KtTNHaoisCcsNERGAo9lFSFifgiPZRaZlARo1ugR7omNzDdoFeKBtgAd83VW8NxSRlWO5ISKHZjAKLPz1FP696wwMRgGVkxyPdA/GQ9HBaB/owSJDZINYbojIYVVWGfDct4exLTUXADC0cyBm3dcWvu5qiZMR0e1guSEih1Sh02Pciv04kHkVSic53nuwE+7vEiR1LCJqACw3RORw9AYjpn1zGAcyr8JD7YT/jOvOC+4R2RGWGyJyOHN+SUVieh5UTnKsnNAdUSEsNkT2hDfOJCKHsv7oJazaex4yGbDoka4sNkR2iOWGiBxGZn45Xl13HAAwdUAE7u3gL3EiIrIElhsicghGo8DL3x9FmVaPHqHeeP7uVlJHIiILYbkhIofw9f4sHDx/Fa5KBT58pAucFPz2R2Sv+K+biOxeTnEl5m9OBwD8K64N7wFFZOdYbojI7s3dnIZSrR5dgj0xpmeo1HGIyMJYbojIriWfv4qfj1yCTAa8NbwDFHLeToHI3rHcEJHdMhoF3tyQCgB4OCoYHYI0EiciosYgeblZunQpQkNDoVarERMTg/37999y/aKiIkyZMgUBAQFQqVRo3bo1Nm3a1EhpiciWbD6RgyPZRXBVKvBSXGup4xBRI5H0CsVr1qxBfHw8li1bhpiYGCxcuBBxcXE4efIkfH19b1hfp9Phnnvuga+vL9auXYugoCCcP38enp6ejR+eiKza9bt9A8CTfVryZphEDkTScvPBBx9g0qRJmDBhAgBg2bJl2LhxI1asWIHp06ffsP6KFStQWFiIP//8E87OzgCA0NDQxoxMRDZi4/HLOJ1XBg+1Eyb2CZM6DhE1IskOS+l0OiQnJyM2Nva/YeRyxMbGIikpqdZt1q9fj549e2LKlCnw8/NDhw4d8M4778BgMNz0fbRaLUpKSmo8iMi+GYwCi/4atZnUpyU81M4SJyKixiRZucnPz4fBYICfn1+N5X5+fsjJyal1m7Nnz2Lt2rUwGAzYtGkTZs2ahffffx9vvfXWTd9n7ty50Gg0pkdwcHCDfg4isj6/HL2EM1fK4enijPF3hkodh4gameQTis1hNBrh6+uLTz/9FFFRURg1ahRee+01LFu27KbbzJgxA8XFxaZHdnZ2IyYmosamNxixKPE0gOpRG3eO2hA5HMnm3Pj4+EChUCA3N7fG8tzcXPj7134zu4CAADg7O0OhUJiWtW3bFjk5OdDpdFAqlTdso1KpoFKpGjY8EVmtn49cwrn8cni7KjGuV6jUcYhIApKN3CiVSkRFRSExMdG0zGg0IjExET179qx1mzvvvBMZGRkwGo2mZadOnUJAQECtxYaIHIveYMTiHdWjNk/1bQk3laTnTBCRRCQ9LBUfH4/ly5fjiy++QFpaGp555hmUl5ebzp4aO3YsZsyYYVr/mWeeQWFhIZ5//nmcOnUKGzduxDvvvIMpU6ZI9RGIyIqsO3wR5wsq0NRVibE9Q6SOQ0QSkfTXmlGjRuHKlSuYPXs2cnJy0KVLF2zZssU0yTgrKwty+X/7V3BwMLZu3YoXX3wRnTp1QlBQEJ5//nm88sorUn0EIrISVQYjPvpr1GZyv3C4KDlqQ+SoZEIIIXWIxlRSUgKNRoPi4mJ4eHhIHYeIGsjq/VmYvu44fNxU+P3/BqCJUvHPGxGRzTDn57dNnS1FRFQbnd6Ij3ZkAACe6R/OYkPk4FhuiMjmfZ+cjYtF1+DrrsJjMS2kjkNEEmO5ISKbptUbsOSvUZtn+4dD7cxRGyJHx3JDRDZt9f5sXC6uhL+HGo/04KgNEbHcEJENK9fqTXNtptwVwVEbIgLAckNENmzlnnPIL9MipKkLHunO+8YRUTWWGyKySVfLdfhk91kAQPw9reGs4LczIqrG7wZEZJOW7T6DUq0ekf7uGNopUOo4RGRFWG6IyOZcLLqGz//MBAD8371tIJfLpA1ERFaF5YaIbM47m9Kg1RvRI8wbA9r4Sh2HiKwMyw0R2ZQ/z+Rj47HLkMuA14e2h0zGURsiqonlhohshk5vxJz1qQCAx2JC0C6Q94cjohux3BCRzVi6MwMnc0vh7arESwNbSx2HiKwUyw0R2YSUS8VYurP6gn1v3N8eni5KiRMRkbVyqs9GWq0W+/btw/nz51FRUYFmzZqha9euCAsLa+h8REQo1+rx4poj0BsFBnXwx5COAVJHIiIrZla52bNnDxYtWoRffvkFVVVV0Gg0aNKkCQoLC6HVatGyZUs89dRTmDx5Mtzd3S2VmYgciBAC09cdx6ncMvi6q/DG/R04iZiIbqnOh6WGDRuGUaNGITQ0FNu2bUNpaSkKCgpw4cIFVFRU4PTp05g5cyYSExPRunVrbN++3ZK5ichBfPLbWfxy9BKc5DIsfawbmrmrpI5ERFauziM3Q4YMwQ8//ABnZ+dan2/ZsiVatmyJcePGITU1FZcvX26wkETkmL7edx7zNqcDAF4b0hbdQ70lTkREtkAmhBBSh2hMJSUl0Gg0KC4uhocHTyMlskZCCHz2xzm8vSkNQgDP9A/HK/dGSh2LiCRkzs/vek0oJiKylDKtHq+vT8Ha5AsAgPG9QvF/cW0kTkVEtqRBy83Ro0fRrVs3GAyGhnxZInIAeoMRG45dxtzNacgt0UIuA14b0g5P3BnKCcREZJYGH7lxsKNcRHQbSiqrcDCzEPvOFuKXo5dwqbgSABDS1AVzH+iIXhE+EickIltkVrkZMWLELZ8vLi7mb1hEdFNFFTrsO1ddZvadK0Da5RIY//b7kI+bEuN7heLJPi2hdlZIF5SIbJpZ5eaXX37BPffcAz8/v1qf5+EoIvpfV8t1WJt8AZtPXMaR7KIaZQYAQpu6ICasKXpFNEVce3+WGiK6bWaVm7Zt22LkyJGYOHFirc8fOXIEGzZsaJBgRGTbiq9VYeGvp/DNvixo9UbT8ghfN8SEeSOmZVPEhHnDz0MtYUoiskdmlZuoqCgcOnTopuVGpVKhRYsWDRKMiGzXtpQcTF93HIXlOgBA+0APPNKjBe6O9EWgZxOJ0xGRvTPrOjdarRYGgwEuLi6WzGRRvM4NkeUYjQILfz2FxTuqb3AZ4euG2fe1Q59WPpyPR0S3xWLXuVGpeNlzIqqdEAKz15/AV3uzAFRfn+a1IW3hrKjzXV6IiBpEnctNeXk5XF1d6/zC5q5PRLZLCIG3Nqbhq71ZkMmAd0d0wsPdg6WORUQOqs6/UkVERGDevHm3vGeUEALbt2/HoEGDsHjx4gYJSETW75v9Wfjsj3MAWGyISHp1HrnZtWsXXn31Vbz++uvo3LkzoqOjERgYCLVajatXryI1NRVJSUlwcnLCjBkz8PTTT1syNxFZicNZV/H6+hQAwP/d24bFhogkZ/aNM7OysvD999/j999/x/nz53Ht2jX4+Piga9euiIuLw6BBg6BQWO91KjihmKjhlGn1iPvwN1wsuoa49n5Y9ngUJw4TkUWY8/ObdwUnonp77cfj+HpfFoK9m2DTc33grnaWOhIR2Slzfn7X6zSGnTt31isYEdmPPzPy8fW+6jOj3h3ZicWGiKxGvcrNvffei/DwcLz11lvIzs5u6ExEZOV0eiNm/nQCAPBYTAv0CucNLonIetSr3Fy8eBFTp07F2rVr0bJlS8TFxeG7776DTqdr6HxEZIW+TMrE2fxy+Lgp8cqgSKnjEBHVUK9y4+PjgxdffBFHjhzBvn370Lp1azz77LMIDAzEc889h6NHjzZ0TiKyEvllWixKPA0A+FdcG3jwcBQRWZnbvnRot27dMGPGDEydOhVlZWVYsWIFoqKi0KdPH6SkpDRERiKyIkt2ZKC0Uo8OQR54MIqnfROR9al3uamqqsLatWsxePBghISEYOvWrViyZAlyc3ORkZGBkJAQPPTQQw2ZlYgkll+mxeoD1ZOIX7k3Ego5T/smIutj1r2lrps2bRq+/fZbCCEwZswYzJ8/Hx06dDA97+rqigULFiAwMLDBghKR9D774xwqq4zoHOyJ3hGcRExE1qle5SY1NRUfffQRRowYcdObafr4+PCUcSI7UlxRhVVJ5wEAUwdE8GJ9RGS16nVYKiEhAQ899NANxUav1+O3334DADg5OaFfv363n5CIrMIXSZko0+oR6e+OuyN9pY5DRHRT9So3AwYMQGFh4Q3Li4uLMWDAgNsORUTWpVyrx4o91TfGfHZABOSca0NEVqxe5UYIUeuQdEFBAVxdXW87FBFZl2/2ZaGoogqhTV0wpGOA1HGIiG7JrDk3I0aMAADIZDKMHz++xmEpg8GAY8eOoVevXg2bkIgkVVllwKe/nwUAPNM/nGdIEZHVM6vcaDQaANUjN+7u7mjSpInpOaVSiTvuuAOTJk1q2IREJKnvky/gSqkWgRo1HujaXOo4RET/yKxys3LlSgBAaGgoXn75ZR6CIrJzVQYjlu06AwB4qm9LKJ1u+7qfREQWV69TwRMSEho6BxFZoZ+PXMLFomvwcVPikR4tpI5DRFQndS433bp1Q2JiIry8vNC1a9dbXuPi0KFDDRKOiKRjMAr8e1cGAGBi75ZQOyskTkREVDd1Ljf333+/aQLx8OHDLZWHiKzElhM5OHulHB5qJzx+B0dtiMh2yIQQQuoQjamkpAQajQbFxcXw8PCQOg6RVRJCYPDiP5B2uQTP3d0K8fe0ljoSETk4c35+c3YgEd1g58k8pF0ugYtSgQm9QqWOQ0RkljoflvLy8qrzvWRqu3oxEdkGIQSW7Kiea/P4HSHwclVKnIiIyDx1LjcLFy60YAwishZJZwtwKKsISic5nuwdJnUcIiKz1bncjBs3zpI5iMhKLN1ZPWozKjoYvh5qidMQEZmvzuWmpKTENIGnpKTklutyoi6RbTqcdRV7MgrgJJfh6X4tpY5DRFQvZs25uXz5Mnx9feHp6Vnr/JvrN9Q0GAwNGpKIGsdHf821Gd41CM29XCROQ0RUP3UuNzt27IC3tzcAYOfOnRYLRETSOJJdhB3peVDIZZgyIELqOERE9VbnctOvX79a/0xE9mHhr6cAAMO7BCHMh/eNIyLbVa97SwHA1atX8dlnnyEtLQ0A0K5dO0yYMME0ukNEtuNQ1lXsOnkFCrkMz93NURsism31uojfb7/9htDQUCxevBhXr17F1atXsXjxYoSFheG3335r6IxEZGELfz0NABjRNQghTTlqQ0S2rV4jN1OmTMGoUaPw8ccfQ6GovpmewWDAs88+iylTpuD48eMNGpKILOdAZiF+O1U9ajP1Lo7aEJHtq9fITUZGBl566SVTsQEAhUKB+Ph4ZGRkNFg4IrIso1HgrY3Vh5Yfjm7OURsisgv1KjfdunUzzbX5u7S0NHTu3Pm2QxFR4/jl2CUczS6Cq1KBF3lzTCKyE3U+LHXs2DHTn5977jk8//zzyMjIwB133AEA2Lt3L5YuXYp58+Y1fEoianAVOj3mbzkJAHimfzh83Xk1YiKyDzIhhKjLinK5HDKZDP+0urVfxM+cW6YT2bO3N6Zi+e/nEOTZBL/G90MTpeKfNyIikog5P7/rfFjq3LlzOHv2LM6dO3fLx9mzZ80OvHTpUoSGhkKtViMmJgb79++v03arV6+GTCbD8OHDzX5PIkd2/EIxPvvjHADgreEdWGyIyK7U+bBUSEiIRQKsWbMG8fHxWLZsGWJiYrBw4ULExcXh5MmT8PX1vel2mZmZePnll9GnTx+L5CKyV5VVBvxr7VEYBTC0cyAGRN783xkRkS2q82Gp2qSmpiIrKws6na7G8mHDhtX5NWJiYtC9e3csWbIEAGA0GhEcHIxp06Zh+vTptW5jMBjQt29fPPHEE/j9999RVFSEn376qU7vx8NS5Ohe+/E4vt6XhaauSmx5oS+auaukjkRE9I/M+fldr+vcnD17Fg888ACOHz9eYx7O9Ztp1nXOjU6nQ3JyMmbMmGFaJpfLERsbi6SkpJtu98Ybb8DX1xcTJ07E77//Xp+PQOSQfj5yEV/vy4JMBnw4qguLDRHZpXqdCv78888jLCwMeXl5cHFxQUpKCn777TdER0dj165ddX6d/Px8GAwG+Pn51Vju5+eHnJycWrf5448/8Nlnn2H58uV1eg+tVouSkpIaDyJHtCcjH//6vvqsxyn9I9C3dTOJExERWUa9yk1SUhLeeOMN+Pj4QC6XQy6Xo3fv3pg7dy6ee+65hs5oUlpaijFjxmD58uXw8fGp0zZz586FRqMxPYKDgy2Wj8haJZ8vxFNfHoTOYMTgjv68pg0R2bV6HZYyGAxwd3cHAPj4+ODSpUto06YNQkJCcPLkyTq/jo+PDxQKBXJzc2ssz83Nhb+//w3rnzlzBpmZmRg6dKhpmdForP4gTk44efIkwsPDa2wzY8YMxMfHm74uKSlhwSGHsvHYZbz43RHo9Eb0Cm+KD0d1gUIukzoWEZHF1KvcdOjQAUePHkVYWBhiYmIwf/58KJVKfPrpp2jZsmWdX0epVCIqKgqJiYmm07mNRiMSExMxderUG9aPjIy84b5VM2fORGlpKRYtWlRraVGpVFCpOK+AHE9pZRXmbk7HN/uyAACxbX2x6JGuUDnxtG8ism/1KjczZ85EeXk5gOrJvffddx/69OmDpk2bYs2aNWa9Vnx8PMaNG4fo6Gj06NEDCxcuRHl5OSZMmAAAGDt2LIKCgjB37lyo1Wp06NChxvaenp4AcMNyIkdVptXj231Z+PT3s7hSqgUATOwdhlcHt+WIDRE5hHqVm7i4ONOfIyIikJ6ejsLCQnh5eZnOmKqrUaNG4cqVK5g9ezZycnLQpUsXbNmyxTTJOCsrC3J5vaYGETmMC1crkJiWh8T0POw9UwCdofpwbUhTF8wb0Qk9w5tKnJCIqPHc1nVuACA7OxsAbGYeC69zQ/ZACIHD2UX4NTUXiWl5OJlbWuP58GaueKpvSzzQtTmUTvzlgIhsn8Wvc6PX6zFnzhwsXrwYZWVlAAA3NzdMmzYNCQkJcHZ2rs/LEtE/KK6owpdJmfg++QKyCitMy+UyIDrUG3dH+uLutn4Ib+Zq9igqEZG9qFe5mTZtGtatW4f58+ejZ8+eAKpPD3/99ddRUFCAjz/+uEFDEjk6nd6IT3afwSe/nUWZVg8AcFUqcFdbP8S29UW/1s3g6aKUOCURkXWo12EpjUaD1atXY9CgQTWWb9q0CaNHj0ZxcXGDBWxoPCxFtiY9pwQvrD6C9JzqQ0+R/u54ul9L3Ns+gDe8JCKHYfHDUiqVCqGhoTcsDwsLg1LJ3x6JGsruU1cw5etDKNPq4e2qxOz72mFY50DIedYTEdFN1Wum4dSpU/Hmm29Cq9Walmm1Wrz99tu1Xp+GiMy34dglPPH5AZRp9bijpTe2vdgXw7sGsdgQEf2DOo/cjBgxosbXv/76K5o3b47OnTsDAI4ePQqdToe77767YRMSOaDdp67gxTVHYDAKjOgahHkjO/GsJyKiOqpzudFoNDW+HjlyZI2vbeVUcCJrl3a5BJNXJaPKIDC0cyAWPNSZozVERGaoc7lZuXKlJXMQEapvmfDs14dwrcqA3hE+eJ/FhojIbPWaUHzdlStXTDfKbNOmDZo1a9YgoYgckRACM9Ydx7n8cgRq1PhodFceiiIiqod6fecsLy/HE088gYCAAPTt2xd9+/ZFYGAgJk6ciIqKin9+ASK6wYZjl7Hh2GU4yWVY8lg3eLnyzEMiovqoV7mJj4/H7t278csvv6CoqAhFRUX4+eefsXv3brz00ksNnZHI7hWW6/D6+hQAwNS7ItCthZfEiYiIbFe9Dkv98MMPWLt2Lfr3729aNnjwYDRp0gQPP/wwr1BMZKa3NqSioFyHNn7ueLZ/hNRxiIhsWr1GbioqKkx37f47X19fHpYiMtOhrKtYd/giZDLg3Qd5yjcR0e2q13fRnj17IiEhAZWVlaZl165dw5w5c0z3miKifyaEwFsbUgEAD3Zrji7BntIGIiKyA/U6LLVw4ULce++9N1zET61WY+vWrQ0akMiebTx+GYeyitDEWYGX49pIHYeIyC7Uq9x07NgRp0+fxtdff4309HQAwOjRo/HYY4+hSZMmDRqQyF5VVhkwb3P1v5/J/cLh56GWOBERkX0wu9xUVVUhMjISGzZswKRJkyyRicghfJmUiQtXr8HfQ41JfcOkjkNEZDfMnnPj7OxcY64NEZmvQqfHst1nAQDxA1vDRXlb19MkIqK/qdeE4ilTpuDdd9+FXq9v6DxEDmFV0nkUlusQ0tQFI7oGSR2HiMiu1OvXxQMHDiAxMRHbtm1Dx44d4erqWuP5devWNUg4IntUodPjk9+qR22m3dUKTgqe+k1E1JDqVW48PT1vuCs4EdXN30dthncJlDoOEZHdMavcGI1GvPfeezh16hR0Oh3uuusuvP766zxDiqiOOGpDRGR5Zn1nffvtt/Hqq6/Czc0NQUFBWLx4MaZMmWKpbER2h6M2RESWZ1a5+fLLL/Hvf/8bW7duxU8//YRffvkFX3/9NYxGo6XyEdkNjtoQETUOs767ZmVlYfDgwaavY2NjIZPJcOnSpQYPRmRvvt6bxVEbIqJGYFa50ev1UKtrXkXV2dkZVVVVDRqKyN5UVhlMozZT+kdw1IaIyILMmlAshMD48eOhUqlMyyorKzF58uQap4PzVHCimlbvz0J+mRZBnk0wnNe1ISKyKLPKzbhx425Y9vjjjzdYGCJ7pNUbTFcjntw/HEonjtoQEVmSWeVm5cqVlspBZLfWJl9ATkkl/DxUeCiqudRxiIjsHn+FJLKgKoMRH+86AwB4um841M4KiRMREdk/lhsiC/rp8EVcuHoNPm5KjO7RQuo4REQOgeWGyEIMRoF//zVq82Sflmii5KgNEVFjYLkhspANxy7hXH45PF2c8fgdIVLHISJyGCw3RBZQZTBi4a+nAQAT7wyDm6pe96glIqJ6YLkhsoDvDmbjXH45vF2VGH9nqNRxiIgcCssNUQO7pjNg0V+jNlMHRMBd7SxxIiIix8JyQ9TAVv55DnmlWjT3aoLH7uAZUkREjY3lhqgBFVXoTNe1ib+nNVROPEOKiKixsdwQNaAPt59CaaUekf7uuL8L7yFFRCQFlhuiBpJyqRir9p4HAMy+rx0UcpnEiYiIHBPLDVEDMBoFZv+cAqMA7usUgF4RPlJHIiJyWCw3RA3gy6RMJJ+/ChelAq8NaSt1HCIih8ZyQ3Sbzl4pw7wt6QCA6YMiEaBpInEiIiLHxnJDdBu0egNe/O4oKquM6B3hg8djeJsFIiKpsdwQ3YbX16fiaHYRPNROmP9gJ8g5iZiISHIsN0T1tCopE9/uz4JMBiwe3RWBnjwcRURkDVhuiOph3aELmL0+BQDw8sA26N/GV+JERER0HcsNkZm+O5iNl78/CiGAx+9ogWf7h0sdiYiI/sZJ6gBEtqLKYMS8zen47I9zAIBR0cF4Y1gHyGScZ0NEZE1Ybojq4HDWVcxYdxzpOaUAgOfvboXn727FCcRERFaI5YboJoxGgYPnr+LT387i17RcAICXizPeeaAjBnUMkDgdERHdDMsN0d8IIZByqQS/HL2EDccu42LRNQCATAaM6Nocrw1pC29XpcQpiYjoVlhuiABkF1bg++QL2HD0Es7ml5uWu6mcMLijP57uF47wZm4SJiQiorpiuSGHZTQKbE/LxVd7z+P30/mm5SonOe5u64thnQPRv40v1M4KCVMSEZG5WG7IISWdKcDbm1Jx4mKJaVmfVj4Y0S0I97Tzh5uK/zSIiGwVv4OTQ6nQ6TF3UzpW7T0PAHBVKjC2Vyge7dECwd4uEqcjIqKGwHJDDuNi0TVM/PyA6XTuR2Na4KV7WqOpm0riZERE1JBYbsghpFwqxviVB3ClVAsfNxU+HNUZfVo1kzoWERFZAMsN2b1TuaUY89l+FJbrEOnvjv+Mi0ZzLx6CIiKyVyw3ZNeyCyvw2H/2obBch07NNfjqyRh4qJ2ljkVERBbEG2eS3SrX6jHpy4O4UqpFpL87vnyiB4sNEZEDYLkhuySEQPx3R5CeUwofNxVWTugOTxdeWZiIyBGw3JBd+vzPTGxNyYVSIccnY6IQoGkidSQiImokLDdkd9Iul2DupnQAwKuDIxEV4iVxIiIiakwsN2RXdHojXlh9BDqDEXdF+mJcr1CpIxERUSNjuSG7smz3GZzMLUVTVyXee7ATZDKZ1JGIiKiRWUW5Wbp0KUJDQ6FWqxETE4P9+/ffdN3ly5ejT58+8PLygpeXF2JjY2+5PjmOjLwyLNmRAQCYPbQdrzxMROSgJC83a9asQXx8PBISEnDo0CF07twZcXFxyMvLq3X9Xbt2YfTo0di5cyeSkpIQHByMgQMH4uLFi42cnKyJ0Sjw6rrj0BmM6N+mGYZ1DpQ6EhERSUQmhBBSBoiJiUH37t2xZMkSAIDRaERwcDCmTZuG6dOn/+P2BoMBXl5eWLJkCcaOHfuP65eUlECj0aC4uBgeHh63nZ+sw7f7szBj3XG4KBXY9mJfXoGYiMjOmPPzW9KRG51Oh+TkZMTGxpqWyeVyxMbGIikpqU6vUVFRgaqqKnh7e1sqJlm54ooqzN9SfXZU/D2tWWyIiBycpLdfyM/Ph8FggJ+fX43lfn5+SE9Pr9NrvPLKKwgMDKxRkP5Oq9VCq9Wavi4pKal/YLJKCxNP4WpFFVr7uWE8z44iInJ4ks+5uR3z5s3D6tWr8eOPP0KtVte6zty5c6HRaEyP4ODgRk5JlnQ6txRfJp0HAMy+rz2cFDb9V5qIiBqApD8JfHx8oFAokJubW2N5bm4u/P39b7ntggULMG/ePGzbtg2dOnW66XozZsxAcXGx6ZGdnd0g2Ul6Qgi8sSEVBqPAPe380LuVj9SRiIjICkhabpRKJaKiopCYmGhaZjQakZiYiJ49e950u/nz5+PNN9/Eli1bEB0dfcv3UKlU8PDwqPEg+7AjPQ+/n86HUiHHa4PbSh2HiIishKRzbgAgPj4e48aNQ3R0NHr06IGFCxeivLwcEyZMAACMHTsWQUFBmDt3LgDg3XffxezZs/HNN98gNDQUOTk5AAA3Nze4ublJ9jmocen0Rry5IRUAMKF3KEJ9XCVORERE1kLycjNq1ChcuXIFs2fPRk5ODrp06YItW7aYJhlnZWVBLv/vANPHH38MnU6HBx98sMbrJCQk4PXXX2/M6CShz/88h8yCCvi4qTB1QITUcYiIyIpIfp2bxsbr3Ni+K6VaDFiwC2VaPeY/2AkPR3OSOBGRvbOZ69wQ1cf8Leko0+rRqbkGD3ZrLnUcIiKyMiw3ZFOOZBfh++QLAIDXh7WHXM4bYxIRUU0sN2QzjEaBhJ9PAABGdmuObi28JE5ERETWiOWGbMbaQxdw9EIx3FROeOXeNlLHISIiK8VyQzahpPK/94967u4I+HrUfkVqIiIilhuyCQu3n0Z+mQ4tfVwxvleY1HGIiMiKsdyQ1TuSXYTP/zwHAJg9tB2UTvxrS0REN8efEmTVtHoD/m/tURgFMLxLIPq38ZU6EhERWTmWG7JqS3eewancMjR1VWL20PZSxyEiIhvAckNWK+1yCf69MwMAMOf+9vB2VUqciIiIbAHLDVmlCp0e0749DL1RYGA7PwzpGCB1JCIishEsN2SVXl+fgoy8Mvh5qDB3REfIZLwSMRER1Q3LDVmdb/dn4buDFyCTAQtHdUVTN5XUkYiIyIaw3JBV2ZORj1k/Vd9iIT62NXqGN5U4ERER2RqWG7Iaxy8UY/JXydAbBe7vEoipd0VIHYmIiGwQyw1ZhZRLxXj8s30ordSje6gX3h3ZifNsiIioXpykDkC0+9QVTP36EEq1enRr4YmVE3pA7ayQOhYREdkolhuSjMEosPz3s5i/JR1GAfQI9cZ/xkfDTcW/lkREVH/8KUKSyMgrxavrTmB/ZiEAYGS35nhnRAeonDhiQ0REt4flhhrV6dxSrPwzE2sOZMNgFHBVKjDzvnZ4pHsw59gQEVGDYLkhi6usMmDT8cv4dn8WDmReNS0f2M4Ps+5rh2BvFwnTERGRvWG5IYs5mVOKb/dnYd2hCyip1AMAFHIZ7o70xcTeYYhpyWvYEBFRw2O5oQZVZTBi0/HL+OLPTBzKKjItD/JsgtE9gvFQdDD8PNTSBSQiIrvHckMNokKnx5dJ5/H5nkzklFQCAJzkMsS29cPomBboE+EDuZxzaoiIyPJYbui2CCHwffIFLNh6EnmlWgCAj5sKY+4IweiYYPi6c5SGiIgaF8sN1Vt2YQVe+eEY/jxTAABo7tUEz9/dCsO6BPKUbiIikgzLDdXLtpQcvPT9UZRW6qF2luPF2NYYf2coSw0REUmO5YbMIoTAosTTWPjraQBAtxae+ODhLgj1cZU4GRERUTWWG6ozg1Hg9fUpWLX3PADgiTvDMGNwJJwVvP8qERFZD5YbqpMqgxEvrjmCDccuQyYD5gxrj7E9Q6WORUREdAOWG/pHBqNA/HdHseHYZTgrZPjg4S4Y2jlQ6lhERES1YrmhWxJCYNbPJ/DL0UtwVsjw6ZhoDIj0lToWERHRTXGyBN3Se1tP4pt9WZDJgA9HdWGxISIiq8dyQze15kAW/r3rDADgnQc64r5OPBRFRETWj+WGarX/XCFm/nQCAPD83a0wukcLiRMRERHVDcsN3SC7sAKTv0pGlUFgSMcAPH93K6kjERER1RnLDdVQptXjyS8OorBchw5BHljwUGfe8JKIiGwKyw2ZGIwCL6w+jJO5pfB1V2H52Gg0UfJ2CkREZFtYbsjkva0n8WtaHpROcnw6NhoBmiZSRyIiIjIbyw0BANYduoBlu6vPjHrvwU7oEuwpbSAiIqJ6YrkhHMwsxPQfjgMApgwIx/1dgiROREREVH8sNw4uu7ACT69Khs5gRFx7P7x0TxupIxEREd0WlhsHdv3MqIJyHdoFeODDUV14ZhQREdk8lhsHpTcY8dy31WdGNXNX4bPx0XBR8lZjRERk+1huHJAQAtPXHceO9DyonORYzjOjiIjIjrDcOKC5m9OxNvkC5DLgo9FdeWYUERHZFZYbB7Nkx2l8+ttZAMC8kZ0wsL2/xImIiIgaFidZOAghBOZvPYmP/7rL96uDI/FwdLDEqYiIiBoey40DMBoFEtanYNXe8wCqi81TfcMlTkVERGQZLDd2rrSyCi+uOYpf03IhkwFvD++IR2NaSB2LiIjIYlhu7Fja5RJM+/YwMvLKoHSSY8FDnTGsc6DUsYiIiCyK5cYOVRmMWPHHOby/7RR0BiP8PdRYNiaKZ0UREZFDYLmxM7+duoI3NqQiI68MABDb1hfzRnaCj5tK4mRERESNg+XGDuj0RmxLzcGqpPPYd64QAODtqsT0eyPxUHRzyGS8pQIRETkOlhsbdrn4Gr7dl4VvD2TjSqkWAOAkl2Fsz1A8H9sKmibOEickIiJqfCw3NsZoFPjzTAFW7c3Er2l5MBgFAKCZuwqjuwfjkR4tEOjJWykQEZHjYrmxEcUVVfg+ORtf78vCufxy0/I7Wnrj8TtCMLCdP5ROvOA0ERERy42VO3ulDP/54xzWHbqAyiojAMBN5YSR3YLw2B0haO3nLnFCIiIi68JyY6WSzxdi2e6z+DUtF6L6yBMi/d0xpmcIhncJgquK/+uIiIhqw5+QVuZkTine3ZKOHel5pmWxbf0wqU8YeoR588wnIiKif8ByYyVySyrxwbZT+D45G0ZRfdbTg1HN8WSflojwdZM6HhERkc1guZGY0Sjw1b7zmL/lJMq0egDAoA7++L97IxHm4ypxOiIiItvDciOh07mlmL7uOJLPXwUAdAn2xKz72iEqxEviZERERLaL5UYCWr0BS3eewce7MlBlEHBVKvB/90bi8TtCoJBzTg0REdHtYLlpZAcyCzH9h2M4c6X6WjWxbX3xxv0deOE9IiKiBsJy00hKKqswf0s6vtqbBQDwcVNhzrD2GNzRn2dAERERNSCWm0awLSUHs39OQU5JJQDgke7BmDGoLTQuvPcTERFRQ7OK6/UvXboUoaGhUKvViImJwf79+2+5/vfff4/IyEio1Wp07NgRmzZtaqSk5snML8ekLw/iqVXJyCmpRGhTF3wzKQbzRnZisSEiIrIQycvNmjVrEB8fj4SEBBw6dAidO3dGXFwc8vLyal3/zz//xOjRozFx4kQcPnwYw4cPx/Dhw3HixIlGTn5zheU6vLkhFfd8uBvbU3OhkMvwTP9wbHmhL3qF+0gdj4iIyK7JhLh+cX9pxMTEoHv37liyZAkAwGg0Ijg4GNOmTcP06dNvWH/UqFEoLy/Hhg0bTMvuuOMOdOnSBcuWLfvH9yspKYFGo0FxcTE8PDwa7oMAuFx8Dct/O4dv92fhWpUBANCvdTO8NqQt7wFFRER0G8z5+S3pnBudTofk5GTMmDHDtEwulyM2NhZJSUm1bpOUlIT4+Pgay+Li4vDTTz/Vur5Wq4VWqzV9XVJScvvBa7H5+GU8t/owqgzVXbFDkAdeGtgGA9r4WuT9iIiIqHaSlpv8/HwYDAb4+fnVWO7n54f09PRat8nJyal1/ZycnFrXnzt3LubMmdMwgW8hKtQLMpkMPcK8MGVABPq28uFZUERERBKw+7OlZsyYUWOkp6SkBMHBwQ3+Pr7uaiTG90Owt0uDvzYRERHVnaTlxsfHBwqFArm5uTWW5+bmwt/fv9Zt/P39zVpfpVJBpVI1TOB/wGJDREQkPUnPllIqlYiKikJiYqJpmdFoRGJiInr27FnrNj179qyxPgBs3779pusTERGRY5H8sFR8fDzGjRuH6Oho9OjRAwsXLkR5eTkmTJgAABg7diyCgoIwd+5cAMDzzz+Pfv364f3338eQIUOwevVqHDx4EJ9++qmUH4OIiIishOTlZtSoUbhy5Qpmz56NnJwcdOnSBVu2bDFNGs7KyoJc/t8Bpl69euGbb77BzJkz8eqrr6JVq1b46aef0KFDB6k+AhEREVkRya9z09gseZ0bIiIisgxzfn5LfoViIiIioobEckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISIiIrsi+e0XGtv1CzKXlJRInISIiIjq6vrP7brcWMHhyk1paSkAIDg4WOIkREREZK7S0lJoNJpbruNw95YyGo24dOkS3N3dIZPJGvS1S0pKEBwcjOzsbN63yoK4nxsH93Pj4H5uPNzXjcNS+1kIgdLSUgQGBta4oXZtHG7kRi6Xo3nz5hZ9Dw8PD/7DaQTcz42D+7lxcD83Hu7rxmGJ/fxPIzbXcUIxERER2RWWGyIiIrIrLDcNSKVSISEhASqVSuoodo37uXFwPzcO7ufGw33dOKxhPzvchGIiIiKybxy5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhszLV26FKGhoVCr1YiJicH+/ftvuf7333+PyMhIqNVqdOzYEZs2bWqkpLbNnP2ckpKCkSNHIjQ0FDKZDAsXLmy8oDbOnP28fPly9OnTB15eXvDy8kJsbOw//v2naubs53Xr1iE6Ohqenp5wdXVFly5dsGrVqkZMa7vM/f583erVqyGTyTB8+HDLBrQj5uzrzz//HDKZrMZDrVZbNqCgOlu9erVQKpVixYoVIiUlRUyaNEl4enqK3NzcWtffs2ePUCgUYv78+SI1NVXMnDlTODs7i+PHjzdyctti7n7ev3+/ePnll8W3334r/P39xYcffti4gW2Uufv50UcfFUuXLhWHDx8WaWlpYvz48UKj0YgLFy40cnLbYu5+3rlzp1i3bp1ITU0VGRkZYuHChUKhUIgtW7Y0cnLbYu5+vu7cuXMiKChI9OnTR9x///2NE9bGmbuvV65cKTw8PMTly5dNj5ycHItmZLkxQ48ePcSUKVNMXxsMBhEYGCjmzp1b6/oPP/ywGDJkSI1lMTEx4umnn7ZoTltn7n7+u5CQEJabOrqd/SyEEHq9Xri7u4svvvjCUhHtwu3uZyGE6Nq1q5g5c6Yl4tmN+uxnvV4vevXqJf7zn/+IcePGsdzUkbn7euXKlUKj0TRSumo8LFVHOp0OycnJiI2NNS2Ty+WIjY1FUlJSrdskJSXVWB8A4uLibro+1W8/k/kaYj9XVFSgqqoK3t7elopp8253PwshkJiYiJMnT6Jv376WjGrT6ruf33jjDfj6+mLixImNEdMu1Hdfl5WVISQkBMHBwbj//vuRkpJi0ZwsN3WUn58Pg8EAPz+/Gsv9/PyQk5NT6zY5OTlmrU/1289kvobYz6+88goCAwNvKPD0X/Xdz8XFxXBzc4NSqcSQIUPw0Ucf4Z577rF0XJtVn/38xx9/4LPPPsPy5csbI6LdqM++btOmDVasWIGff/4ZX331FYxGI3r16oULFy5YLKfD3RWciG7fvHnzsHr1auzatcvyEwMdkLu7O44cOYKysjIkJiYiPj4eLVu2RP/+/aWOZhdKS0sxZswYLF++HD4+PlLHsXs9e/ZEz549TV/36tULbdu2xSeffII333zTIu/JclNHPj4+UCgUyM3NrbE8NzcX/v7+tW7j7+9v1vpUv/1M5rud/bxgwQLMmzcPv/76Kzp16mTJmDavvvtZLpcjIiICANClSxekpaVh7ty5LDc3Ye5+PnPmDDIzMzF06FDTMqPRCABwcnLCyZMnER4ebtnQNqohvkc7Ozuja9euyMjIsEREADwsVWdKpRJRUVFITEw0LTMajUhMTKzRSP+uZ8+eNdYHgO3bt990farffibz1Xc/z58/H2+++Sa2bNmC6Ojoxohq0xrq77PRaIRWq7VERLtg7n6OjIzE8ePHceTIEdNj2LBhGDBgAI4cOYLg4ODGjG9TGuLvtMFgwPHjxxEQEGCpmDwV3ByrV68WKpVKfP755yI1NVU89dRTwtPT03RK25gxY8T06dNN6+/Zs0c4OTmJBQsWiLS0NJGQkMBTwevA3P2s1WrF4cOHxeHDh0VAQIB4+eWXxeHDh8Xp06el+gg2wdz9PG/ePKFUKsXatWtrnNJZWloq1UewCebu53feeUds27ZNnDlzRqSmpooFCxYIJycnsXz5cqk+gk0wdz//L54tVXfm7us5c+aIrVu3ijNnzojk5GTxyCOPCLVaLVJSUiyWkeXGTB999JFo0aKFUCqVokePHmLv3r2m5/r16yfGjRtXY/3vvvtOtG7dWiiVStG+fXuxcePGRk5sm8zZz+fOnRMAbnj069ev8YPbGHP2c0hISK37OSEhofGD2xhz9vNrr70mIiIihFqtFl5eXqJnz55i9erVEqS2PeZ+f/47lhvzmLOvX3jhBdO6fn5+YvDgweLQoUMWzScTQgjLjQsRERERNS7OuSEiIiK7wnJDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEJFNGT9+PIYPHy51DCKyYrxxJhFZDZlMdsvnExISsGjRIvDao0R0Kyw3RGQ1Ll++bPrzmjVrMHv2bJw8edK0zM3NDW5ublJEIyIbwsNSRGQ1/P39TQ+NRgOZTFZjmZub2w2Hpfr3749p06bhhRdegJeXF/z8/LB8+XKUl5djwoQJcHd3R0REBDZv3lzjvU6cOIFBgwbBzc0Nfn5+GDNmDPLz8xv5ExORJbDcEJHN++KLL+Dj44P9+/dj2rRpeOaZZ/DQQw+hV69eOHToEAYOHIgxY8agoqICAFBUVIS77roLXbt2xcGDB7Flyxbk5ubi4YcflviTEFFDYLkhIpvXuXNnzJw5E61atcKMGTOgVqvh4+ODSZMmoVWrVpg9ezYKCgpw7NgxAMCSJUvQtWtXvPPOO4iMjETXrl2xYsUK7Ny5E6dOnZL40xDR7eKcGyKyeZ06dTL9WaFQoGnTpujYsaNpmZ+fHwAgLy8PAHD06FHs3Lmz1vk7Z86cQevWrS2cmIgsieWGiGyes7Nzja9lMlmNZdfPwjIajQCAsrIyDB06FO++++4NrxUQEGDBpETUGFhuiMjhdOvWDT/88ANCQ0Ph5MRvg0T2hnNuiMjhTJkyBYWFhRg9ejQOHDiAM2fOYOvWrZgwYQIMBoPU8YjoNrHcEJHDCQwMxJ49e2AwGDBw4EB07NgRL7zwAjw9PSGX89sika2TCV7qk4iIiOwIf0UhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2ZX/B0DDGioZVcVyAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "gamma_sm = 1.0\n", "evolution_result_decay = cudaq.evolve(\n", @@ -1469,18 +1239,10 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "id": "09db2f53-c94f-4323-8f21-5c39fbc61dc7", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Probability of |1> from the gate-level simulation with noise: 0.852\n" - ] - } - ], + "outputs": [], "source": [ "cudaq.set_target('density-matrix-cpu')\n", "cudaq.set_random_seed(13)\n", @@ -1526,10 +1288,21 @@ "id": "6431aa03-378f-4e39-86ba-fd096691d167", "metadata": {}, "source": [ - "## Summary ##\n", + "## Conclusion\n", "\n", "Noise is the greatest challenge facing quantum computers. Accurate simulations can help us understand both the sources and impacts of noise to guide development of better hardware, algorithms, and QEC codes. You now know how to utilize CUDA-Q for noise modeling as well as a number of situations where noise modeling is useful. Scaling up any of these examples makes simulation much more challenging and requires the power of CUDA-Q and AI supercomputing to usher in new advancements to the field." ] + }, + { + "cell_type": "markdown", + "id": "af69b90635ee4ef0", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — covers the Steane code used in this notebook’s QEC exercises\n", + "* [QEC Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — next notebook in the QEC101 series\n", + "* [VQE and GQE](https://github.com/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/vqe_and_gqe.ipynb) — explores the variational chemistry algorithm used in the noise impact analysis" + ] } ], "metadata": { @@ -1549,8 +1322,24 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.3" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SIM", + "QCS.SW" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_chemistry", + "hpc_integration" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/04_QEC_Decoders.ipynb b/qec101/04_QEC_Decoders.ipynb index 600f859..3753bb8 100644 --- a/qec101/04_QEC_Decoders.ipynb +++ b/qec101/04_QEC_Decoders.ipynb @@ -27,34 +27,61 @@ "id": "3e33ec99-e8e6-4a88-b4ea-bcf1e8bbb0df", "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 4: Decoders\n", - "\n", - "\n", - "QEC is only effective if the codes utilized to flag errors can be interpreted to identify the errors to be fixed. This is the job of a decoder. Decoding is one of the most challenging yet critical aspects of QEC and finding good decoders is a major researcher objective in the field.\n", - "\n", - "This lab introduces the basic concepts of decoding and frames why the problem is so difficult. In the later sections, you will then work through a number of exercises to implement a naive \"brute force\" decoder, train an AI decoder, and explore how GPUs can power advanced decoding algorithms like belief propagation with ordered statistics decoding. \n", - "\n", - "\n", - "**Prerequisites:** This is the 4th lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete [\"The Basics of Classical and Quantum Error Correction\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) first. It is also helpful to have completed [\"Stabilizers, the Shor Code, and the Steane Code\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) in which stabilizers and syndromes are introduced in detail. This notebook also walks you through building a Steane code implementation in CUDA-Q which is used again in this lab. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **4.1** Understand what decoding is, why it is important, and what makes it so difficult\n", - "* **4.2** Explore Pauli frames and error tracking\n", - "* **4.3** Code a naive brute force decoder for the Steane Code\n", - "* **4.4** Train an AI decoder for the Steane Code\n", - "* **4.5** Experiment with NVIDIA's accelerated belief propagation decoder.\n", - "\n", - "Terminology and notation you'll use:\n", - "* decoders, decoding window, Pauli frames\n", - "* most likely error decoding\n", - "* AI decoding\n", - "* Belief propagation and ordered statistics decoding\n", + "# QEC 101 — Lab 4: Decoders \n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Explore the role of decoding in quantum error correction and understand the decoding window constraint\n", + "* Implement Pauli frame tracking for multi-round error correction\n", + "* Build a brute-force maximum likelihood error decoder for the Steane code\n", + "* Train a neural network AI decoder using CUDA-Q simulated syndrome data\n", + "* Implement belief propagation decoding and apply NVIDIA's GPU-accelerated BP+OSD decoder to large qLDPC codes\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of [QEC 101 Lab 1](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) (classical and quantum error correction basics)\n", + "* Completion of [QEC 101 Lab 2](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) (stabilizers, Steane code implementation in CUDA-Q)\n", + "\n", + "**Key Terminology:**\n", + "* Decoder\n", + "* Decoding Window\n", + "* Pauli Frame\n", + "* Maximum Likelihood Decoding (MLE)\n", + "* AI Decoder\n", + "* Belief Propagation (BP)\n", + "* Ordered Statistics Decoding (OSD)\n", + "* Code Capacity\n", + "* qLDPC (Quantum Low-Density Parity Check)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library (BP decoder, code definitions)\n", + "\n", + "**Solutions:** [`Solutions/04_QEC_Decoders_Solution.ipynb`](Solutions/04_QEC_Decoders_Solution.ipynb)\n", + "\n", + "QEC is only effective if the codes utilized to flag errors can be interpreted to identify the errors to be fixed. This is the job of a decoder. Decoding is one of the most challenging yet critical aspects of QEC and finding good decoders is a major researcher objective in the field.\n", + "\n", + "This lab introduces the basic concepts of decoding and frames why the problem is so difficult. In the later sections, you will then work through a number of exercises to implement a naive \"brute force\" decoder, train an AI decoder, and explore how GPUs can power advanced decoding algorithms like belief propagation with ordered statistics decoding.\n", + "\n", + "This is the 4th lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete [\"The Basics of Classical and Quantum Error Correction\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) first. It is also helpful to have completed [\"Stabilizers, the Shor Code, and the Steane Code\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) in which stabilizers and syndromes are introduced in detail. This notebook also walks you through building a Steane code implementation in CUDA-Q which is used again in this lab." + ] + }, + { + "cell_type": "markdown", + "id": "19021cfd832544a0", + "metadata": {}, + "source": [ + "
\n", "\n", - "💻 Just a heads-up: This notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning! ⭐\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "To get started, run the cells below to install the prerequisite libraries and then restart the kernel." + "
" ] }, { @@ -64,32 +91,54 @@ "metadata": {}, "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up \n", - "# Run this notebook in a GPU runtime\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", - "\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#!pip install torch scikit-learn galois ipywidgets -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, + { + "cell_type": "markdown", + "id": "45d32c6755024b5c", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, { "cell_type": "code", "execution_count": null, +<<<<<<< qec_updates + "id": "c8f04c97afb04f96", +======= "id": "4a3fc629-1a82-4eec-b946-1f3014d749ea", +>>>>>>> main "metadata": {}, "outputs": [], "source": [ - "# install `torch`, 'scikit-learn', 'galois', cudaq-qec' and `ipywidgets` in the current Python kernel. Skip this if they are already installed.\n", - "# Make sure to restart your kernel if you execute this!\n", - "# In a Jupyter notebook, go to the menu bar > Kernel > Restart Kernel.\n", - "# In VSCode, click on the Restart button in the Jupyter toolbar.\n", - "\n", "import sys\n", + "import os\n", + "from itertools import product\n", "\n", +<<<<<<< qec_updates + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import requests\n", + "import bz2\n", + "import galois\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "from sklearn.model_selection import train_test_split\n", +======= "try:\n", " import torch\n", " import scikit_learn\n", @@ -98,8 +147,17 @@ " import ipywidgets as widgets\n", " import numpy as np\n", " import cudaq_qec as qec\n", +>>>>>>> main "\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", "\n", +<<<<<<< qec_updates + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec\n", +======= "except ImportError:\n", " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", " !{sys.executable} -m pip install --upgrade pip -q\n", @@ -110,11 +168,17 @@ " !{sys.executable} -m pip install ipywidgets -q\n", " !{sys.executable} -m pip install cudaq-qec -q\n", " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", +>>>>>>> main "\n", + "from Images.decoder.decoder_widget import display_widget\n", + "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", "\n", +<<<<<<< qec_updates +======= "#This lab runs a GPU accelerated decoder and requires access to a GPU\n", "import cudaq\n", "import cudaq_qec as qec\n", +>>>>>>> main "cudaq.set_target('nvidia')" ] }, @@ -123,6 +187,8 @@ "id": "f5ee9745-1d8a-4591-b1c3-0159c4dad662", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.1 Decoding Decoded\n", "\n", "Remember that a QEC round involves four main steps:\n", @@ -131,12 +197,15 @@ "* Decoding the syndrome to identify where an error occurred and what instructions to send to the QPU to fix the error\n", "* Correcting the error\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", "The decoding step is very challenging and is considered one of the primary limitations for QEC. This is because decoding requires measurements on a QPU, data transfer to the supercomputer, decoding on the supercomputer, and then data transfer back to the QPU. The time available for this is called the **decoding window** and varies based on a number of factors such as the qubit modality, data transfer rates, and the volume of information that needs to be decoded.\n", "\n", - "The simulation below makes this more clear. First, set the time for the decoding window. All 50 syndromes must be decoded in this time, otherwise the QEC procedure fails. In many cases, syndromes vary in decoding difficulty, so this simulation samples random times from a normal distribution. Try changing the parameters of the distribution and see how this impacts the decoder's success rate.\n", + "Directly competing with speed is accuracy. If a decoder is inaccurate, errors will be missed or introduced each QEC round and will propagate to ruin the computation. High-distance codes are necessary for accuracy, but unfortunately introduce high-qubit overheads and make decoding much more challenging. Advances in QEC code design and low-latency integration between AI supercomputers and QPUs alleviate pressure on the decoding step, but it nevertheless remains the primary bottleneck of QEC.\n", "\n", +<<<<<<< qec_updates + "To learn more about decoder metrics, explore the notebook called \"[Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb)\"" +======= "Notice how even if a decoder is quite fast and can decode most of the syndromes in time, the worst-case scenario (i.e. hardest syndrome to decode) is usually the limiting factor." ] }, @@ -149,6 +218,7 @@ "source": [ "from Images.decoder.decoder_widget import display_widget\n", "display_widget()" +>>>>>>> main ] }, { @@ -164,24 +234,27 @@ "id": "6b77d598-b3c0-489c-9809-34bdfb7a9a65", "metadata": {}, "source": [ - "## 4.2 Pauli Frames and Error Tracking ##\n", + "---\n", + "\n", + "## 4.2 Pauli Frames and Error Tracking\n", "\n", - "In practice, when errors are identified by the decoder, they are not immediately corrected but are tracked using a Pauli frame. The Pauli frame keeps track of the corrections classically and applies them later. This approach reduces the number of gate operations required to fix errors on the QPU, thereby protecting the encoded state from additional noise introduced by each correction gate. For instance, if a bit flip error occurs on qubit 1 in the first round and another bit flip error happens on the same qubit later, the two errors cancel each other out, eliminating the need for a correction\n", + "In practice, when errors are identified by the decoder, they are not immediately corrected but are tracked using a **Pauli frame**. The Pauli frame keeps track of the corrections classically and applies them later. This approach reduces the number of gate operations required to fix errors on the QPU, thereby protecting the encoded state from additional noise introduced by each correction gate. For instance, if a bit flip error occurs on qubit 1 in the first round and another bit flip error happens on the same qubit later, the two errors cancel each other out, eliminating the need for a correction\n", "\n", "Often, codes are depicted using 3D images like the one below. In this case, each plane is a Steane code QEC round with flagged errors in purple. Each error is saved, and the list grows with future rounds. The final Paul frame, $[X_1, X_5, X_1]$, is the list of corrections for the three bit flip errors that have occurred over all the rounds: two on qubit 1 and one on qubit 5. In the last step, the errors can be simplified, for example, $X_1X_1 = I$, so only one of the three corrections, $X_5$, needs to be applied. This is a rather trivial example, and often diagrams like this are used to depict more complex codes and their respective error pathways.\n", "\n", - "\"Drawing\"\n", + "\"3D\n", "\n", "The dimension of time can also lend itself to more sophisticated decoding schemes. This is particularly important when measurement errors occur during the stabilizer checks. In this case, it might appear that a stabilizer flags when in fact the data qubits are fine. Multiple decoding rounds can demonstrate that the false stabilizer flag is a consequence of measurement error and not a true error, where other true errors would persist without correction. Such an approach is more powerful but requires decoding of much more complex syndromes. The diagram below demonstrates this concept with an example.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "Notice how, in the first case, the decoder has kept track of a measurement error and is therefore making an incorrect syndrome in the final case. When decoding happens over time, the decoding task must not decode a 19-bit syndrome but is able to flag measurement errors\n", "\n", "\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", + "

\n", + "\n", + "**Exercise 1:**\n", + "\n", "The benefit of decoding in time is that the measurement errors can be factored into the decoding process. However, the tradeoff is that the decoding problem is much harder. When decoding in time, an effective parity check matrix must be constructed as an input to the decoder. In this exercise you will build $H^{(2)}$ for a two round Steane code that includes consistency checks to flag errors between the two time steps. \n", "\n", "First, a few hints. Consider the dimensions. The number of columns still corresponds to the number of qubits, but, now we need to take into account the data qubits at time 0, the data qubits at time 1, and the three ancilla qubits used to measure syndromes between the two rounds. \n", @@ -193,7 +266,7 @@ "What do the middle three columns need to be for $H^{(2)}$ to be able to catch measurement errors?\n", "\n", "Build $H^{(2)}$, and then build an error vector $e$ of size 17 such that each entry is a 0 or a 1 if an error occurred on that qubit. Compute $H^{(2)}e^T$ for a case with an error on data qubit 1 in the first time step, an error on data qubit 1 in the second time step only, and a measurement error. Note, it is best practice to assume that the decoder will not hanndle raw syndrome outputs, but the differences between he current set of measurements and the next round. For example, after preparation the syndrome 101 might be measured. If the next round produces the same stabilizer measurerments, the decoder would see 000 not 101. This syntax makes it much easier for decoders to handle data in more complex settings.\n", - "

\n", + "\n", "
\n" ] }, @@ -204,39 +277,38 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np \n", - "\n", + "# EXERCISE 1\n", "H = np.array([\n", " [1, 1, 0, 1, 1, 0, 0],\n", " [1, 0, 1, 1, 0, 1, 0],\n", " [0, 1, 1, 1, 0, 0, 1]\n", "])\n", "\n", - "#Build 2 round parity check matrix.\n", - "H2 = np.array([#TODO\n", + "# Build 2 round parity check matrix.\n", + "H2 = np.array([##TODO##\n", "])\n", "\n", - "#Syndrome for no error\n", - "e = np.array([#TODO])\n", + "# Syndrome for no error\n", + "e = np.array([##TODO##])\n", "\n", "print(H2 @ e.T)\n", "\n", - "#syndrome for error on first data qubit in first time step\n", - "e = np.array([#TODO])\n", + "# Syndrome for error on first data qubit in first time step\n", + "e = np.array([##TODO##])\n", "\n", "print(H2 @ e.T)\n", "\n", - "#syndrome for error on first measurement qubit\n", - "e = np.array([#TODO])\n", + "# Syndrome for error on first measurement qubit\n", + "e = np.array([##TODO##])\n", "\n", "print(H2 @ e.T)\n", "\n", - "#syndrome for error on first data qubit in second time step\n", - "e = np.array([#TODO])\n", + "# Syndrome for error on first data qubit in second time step\n", + "e = np.array([##TODO##])\n", "\n", "print(H2 @ e.T)\n", "\n", - "#Try other errors " + "# Try other errors" ] }, { @@ -252,6 +324,8 @@ "id": "7cc15193-fe88-49e3-a943-c5188cd0d5d7", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.3 Most Likely Error Decoding\n", "\n", "So far, decoders have been presented as black boxes. In many cases that is sufficient. If you are developing or testing new codes, you might just use a state of the art decoder and not care how it works. In other cases, the opposite is true, and you might focus on developing and tuning a decoder to work well for a specific sort of QEC situation. \n", @@ -260,7 +334,7 @@ "\n", "The starting point is to consider a naive brute forced decoder that is conceptually simple yet sets the upper bound on decoder accuracy. \n", "\n", - "The steps of maximum likelihood decoding are as follows (considering only bitflip errors for simplicity):\n", + "The steps of **maximum likelihood decoding** are as follows (considering only bitflip errors for simplicity):\n", "\n", "1. Select a QEC code and encode a message in the codespace with $n$ data qubits.\n", "2. Generate the $2^n$ bitstrings $\\{x_0, \\cdots, x_{2^n} \\}$ of length $n$ corresponding to all possible error situations.\n", @@ -274,11 +348,12 @@ "Notice, in Lab 2 when you coded the Steane code, we assumed a situation where one error occurs at a time, allowing your syndrome checks to fix errors. This is the same assumption made here. The problem with this approach is that it does not scale. There are $2^n$ errors that need to be computed *a priori* which is not possible for large codes. The sections below will consider more scaleable heuristics to overcome this issue.\n", "\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - "Code the most likely error decoder for the Steane code below given the parity check matrix below. For each syndrome, print the associated length 7 bitstrings that produce that error, the Hamming distance from the baseline message (0000000), and the probability of that error. \n", - "

\n", + "
\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Code the most likely error decoder for the Steane code below given the parity check matrix below. For each syndrome, print the associated length 7 bitstrings that produce that error, the Hamming distance from the baseline message (0000000), and the probability of that error.\n", + "\n", "
\n", " " ] @@ -290,8 +365,7 @@ "metadata": {}, "outputs": [], "source": [ - "from itertools import product\n", - "\n", + "# EXERCISE 2\n", "# Define the bit-flip probability\n", "p = 0.1\n", "\n", @@ -303,7 +377,7 @@ "])\n", "\n", "# Generate the syndromes and sort based on the instructions above\n", - "#TODO" + "##TODO##" ] }, { @@ -311,6 +385,8 @@ "id": "e16ead63-c070-4ef7-a097-f01783635be1", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.4 AI Decoders\n", "\n", "One way to circumvent the scaling challenges posed by the a brute force most likely error decoder is to use tools like AI. AI is fantastic at pattern recognition, runs very quickly, and can easily scale. \n", @@ -319,19 +395,20 @@ "\n", "Recently, [NVIDIA and QuEra announced a new transformed based decoder](https://developer.nvidia.com/blog/nvidia-and-quera-decode-quantum-errors-with-ai/) tested on magic state distillation circuits used by QuEra (A 35 qubit circuit with 5 Steane code logically encoded logical qubits). The decoder showed promise by outperforming the decoder used by QuEra in terms of speed and accuracy. Additionally, the AI decoder might have the potential to scale to code distances large enough for sufficiently low logical error rates.\n", "\n", - "\"Drawing\"\n", + "\"Performance\n", + "\n", + "\n", "\n", + "
\n", "\n", + "**Exercise 3:**\n", "\n", - "
\n", - "

Exercise 3:

\n", - "

\n", "You will now build a working AI decoder for the Steane code. The goal is to build something similar to the workflow in the image below.\n", "\n", - "\"Drawing\"\n", + "\"Workflow\n", "\n", "This lab does not expect you to have experience coding AI models with tools like PyTorch, so you will focus on the data generation and learn how to prepare the data to train an AI decoder without worrying about details of the model. Follow the steps outlined below to complete the code.\n", - "

\n", + "\n", "
\n", "\n" ] @@ -343,7 +420,11 @@ "source": [ "The first step is to generate the training data. Take the Steane code circuit you coded in Lab 2, now with bitflip noise to each qubit after encoding. In this case, we can explore circuit-level noise based on simulated results rather than a contrived data set. \n", "\n", +<<<<<<< qec_updates + "Create a data set of 5000 samples. To generate this, run `cudaq.run()` 5000 times taking one shot each time. Output the measurements from the syndrome checks (without correcting any errors) and then measure all of the data qubits. Compute the parity of bits corresponding to the correct logical operator to determine the true logical state. \n", +======= "Create a data set of 5000 samples. To generate this, run `cudaq.run()` 5000 times to return the syndrome and data qubit measurements for each shot. Compute the parity of bits corresponding to the correct logical operator to determine the true logical state. \n", +>>>>>>> main "\n", "Save the syndromes and the logical states as two numpy arrays. This will be your data set." ] @@ -355,6 +436,9 @@ "metadata": {}, "outputs": [], "source": [ +<<<<<<< qec_updates + "# EXERCISE 3\n", +======= "import cudaq\n", "import numpy as np\n", "\n", @@ -365,13 +449,18 @@ " pass\n", "\n", "# Defines noise model and probability of bit-flip error.\n", +>>>>>>> main "p = 0.05\n", "cudaq.unset_noise()\n", "noise = cudaq.NoiseModel()\n", "\n", "@cudaq.kernel\n", "def steane_code() -> list[int]:\n", +<<<<<<< qec_updates + " \"\"\"Prepares a kernel for the Steane Code\n", +======= " \"\"\"Runs the Steane code and returns measurement outcomes.\n", +>>>>>>> main " Returns\n", " -------\n", " list[int]\n", @@ -441,23 +530,54 @@ " #Requires return type when kernel defined.\n", " return [d0,d1,d2,d3,d4,d5,d6,a0,a1,a2]\n", "\n", + " d0=mz(data_qubits[0])\n", + " d1=mz(data_qubits[1])\n", + " d2=mz(data_qubits[2])\n", + " d3=mz(data_qubits[3])\n", + " d4=mz(data_qubits[4])\n", + " d5=mz(data_qubits[5])\n", + " d6=mz(data_qubits[6])\n", + " a0=mz(ancilla_qubits[0])\n", + " a1=mz(ancilla_qubits[1])\n", + " a2=mz(ancilla_qubits[2])\n", + "\n", + " return [d0,d1,d2,d3,d4,d5,d6,a0,a1,a2]\n", "\n", "# Generate Data\n", +<<<<<<< qec_updates + "##TODO## - generate the inout data \n", + "\n", + "raw_logical_error_rate =##TODO## - calculate the raw logical error rate" +======= "##TODO##" +>>>>>>> main ] }, { "cell_type": "markdown", +<<<<<<< qec_updates + "id": "956a81bf", + "metadata": {}, + "source": [ + "The previous cell is quite a bit of work, and requires you to manually construct the entire QEC code. This is not ideal when you are primarily interested in testing an AI decoder and want data generation streamlined.\n", + "\n", + "Another more efficient way to sample training data for memory experiments is directly using the parity check matrix. Random bitflips can be applied and syndromes determined via matrix multiplication. CUDA-Q QEC can do this with just a few lines of codes (shown below) and generate the same sort of data we did above with a preloaded Steane code. Additionally, if you want to generate data for multiple syndrome extraction rounds, you can use the sample_memory_circuit. If you want to test a new, non-standard code, you would need to define the kernels explicitly similar to the example above as shown in the docs [here](https://nvidia.github.io/cudaqx/components/qec/introduction.html#qec-code-framework-cudaq-qec-code)" +======= "id": "17ef09ea", "metadata": {}, "source": [ "Another more efficient way to sample training data for memory experiments is directly using the parity check matrix. Random bitflips can be applied and syndromes determined via matrix multiplication. CUDA-Q QEC can do this with just a few lines of codes (shown below) and generate the same sort of data we did above with a preloaded Steane code. Additionally, if you want to generate data for multiple syndrome extraction rounds, you can use the `sample_memory_circuit`. If you want to test a new, non-standard code, you would need to define the kernels explicitly similar to the example above as shown in the docs [here](https://nvidia.github.io/cudaqx/components/qec/introduction.html#core-components)" +>>>>>>> main ] }, { "cell_type": "code", "execution_count": null, +<<<<<<< qec_updates + "id": "e98a9fac", +======= "id": "43287ac3", +>>>>>>> main "metadata": {}, "outputs": [], "source": [ @@ -495,12 +615,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import torch\n", - "import torch.nn as nn\n", - "import torch.optim as optim\n", - "import matplotlib.pyplot as plt\n", - "from sklearn.model_selection import train_test_split\n", "\n", "# This section normalizes and loads the data you defined previously\n", "syndromes = np.array(syndromes, dtype=np.float32)\n", @@ -643,7 +757,12 @@ "\n", "# Calculate accuracy for these 20 examples\n", "correct = (predicted_labels[:20] == y_test[:20]).sum().item()\n", +<<<<<<< qec_updates + "print(f\"\\nAccuracy for these 20 examples: {correct}/20 = {correct/20:.2%}\")\n", + "\n" +======= "print(f\"\\nAccuracy for these 20 examples: {correct}/20 = {correct/20:.2%}\")\n" +>>>>>>> main ] }, { @@ -651,6 +770,18 @@ "id": "26973394-04c5-4919-a43e-9ab2d7309fb1", "metadata": {}, "source": [ +<<<<<<< qec_updates + "You should see the model successfully train! The test set accuracy should increase while the loss functions decreases.\n", + "\n", + "There are a few key observations to discuss.\n", + "\n", + "1. If the model parameters are random and we run this training multiple times, we should see the model on average start with an accuracy of about 0.5. This means we would have as much luck flipping a coin as our decoder. It may start higher or lower depending on the initial parameters bias towards outputting 1's or 0's. So, we do demonstrate the model did train.\n", + "\n", + "2. The trained model does outperform the raw logical error rate without decoding. So, our AI decoder is an improvement. Given the simplicity of the Steane code, this is unsurprising, as there is not really any hidden insight to be gleaned as we can essentially work out the brute force MLE decoding by hand.\n", + "\n", + "3. The final output of the AI model (or any decoder) is limited by the underlying QEC code and its distance. This is what determines what errors are detectable or correctable before we even try decoding. For example, the distance three Steane code cannot correct two errors. So, we cannot expect the AI model to learn how to correct these errors either. Thus, AI decoding shines in the regime where there are many correctable errors with non-trivial syndrome patterns.\n", + "\n", +======= "You should see the model successfully train! The test set accuracy should increase while the loss functions decreases. \n", "\n", "There are a few key observations to discuss. \n", @@ -661,6 +792,7 @@ "\n", "3. The final output of the AI model (or any decoder) is limited by the underlying QEC code and its distance. This is what determines what errors are detectable or correctable before we even try decoding. For example, the distance three Steane code cannot correct two errors. So, we cannot expect the AI model to learn how to correct these errors either. Thus, AI decoding shines in the regime where there are many correctable errors with non-trivial syndrome patterns.\n", "\n", +>>>>>>> main "4. Because the trained decoder depends on the error model used, it is really important to have large training data sets with sufficiently realistic noise models and model often require fine tuning with experimental data to realize peak performance. If the error rate was tiny, the model may just learn to output logical 0 all the time, and learn nothing about the error patterns if it has insufficient cases to train on. When training on physical QPU data, we are not trying to learn a noise model, but the actual, unknown noise profile of the device." ] }, @@ -669,9 +801,11 @@ "id": "3b8cb708-3810-43aa-af2e-38144aba2b17", "metadata": {}, "source": [ - "## Belief Propagation Decoding\n", + "---\n", + "\n", + "## 4.5 Belief Propagation Decoding\n", "\n", - "Another state-of-the-art decoding method is belief propagation (BP). BP is a powerful technique borrowed from classical error correction that is highly flexible and can serve as a black box decoder for arbitrary QEC Codes. It is particularly useful for codes like quantum low-density parity check (qLDPC). All the user needs to do is provide a parity check matrix and then feed the decoder syndromes to decode. \n", + "Another state-of-the-art decoding method is **belief propagation (BP)**. BP is a powerful technique borrowed from classical error correction that is highly flexible and can serve as a black box decoder for arbitrary QEC Codes. It is particularly useful for codes like quantum low-density parity check (**qLDPC**). All the user needs to do is provide a parity check matrix and then feed the decoder syndromes to decode. \n", "\n", "NVIDIA created a GPU accelerated BP decoder which allows researchers to push QEC even further than before. This section will walk you through implementing BP and how to use NVIDIA's accelerated BP decoder. \n", "\n", @@ -687,15 +821,14 @@ " \n", "4. Finally, the final beliefs $L_{\\text{final}, i}$ are computed as $L_i + \\sum_{j \\in N(i)} L_{c_j \\to v_i}$, summing the prior beliefs with the final messages sent to each variable node. From this a decision can be made where positive numbers indicate no error and negative an error, with the magnitudes related to confidence.\n", "\n", - "Ideally, BP will converge to a solution that agrees with the original syndrome and correct the error. If BP cannot converge, it means there is still significant uncertainty whether some of the bits have errors or not and postprocessing is necessary to refine the result. This will be discussed in the following section.\n", + "Ideally, BP will converge to a solution that agrees with the original syndrome and correct the error. If BP cannot converge, it means there is still significant uncertainty whether some of the bits have errors or not and postprocessing is necessary to refine the result. This will be discussed in the following section.
\n", "\n", + "**Exercise 4:**\n", "\n", - "div style=\"background-color: #f9fff0; border-left: 6px solid #76b900; padding: 15px; border-radius: 4px;\">\n", - "

Exercise 4:

\n", - "

\n", "Below is the start of a BP implementation for decoding the 5-qubit repetition code. Fill in the sections marked \"TODO\" to complete the code. Most of the BP loops are calculated for you. Make sure to review them and understand what is going on. Then, you will complete the code by fixing the code to calculate the final belief on each qubit and determine where errors occurred.\n", - "

\n", - "
\n" + "\n", + "
\n", + "\n" ] }, { @@ -705,9 +838,7 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import sys\n", - "\n", + "# EXERCISE 4\n", "physical_error_rate = 0.1\n", "max_iter = 5\n", "\n", @@ -797,16 +928,16 @@ " print(\"\")\n", "\n", "\n", - "# TODO Compute the final belief of each qubit (prior beliefs plus sum of incoming messages)\n", + "##TODO## Compute the final belief of each qubit (prior beliefs plus sum of incoming messages)\n", "\n", "\n", - "#TODO END\n", + "##TODO## END\n", "\n", "print(\"--- Final Belief ---\")\n", "print(f\"Final LLRs: {np.round(L_final, 4)}\")\n", "\n", "\n", - "estimated_error = #TODO\n", + "estimated_error = ##TODO## Determine where errors occurred based on L_final\n", "\n", "print(\"-\" * 40)\n", "print(f\"Decoder Estimated Error: {estimated_error}\")\n", @@ -836,11 +967,6 @@ "metadata": {}, "outputs": [], "source": [ - "import requests\n", - "import bz2\n", - "import os\n", - "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", - "\n", "if __name__ == \"__main__\":\n", " # See other test data options in https://github.com/NVIDIA/cudaqx/releases/tag/0.2.0\n", " filename = 'osd_1008_8785_0.001.json' # lower error rate\n", @@ -880,7 +1006,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "4e3d1953-c564-4b9f-9bea-d2b2d6247613", "metadata": {}, @@ -891,7 +1016,7 @@ "\n", "In benchmarks of large code instances, the NVIDIA decoder was up to 35x faster than the industry standard implementation for benchmarks run on the [[144,12,12]](https://arxiv.org/abs/2308.07915) code. \n", "\n", - "\"Drawing\"\n", + "\"Bar\n", "\n" ] }, @@ -953,11 +1078,6 @@ "metadata": {}, "outputs": [], "source": [ - "import requests\n", - "import bz2\n", - "import os\n", - "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", - "\n", "if __name__ == \"__main__\":\n", " # See other test data options in https://github.com/NVIDIA/cudaqx/releases/tag/0.2.0\n", " filename = 'osd_1008_8785_0.005.json' # lower error rate\n", @@ -1011,11 +1131,12 @@ "\n", "\n", "\n", - "
\n", - "

Exercise 5:

\n", - "

\n", - " The following exercise is based on a lecture by Joschka Roffe. Given the parity check matrix below and the probabilities of error from BP. Perform OSD manually and find the error profile that satisfies the syndrome. Note, all computations must be performed using mod 2 arithmetic. This can be accomplished using the galois library which creates a Galois field and allows all numpy operations to compute mod 2.\n", - "

\n", + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "The following exercise is based on a [lecture](https://www.youtube.com/watch?v=b9N2Ps3FTto) by Joschka Roffe. Given the parity check matrix below and the probabilities of error from BP. Perform OSD manually and find the error profile that satisfies the syndrome. Note, all computations must be performed using mod 2 arithmetic. This can be accomplished using the `galois` library which creates a Galois field and allows all `numpy` operations to compute mod 2.\n", + "\n", "
\n" ] }, @@ -1026,6 +1147,7 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 5\n", "GF2 = galois.GF(2) # allows mod 2 math.\n", "\n", "#parity check matrix as numpy array\n", @@ -1039,10 +1161,10 @@ "s = GF2([1,0,0,1]) #syndrome\n", "\n", "# Get indices that would sort bp_results in descending order\n", - "#TODO\n", + "##TODO##\n", "\n", "# Rearrange columns of numpy array\n", - "#TODO\n", + "##TODO##\n", "H_sorted = \n", "\n", "\n", @@ -1075,19 +1197,19 @@ "print(rref(H_sorted.copy())) # First four columns are pivot columns\n", "\n", "# Build H_s from the first full rank columns\n", - "#TODO\n", + "##TODO##\n", "\n", "# Compute Hs_inverse\n", - "#TODO\n", + "##TODO##\n", "\n", "# Calculate e_s\n", - "#TODO\n", + "##TODO##\n", "\n", "# Pad result with zeros and reorder based on colum sorting from earlier.\n", - "#TODO\n", + "##TODO##\n", "\n", "# Confirm that the errors produce the expected syndrome from the original H\n", - "#TODO\n", + "##TODO##\n", "\n", "from Images.decoder.solution3_button import show_cudaq_solution\n", "show_cudaq_solution()" @@ -1105,7 +1227,7 @@ "To summarize, syndromes are of varying difficulty. Easy syndromes are solved with BP, OSD-0 is used for moderate syndrome difficulties, and higher order OSD is used for the most challenging. \n", "\n", "\n", - "\"Drawing\"\n", + "\"Flowchart\n", "\n", "\n", "Try running the code below on the 10000 shot data set. See what happens when `osd_method` is set to 1 for OSD-0. Then try setting this variable to 3 to run a variant of higher order OSD. Does the logical error rate improve? How much more time does it take to perform higher order OSD?" @@ -1147,10 +1269,21 @@ "id": "b31e66fd-08dc-4ed5-bcf0-00858109baf6", "metadata": {}, "source": [ - "## Summary\n", + "## Conclusion\n", "\n", "After completing this notebook, you should now have an understanding for the challenges behind decoding. This begins with the brute force most likely error decoding which is incredibly inefficient. You now have experience using other techniques like AI and BP+OSD decoding to accelerated QEC decoding. " ] + }, + { + "cell_type": "markdown", + "id": "0ab9420760b24f57", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 5: Magic State Distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) — continues the QEC series with magic state distillation\n", + "* [QEC 101 — Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — explores noise models that generate the syndrome data decoders must process\n", + "* [QEC 101 — Lab 8: Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — extends decoder concepts with performance metrics and parallel decoding" + ] } ], "metadata": { @@ -1167,9 +1300,31 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", +<<<<<<< qec_updates + "version": "3.10.0" + }, + "learning_goals": { + "application_domain": "error_correction", + "cfqt_domain": "QCS", + "cfqt_proficiency": "B2", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG", + "QCS.ML" + ], + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms", + "quantum_machine_learning", + "hpc_integration" + ] +======= "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.5" +>>>>>>> main } }, "nbformat": 4, diff --git a/qec101/05_QEC_MSD.ipynb b/qec101/05_QEC_MSD.ipynb index d9fec34..5c536bc 100644 --- a/qec101/05_QEC_MSD.ipynb +++ b/qec101/05_QEC_MSD.ipynb @@ -4,9 +4,7 @@ "cell_type": "code", "execution_count": null, "id": "eead95ce", - "metadata": { - "id": "eead95ce" - }, + "metadata": {}, "outputs": [], "source": [ "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", @@ -27,72 +25,92 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 5- $T$ gates and Magic State Distillation\n", + "# QEC 101 — Lab 5: $T$ Gates and Magic State Distillation\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "Fault tolerant quantum computing (FTQC) requires a universal gate set from which any quantum algorithm can be implemented. The T-gate is a challenging gate to implement fault tolerantly, yet holds the key to unlocking the power of quantum computing.\n", + "---\n", "\n", - "A common procedure for producing T-gates is called magic state distillation (MSD), and will likely consume the lion's share of the resources necessary to realize FTQC. Much quantum research is currently directed at finding ways to efficiently implement MSD. \n", - "\n", - "In a recent paper titled [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165), researchers from QuEra, MIT, and Harvard showcased MSD on QuEra's neutral atom quantum processor.\n", + "**What You Will Do:**\n", + "* Explain why the Clifford + $T$ gate set is necessary for universal quantum computing\n", + "* Describe how fault-tolerant $T$ gates are applied using magic state injection\n", + "* Analyze the resource overhead of magic state distillation across multiple rounds\n", + "* Implement the [[5,1,3]] MSD protocol using CUDA-Q to distill higher-fidelity magic states\n", + "* Determine the distillation threshold by comparing input and output state fidelities\n", "\n", "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic quantum computing concepts (qubits, gates, measurement)\n", + "* Familiarity with quantum error correction basics ([Lab 1: Introduction to QEC](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb))\n", + "* Understanding of the Steane [[7,1,3]] code ([Lab 2: Stabilizer Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb))\n", + "\n", + "**Key Terminology:**\n", + "* Universal gate set\n", + "* T gate\n", + "* Transversal gates\n", + "* Eastin-Knill Theorem\n", + "* Magic state\n", + "* Magic state distillation (MSD)\n", + "* [[5,1,3]] code\n", + "* Color code\n", + "* Clifford gates\n", + "* Fault tolerant quantum computing (FTQC)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.get_state`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.get_state) — returns the statevector\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "\n", + "**Solutions:** [`Solutions/05_QEC_MSD_Solution.ipynb`](Solutions/05_QEC_MSD_Solution.ipynb)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd21d6cf34b34383", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", - "If you have completed the previous labs in this series (1-4), you should have a good foundation for the basics of QEC and the fundamentals of stabilizer codes, and have already coded up the Steane code which is used in the QuEra MSD implementation.\n", - "\n", - "This lab will introduce the importance of T-gates, how MSD works, and allow you to implement a version of it yourself. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **5.1** Learn about the Clifford + $T$ gate set.\n", - "* **5.2** Learn how fault tolerant $T$ gates are applied.\n", - "* **5.3** Learn how magic state distillation works.\n", - "* **5.4** Learn how QuEra performed MSD in their recent experimental paper.\n", - "* **5.5** Write a CUDA-Q code to perform MSD distillation.\n", - "\n", - "Terminology and notation you'll use:\n", - "- universal gate set, T-gates, transversal gates\n", - "- Eastin-Knill Theorem\n", - "- magic state, magic state distillation, [[5,1,3]] code.\n", - "- color code\n", - "\n", - "To get started, run the cell below to install the prerequisite libraries and then restart the kernel.\n", - "\n", - "💻 Just a heads-up: This notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning! ⭐" + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "bd279c318aca4b05", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "\n", - "import sys\n", "\n", "try:\n", - " import numpy as np\n", " import qutip\n", - "\n", - "\n", "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade pip\n", - " !{sys.executable} -m pip install qutip\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", + " print(\"qutip not found, installing. Please restart your kernel after this is done.\")\n", + " !pip install qutip -q\n", + " print(\"\\nNew library installed. Please restart your kernel!\")\n", "\n", "import cudaq\n", "cudaq.set_target('nvidia')" @@ -101,74 +119,71 @@ { "cell_type": "markdown", "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc", - "metadata": { - "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc" - }, + "metadata": {}, "source": [ - "## 5.1 Clifford + $T$ Gates ##\n", + "---\n", "\n", + "## 1. Clifford + $T$ Gates\n", "\n", - "So far in this QEC series, all of the QEC procedures have been contextualized as critical for realizing FTQC. However, FTQC is broader than QEC and involves careful consideration of all operations such that errors are not propagated through a computation. That is to say, implementing syndrome extraction of a QEC code correctly is a key step for preserving quantum memory, but still requires a set of FT operations (state prep, gate, measurement, etc) to perform computations in a way that ensures a single errors do not propogate uncontrollably.\n", + "So far in this QEC series, all of the QEC procedures have been contextualized as critical for realizing **fault tolerant quantum computing (FTQC)**. However, FTQC is broader than QEC and involves careful consideration of all operations such that errors are not propagated through a computation. That is to say, implementing syndrome extraction of a QEC code correctly is a key step for preserving quantum memory, but still requires a set of FT operations (state prep, gate, measurement, etc.) to perform computations in a way that ensures single errors do not propagate uncontrollably.\n", "\n", "For example, proper FT implementation of the Steane code would require logically encoded ancilla qubits to measure the stabilizers to make sure an ancilla qubit error does not become an uncorrectable error itself.\n", "\n", - "There are many aspects of FTQC that will not be covered here. However, the concept of a universal gate set needs to be introduced before the MSD protocol can be understood. A **universal gate set** is a set of quantum gates from which any unitary operation can be approximated to arbitrary precision. The is analogous to classical computing where any digital computation can be constructed from AND, NOT, and OR gates.\n", + "There are many aspects of FTQC that will not be covered here. However, the concept of a universal gate set needs to be introduced before the MSD protocol can be understood. A **universal gate set** is a set of quantum gates from which any unitary operation can be approximated to arbitrary precision. This is analogous to classical computing where any digital computation can be constructed from AND, NOT, and OR gates.\n", "\n", - "Certain gates, known as **transversal gates**, are nice to have in a gate set, because they are already fault tolerant. A transversal gate can be applied individually to every data qubit encoding a logical qubit such that if any of them have an error, it only affects the target data qubit. An example of a transversal get would be the $X$ operation in the Steane code. A logical $X$ is obtained by performing an $X$ on each data qubit and if any of those result in an error, it is correctable and will not propagate to the other data qubits. \n", + "Certain gates, known as **transversal gates**, are nice to have in a gate set, because they are already fault tolerant. A transversal gate can be applied individually to every data qubit encoding a logical qubit such that if any of them have an error, it only affects the target data qubit. An example of a transversal gate would be the $X$ operation in the Steane code. A logical $X$ is obtained by performing an $X$ on each data qubit and if any of those result in an error, it is correctable and will not propagate to the other data qubits.\n", "\n", - "So, all that is needed for FTQC to be easy is a universal gate set consisting only of transversal gates. Unfortunately, no such set exists. The **Eastin-Knill** Theorem proves that no quantum error-correcting code can implement a universal set of gates using only transversal operations.\n", + "So, all that is needed for FTQC to be easy is a universal gate set consisting only of transversal gates. Unfortunately, no such set exists. The **Eastin-Knill Theorem** proves that no quantum error-correcting code can implement a universal set of gates using only transversal operations.\n", "\n", - "The most common universal gate set is the Clifford + T gates. The Clifford gates, such as $H$, $S$, and CNOT, are the gates that map Pauli operators to other Pauli operators under conjugation. The $T$ gate, while not part of the Clifford group, is necessary to promote the Clifford gate set to a universal gate set. In most quantum error-correcting codes, the $T$ gate cannot be implemented transversally. In the case of the Steane code, the Clifford gates can be implemented transversally (see figure below), but the $T$ gate is not.\n", + "The most common universal gate set is the Clifford + T gates. The **Clifford gates**, such as $H$, $S$, and CNOT, are the gates that map Pauli operators to other Pauli operators under conjugation. The **T gate**, while not part of the Clifford group, is necessary to promote the Clifford gate set to a universal gate set. In most quantum error-correcting codes, the $T$ gate cannot be implemented transversally. In the case of the Steane code, the Clifford gates can be implemented transversally (see figure below), but the $T$ gate is not.\n", "\n", - "\n", + "\"Diagram\n", "\n", - "Consider why the $T$-gate is necessary for the universal gate set. For any logical operation $\\bar{U}$, it must transform all stabilizers $s_i \\in S$ into another $s_j \\in S$. \n", + "Consider why the $T$-gate is necessary for the universal gate set. For any logical operation $\\bar{U}$, it must transform all stabilizers $s_i \\in S$ into another $s_j \\in S$.\n", "\n", "$H$ does this fine as $H \\otimes X \\otimes H ^{\\dagger} = Z \\in S$ and $H \\otimes Z \\otimes H ^{\\dagger} = X \\in S$.\n", "\n", "$T$ does transform $Z$ into a valid stabilizer, but not the $X$ stabilizers.\n", "\n", - "$$T \\otimes Z \\otimes T ^{\\dagger} = Z \\in S$$ \n", + "$$T \\otimes Z \\otimes T ^{\\dagger} = Z \\in S$$\n", "\n", - "$$T \\otimes X \\otimes T ^{\\dagger} = e^{i \\pi/4}SZX \\notin S$$ \n", + "$$T \\otimes X \\otimes T ^{\\dagger} = e^{i \\pi/4}SZX \\notin S$$\n", "\n", - "The $T$ gate ends up being the key to any advantage realized by quantum algorithms. This is because without $T$ gates, the other gates could only jump between the six pole states of the Bloch sphere, which can be simulated efficiently with classical computing. Explore this fact with the widget [linked here](https://nvidia.github.io/cuda-q-academic/qec101/Images/MSD/bloch_sphere.html). Notice how the T gate is the only operation capable of producing a non-pole state. " + "The $T$ gate ends up being the key to any advantage realized by quantum algorithms. This is because without $T$ gates, the other gates could only jump between the six pole states of the Bloch sphere, which can be simulated efficiently with classical computing. Explore this fact with the widget [linked here](https://nvidia.github.io/cuda-q-academic/qec101/Images/MSD/bloch_sphere.html). Notice how the T gate is the only operation capable of producing a non-pole state." ] }, { "cell_type": "markdown", "id": "255bcdd8", - "metadata": { - "id": "255bcdd8" - }, + "metadata": {}, "source": [ - "## 5.2 Fault Tolerant $T$ Gate Application ##\n", - "\n", - "Thankfully, there is a way to implement a fault-tolerant $T$ gate, it is just difficult and resource intensive. The procedure involves the circuit below.\n", + "---\n", "\n", + "## 2. Fault-Tolerant $T$ Gate Application\n", "\n", - "\n", + "Thankfully, there is a way to implement a fault-tolerant $T$ gate, it is just difficult and resource intensive. The procedure involves the circuit below.\n", "\n", + "\"Circuit\n", "\n", - "It begins with an encoded $\\ket{0}$ qubit (bottom wire) and an arbitrary logical qubit state $\\ket{\\psi}$ (top wire). Then, the bottom wire is prepared in a so called magic state, $\\frac{\\ket{0} + e^{i\\pi/4}\\ket{1}}{\\sqrt{2}}$. A fault tolerant CNOT gate is then applied with the magic state qubit as the control. The target qubit is then measured and if the result is a 0, $T$ was successfully applied to $\\ket{\\psi}$. If measurement produces a 1, then a FT SX gate is applied to the qubit and the result becomes $T$ applied to $\\ket{\\psi}$.\n", + "It begins with an encoded $\\ket{0}$ qubit (bottom wire) and an arbitrary logical qubit state $\\ket{\\psi}$ (top wire). Then, the bottom wire is prepared in a so called **magic state**, $\\frac{\\ket{0} + e^{i\\pi/4}\\ket{1}}{\\sqrt{2}}$. A fault-tolerant CNOT gate is then applied with the magic state qubit as the control. The target qubit is then measured and if the result is a 0, $T$ was successfully applied to $\\ket{\\psi}$. If measurement produces a 1, then a FT $SX$ gate is applied to the qubit and the result becomes $T$ applied to $\\ket{\\psi}$.\n", "\n", - "The \"Prepare Magic State\" subcircuit is deceptively simple. It consists of a single logical Hadamard gate followed by a noisy $T$ gate to form $\\ket{T}$. This process may seem circular as a $T$ gate is required in the process to apply a fault tolerant $T$ gate. However, the procedure is a bit more nuanced. A \"good enough\" (high enough fidelity) magic state is the output of an onerous process called magic state distillation (MSD). MSD will be discussed more in the next section. \n", + "The \"Prepare Magic State\" subcircuit is deceptively simple. It consists of a single logical Hadamard gate followed by a noisy $T$ gate to form $\\ket{T}$. This process may seem circular as a $T$ gate is required in the process to apply a fault-tolerant $T$ gate. However, the procedure is a bit more nuanced. A \"good enough\" (high enough fidelity) magic state is the output of an onerous process called **magic state distillation (MSD)**. MSD will be discussed more in the next section.\n", "\n", - "Another distinction is that the magic state can only be produced from an easy to prepare logical 0 state. Only by performing the fault tolerant circuit above, can the $T$ gate act on an arbitrary qubit state $\\ket{\\psi_L}$. \n", + "Another distinction is that the magic state can only be produced from an easy to prepare logical 0 state. Only by performing the fault-tolerant circuit above can the $T$ gate act on an arbitrary qubit state $\\ket{\\psi_L}$.\n", "\n", "The math is instructive for why the above circuit works. After the CNOT is applied to the magic state and $\\ket{\\psi}$ qubits, the resulting state is\n", "\n", - "$$ \\frac{1}{\\sqrt{2}} [ \\ket{0}(a\\ket{0} +b\\ket{1}) + e^{i\\pi/4}\\ket{1}(a\\ket{1} + b\\ket{0})] $$\n", + "$$\\frac{1}{\\sqrt{2}} \\left[ \\ket{0}(a\\ket{0} + b\\ket{1}) + e^{i\\pi/4}\\ket{1}(a\\ket{1} + b\\ket{0}) \\right]$$\n", "\n", "Then, if the second qubit is measured (fault tolerantly) and is a 0, the intended $T\\ket{\\psi_L}$ state is prepared.\n", "\n", - "$$ a\\ket{0} +b*e^{i\\pi/4}\\ket{1} $$\n", + "$$a\\ket{0} + b \\cdot e^{i\\pi/4}\\ket{1}$$\n", "\n", "Otherwise, if 1 is measured, the following state is prepared.\n", "\n", - "$$ b\\ket{0} + a*e^{i\\pi/4}\\ket{1} $$\n", + "$$b\\ket{0} + a \\cdot e^{i\\pi/4}\\ket{1}$$\n", "\n", - "In the latter case, the objective state can be obtained with a Clifford correction consisting of an $S$ and $X$ operation to flip the bits and amplitudes. such a process ensures that the $T$ gate is implemented fault tolerantly and will not propagate through the computation if it introduces an error." + "In the latter case, the objective state can be obtained with a Clifford correction consisting of an $S$ and $X$ operation to flip the bits and amplitudes. Such a process ensures that the $T$ gate is implemented fault tolerantly and will not propagate through the computation if it introduces an error." ] }, { @@ -176,25 +191,27 @@ "id": "2e9e58e5-c274-4ebf-8511-01cd9c7e90c2", "metadata": {}, "source": [ - "## 5.3 Magic State Distillation ##\n", + "---\n", "\n", - "Preparing the magic state is a very resource intensive process. First, the process will be explained at a high level and then in terms of the details used in [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165). \n", + "## 3. Magic State Distillation\n", + "\n", + "Preparing the magic state is a very resource intensive process. First, the process will be explained at a high level and then in terms of the details used in [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165).\n", "\n", "Magic states are produced by magic state factories which follow a procedure like the one pictured below. This process is decoupled from any quantum application and can be used to build and store magic states which are consumed as an application needs $T$ gates applied.\n", "\n", "1) Select a QEC code to encode a collection of logical qubits\n", "2) Prepare each logical qubit in a noisy $\\ket{T}$ state.\n", - "3) Apply a second MSD QEC code on the logical qubits. In the figure below, the [[5,1,3]] code used by QuEra is represented.\n", - "4) Measure all but one of the logical qubits to produce a syndrome to determine if the remaining qubit is in a good magic state or not. If the syndrome indicates no error, the process worked and proceed to 5), if an error is flagged, the process restarts at 1).\n", - "5) A less noisy (high fidelity) magic state has been prepared. It can now be used, or input into another round of MSD.\n", + "3) Apply a second MSD QEC code on the logical qubits. In the figure below, the **[[5,1,3]] code** used by QuEra is represented.\n", + "4) Measure all but one of the logical qubits to produce a syndrome to determine if the remaining qubit is in a good magic state or not. If the syndrome indicates no error, the process worked and proceed to 5); if an error is flagged, the process restarts at 1).\n", + "5) A less noisy (higher fidelity) magic state has been prepared. It can now be used, or input into another round of MSD.\n", "\n", - "\n", + "\"Flowchart\n", "\n", - "It should be noted that this is one approach to producing $T$ gates, but others exits.\n", + "It should be noted that this is one approach to producing $T$ gates, but others exist.\n", "\n", "Usually, one round of MSD is not enough, even if the resulting state does have higher fidelity. To fix this, the output from the first round can be combined with other output magic states and the procedure repeated. Each round exponentially grows the overhead required to produce a single magic state. This is likely the greatest challenge facing FTQC and will consume most of the resources for any quantum application.\n", "\n", - "" + "\"Diagram" ] }, { @@ -202,56 +219,47 @@ "id": "77046502-787f-4814-bc51-7410c48fab34", "metadata": {}, "source": [ - "## 5.4 Performing MSD with the Color Code ##\n", + "---\n", "\n", + "## 4. Performing MSD with the Color Code\n", "\n", "It is helpful to look at a concrete example of MSD in practice to better understand the above process. This section will follow the procedure from the QuEra paper and explain the steps they took, preparing you to code up a similar example in the following section.\n", "\n", - "Each magic state is prepared using the [[7,1,3]] color code, also known as the Steane code which was covered in the second lab of this QEC series. The image below shows the procedure which QuEra used based on the gate set of their neutral atom QPU. The bottom data qubit can be set to an arbitrary qubit state and will \"inject\" that state into the logical qubit. The circuit is slightly different from the circuit used to encode the Steane code in lab 2, but enforces equivalent stabilizer constraints.\n", - "\n", - "\n", + "Each magic state is prepared using the [[7,1,3]] **color code**, also known as the Steane code which was covered in the second lab of this QEC series. The image below shows the procedure which QuEra used based on the gate set of their neutral atom QPU. The bottom data qubit can be set to an arbitrary qubit state and will \"inject\" that state into the logical qubit. The circuit is slightly different from the circuit used to encode the Steane code in Lab 2, but enforces equivalent stabilizer constraints.\n", "\n", "\n", "\n", - "\n", - "\n", - "\n", "In this case, the prepared state is a noisy $T$ state created by starting in the $\\ket{0}$ state and applying a rotation of $\\arccos(1/\\sqrt{3})$ about the (-1,1,0) axis.\n", "\n", - "This is repeated five times to create five logically encoded noisy magic states. They are then input into a [[5, 1, 3]] code which produces one magic state with high fidelity from five low fidelity ones. This is an example, like the Shor code, where QEC codes are concatenated. That is to say, the logical qubits are themselves used in a QEC code. The circuit below demonstrates what logical operations need to be performed on each logical qubit to complete the MSD process.\n", - "\n", + "This is repeated five times to create five logically encoded noisy magic states. They are then input into a [[5, 1, 3]] code which produces one magic state with high fidelity from five low fidelity ones. This is an example, like the Shor code, where QEC codes are concatenated. That is to say, the logical qubits are themselves used in a QEC code. The circuit below demonstrates what logical operations need to be performed on each logical qubit to complete the MSD process.\n", "\n", "\n", "\n", + "All but the first logical qubit are measured to produce a syndrome. If the correct syndrome (which can vary by implementation) is measured, a magic state with less error has been produced. QuEra's experimental results found that a raw logically encoded magic state had a fidelity of 94.1% which increased to 98.9% when postselected based on syndrome measurements, demonstrating the procedure worked.\n", + "\n", + "
\n", "\n", - "All but the first logical qubit are measured to produce as syndrome. If the correct syndrome (which can vary by implementation) is measured, a magic state with less error has been produced. QuEra's experimental results found that a raw logically encoded magic state had a fidelity of 94.1\\% which increased to 98.9\\% when postselected based on syndrome measurements, demonstrating the procedure worked.\n", + "**Exercise 1:**\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", "Write a simple script to calculate the number of physical qubits needed to perform $N$ rounds of MSD given an arbitrary QEC code and MSD procedure. Assuming the [[5,1,3]] code is used for the MSD, plot how many data qubits are needed for up to 5 rounds of MSD when the Steane code is used and the other [[17,1,5]] color code shown below, also used by QuEra in the paper.\n", "\n", - "

\n", + "\"Diagram\n", "\n", - "Is it clear why FTQC is so hard? This entire process must happen for every single $T$ gate applied in a quantum circuit, which could require millions or more $T$ gates.\n", - "

\n", - "
\n", - "\n" + "*[Adapted from: Experimental Demonstration of the Logical Magic State Distillation](https://arxiv.org/abs/2412.15165)*\n", + "\n", + "Is it clear why FTQC is so hard? This entire process must happen for every single $T$ gate applied in a quantum circuit, which could require millions or more $T$ gates.\n", + "\n", + "
" ] }, { @@ -261,42 +269,36 @@ "metadata": {}, "outputs": [], "source": [ - "def qubits_for_MSD(dataq_per_logicalq, ancilla_per_logicalq, max_msd_rounds, ms_per_msd_round):\n", - " \"\"\"Function that calculates the number of physical qubits required for n rounds of MSD and plots them Parameters\n", + "# EXERCISE 1\n", + "\n", + "def qubits_for_MSD(dataq_per_logicalq: int, ancilla_per_logicalq: int,\n", + " max_msd_rounds: int, ms_per_msd_round: int) -> None:\n", + " \"\"\"Calculate the number of physical qubits required for n rounds of MSD and plot them.\n", + "\n", + " Parameters\n", " ----------\n", - " dataq_per_logicalq: int\n", - " The number of data qubits required to encode a logical qubit for the QEC code you selected \n", + " dataq_per_logicalq : int\n", + " The number of data qubits required to encode a logical qubit for the QEC code you selected.\n", " ancilla_per_logicalq : int\n", - " The number of ancilla qubits required to measure the stabilizers of a logical qubit (assume no reuse)\n", - " max_msd_rounds: int\n", - " The number of MSD distillation rounds to perform\n", - " ms_per_msd_round: int\n", - " The number of magic states that must be input for the MSD protocol selected\n", - "\n", - " Returns\n", - " -------\n", + " The number of ancilla qubits required to measure the stabilizers of a logical qubit (assume no reuse).\n", + " max_msd_rounds : int\n", + " The number of MSD distillation rounds to perform.\n", + " ms_per_msd_round : int\n", + " The number of magic states that must be input for the MSD protocol selected.\n", " \"\"\"\n", - "\n", " rounds = []\n", - " qubits= []\n", - " #TODO - Write the body of the function\n", - "\n", - " \n", - " \n", + " qubits = []\n", + " ##TODO## Write the body of the function\n", "\n", - " plt.plot(rounds, qubits, 'b-o') \n", + " plt.plot(rounds, qubits, 'b-o')\n", " plt.autoscale(tight=True)\n", - "\n", - "\n", " plt.xlabel('MSD Rounds')\n", " plt.ylabel('Physical Qubits Required')\n", " plt.title('Qubits Needed for MSD')\n", - "\n", - " plt.show() \n", + " plt.show()\n", "\n", "\n", - "qubits_for_MSD(7,3,6,5)\n", - " " + "qubits_for_MSD(7, 3, 6, 5)" ] }, { @@ -304,21 +306,24 @@ "id": "31a6ae76-794a-4ba9-ab56-81e6891edb5d", "metadata": {}, "source": [ - "## 5.5 Using CUDA-Q to perform MSD ##\n", + "---\n", + "\n", + "## 5. Using CUDA-Q to Perform MSD\n", + "\n", "This section will walk you through an example using CUDA-Q to perform MSD. You will be coding the [[7,1,3]] code and following the MSD protocol of [Bravyi and Kitaev](https://arxiv.org/abs/quant-ph/0403025). Because this procedure only requires seven qubits, it can be simulated with a state vector simulator. However, remember that in practice each qubit would be a logical qubit and the procedure below would need to be performed with logical operations.\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - " The first step of this protocol is to prepare a kernel that produces a noisy $\\ket{T_0}$ state. \n", + "

\n", + "\n", + "**Exercise 2:**\n", "\n", - "$$ \\ket{T_0} = \\cos (\\frac{\\theta}{2}) \\ket{0} + e^{\\frac{i\\pi}{4}}\\sin (\\frac{\\theta}{2}) \\ket{1} $$\n", + "The first step of this protocol is to prepare a kernel that produces a noisy $\\ket{T_0}$ state.\n", "\n", - "Where $\\theta = \\frac{1}{2}\\cos^{-1}(\\frac{1}{\\sqrt{3}}) = 0.4776583090622546$.\n", + "$$\\ket{T_0} = \\cos\\!\\left(\\frac{\\theta}{2}\\right) \\ket{0} + e^{\\frac{i\\pi}{4}} \\sin\\!\\left(\\frac{\\theta}{2}\\right) \\ket{1}$$\n", + "\n", + "Where $\\theta = \\frac{1}{2}\\cos^{-1}\\!\\left(\\frac{1}{\\sqrt{3}}\\right) = 0.4776583090622546$.\n", "Let the kernel take a single float as an input which applies an error to $\\theta$. The code underneath the kernel creates a list of perturbed $\\theta$s over a suggested range.\n", - "

\n", - "
\n", - "\n" + "\n", + "
" ] }, { @@ -328,17 +333,16 @@ "metadata": {}, "outputs": [], "source": [ - "cudaq.set_target(\"nvidia\")\n", + "# EXERCISE 2\n", "\n", - "#Build t0 with some error\n", "@cudaq.kernel\n", - "def noisy_t0(y:float):\n", - " #TODO Write the CUDA-Q kernel\n", + "def noisy_t0(y: float):\n", + " ##TODO## Write the CUDA-Q kernel to build a noisy T0 state\n", + " pass\n", "\n", "# Perturbation to y gate angles\n", "epsilon = 0.005\n", - "initial_thetay = [0.4776583090622546 + epsilon*i for i in range(75)] \n", - " " + "initial_thetay = [0.4776583090622546 + epsilon * i for i in range(75)]" ] }, { @@ -346,14 +350,13 @@ "id": "82a19172-4ff6-4d34-9ccc-e46ead5abb3d", "metadata": {}, "source": [ - "
\n", - "

Exercise 3:

\n", - "

\n", - "Next, initialize a perfect $\\ket{T_0}$ state and a second state with some error (It is suggested to use element 30 from the list above to get a noticeable amount of noise but still allows MSD to work). Use CUDA-Q's $\\texttt{get\\_state}$ and $\\texttt{overlap}$ commands to compute the fidelity of the noisy state and visualize both states using the Bloch spheres below. Note that fidelity is the overlap squared.\n", - "

\n", - "
\n", + "
\n", "\n", - "\n" + "**Exercise 3:**\n", + "\n", + "Next, initialize a perfect $\\ket{T_0}$ state and a second state with some error (it is suggested to use element 30 from the list above to get a noticeable amount of noise but still allow MSD to work). Use CUDA-Q's `get_state` and `overlap` commands to compute the fidelity of the noisy state and visualize both states using the Bloch spheres below. Note that fidelity is the overlap squared.\n", + "\n", + "
" ] }, { @@ -363,13 +366,13 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO Save the state of the initial T0 state\n", + "# EXERCISE 3\n", "\n", - "#TODO Save the state of the error free T0 state\n", - "\n", - "#TODO compute the fidelity between them\n", + "##TODO## Save the state of the initial T0 state (with error)\n", "\n", + "##TODO## Save the state of the error-free T0 state\n", "\n", + "##TODO## Compute the fidelity between them\n", "\n", "print(\"Initial Fidelity\")\n", "print(initial_fidelity)\n", @@ -385,15 +388,16 @@ "id": "f74f93f8-cb32-4a78-83b8-f8f0fb666e14", "metadata": {}, "source": [ - "
\n", - "

Exercise 4:

\n", - "

\n", - " Now prepare a kernel to perform the [[5,1,3]] QEC code. The kernel should prepare each qubit as a noisy $\\ket{T_0}$ state, all with the same angle perturbation. Then, implement the circuit below. This circuit essentially maps the stabilizer measurements of the code to qubits 1,2,3 and 4 which need to be measured to produce a syndrome. Note that this version of the [[5,1,3]] code is slightly different from the version used by QuEra, but is more amenable to gate operations native to CUDA-Q. It is the implementation from A study of the robustness of magic state distillation against Clifford gate faults.\n", - "

\n", - " \n", - "
\n", - " One caveat to this code. It actually produces a distilled $\\ket{T_1}$. Add a Pauli Y operation followed by a Hadamard gate to the first qubit to convert back to $\\ket{T_0}$. Play around with this in the Bloch spheres above to understand what the transformation does.\n", - "

\n", + "
\n", + "\n", + "**Exercise 4:**\n", + "\n", + "Now prepare a kernel to perform the [[5,1,3]] QEC code. The kernel should prepare each qubit as a noisy $\\ket{T_0}$ state, all with the same angle perturbation. Then, implement the circuit below. This circuit essentially maps the stabilizer measurements of the code to qubits 1, 2, 3, and 4 which need to be measured to produce a syndrome. Note that this version of the [[5,1,3]] code is slightly different from the version used by QuEra, but is more amenable to gate operations native to CUDA-Q. It is the implementation from [A study of the robustness of magic state distillation against Clifford gate faults](https://dspacemainprd01.lib.uwaterloo.ca/server/api/core/bitstreams/44d06403-0b58-456f-a2d2-fd56ef2ea7b7/content).\n", + "\n", + "\"Circuit\n", + "\n", + "One caveat to this code: it actually produces a distilled $\\ket{T_1}$. Add a Pauli Y operation followed by a Hadamard gate to the first qubit to convert back to $\\ket{T_0}$. Play around with this in the Bloch spheres above to understand what the transformation does.\n", + "\n", "
" ] }, @@ -404,18 +408,15 @@ "metadata": {}, "outputs": [], "source": [ - "#Kernel implementing [[5,1,3]] code\n", + "# EXERCISE 4\n", + "\n", "@cudaq.kernel\n", "def msd(y: float):\n", - " #TODO Write the kernel for the [[5,1,3]] code\n", - "\n", - "\n", - "\n", + " ##TODO## Write the kernel for the [[5,1,3]] code\n", + " pass\n", "\n", - " \n", - "\n", - " h(reg[0]) #Apply to swap from T1 to T0\n", - " y(reg[0]) " + " h(reg[0]) # Apply to swap from T1 to T0\n", + " y(reg[0])" ] }, { @@ -423,7 +424,7 @@ "id": "a93dad50-263b-422f-b931-4b3d6fa52c6d", "metadata": {}, "source": [ - "Now, select an entry from the perturbed angle you created above and use it to run the code below. This will produce ten shots and for each print the syndrome, state, and fidelity with respect to the perfect $\\ket{T_0}$ state. How does the initial fidelity compare to the results? For results with improved fidelity, what is the resulting syndrome? If you do not get a 0000 syndrome, rerun the code. Take one of these improved states and visualize it using the Bloch sphere. Notice how it is closer to the perfect $\\ket{T_0}$ state than the state you started with." + "Now, select an entry from the perturbed angle you created above and use it to run the code below. This will produce ten shots and for each print the syndrome, state, and fidelity with respect to the perfect $\\ket{T_0}$ state. How does the initial fidelity compare to the results? For results with improved fidelity, what is the resulting syndrome? If you do not get a 0000 syndrome, rerun the code. Take one of these improved states and visualize it using the Bloch sphere. Notice how it is closer to the perfect $\\ket{T_0}$ state than the state you started with." ] }, { @@ -433,22 +434,22 @@ "metadata": {}, "outputs": [], "source": [ - "print(\"\\n\") \n", + "print(\"\\n\")\n", "for i in range(10):\n", - " print(\"\\n\") \n", + " print(\"\\n\")\n", " print(f\"shot: {i}\")\n", - " distilled_ms = cudaq.get_state(msd, initial_thetay[30]) # uses arbitrary selection \n", + " distilled_ms = cudaq.get_state(msd, initial_thetay[30])\n", " distilled_ms = np.array(distilled_ms)\n", "\n", " indices = np.nonzero(distilled_ms)[0]\n", " for j in np.nonzero(distilled_ms)[0]:\n", - " syndrome=j\n", + " syndrome = j\n", " print(f\"syndrome: {np.binary_repr(syndrome, width=5)[0:4]}\")\n", "\n", " distilled_ms = distilled_ms[np.nonzero(distilled_ms)]\n", " print(f\"state:\\n{distilled_ms}\")\n", - " print(f\"fidelity: {perfect_t0.overlap(distilled_ms)**2}\") \n", - " print(\"\\n\") " + " print(f\"fidelity: {perfect_t0.overlap(distilled_ms)**2}\")\n", + " print(\"\\n\")" ] }, { @@ -456,12 +457,13 @@ "id": "cca10e2a-3a59-4715-9726-e01dc9ceaf0b", "metadata": {}, "source": [ - "
\n", - "

Exercise 5:

\n", - "

\n", - "Now, build a loop below that performs the MSD protocol for initial states corresponding to each error in the initial list of $\\theta$'s. Plot this against the line $y=x$ to determine the threshold required for the input state? That is, the minimum fidelity of the input state such that the procedure works and does not produce worse results. From the graph, estimate how many rounds of MSD would be needed (assuming the same procedure) to distill a state above .98 starting from .94.\n", - "

\n", - "
\n" + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "Now, build a loop below that performs the MSD protocol for initial states corresponding to each error in the initial list of $\\theta$'s. Plot this against the line $y=x$ to determine the threshold required for the input state. That is, the minimum fidelity of the input state such that the procedure works and does not produce worse results. From the graph, estimate how many rounds of MSD would be needed (assuming the same procedure) to distill a state above 0.98 starting from 0.94.\n", + "\n", + "
" ] }, { @@ -471,47 +473,49 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 5\n", + "\n", "input_fidelity = []\n", "output_fidelity = []\n", "\n", "for error in initial_thetay:\n", + " ##TODO## Write a loop to perform MSD for each error in the list of thetas\n", + " pass\n", "\n", - "# TODO Write a loop to perform MSD for each error in the list of thetas\n", - " \n", - "import matplotlib.pyplot as plt\n", - "\n", - "# Create the plot\n", "plt.figure(figsize=(8, 6))\n", - "plt.plot(input_fidelity, input_fidelity, label=\"Y=X\", marker='.') # First series: y=x\n", - "plt.plot(input_fidelity, output_fidelity, label=\"Input vs Output\", marker='x') # Second series: input vs output\n", - "\n", - "# Add labels, title, and legend\n", + "plt.plot(input_fidelity, input_fidelity, label=\"Y=X\", marker='.')\n", + "plt.plot(input_fidelity, output_fidelity, label=\"Input vs Output\", marker='x')\n", "plt.xlabel(\"X-axis\")\n", "plt.ylabel(\"Y-axis\")\n", "plt.title(\"Plot of Two Series\")\n", "plt.legend()\n", - "\n", - "# Show the plot\n", "plt.grid(True)\n", "plt.show()" ] }, { "cell_type": "markdown", - "id": "a0c96916-8d1e-4efe-89a7-f8e2b32eacf7", + "id": "412c6459-5c10-49f8-b12c-9992149d28d2", "metadata": {}, "source": [ "## Conclusion\n", "\n", - "after completing this notebook you should now have a sense of how MSD works, why it is needed, and what makes it so challenging. As you continue with QEC 101 content, remember that there are many aspects of FTQC beyond simply the QEC code selected." + "After completing this notebook you should now have a sense of how MSD works, why it is needed, and what makes it so challenging. The Eastin-Knill Theorem means that no QEC code can implement a universal gate set transversally, making the $T$ gate the critical bottleneck for fault-tolerant quantum computing. Magic state distillation provides a path forward, but at an exponential resource cost that represents one of the greatest challenges facing practical FTQC. As you continue with QEC 101 content, remember that there are many aspects of FTQC beyond simply the QEC code selected." + ] + }, + { + "cell_type": "markdown", + "id": "28d100b289c04eb8", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 2: Stabilizer Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — covers the Steane code used as the inner code for MSD in this lab\n", + "* [QEC 101 — Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — the next notebook in the QEC 101 series, exploring surface and toric codes\n", + "* [QEC 101 — Lab 4: Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — introduces quantum decoding strategies that complement the distillation approach" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -525,11 +529,24 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.ALG", + "QCS.SW" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_algorithms", + "quantum_software" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/06_QEC_Topological_Codes.ipynb b/qec101/06_QEC_Topological_Codes.ipynb index 11c70fc..d98a5c5 100644 --- a/qec101/06_QEC_Topological_Codes.ipynb +++ b/qec101/06_QEC_Topological_Codes.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "eead95ce", "metadata": { "id": "eead95ce" @@ -27,110 +27,122 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 6 - Topological Codes\n", - "\n", - "This lab builds on the stabilizer formalism introduced in [lab 2]() towards much more complex yet powerful QEC codes: the topological codes. [Recent announcements](https://research.google/blog/making-quantum-error-correction-work/) from Google about their new Willow chip have brought a specific topological code (the surface code) center stage, as it is the primary code being explored by Google today and a very promising candidate for useful QEC.\n", - "\n", - "The toric code is one of the earliest topological QEC codes and a precursor to the surface code. It is a more complex code relative to the repetiton code and Steane code, but also elegant and worth the time to understand. This lab introduces you to the conceptual aspects of the toric code and enables you to code an implementation yourself. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **6.1** Learn about topological codes and why they are so promising.\n", - "* **6.2** Learn the toric code layout and its stabilizers\n", - "* **6.3** Learn the toric code logical operators\n", - "* **6.4** Learn how to code the toric code in CUDA-Q\n", - "* **6.5** Learn how to decode the toric code errors\n", - "* **6.6** Implement a minimum weight perfect matching decoder\n", - "* **6.7** Learn about the planar and surface codes\n", - "\n", - "Lab 5 Learning Objectives:\n", - "* What are topological codes and why are they so promising. \n", - "* Understand the basics of the toric code and how to construct it visually\n", - "* Understand how to intupret toric code errors\n", - "* Understand how to code the toric code in CUDA-Q\n", - "* Understand how minimum-weight perfect matching decoding works\n", - "\n", - "\n", - "Terminology and notation you'll use:\n", - "\n", - "* topological code, toric code, surface code, threshold\n", - "* stabilizers, logical operators, syndromes\n", - "* minimum weight perfect matching\n", + "# Topological Codes — QEC 101: Lab 6\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Explore topological quantum error correction codes and their advantages over simpler codes\n", + "* Analyze the toric code's stabilizer structure, including plaquette and vertex stabilizers\n", + "* Identify logical operators on the torus using topological equivalence classes\n", + "* Implement the toric code encoding and syndrome measurement in CUDA-Q\n", + "* Apply minimum weight perfect matching (MWPM) decoding to toric code syndromes\n", + "* Compare the toric, planar, and rotated surface code architectures\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement) ([Quick Start to Quantum](https://github.com/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/01_quick_start_to_quantum.ipynb))\n", + "* Stabilizer formalism and syndrome measurement ([Lab 2: Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb))\n", + "* Familiarity with CUDA-Q kernels and `cudaq.sample` ([Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb))\n", + "\n", + "**Key Terminology:**\n", + "* Topological code\n", + "* Toric code\n", + "* Surface code\n", + "* Threshold\n", + "* Stabilizer\n", + "* Logical operator\n", + "* Syndrome\n", + "* Minimum weight perfect matching (MWPM)\n", + "* Plaquette stabilizer\n", + "* Vertex stabilizer\n", + "* Planar code\n", + "* Rotated surface code\n", + "* Equivalence class\n", + "* Topology\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "\n", + "**Solutions:** [`Solutions/06_QEC_Topological_Codes_Solution.ipynb`](Solutions/06_QEC_Topological_Codes_Solution.ipynb)\n", + "\n", + "This lab was inspired by excellent work in [*Quantum Error Correction: An Introductory Guide*](https://arxiv.org/pdf/1907.11157) and a blog entitled [*An Interactive Guide to the Surface Code*](https://arthurpesah.me/blog/2023-05-13-surface-code/#solution-of-the-exercise)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8dd39d5c17594d62", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", - "This lab was inspired by excellent work in [*Quantum Error Correction an Introductory Guide*](https://arxiv.org/pdf/1907.11157) and a blog entitled [*An Interactive Guide to the Surface Code*](https://arthurpesah.me/blog/2023-05-13-surface-code/#solution-of-the-exercise)" + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { "cell_type": "markdown", "id": "b5046650", - "metadata": { - "id": "b5046650" - }, + "metadata": {}, "source": [ - "Execute the cell below to load all the necessary packages for this lab. " + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "import sys\n", - "\n", - "try:\n", - " import networkx as nx\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade networkx\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", + "import networkx as nx\n", "\n", "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "\n", "cudaq.set_target('stim')" ] }, - { - "cell_type": "markdown", - "id": "b3e18790-87ab-4180-b349-a4210abca136", - "metadata": {}, - "source": [ - "## 6.1 What is a topological code?" - ] - }, { "cell_type": "markdown", "id": "3f64f39f-0c0c-4082-a63d-609b81fceebf", "metadata": {}, "source": [ - "The previous lab discussed stabilizers, and how the stabilizer formalism can be a helpful tool for generating more complex QEC codes, particularly at scales where specifying the codewords is impractical. The **toric code** is one of the most famous **topological** codes which combines the stabilizer formalism and principles of topology to produce a QEC code with a number of favorable properties. \n", + "---\n", "\n", - "**Topology** is a branch of mathematics that studies objects abstractly and relates them to so called equivalence classes based on how they can or cannot be transformed into one another. The prototypical example given to introduce topology proposes that a doughnut and a coffee mug are in the same class (see image below), as they each have one hole while a shape like a sphere, would be in a different equivalence class.\n", + "## 6.1. What is a Topological Code?\n", "\n", - "\n", + "The previous lab discussed **stabilizers**, and how the stabilizer formalism can be a helpful tool for generating more complex QEC codes, particularly at scales where specifying the codewords is impractical. The **toric code** is one of the most famous **topological codes** which combines the stabilizer formalism and principles of **topology** to produce a QEC code with a number of favorable properties.\n", "\n", + "Topology is a branch of mathematics that studies objects abstractly and relates them to so called **equivalence classes** based on how they can or cannot be transformed into one another. The prototypical example given to introduce topology proposes that a doughnut and a coffee mug are in the same class (see image below), as they each have one hole while a shape like a sphere, would be in a different equivalence class.\n", "\n", - "The coffee mug example, though certainly not rigorous and perhaps somewhat arbitrary, is an accessible glance into the sorts of distinctions topology can make to classify all sorts of geometries, even those that cannot be visualized. Though the details of topology are well outside the scope of this lab, they provide the theoretical foundation of the surface code which takes advantage of the notion of equivalence classes to perform error correction on qubits arranged on a surface.\n", + "\"Illustration\n", "\n", - "The key idea of the surface code is to define stabilizers as small repeating units that can be visually represented on a grid. Repeating such units results in a systematic way to increase the number of data qubits that encode one or more logical qubits, and increase the distance of the code. Defining the code on a grid pays dividends for leveraging topological principles to define errors and logical operations in such a way that lends itself to a nice visual interpretation. Ideally, surface code errors are localized and can be visually inspected much like one can look at an image, assuming the pixels are large enough, and quickly identify which pixels are the wrong color.\n", + "The coffee mug example, though certainly not rigorous and perhaps somewhat arbitrary, is an accessible glance into the sorts of distinctions topology can make to classify all sorts of geometries, even those that cannot be visualized. Though the details of topology are well outside the scope of this lab, they provide the theoretical foundation of the **surface code** which takes advantage of the notion of equivalence classes to perform error correction on qubits arranged on a surface.\n", + "\n", + "The key idea of the surface code is to define stabilizers as small repeating units that can be visually represented on a grid. Repeating such units results in a systematic way to increase the number of data qubits that encode one or more logical qubits, and increase the distance of the code. Defining the code on a grid pays dividends for leveraging topological principles to define errors and logical operations in such a way that lends itself to a nice visual interpretation. Ideally, surface code errors are localized and can be visually inspected much like one can look at an image, assuming the pixels are large enough, and quickly identify which pixels are the wrong color.\n", "\n", "Certain types of QPUs (such as superconducting) are well suited for the surface code as qubits are already arranged on surface with nearest neighbor connectivity.\n", "\n", - "The surface code also has a high **threshold**. The threshold of a QEC code is the error rate required of physical qubits such that adding more physical qubits improves the logical error rate and does not make it worse. A high threshold means that the surface code can tolerate physical qubits of lower relative quality than other codes. \n" + "The surface code also has a high **threshold**. The threshold of a QEC code is the error rate required of physical qubits such that adding more physical qubits improves the logical error rate and does not make it worse. A high threshold means that the surface code can tolerate physical qubits of lower relative quality than other codes." ] }, { @@ -138,45 +150,48 @@ "id": "1a0bf241-9978-4c88-ad2e-21233dbce218", "metadata": {}, "source": [ - "## 6.2 The Toric Code and its Stabilizers\n", + "---\n", + "\n", + "## 6.2. The Toric Code and its Stabilizers\n", "\n", - "The starting point for the toric code is a grid, the lines of which correspond to lines on the surface of a torus (doughnut shape). The horizontal grid lines (like the blue line below) wrap around the equator of the torus. The vertical lines (like the red line) wrap inside of the \"doughnut hole\" of the torus. In order for the grid to appropriately resemble the torus, the endpoints of any grid line connect. Notice that the point A on the torus corresponds to both the left and right endpoint of the blue horizontal line.\n", + "The starting point for the toric code is a grid, the lines of which correspond to lines on the surface of a torus (doughnut shape). The horizontal grid lines (like the blue line below) wrap around the equator of the torus. The vertical lines (like the red line) wrap inside of the \"doughnut hole\" of the torus. In order for the grid to appropriately resemble the torus, the endpoints of any grid line connect. Notice that the point A on the torus corresponds to both the left and right endpoint of the blue horizontal line.\n", "\n", - "\n", + "\"Diagram\n", "\n", "

Image adapted from https://en.wikipedia.org/wiki/Torus#/media/File:Tesseract_torus.png

\n", "\n", "\n", "Each edge (line connecting two vertices) of the grid corresponds to a data qubit depicted by grey squares. The toric code is a $[[2L^2, 2, L]]$ code, encoding two logical qubits, where $L$ is the number of vertical/horizontal edges in each row/column.\n", "\n", - "\n", + "\"Toric\n", "\n", "\n", - "### Stabilizers \n", + "### Stabilizers\n", "\n", - "Stabilizers come in two varieties: plaquette and vertex. A plaquette stabilizer corresponds to the an ancilla qubit (black circle) which sits in the center of each face. The stabilizer consists of four $X$ operators acting on the four adjacent (green edge) qubits. Similarly, each vertex stabilizers corresponds to an ancilla qubit placed at each grid vertex and consists of four $Z$ operators acting on the four adjacent (purple edge) qubits.\n", + "Stabilizers come in two varieties: plaquette and vertex. A **plaquette stabilizer** corresponds to an ancilla qubit (black circle) which sits in the center of each face. The stabilizer consists of four $X$ operators acting on the four adjacent (green edge) qubits. Similarly, each **vertex stabilizer** corresponds to an ancilla qubit placed at each grid vertex and consists of four $Z$ operators acting on the four adjacent (purple edge) qubits.\n", "\n", - "\n", + "\"Diagram\n", "\n", "\n", "### Confirming Stabilizers Commute\n", "\n", "Remember from Lab 2 that all stabilizers must commute with one another. Consider the three possible cases to confirm that the commutation relationships hold.\n", "\n", - "1. *The stabilizers do not intersect*: This is the trivial case as two stabilizers acting on completely different qubits will trivially commute.\n", + "1. *The stabilizers do not intersect*: This is the trivial case as two stabilizers acting on completely different qubits will trivially commute.\n", "\n", - "2. *Two stabilizers of the same sort intersect*: Considering the plaquette stabilizers only as the same logic holds for vertex stabilizers, notice that the only way for two different plaquette stabilizers to intersect is if they share one data qubit. That is, two green squares share an edge. In the example below, the $X_2$ operations cancel out and the commutation relationship holds for the other operators acting on different qubits.\n", + "2. *Two stabilizers of the same sort intersect*: Considering the plaquette stabilizers only as the same logic holds for vertex stabilizers, notice that the only way for two different plaquette stabilizers to intersect is if they share one data qubit. That is, two green squares share an edge. In the example below, the $X_2$ operations cancel out and the commutation relationship holds for the other operators acting on different qubits.\n", "\n", - " \n", + " \"Two\n", "\n", - "3. *A plaquette and vertex stabilizer intersect*: This is the most complex case. Notice, that this can only occur when a green square intersects a purple cross on two shared data qubits. When a $Z$ and an $X$ type operator act on the same qubit an anti-commutation relationship holds and a negative phase is added. However, if this occurs for a pair of operators acting on a second qubit, the anti-commutation relationships cancel and the entire stabilizer commutes.\n", + "3. *A plaquette and vertex stabilizer intersect*: This is the most complex case. Notice, that this can only occur when a green square intersects a purple cross on two shared data qubits. When a $Z$ and an $X$ type operator act on the same qubit an anti-commutation relationship holds and a negative phase is added. However, if this occurs for a pair of operators acting on a second qubit, the anti-commutation relationships cancel and the entire stabilizer commutes.\n", "\n", - "\n", + "\"A\n", "\n", + "---\n", "\n", - "## 6.3 Logical Operators\n", + "## 6.3. Logical Operators\n", "\n", - "In order to understand logical operators, it is necessary to consider the topology of the torus and specifically the sorts of loops that can be drawn on it. It turns out, that there are four sorts of loops that can occur on the torus, each as a distinct equivalence class. \n", + "In order to understand **logical operators**, it is necessary to consider the topology of the torus and specifically the sorts of loops that can be drawn on it. It turns out, that there are four sorts of loops that can occur on the torus, each as a distinct equivalence class.\n", "\n", "1. A trivial loop (yellow) is simply a closed loop on the surface of the torus.\n", "2. A Vertical loop (red and green) is a loop through the center hole.\n", @@ -184,51 +199,51 @@ "4. A Vertical + Horizontal loop combines the two, looping around the equator with a twist through the center hole.\n", "\n", "\n", - " \n", + " \"Four\n", "\n", - "Each sort of loop also manifest within the toric code and correspond to either stabilizers or logical operators. \n", + "Each sort of loop also manifests within the toric code and corresponds to either stabilizers or logical operators.\n", "\n", "\n", - "First consider trivial loops. In the section above, it was noted that the product of two overlapping plaquette (or vertex) stabilizers commute because the operation performed on the overlapping qubit cancels out. Another way to think about this, is the formation of a larger loop which is just another stabilizer!\n", + "First consider trivial loops. In the section above, it was noted that the product of two overlapping plaquette (or vertex) stabilizers commute because the operation performed on the overlapping qubit cancels out. Another way to think about this, is the formation of a larger loop which is just another stabilizer!\n", "\n", - " \n", + " \"Two\n", "\n", "\n", "One could continue to add plaquette stabilizers and expand the loop, or work backwards and cancel out the previous stabilizers to recover $\\ket{\\psi}$. This means that any closed loop on the surface is a stabilizer and in the same equivalence class with the original state.\n", "\n", - "This is not true for a vertical or horizintal loop (pictured below). There is no way to apply stabilizers to either state to recover $\\ket{\\psi}$ Therefore, such loops are in a different equivalence class. It turns out that a horizontal loop (if formed by consecutive $X$ operators) is $\\bar{X}_1$ and a vertical loop is $\\bar{X}_2$. \n", + "This is not true for a vertical or horizontal loop (pictured below). There is no way to apply stabilizers to either state to recover $\\ket{\\psi}$. Therefore, such loops are in a different equivalence class. It turns out that a horizontal loop (if formed by consecutive $X$ operators) is $\\bar{X}_1$ and a vertical loop is $\\bar{X}_2$.\n", "\n", - " \n", + " \"A\n", "\n", "Just as all stabilizers correspond to the same equivalence class, there are many ways to produce vertical and horizontal loops. Such loops do not need to be \"straight\" and can follow a windy path as pictured below.\n", "\n", - " \n", + " \"A\n", "\n", "\n", "It is also possible for two distinct loops to close along the same axis corresponding to $\\bar{X}_i$$\\bar{X}_i$, that is, the application of the logical operator twice.\n", "\n", - " \n", - " \n", - "Finally, if a vertical and horizontal loop close, the results is the same as both logical bitflip operators $\\bar{X}_1$$\\bar{X}_2$\n", + " \"Two\n", "\n", + "Finally, if a vertical and horizontal loop close, the result is the same as both logical bitflip operators $\\bar{X}_1$$\\bar{X}_2$.\n", "\n", - " \n", "\n", - "All of this logic is identical for the $Z$-type vertex stabilizer. The only difference is that $\\bar{Z}_1$ and $\\bar{Z}_2$ correspond to vertical and horizontal loops formed by data qubits on parallel faces of the grid (purple lines). \n", + " \"A\n", "\n", + "All of this logic is identical for the $Z$-type vertex stabilizer. The only difference is that $\\bar{Z}_1$ and $\\bar{Z}_2$ correspond to vertical and horizontal loops formed by data qubits on parallel faces of the grid (purple lines).\n", "\n", - " \n", "\n", + " \"Three\n", "\n", "\n", - " $\\bar{Z}_1$ and $\\bar{X}_1$ need to anticommute which is why a horizontal loop formed by green lines and vertical loops formed by purple lines operate on the same logical qubit. Anticommutation is presevered as a $Z$ and $X$ operator must both occur on an odd number of data qubits.\n", "\n", + " $\\bar{Z}_1$ and $\\bar{X}_1$ need to anticommute which is why a horizontal loop formed by green lines and vertical loops formed by purple lines operate on the same logical qubit. Anticommutation is preserved as a $Z$ and $X$ operator must both occur on an odd number of data qubits.\n", "\n", - " \n", "\n", + " \"Crossing\n", "\n", + "---\n", "\n", - "## 6.4 Implementing the toric code in CUDA-Q\n" + "## 6.4. Implementing the Toric Code in CUDA-Q" ] }, { @@ -236,20 +251,21 @@ "id": "1e33a422-f03f-4ced-a307-f9977993b03b", "metadata": {}, "source": [ - "
\n", - "

Exercise 1:

\n", - "

\n", - "You will now code the toric code in CUDA-Q. It is important to note that there are many nuances that arise when coding the toric code that have not been considered in the conceptual discussions above. For example, in the presence of errors, the order in which the stabilizer gates are applied in the encoding stage matters for the propagation of errors. Such considerations will be ignored for simplicity but are important to note regardless. \n", + "

\n", "\n", - "In the cells below, you will perform the encoding for the toric code. First, calculate the indices of the stabilizers and print them to ensure all are correct. As the L=3 toric code is an [[18,2,3]] code, only 16 stabilizers are required to reinforce the constraints as all 9 vertex and plaquette stabilizers must produce a product of 1. However, it is OK to use all 18 stabilizers for clarity despite the redundancy as the extra two stabilizers do not add additional constraints.\n", + "**Exercise 1:**\n", "\n", - "In the cell below, write code that produces the indices for each plaquette (catch $Z$ errors) and vertex (catch $X$ errors) stabilizers. Use the indexing below to match the solution key. Notice how there are 18 data and ancilla qubits. Note: Store the indicies in a single flattened list (one for each stabilizer type) so that they can be easily read into CUDA-Q kernels. \n", + "You will now code the toric code in CUDA-Q. It is important to note that there are many nuances that arise when coding the toric code that have not been considered in the conceptual discussions above. For example, in the presence of errors, the order in which the stabilizer gates are applied in the encoding stage matters for the propagation of errors. Such considerations will be ignored for simplicity but are important to note regardless.\n", + "\n", + "In the cells below, you will perform the encoding for the toric code. First, calculate the indices of the stabilizers and print them to ensure all are correct. As the L=3 toric code is an $[[18,2,3]]$ code, only 16 stabilizers are required to reinforce the constraints as all 9 vertex and plaquette stabilizers must produce a product of 1. However, it is OK to use all 18 stabilizers for clarity despite the redundancy as the extra two stabilizers do not add additional constraints.\n", + "\n", + "In the cell below, write code that produces the indices for each plaquette (catch $Z$ errors) and vertex (catch $X$ errors) stabilizers. Use the indexing below to match the solution key. Notice how there are 18 data and ancilla qubits. Note: Store the indices in a single flattened list (one for each stabilizer type) so that they can be easily read into CUDA-Q kernels.\n", "\n", "Complete the additional tasks in the following cells.\n", - "

\n", + "\n", "
\n", "\n", - " \n", + "\"Diagram\n", "\n", "\n", "Print the stabilizers and confirm your code works. Do they match what you would expect from the image above?" @@ -262,16 +278,25 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO \n", - "#Write code here to generate the indicices of each stabilizer.\n", + "# EXERCISE 1\n", + "##TODO## Write code to generate the indices of each stabilizer.\n", + "L = 3\n", + "\n", + "stabilizers_z = [] # Plaquette stabilizers that flag Z errors\n", + "##TODO## Compute plaquette stabilizer indices\n", + "\n", + "\n", + "stabilizers_x = [] # Vertex stabilizers that flag X errors\n", + "##TODO## Compute vertex stabilizer indices\n", + "\n", "\n", "print(\"plaq\")\n", "for x in range(L**2):\n", - " print(stabilizers_z[4*x], stabilizers_z[4*x+1],stabilizers_z[4*x+2],stabilizers_z[4*x+3])\n", + " print(stabilizers_z[4*x], stabilizers_z[4*x+1], stabilizers_z[4*x+2], stabilizers_z[4*x+3])\n", "\n", "print(\"vertex\")\n", "for x in range(L**2):\n", - " print(stabilizers_x[4*x], stabilizers_x[4*x+1],stabilizers_x[4*x+2],stabilizers_x[4*x+3])" + " print(stabilizers_x[4*x], stabilizers_x[4*x+1], stabilizers_x[4*x+2], stabilizers_x[4*x+3])" ] }, { @@ -279,17 +304,17 @@ "id": "fadf8e04-6cc8-4403-99d7-c7efc4ef19c2", "metadata": {}, "source": [ - "Now, build a register and an prepare the logical zero state. For the Steane and Shor codes in previous labs, you were provided with an encoding circuit. The encoding circuit prepared a state within the codespace such that all stabilizers were satisfied and returned a 0 when measured. For the toric code, this is impractical and instead, you can take advantage of the fact that if the stabilizers run on a register of all zeros, and then the ancilla qubits are measured, this will project the state into the codespace corresponding to the logical 0 state. \n", + "Now, build a register and prepare the logical zero state. For the Steane and Shor codes in previous labs, you were provided with an encoding circuit. The encoding circuit prepared a state within the codespace such that all stabilizers were satisfied and returned a 0 when measured. For the toric code, this is impractical and instead, you can take advantage of the fact that if the stabilizers run on a register of all zeros, and then the ancilla qubits are measured, this will project the state into the codespace corresponding to the logical 0 state.\n", "\n", - "This means if you run the stabilizer circuit and measure a second time, the same stabilizers will return the same measurements. The downside, is that a syndrome measurement of 000...000 no longer corresponds to a lack of errors. In fact, the baseline stabilizer measurements will be different each time the toric code is prepared this way! \n", + "This means if you run the stabilizer circuit and measure a second time, the same stabilizers will return the same measurements. The downside, is that a **syndrome** measurement of 000...000 no longer corresponds to a lack of errors. In fact, the baseline stabilizer measurements will be different each time the toric code is prepared this way!\n", "\n", - "So what does it mean to flag an error? In practice, the first result of stabilizer measurements is tracked, and effectively considered as the 000....000 result. Then, any deviation from this initial pattern indicates an error. So the result, 01000.... would imply the current QEC round resulted in a deviation from the expected syndrome of the initial state due to flagging of the second stabilizer.\n", + "So what does it mean to flag an error? In practice, the first result of stabilizer measurements is tracked, and effectively considered as the 000....000 result. Then, any deviation from this initial pattern indicates an error. So the result, 01000.... would imply the current QEC round resulted in a deviation from the expected syndrome of the initial state due to flagging of the second stabilizer.\n", "\n", - "Note: This exercise does not consider the possibility of errors occuring while preparing the state, but a true fault-tolerant preparation would have to do so. In that case, the procedure is run for many rounds and every round the syndromes do not change builds confidence that the state was prepared correctly.\n", + "Note: This exercise does not consider the possibility of errors occurring while preparing the state, but a true fault-tolerant preparation would have to do so. In that case, the procedure is run for many rounds and every round the syndromes do not change builds confidence that the state was prepared correctly.\n", "\n", - "In the cell below, build a kernel that runs the stabilizer checks, measures and saves the ancilla measurements, resets the ancilla qubits and repeats these steps at least three times. Print the results of a single shot of the kernel including each ancilla measurement round and the data qubit measurments. Make sure the stabilizer results stay the same. If they change, the preparation is not correct.\n", + "In the cell below, build a kernel that runs the stabilizer checks, measures and saves the ancilla measurements, resets the ancilla qubits and repeats these steps at least three times. Print the results of a single shot of the kernel including each ancilla measurement round and the data qubit measurements. Make sure the stabilizer results stay the same. If they change, the preparation is not correct.\n", "\n", - "Notice, the measurement results for the data qubits change despite the stabilizers not changing. In the next code block you will confirm that this is indeed the logical zero state." + "Notice, the measurement results for the data qubits change despite the stabilizers not changing. In the next code block you will confirm that this is indeed the logical zero state." ] }, { @@ -299,31 +324,32 @@ "metadata": {}, "outputs": [], "source": [ - "\n", + "# EXERCISE 1\n", "@cudaq.kernel\n", "def toric(plaq: list[int], vert: list[int]):\n", " \"\"\"\n", - " Function that takes a list of plaquette and vertex indices and constructs a kernel for an L=3 surface code.\n", - " The code should apply stabilizer checks and measure the ancilla qubits\n", - " \n", - " ----------\n", - " plaq: list[int]\n", - " indices for plaquette stabilizers. (see figure above)\n", - " vert: list[int]\n", - " indices for vertex stabilizers. (see figure above)\n", + " Constructs a kernel for an L=3 toric code that applies stabilizer checks,\n", + " measures ancilla qubits, and repeats for multiple rounds.\n", "\n", - " Returns\n", - " cudaq.kernel to prepare toric code.\n", - " -------\n", + " Parameters\n", + " ----------\n", + " plaq : list[int]\n", + " Indices for plaquette stabilizers.\n", + " vert : list[int]\n", + " Indices for vertex stabilizers.\n", " \"\"\"\n", "\n", - " #TODO\n", - " #Write a kernel that prepares the toric code\n", - " \n", + " ##TODO## Write a kernel that prepares the toric code:\n", + " # 1. Allocate data and ancilla qubits\n", + " # 2. Apply plaquette stabilizer checks (H-CNOT-H pattern)\n", + " # 3. Apply vertex stabilizer checks (CNOT pattern)\n", + " # 4. Measure ancilla qubits, reset, and repeat at least 3 times\n", + " # 5. Measure data qubits at the end\n", "\n", "\n", - "# Sample the kernel one shot at a time and print the results from the intermediate measurements to confirm stabilizer checks do not change \n", - "results = cudaq.sample(toric,stabilizers_z, stabilizers_x,shots_count=1)\n", + "# Sample the kernel one shot at a time and print the results from the\n", + "# intermediate measurements to confirm stabilizer checks do not change\n", + "results = cudaq.sample(toric, stabilizers_z, stabilizers_x, shots_count=1)\n", "\n", "print(results.get_register_counts(\"anc0\"))\n", "print(results.get_register_counts(\"anc1\"))\n", @@ -337,15 +363,15 @@ "id": "24dbc831-1aba-483f-988f-824b5bb336a2", "metadata": {}, "source": [ - "Now, let's test your code to ensure that logical operators work as expected. First, copy the code above that prepares the logical 0 state. Then, run `cudaq.sample` and produce a dictionary of bistrings form measuring the data qubits. Write a function that loops through these results and sums a subset of specified bits mod 2. \n", + "Now, let's test your code to ensure that logical operators work as expected. First, copy the code above that prepares the logical 0 state. Then, run `cudaq.sample` and produce a dictionary of bitstrings from measuring the data qubits. Write a function that loops through these results and sums a subset of specified bits mod 2.\n", "\n", - "The next ingredient is to define the logical operators for $\\bar{X}_1$, $\\bar{X}_1$, $\\bar{Z}_1$, and $\\bar{Z}_2$. \n", + "The next ingredient is to define the logical operators for $\\bar{X}_1$, $\\bar{X}_2$, $\\bar{Z}_1$, and $\\bar{Z}_2$.\n", "\n", - "Remember, these need to be horizontal and vertical loops across the torus. use the figures in the previous section to help identify a set of correct data qubits which correspond to valid logical operators. Remember, there is more than one right selection here. These logical operator will also inform which data qubits we measure (and sum mod 2) to produce the logical observable. The logical operators and obervables do not need to be the exact same loop, but just in the same equivilance class.\n", + "Remember, these need to be horizontal and vertical loops across the torus. Use the figures in the previous section to help identify a set of correct data qubits which correspond to valid logical operators. Remember, there is more than one right selection here. These logical operators will also inform which data qubits we measure (and sum mod 2) to produce the logical observable. The logical operators and observables do not need to be the exact same loop, but just in the same equivalence class.\n", "\n", - "Also remember that just like measuring in the $Z$ basis of a single qubit provides the $\\ket{0}$ or $\\ket{1}$ state, the $Z$ logical observables will produce the logical 0 and 1 states. Confrim the results below. If you take 1000 shots, you should see both $\\bar{Z}_1$ and $\\bar{Z}_2$ return 0 100 percent of the time. Likewise, the $X$ observable should be in a 50/50 superposition. \n", + "Also remember that just like measuring in the $Z$ basis of a single qubit provides the $\\ket{0}$ or $\\ket{1}$ state, the $Z$ logical observables will produce the logical 0 and 1 states. Confirm the results below. If you take 1000 shots, you should see both $\\bar{Z}_1$ and $\\bar{Z}_2$ return 0 100 percent of the time. Likewise, the $X$ observable should be in a 50/50 superposition.\n", "\n", - "Finally, try applying a logical bitflip gate. Make sure you can successfully apply one to both logical qubits. The solution has these gates commented out if you get stuck. " + "Finally, try applying a logical bitflip gate. Make sure you can successfully apply one to both logical qubits. The solution has these gates commented out if you get stuck." ] }, { @@ -355,23 +381,27 @@ "metadata": {}, "outputs": [], "source": [ - "def count_summed_bits_at_indices(bit_dict, indices):\n", + "# EXERCISE 1\n", + "def count_summed_bits_at_indices(bit_dict: dict[str, int], indices: list[int]) -> tuple[int, int]:\n", " \"\"\"\n", - " Function that computes logical 0 or 1 state from toric code.\n", - " \n", + " Computes logical 0 or 1 state from toric code measurement outcomes.\n", + "\n", + " Parameters\n", " ----------\n", - " bit_dict: dictionary\n", - " measurement outcome obtained from cudaq.sample\n", - " indices: list[int]\n", - " list specifying which bits to sum in order to measure logical Z state.\n", + " bit_dict : dict[str, int]\n", + " Measurement outcome obtained from cudaq.sample.\n", + " indices : list[int]\n", + " List specifying which bits to sum in order to measure logical Z state.\n", "\n", " Returns\n", - " the number of logical 0 and logical 1 measurements\n", " -------\n", + " tuple[int, int]\n", + " The number of logical 0 and logical 1 measurements.\n", " \"\"\"\n", - " #TODO - Finish the function\n", + " ##TODO## Finish the function\n", + "\n", "\n", - "#TODO - Define operators for X1 X2 Z1 Z2\n", + "##TODO## Define logical operators for X1, X2, Z1, Z2\n", "\n", "print(\"Result of measuring X1:\")\n", "print(count_summed_bits_at_indices(results, logical_x1))\n", @@ -389,50 +419,54 @@ "id": "e159b585-6705-4ac7-a7c5-8e753aec15ba", "metadata": {}, "source": [ - "## 6.5 Decoding the Toric Code\n", + "---\n", "\n", - "Now that you know how to construct the toric code, it is important to think about errors and syndrome decoding so that the code can be put to use. \n", + "## 6.5. Decoding the Toric Code\n", "\n", - "Note that every data qubit (edge) is adjacent to exactly two faces and two vertices, hence, any single error (denoted by red text on a data quit) will produce a syndrome where two stabilizers flag (denoted as yellow squares for plaquette and orange circles for vetex stabilizers.) In the figure below, notice how easy it is to spot the localized errors, a key benefit of topological codes.\n", + "Now that you know how to construct the toric code, it is important to think about errors and syndrome decoding so that the code can be put to use.\n", "\n", - " \n", + "Note that every data qubit (edge) is adjacent to exactly two faces and two vertices, hence, any single error (denoted by red text on a data qubit) will produce a syndrome where two stabilizers flag (denoted as yellow squares for plaquette and orange circles for vertex stabilizers.) In the figure below, notice how easy it is to spot the localized errors, a key benefit of topological codes.\n", "\n", + " \"Toric\n", "\n", - "What gets complicated is the presence of multiple errors. Considering only $X$-type errors, Notice what happens if two adjacent errors occur. Now, the vertex stabilizer bordering both errors no longer fires, while the vertex diagonal to the first stabilizer now fires.\n", "\n", - " \n", + "What gets complicated is the presence of multiple errors. Considering only $X$-type errors, notice what happens if two adjacent errors occur. Now, the vertex stabilizer bordering both errors no longer fires, while the vertex diagonal to the first stabilizer now fires.\n", + "\n", + " \"Two\n", "\n", "This pattern can continue, placing the highlighted stabilizers in any position.\n", "\n", - " \n", + " \"A\n", "\n", - "Another way to say this is that a pair of stabilizers is connected by a string of edges with errors. So, it is very natural to understand which stabilizers fire given the error pattern. However, in practice, you start with the syndrome results and must infer whihch errors occured occured with a decoder.\n", + "Another way to say this is that a pair of stabilizers is connected by a string of edges with errors. So, it is very natural to understand which stabilizers fire given the error pattern. However, in practice, you start with the syndrome results and must infer which errors occurred with a decoder.\n", "\n", - "There are many ways to go about this, but we will focus on a traditional technique called **minimum weight perfect matching (MWPM)**. \n", + "There are many ways to go about this, but we will focus on a traditional technique called **minimum weight perfect matching (MWPM)**.\n", "\n", - "One of the first principles of MWPM has to do with the minimum weight. Consider a simple case where two stabilizers fire on opposite diagonals of a square in the grid. It is perfectly valid for any of the below error patterns to cause this syndrome. In fact, there are many other potential error patterns not shown. \n", + "One of the first principles of MWPM has to do with the minimum weight. Consider a simple case where two stabilizers fire on opposite diagonals of a square in the grid. It is perfectly valid for any of the below error patterns to cause this syndrome. In fact, there are many other potential error patterns not shown.\n", "\n", - " \n", + " \"Four\n", "\n", "\n", - "Assuming errors are independent of one another, then the most likely error pattern is the one with the fewest number of errors, meaning pattern 1 or 2. If X's (blue) are applied as fixes to correct the errors, there are two minimum weight solutions depending on selection of pattern 1 or 2 as the error pattern. It turns out that in this case, it does not matter which one is chosen. If pattern 1 is the true error distribution and $X$ operations are applied to the two erroneous qubits, the errors are fixed. If instead $X$'s are applied to the other half of the square, the errors are not fixed, but the fixes combined with the errors form a stabilizer and result in an overall operation that will obtain the original state either way!\n", + "Assuming errors are independent of one another, then the most likely error pattern is the one with the fewest number of errors, meaning pattern 1 or 2. If X's (blue) are applied as fixes to correct the errors, there are two minimum weight solutions depending on selection of pattern 1 or 2 as the error pattern. It turns out that in this case, it does not matter which one is chosen. If pattern 1 is the true error distribution and $X$ operations are applied to the two erroneous qubits, the errors are fixed. If instead $X$'s are applied to the other half of the square, the errors are not fixed, but the fixes combined with the errors form a stabilizer and result in an overall operation that will obtain the original state either way!\n", "\n", - "\n", + "\"Two\n", "\n", "If pattern 3, though much less likely, were the true error distribution, then applying the same MWPM corrections would still produce a stabilizer loop and ameliorate all the errors.\n", "\n", - "\n", + "\"A\n", + "\n", + "This would not be the case for pattern 4. If the same corrections are applied, a stabilizer loop is not formed, nor are the errors corrected. Instead, a loop corresponding to a logical operation is closed. This means, the fixes would perform an unexpected logical bitflip and result in a logical error, the worst case scenario. This is why the toric code has a distance of 5, as you need at least 5 errors to complete a vertical or horizontal loop and produce a logical error. Increasing the grid size makes logical errors less likely, but comes at the cost of $L^2$ scaling data qubit overhead.\n", "\n", - "This would not be the case for pattern 4. If the same corrections are applied, a stabilizer loop is not formed, nor are the errors corrected. Instead, a loop corresponding to a logical operation is closed. This means, the fixes would perform an unexpected logical bitflip and result in a logical error, the worst case scenario. This is why the toric code has a distance of 5, as you need at least 5 errors to complete a vertical or horizontal loop and produce a logical error. Increasing the grid size makes logical errors less likley, but comes at the cost of $L^2$ scaling data qubit overhead.\n", + "\"An\n", "\n", - "\n", + "The perfect matching part of MWPM concerns the fact that a valid syndrome can be composed of any even number of stabilizers firing. So, finding the MWPM solution requires solving a larger minimization problem to consider all pairs of stabilizers, and the minimum number of errors required to connect each stabilizer in each pair. This means considering all possible pairings of the stabilizers and the shortest paths for each pairing, a much more complex optimization problem.\n", "\n", - "The perfect matching part of MWPM concerns that fact that a valid syndrome can be composed of any even number of stabilizers firing. So, finding the MWPM solution requires solving a larger minimization problem to consider all pairs of stabilizers, and the minimium number of errors required to connect each stabilizer in each pair. This means considering all possible parings of the stabilizers and the shortest paths for each pairing, a much more complex optimization problem.\n", "\n", + "\"Visualization\n", "\n", - "\n", + "---\n", "\n", - "## 6.6 MQPM Decoding" + "## 6.6. MWPM Decoding" ] }, { @@ -440,18 +474,17 @@ "id": "27fed2d3-e02c-431e-a9a6-dd32fdd5be5a", "metadata": {}, "source": [ + "
\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - "Use the code provided to solve the MWPM problem for the two syndrome patterns below. Determine in each case if a logical error happens or not? The `mwpm_decoder_toric` function is provided for you below. It will print the graph you are trying to optimize and return the solution. You need to fill in the function called `toric_distance` which returns the Manhattan grid distance between any two syndromes on the torus. \n", + "**Exercise 2:**\n", "\n", - "\n", + "Use the code provided to solve the MWPM problem for the two syndrome patterns below. Determine in each case if a logical error happens or not? The `mwpm_decoder_toric` function is provided for you below. It will print the graph you are trying to optimize and return the solution. You need to fill in the function called `toric_distance` which returns the Manhattan grid distance between any two syndromes on the torus.\n", "\n", - "\n", - "

\n", - "
\n", - "\n" + "\"First\n", + "\n", + "\"Second\n", + "\n", + "
" ] }, { @@ -461,41 +494,44 @@ "metadata": {}, "outputs": [], "source": [ - "def toric_distance(u, v, L):\n", - "\n", + "# EXERCISE 2\n", + "def toric_distance(u: list[int], v: list[int], L: int) -> int:\n", " \"\"\"\n", - " Function that computes Manhattan distance between two flagged syndromes on toric grid\n", - " \n", + " Computes Manhattan distance between two flagged syndromes on toric grid.\n", + "\n", + " Parameters\n", " ----------\n", - " u: list[int]\n", - " x and y corrdinate of first syndrome\n", - " v: list[int]\n", - " x and y corrdinate of second syndrome.\n", - " L: int\n", - " Size of toric surface\n", + " u : list[int]\n", + " x and y coordinate of first syndrome.\n", + " v : list[int]\n", + " x and y coordinate of second syndrome.\n", + " L : int\n", + " Size of toric surface.\n", "\n", " Returns\n", - " list of syndromes pairs obtained by MWPM\n", " -------\n", + " int\n", + " Manhattan distance between two syndrome locations on the torus.\n", " \"\"\"\n", + " ##TODO## Complete the function\n", "\n", - " # TODO - Complete the function\n", - "\n", - "def mwpm_decoder_toric(flagged_stabilizers, L):\n", "\n", + "def mwpm_decoder_toric(flagged_stabilizers: list[tuple[int, int]], L: int) -> list[tuple[tuple[int, int], tuple[int, int]]]:\n", " \"\"\"\n", - " Function that takes a list of syndrome locations and returns MWPM result\n", + " Takes a list of syndrome locations and returns MWPM result.\n", + "\n", + " Parameters\n", " ----------\n", - " u: list[list[int]]\n", - " list of lists containing flagged syndrome locations\n", - " L: int\n", - " Size of toric surface\n", + " flagged_stabilizers : list[tuple[int, int]]\n", + " List of (x, y) coordinates of flagged syndrome locations.\n", + " L : int\n", + " Size of toric surface.\n", "\n", " Returns\n", - " interger Manhattan distance between two syndrome locations\n", " -------\n", + " list[tuple[tuple[int, int], tuple[int, int]]]\n", + " List of syndrome pairs obtained by MWPM.\n", " \"\"\"\n", - "\n", " G = nx.Graph()\n", " # Add each flagged stabilizer as a node\n", " for i, coord in enumerate(flagged_stabilizers):\n", @@ -516,7 +552,7 @@ " plt.title(\"Toric Code Graph (Distances on Edges)\")\n", " plt.show()\n", "\n", - " #Performs MWPM\n", + " # Performs MWPM\n", " matching_indices = nx.min_weight_matching(G)\n", "\n", " # Convert node indices back to stabilizer coordinates for clarity\n", @@ -525,11 +561,12 @@ " matching_solution.append((G.nodes[i]['pos'], G.nodes[j]['pos']))\n", " return matching_solution\n", "\n", - "flagged_1 = [(2, 1), (1, 3), (2, 4), (1, 6), (4,5), (5,2)]\n", - "flagged_2 = [(1, 0), (1, 6), (2, 1), (3, 4), (5,3), (4,6)]\n", + "\n", + "flagged_1 = [(2, 1), (1, 3), (2, 4), (1, 6), (4, 5), (5, 2)]\n", + "flagged_2 = [(1, 0), (1, 6), (2, 1), (3, 4), (5, 3), (4, 6)]\n", "L = 7\n", "\n", - "print(\"MWPM solution 1:\",mwpm_decoder_toric(flagged_1, L))\n", + "print(\"MWPM solution 1:\", mwpm_decoder_toric(flagged_1, L))\n", "print(\"MWPM solution 2:\", mwpm_decoder_toric(flagged_2, L))" ] }, @@ -538,34 +575,42 @@ "id": "5ff67ed2-75ba-4f5f-b496-b93be15e1703", "metadata": {}, "source": [ - "## 6.7 The Planar and Surface Codes ##\n", + "---\n", "\n", - "One of the biggest problems with the toric code is that it is not very practical. Some qubit modalities could utilize the toric code, but many modalities such as superconducting cannot as there is no easy way to map a torus to qubits arranged on a plane. This requires some modifications to produce the famous surface code that is much more practical and the basis for many experimental QEC demonstrations today. \n", + "## 6.7. The Planar and Surface Codes\n", "\n", + "One of the biggest problems with the toric code is that it is not very practical. Some qubit modalities could utilize the toric code, but many modalities such as superconducting cannot as there is no easy way to map a torus to qubits arranged on a plane. This requires some modifications to produce the famous surface code that is much more practical and the basis for many experimental QEC demonstrations today.\n", "\n", - "The figure below shows the steps to get there. First, the toric code is transformed into the planar code by eliminating the periodic boundary conditions and capping one pair of parallel slides to form a smooth edge. The other edge is called a rough edge. This allows the code to now lie in a plane which is much more suitable for hardware implementation. \n", + "The figure below shows the steps to get there. First, the toric code is transformed into the **planar code** by eliminating the periodic boundary conditions and capping one pair of parallel sides to form a smooth edge. The other edge is called a rough edge. This allows the code to now lie in a plane which is much more suitable for hardware implementation.\n", "\n", - "\n", + "\"Three-step\n", "\n", - "Changing the topology does impact the logical encoding. Now it is not possible to form loops around the smooth edges (top to bottom) resulting in the loss of a degree of freedom and therefore a logical qubit. \n", + "Changing the topology does impact the logical encoding. Now it is not possible to form loops around the smooth edges (top to bottom) resulting in the loss of a degree of freedom and therefore a logical qubit.\n", "\n", - "It is possible to improve on the planar code with a few transformations that allow encoding of a logical qubit with the same distance but using fewer data qubits. If you place a green or purple square on each vertex and plaquette stabilizer, respectively, and then rotate by 45 degrees, the basis for the so called rotated surface code is formed. \n", + "It is possible to improve on the planar code with a few transformations that allow encoding of a logical qubit with the same distance but using fewer data qubits. If you place a green or purple square on each vertex and plaquette stabilizer, respectively, and then rotate by 45 degrees, the basis for the so called **rotated surface code** is formed.\n", "\n", - "Special boundary 2-qubit stabilizers are added around the faces of the surface to finish the code layout. Previously all error chains flagged exactly two stabilizers. Now, if an error chain starts on the boundary, it is possible to only flag a single stabilizer. Thus, this must be taken into account when decoding.\n", + "Special boundary 2-qubit stabilizers are added around the faces of the surface to finish the code layout. Previously all error chains flagged exactly two stabilizers. Now, if an error chain starts on the boundary, it is possible to only flag a single stabilizer. Thus, this must be taken into account when decoding.\n", "\n", - "The rotated surface code can now encode a single logical qubit with distance $L$ using $L^2$ fewer qubits. \n", + "The rotated surface code can now encode a single logical qubit with distance $L$ using $L^2$ fewer qubits.\n", "\n", - "## Summary ##\n", + "## Conclusion\n", "\n", - "Topological codes are central to both the history of QEC and much of the current research literature. after completing this lab, you should have a better understanding of what a topological code is, why it is used, and a basic understanding of their inner workings. In addition, coding the toric code in CUDA-Q provides deeper insight into the challenges of implementing such a code. " + "Topological codes are central to both the history of QEC and much of the current research literature. After completing this lab, you should have a better understanding of what a topological code is, why it is used, and a basic understanding of their inner workings. In addition, coding the toric code in CUDA-Q provides deeper insight into the challenges of implementing such a code." + ] + }, + { + "cell_type": "markdown", + "id": "320783f27cb04eca", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101: Lab 5 — Magic State Distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) — previous lab in the QEC 101 series covering magic state distillation\n", + "* [QEC 101: Lab 7 — qLDPC Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/07_QEC_qLDPC.ipynb) — next lab exploring quantum low-density parity-check codes\n", + "* [QEC 101: Lab 4 — Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — covers decoder fundamentals used in this lab's MWPM exercise" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -579,9 +624,22 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, diff --git a/qec101/07_QEC_qLDPC.ipynb b/qec101/07_QEC_qLDPC.ipynb index 09ce96e..217411d 100644 --- a/qec101/07_QEC_qLDPC.ipynb +++ b/qec101/07_QEC_qLDPC.ipynb @@ -1,33 +1,105 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a1b74c6a4abe4136", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, { "cell_type": "markdown", "id": "eaa3992e-c095-4bd7-9f14-60abd0740b64", "metadata": {}, "source": [ - "# QEC 101 Lab 7: qLDPC Codes #\n", - "\n", - "\n", - "One of the most promising classes of QEC codes are so called quantum low density parity check (qLDPC) codes. These codes are quite general and include well known codes like the surface code. This lab will walk through the basics of classical LDPC codes, the challenges that arise when moving to qLDPC codes, and how to construct valid qLDPC codes with favorable properties. You will eventually implement techniques from \"[Lift-Connected Surface Codes](https://arxiv.org/abs/2401.02911)\" connecting what you have leaned to state-of-the-art research.\n", - "\n", - "**Prerequisites:** This lab assumes you have a moderate knowledge of QEC and have completed the core QEC 101 courses (labs 1-4), especially the labs covering [stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb). \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "* 7.1 Learn the basics of classical LDPC codes and how to analyze their properties.\n", - "* 7.2 Learn why quantum LDPC codes are challenging to construct and how to build hypergraph product (HGP) codes.\n", - "* 7.3 Extend the HGP procedure to produce larger qLDPC codes with improved properties.\n", - "* 7.4 Compare the quality of the codes you created using the NVIDIA BP+OSD decoder.\n", - "\n", - "Terminology you will use:\n", - "* low density parity check, encoding rate, degree\n", - "* hypergraph product\n", - "* lifted product\n", - "* circulants\n", - "\n", + "# QEC 101 Lab 7: qLDPC Codes\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Analyze classical LDPC code properties including node degrees, encoding rate, and four-cycles\n", + "* Construct quantum LDPC parity check matrices using the Hypergraph Product (HGP) construction\n", + "* Extend the HGP procedure to build Lifted Product (LP) codes with improved code distance\n", + "* Compare LP surface codes against surface code copies using the CUDA-Q QEC BP+OSD decoder\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of QEC 101 Labs 1–4 (especially [Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb))\n", + "* Familiarity with parity check matrices and Tanner graphs\n", + "* Basic linear algebra (matrix multiplication, Kronecker product, rank)\n", + "\n", + "**Key Terminology:**\n", + "* Low Density Parity Check (LDPC)\n", + "* Encoding rate\n", + "* Variable node degree / Check node degree\n", + "* Tanner graph\n", + "* Hypergraph Product (HGP)\n", + "* Lifted Product (LP)\n", + "* Circulant\n", + "* Shannon limit\n", + "* CSS code\n", + "* Belief Propagation (BP)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library\n", + "\n", + "**Solutions:** [`Solutions/07_QEC_qLDPC_Solution.ipynb`](Solutions/07_QEC_qLDPC_Solution.ipynb)" + ] + }, + { + "cell_type": "markdown", + "id": "593be360231e4a4e", + "metadata": {}, + "source": [ + "
\n", "\n", - "qLDPC codes have a number of favorable properties that make them promising for deployment within nearer term fault tolerant workflows.\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "First run the cell below to prepare the necessary libraries." + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac9b4cafa1834f54", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", + "\n", + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "2046baf8b8d846d7", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { @@ -37,27 +109,19 @@ "metadata": {}, "outputs": [], "source": [ - "import sys\n", - "\n", - "try:\n", - " import time\n", - " import cudaq_qec as qec\n", - " import galois\n", - " import cudaq_qec\n", - " import ipywidgets as widgets\n", - " import numpy as np\n", - " from IPython.display import display\n", - " from itertools import combinations\n", - " from scipy.sparse import csr_matrix\n", - "\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade pip\n", - " !{sys.executable} -m pip install galois\n", - " !{sys.executable} -m pip install cudaq-qec\n", - " !{sys.executable} -m pip install ipywidgets\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")" + "import time\n", + "from itertools import combinations\n", + "\n", + "import numpy as np\n", + "from scipy.sparse import csr_matrix\n", + "\n", + "import galois\n", + "import ipywidgets as widgets\n", + "from IPython.display import display\n", + "\n", + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec" ] }, { @@ -65,42 +129,48 @@ "id": "2fb1f3e2-91eb-493a-9272-c54b6599d068", "metadata": {}, "source": [ - "## 7.1 Classical LDPC Codes ##\n", + "---\n", "\n", - "Robert Gallager first conceived of low density parity check (LDPC) codes in his [1960 MIT dissertation](https://dspace.mit.edu/handle/1721.1/11804) but it was underappreciated at the time and interest resurged in the 90's as other error correction codes rose in prominence. LDPC codes are now used widely in telecommunications and computer memory applications\n", + "## 7.1. Classical LDPC Codes\n", "\n", - "An LDPC code is a classical parity check error correction code with a sparse parity check matrix $H$. The parity check matrix is often represented by a Tanner graph, which was introduced in lab 4 on decoders. A Tanner graph is drawn with check nodes on the top row and variable nodes on the bottom. The Tanner graph for the Steane code is shown below. \n", + "Robert Gallager first conceived of **Low Density Parity Check (LDPC)** codes in his [1960 MIT dissertation](https://dspace.mit.edu/handle/1721.1/11804) but it was underappreciated at the time and interest resurged in the 90's as other error correction codes rose in prominence. LDPC codes are now used widely in telecommunications and computer memory applications.\n", "\n", - "\"Drawing\"\n", + "An LDPC code is a classical parity check error correction code with a sparse parity check matrix $H$. The parity check matrix is often represented by a **Tanner graph**, which was introduced in lab 4 on decoders. A Tanner graph is drawn with check nodes on the top row and variable nodes on the bottom. The Tanner graph for the Steane code is shown below.\n", "\n", - "A sparse $H$ means that each variable and check node only connects to a limited number of other nodes. The **variable node degree** characterizes the maximum number of checks any (q)bit is involved in while the and **check node degree** characterizes the maximum number of (q)bits involved in any given check. Ideally, these two values are as small as possible to maintain low density.\n", + "\"Tanner\n", + "\n", + "A sparse $H$ means that each variable and check node only connects to a limited number of other nodes. The **variable node degree** characterizes the maximum number of checks any (q)bit is involved in while the **check node degree** characterizes the maximum number of (q)bits involved in any given check. Ideally, these two values are as small as possible to maintain low density.\n", "\n", "A second important property is the **encoding rate** ($r$).\n", "\n", "$$ r = \\frac{k}{n+c} $$\n", "\n", - "Where, $k$ is the number of encoded logical bits, $n$ is the number of data bits, and $c$ is the number of check bits. A high encoding rate is good and means that many logical bits can be encoded with a lower overhead. However, this competes with other properties like the code distance - i.e. the ability to correctly capture errors.\n", + "Where, $k$ is the number of encoded logical bits, $n$ is the number of data bits, and $c$ is the number of check bits. A high encoding rate is good and means that many logical bits can be encoded with a lower overhead. However, this competes with other properties like the code distance - i.e. the ability to correctly capture errors.\n", "\n", - "What $k$ is depends on the number of linearly independent constraints. To determine this, perform Gaussian elimination over GF(2). GF(2) comes from the world of abstract algebra and corresponds to a field of two elements. Essentially, this just means integer math governed by mod 2 arithmetic. The Gaussian elimination result can be used to determine rank($H$) which is related to $k$ by \n", + "What $k$ is depends on the number of linearly independent constraints. To determine this, perform Gaussian elimination over GF(2). GF(2) comes from the world of abstract algebra and corresponds to a field of two elements. Essentially, this just means integer math governed by mod 2 arithmetic. The Gaussian elimination result can be used to determine rank($H$) which is related to $k$ by\n", "\n", - "$$ k = n - \\mathrm{rank(}H\\mathrm{}) $$\n", + "$$ k = n - \\mathrm{rank(}H\\mathrm{)} $$\n", "\n", "\n", - "A final characteristic of a desirable LDPC code is how suited it is for decoding. Common decoders like belief propagation (BP) can struggle when the Tanner graph has 4-cycles. These form local loops (see image below) which can make it hard for the decoder to converge to a solution.\n", + "A final characteristic of a desirable LDPC code is how suited it is for decoding. Common decoders like **belief propagation (BP)** can struggle when the Tanner graph has 4-cycles. These form local loops (see image below) which can make it hard for the decoder to converge to a solution.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", - "In most cases, it turns out that LDPC codes are very easy to generate. Random generation of $H$ usually produces a good LDPC code. This also provides flexibility as new codes can be generated as needed depending on the problem at hand. Randomly generated codes also perform well and produce results close to the Shannon limit, that is the theoretical maximum of information that can pass through a noisy channel.\n", + "In most cases, it turns out that LDPC codes are very easy to generate. Random generation of $H$ usually produces a good LDPC code. This also provides flexibility as new codes can be generated as needed depending on the problem at hand. Randomly generated codes also perform well and produce results close to the **Shannon limit**, that is the theoretical maximum of information that can pass through a noisy channel." + ] + }, + { + "cell_type": "markdown", + "id": "2fb1f3e2-91eb-493a-9272-c54b6599d068ex1", + "metadata": {}, + "source": [ + "
\n", "\n", + "**Exercise 1:**\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", - "Given the three parity check matrices below, write a function to analyze them and determine the check and variable node degrees, the encoding rate, the indices of any four cycles, and if any nodes are unchecked.\n", - "

\n", - "
\n", + "Given the three parity check matrices below, write a function to analyze them and determine the check and variable node degrees, the encoding rate, the indices of any four-cycles, and if any nodes are unchecked.\n", "\n", - "\n" + "
" ] }, { @@ -110,6 +180,8 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 1\n", + "\n", "H1 = np.array([\n", " [1,1,0,0,1,0,0,0],\n", " [1,1,0,0,0,1,0,0],\n", @@ -125,105 +197,82 @@ "\n", "\n", "H3 = np.array([\n", - " [1,0,0,1,0,1,0,0,0,1, 0, 0, 1, 0, 0, 0], \n", + " [1,0,0,1,0,1,0,0,0,1, 0, 0, 1, 0, 0, 0],\n", " [0,1,0,0,1,0,1,0,0,0, 1, 0, 0, 0, 0, 0],\n", - " [0,0,1,0,0,0,0,1,0,0, 0, 1, 0, 1, 1, 0], \n", - " [1,0,0,0,1,0,0,0,1,0, 0, 0, 0, 1, 0, 0], \n", - " [0,0,0,0,0,0,1,0,1,0, 0, 1, 1, 0, 0, 0], \n", - " [0,0,1,0,0,1,0,0,0,0, 1, 0, 0, 0, 0, 1] \n", + " [0,0,1,0,0,0,0,1,0,0, 0, 1, 0, 1, 1, 0],\n", + " [1,0,0,0,1,0,0,0,1,0, 0, 0, 0, 1, 0, 0],\n", + " [0,0,0,0,0,0,1,0,1,0, 0, 1, 1, 0, 0, 0],\n", + " [0,0,1,0,0,1,0,0,0,0, 1, 0, 0, 0, 0, 1]\n", "], dtype=int)\n", "\n", "\n", - "def degrees(H):\n", - " \"\"\" \n", - " function which computes the degrees of a parity check matrix\n", - " \n", + "def degrees(H: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Compute the degrees of a parity check matrix.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of degrees for each variable bit\n", - " (list): list of degrees for each check bit\n", + " Tuple of (variable node degrees, check node degrees)\n", " \"\"\"\n", - " \n", - " #TODO - Complete the function\n", - " \n", - "\n", - "def unchecked_vars(H):\n", - " \"\"\" \n", - " function which identifies any unchecked variable bit\n", - " \n", + " ##TODO##\n", + "\n", + "\n", + "def unchecked_vars(H: np.ndarray) -> np.ndarray:\n", + " \"\"\"\n", + " Identify any unchecked variable bits.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of unchecked variable bits\n", + " Array of unchecked variable bit indices\n", " \"\"\"\n", - " #TODO - complete the function\n", - " \n", + " ##TODO##\n", + "\n", + "\n", + "def four_cycles(H: np.ndarray) -> list[tuple[int, int, int, int]]:\n", + " \"\"\"\n", + " Identify any four-cycles in a parity check matrix.\n", "\n", - "def four_cycles(H):\n", - " \"\"\" \n", - " function which identifies any four-cycles in a parity check matrix\n", - " \n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of nodes involved in a 4-cycle.\n", + " List of (var_i, var_j, check_p, check_q) tuples involved in 4-cycles\n", " \"\"\"\n", - " #TODO - complete the function\n", + " ##TODO##\n", + "\n", + "\n", + "def encoding_rate(H: np.ndarray) -> tuple[float, int]:\n", + " \"\"\"\n", + " Compute the encoding rate based on rank of H.\n", + " Uses galois for GF2 field definition to ensure computation is correct.\n", "\n", - "def encoding_rate(H):\n", - " \"\"\" \n", - " function which computes the encoding rate based on rank of H.\n", - " Note: Must use galois for GF2 field definition to ensure computation is correct\n", - " \n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (float): encoding rate\n", + " Tuple of (encoding rate, number of logical bits k)\n", " \"\"\"\n", " GF2 = galois.GF(2)\n", " Hgf2 = GF2(H)\n", " n = Hgf2.shape[1]\n", - " rank = np.linalg.matrix_rank(Hgf2) \n", + " rank = np.linalg.matrix_rank(Hgf2)\n", " k = n - rank\n", " return k / n, k\n", "\n", "\n", - "def analyze(H, name): \n", - " \"\"\" \n", - " Function that organizes and prints results from previous functions\n", - " \n", - " Args:\n", - " H (np.array): parity check matrix\n", - " name (str): name of the parity chexk matrix\n", - "\n", - " Returns:\n", + "def analyze(H: np.ndarray, name: str) -> None:\n", " \"\"\"\n", - " vdeg, cdeg = degrees(H)\n", - " R, k = encoding_rate(H)\n", - " cycles = four_cycles(H)\n", - " unchk = unchecked_vars(H)\n", - "\n", - " print(f'\\n{name}:')\n", - " print(' variable degrees:', vdeg.tolist())\n", - " print(' check degrees:', cdeg.tolist())\n", - " print(f' rate = {R:.3f} (k = {k})')\n", - "\n", - " if cycles:\n", - " print(' 4‑cycles:')\n", - " for i, j, p, q in cycles:\n", - " print(f' vars ({i},{j}) rows ({p},{q})')\n", - " else:\n", - " print(' no 4‑cycles')\n", + " Organize and print analysis results for a parity check matrix.\n", "\n", - " if unchk.size:\n", - " print(' unchecked variables:', unchk.tolist())\n", - " else:\n", - " print(' all variables are checked')\n", + " Args:\n", + " H: parity check matrix\n", + " name: name of the parity check matrix\n", + " \"\"\"\n", + " ##TODO##\n", "\n", "\n", "for H, name in [(H1, 'H1'), (H2, 'H2'), (H3, 'H3')]:\n", @@ -235,15 +284,17 @@ "id": "9861bfe7-5840-47ed-a963-23fe56635c80", "metadata": {}, "source": [ - "## 7.2 Quantum LDPC ##\n", + "---\n", "\n", - "qLDPC codes have many similarities to their classical counterparts, particularly with respect to terms like encoding rate and degree. Unfortunately, a major difference is that valid qLDPC codes with favorable properties cannot be produced by randomly generating parity check matrices. This is because the $Z$ and $X$ parity check matrices ($H_Z$ and $H_X$) must commute ($H_ZH^T_X=0$) for a valid CSS code that can correct both types of errors. \n", + "## 7.2. Quantum LDPC\n", "\n", - "The probability of randomly producing parity check matrices that commute is vanishingly small, let alone exhibit favorable properties. Cutting edge research focused on qLDPC codes is determined to find clever ways to produce quality parity check matrices that meet these constraints. \n", + "qLDPC codes have many similarities to their classical counterparts, particularly with respect to terms like encoding rate and degree. Unfortunately, a major difference is that valid qLDPC codes with favorable properties cannot be produced by randomly generating parity check matrices. This is because the $Z$ and $X$ parity check matrices ($H_Z$ and $H_X$) must commute ($H_ZH^T_X=0$) for a valid **CSS code** that can correct both types of errors.\n", "\n", - "One particularly insightful approach is using [so called hypergraph product codes](https://arxiv.org/pdf/2401.02911). The idea is to take two \"good\" ( in this case a technical term meaning the codes distance scales as $n$) classical parity check matrices $H_1$ ($m_1\\times n_1$) and $H_2$ ($m_2\\times n_2$) and combine them in such a way that $H_Z$ and $H_X$ commute (i.e. $H_ZH_X^T=0$) and the resulting codes have a constant encoding rate and a distance that scales proportionally to the square root of the number of data qubits.\n", + "The probability of randomly producing parity check matrices that commute is vanishingly small, let alone exhibit favorable properties. Cutting edge research focused on qLDPC codes is determined to find clever ways to produce quality parity check matrices that meet these constraints.\n", "\n", - "The procedure works by defining the final parity check matrix $H$ as a block encoding of $H_Z$ and $H_X$. \n", + "One particularly insightful approach is using [so called **hypergraph product (HGP)** codes](https://arxiv.org/pdf/2401.02911). The idea is to take two \"good\" (in this case a technical term meaning the codes distance scales as $n$) classical parity check matrices $H_1$ ($m_1\\times n_1$) and $H_2$ ($m_2\\times n_2$) and combine them in such a way that $H_Z$ and $H_X$ commute (i.e. $H_ZH_X^T=0$) and the resulting codes have a constant encoding rate and a distance that scales proportionally to the square root of the number of data qubits.\n", + "\n", + "The procedure works by defining the final parity check matrix $H$ as a block encoding of $H_Z$ and $H_X$.\n", "\n", "\n", "$$\n", @@ -292,7 +343,7 @@ "= 2(H_1 \\otimes H_2^T) = 0\n", "$$\n", "\n", - "It may not be clear at first why the final term equals zero. Recall that all operations with parity check matrices occur mod 2. So, taking any binary matrix and multiplying it by 2, will make every entry 0 or 2 = 0 mod 2. " + "It may not be clear at first why the final term equals zero. Recall that all operations with parity check matrices occur mod 2. So, taking any binary matrix and multiplying it by 2, will make every entry 0 or 2 = 0 mod 2." ] }, { @@ -300,10 +351,11 @@ "id": "24e40d1e-d6f1-4cfa-a066-ad6e2141c27b", "metadata": {}, "source": [ - "
\n", - "

Exercise 2:

\n", - "

\n", - "Construct a hypergraph product code using a pair of three-qubit repetition code base matrices. That is, begin with:\n", + "

\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Construct a hypergraph product code using a pair of three-qubit repetition code base matrices. That is, begin with:\n", "\n", "$$H_1 =H_2 =\\begin{pmatrix}\n", "1&1&0\\\\\n", @@ -311,10 +363,9 @@ "\\end{pmatrix}\n", "$$\n", "\n", - "Build the parity check matrices for $H_Z$ and $H_X$ and confim they commute. Note: the `galois` package is used to define the matrices over a Galois field (GF2), which ensures modular arithmetic is baked in to your computations. All operations can be performed just like you would with `numpy`.\n", - "

\n", - "
\n", - "\n" + "Build the parity check matrices for $H_Z$ and $H_X$ and confirm they commute. Note: the `galois` package is used to define the matrices over a Galois field (GF2), which ensures modular arithmetic is baked in to your computations. All operations can be performed just like you would with `numpy`.\n", + "\n", + "
" ] }, { @@ -324,59 +375,59 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 2\n", + "\n", "H = np.array([[1,1,0],\n", " [0,1,1]]) #Using H as H1 = H2\n", "\n", - "def HGP(H):\n", - " \"\"\" \n", - " Function which takes classical base parity check matricies and performs hypergraph product construction\n", - " \n", + "def HGP(H: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Perform hypergraph product construction from a base parity check matrix.\n", + "\n", " Args:\n", - " H (np.array): Base parity check matrix\n", + " H: Base parity check matrix\n", "\n", " Returns:\n", - " Hz (np.array): Hz matrix from HGP construction\n", - " Hx (np.array): Hx matrix from HGP construction\n", + " Tuple of (Hz, Hx) matrices from HGP construction\n", " \"\"\"\n", - " #TODO build the I matricies\n", - " \n", - " # Constructs a Galois field and updates you matricies at Galois field.\n", - " GF2 = galois.GF(2) \n", - " \n", - " H = GF2(H) \n", + " ##TODO## Build the identity matrices I_rows and I_cols\n", + "\n", + " # Constructs a Galois field and updates your matrices as Galois field.\n", + " GF2 = galois.GF(2)\n", + "\n", + " H = GF2(H)\n", " I_rows = GF2(I_rows)\n", " I_cols = GF2(I_cols)\n", "\n", - " \n", " print(\"First term in Hx\")\n", - " Hx_a = #TODO\n", + " Hx_a = ##TODO##\n", " print(Hx_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hx\")\n", - " Hx_b = #TODO\n", + " Hx_b = ##TODO##\n", " print(Hx_b)\n", - " \n", + "\n", " print(\"\\n Full Hx\")\n", - " Hx = #TODO\n", + " Hx = ##TODO##\n", " print(Hx)\n", - " \n", + "\n", " print(\"\\nFirst term in Hz\")\n", - " Hz_a = #TODO\n", + " Hz_a = ##TODO##\n", " print(Hz_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hz\")\n", - " Hz_b = #TODO\n", + " Hz_b = ##TODO##\n", " print(Hz_b)\n", - " \n", + "\n", " print(\"\\n Full Hz\")\n", - " Hz = #TODO\n", + " Hz = ##TODO##\n", " print(Hz)\n", - " \n", + "\n", " print(\"\\n Hz times HxT\")\n", " print(Hz @ Hx.T)\n", "\n", " return Hz, Hx\n", - " \n", + "\n", "HGP(H)" ] }, @@ -385,9 +436,9 @@ "id": "8ef0283b-69bc-43e5-ae84-46247cbaab6d", "metadata": {}, "source": [ - "It turns out there is a nice visual interpretation of the hypergraph product code you just generated if the Tanner graphs form a multiplication table of sorts. Each node of the product tanner graph that is the product of a check qubit with a check qubit or a data qubit with a data qubit produces a data qubit. If the top Tanner graph is a circle (data qubit) and the left Tanner graph a square (check qubit), the result is an $X$ stabilizer check. Likewise, if the top Tanner graph is a square and the left Tanner graph a circle, the result is a $Z$ parity check, producing the tanner graph below. Does it look familiar?\n", + "It turns out there is a nice visual interpretation of the hypergraph product code you just generated if the Tanner graphs form a multiplication table of sorts. Each node of the product Tanner graph that is the product of a check qubit with a check qubit or a data qubit with a data qubit produces a data qubit. If the top Tanner graph is a circle (data qubit) and the left Tanner graph a square (check qubit), the result is an $X$ stabilizer check. Likewise, if the top Tanner graph is a square and the left Tanner graph a circle, the result is a $Z$ parity check, producing the Tanner graph below. Does it look familiar?\n", "\n", - "\"Drawing\"\n", + "\"Hypergraph\n", "\n", "\n", "Remarkably, it turns out the product of two size $l$ classical repetition codes is a [[ $(l+1)^2 + l^2$, $1$, $l+1)$]] surface code! This is a great example demonstrating how two very simple classical codes can construct a more sophisticated quantum code which obeys the required commutativity constraints." @@ -398,20 +449,28 @@ "id": "b0e5273a-901c-4bb4-a3b3-9e92bc517ad6", "metadata": {}, "source": [ - "## 7.3 Generalizing HGP - Lifted Product Codes\n", + "---\n", "\n", - "It is possible to build upon the HGP method in a more general manner where products are taken between two parity check matrices that have non-integer entries. Such an approach is called a **lifted product (LP)** as the elements of the parity check matrix are \"lifted\" to higher order elements. LP codes can often retain the degree of checks and provide higher distance codes with a smaller qubit overhead. \n", + "## 7.3. Generalizing HGP — Lifted Product Codes\n", + "\n", + "It is possible to build upon the HGP method in a more general manner where products are taken between two parity check matrices that have non-integer entries. Such an approach is called a **lifted product (LP)** as the elements of the parity check matrix are \"lifted\" to higher order elements. LP codes can often retain the degree of checks and provide higher distance codes with a smaller qubit overhead.\n", "\n", "A LP construction still needs to ensure that $H_ZH_X^T=0$ holds as parity check matrices are modified to have non-integer elements. One way to ensure this is to replace parity check matrix elements with a commutative matrix ring, that is, a set of mathematical objects with properties that ensure multiplication of any elements commute, ensuring $H_ZH_X^T=0$ remains true (see the second term in the original proof of commutativity a few cells above). One example is $L \\times L$ **circulant** matrices which are defined as:\n", "\n", "$$ C = \\sum_{i=0}^{L-1} c_iP^{(i)} $$\n", "\n", - "Where $P^{(i)}$ are cyclic permutation matrices that shift columns of the identity matrix by $i$ spaces to the right and where $c_i$ can be either 0 or 1. The notation $B_L(P^{(i)})$ indicates the binary representation of matrix size $L$. \n", + "Where $P^{(i)}$ are cyclic permutation matrices that shift columns of the identity matrix by $i$ spaces to the right and where $c_i$ can be either 0 or 1. The notation $B_L(P^{(i)})$ indicates the binary representation of matrix size $L$." + ] + }, + { + "cell_type": "markdown", + "id": "0057cb5b712d4a08", + "metadata": {}, + "source": [ + "
\n", "\n", + "**Exercise 3:**\n", "\n", - "
\n", - "

Exercise 3:

\n", - "

\n", "Build the following binary matrix representations:\n", "\n", "* $B_4(P^{(2)})$\n", @@ -420,12 +479,8 @@ "I&0\\\\\n", "0&P^{(2)}\n", "\\end{pmatrix}$\n", - "

\n", - "
\n", - "\n", "\n", - "\n", - "\n" + "
" ] }, { @@ -435,13 +490,15 @@ "metadata": {}, "outputs": [], "source": [ - "arr1 = #TODO\n", + "# EXERCISE 3\n", + "\n", + "arr1 = ##TODO##\n", "\n", "\n", - "arr2 = #TODO\n", + "arr2 = ##TODO##\n", "\n", "\n", - "arr3 = #TODO\n" + "arr3 = ##TODO##" ] }, { @@ -449,25 +506,32 @@ "id": "bf2d23dc-5634-4072-bccf-ed4cc67ff8cc", "metadata": {}, "source": [ - "A LP code is built by taking a HGP of two parity check matrices where each 1 is, in this case, a circulant matrix $C$. A LP code of size $L$ will increase the number of qubits and checks by a factor of L. This is because each entry in the parity check matrix is \"lifted\" and replaced by a set of $L$ checks and qubits. \n", + "A LP code is built by taking a HGP of two parity check matrices where each 1 is, in this case, a circulant matrix $C$. A LP code of size $L$ will increase the number of qubits and checks by a factor of L. This is because each entry in the parity check matrix is \"lifted\" and replaced by a set of $L$ checks and qubits.\n", "\n", "The same HGP equations from the previous section hold, but instead, the parity check matrices are denoted with a tilde to note that their elements are circulants where each entry is otherwise a 1. That is to say, $H=LP(\\tilde{H}_1,\\tilde{H}_2) = B_L(\\tilde{H})$\n", "\n", - "The figure below, based on figure 2 of [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911), is helpful for understanding what the LP construction is doing. The overall procedure is similar. First, the base matrices are selected and are used in the same HGP procedure you did previously to form $\\tilde{H}_Z$ and $\\tilde{H}_X$. Then, each 1 in each parity check matrix is replaced with a circulant $C$. This simple swap takes select connections from the original Tanner graph, and lifts it to be replaced with a set of check and data qubits. \n", + "The figure below, based on figure 2 of [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911), is helpful for understanding what the LP construction is doing. The overall procedure is similar. First, the base matrices are selected and are used in the same HGP procedure you did previously to form $\\tilde{H}_Z$ and $\\tilde{H}_X$. Then, each 1 in each parity check matrix is replaced with a circulant $C$. This simple swap takes select connections from the original Tanner graph, and lifts it to be replaced with a set of check and data qubits.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", - "If $C$ is simply the identity matrix $I$, the resulting LP codes is the result of the HGP code duplicated trivially $L$ times. Adding permutations to $C$ such as $I + P^{(1)}$ adds non-trivial checks between the HGP code copies. Notice the figure below (adapted figure 3 from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911)) begins with the HGP that you performed earlier. Then, The LP construction creates four copies of the surface code and interconnects them with parity checks.\n", + "If $C$ is simply the identity matrix $I$, the resulting LP codes is the result of the HGP code duplicated trivially $L$ times. Adding permutations to $C$ such as $I + P^{(1)}$ adds non-trivial checks between the HGP code copies. Notice the figure below (adapted figure 3 from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911)) begins with the HGP that you performed earlier. Then, the LP construction creates four copies of the surface code and interconnects them with parity checks.\n", "\n", - "\"Drawing\"\n", + "\"LP\n", "\n", - "It turns out that the LP surface code is a [[52,4,4]] code whereas four copies of the surface code would be [[52,4,3]]. This means that the encoding rate is the same, but the code distance improves thanks to the LP procedure. These sorts of clever constructions are driving qLDPC code research to continually improve code properties while maintaining the commutativity properties. \n", + "It turns out that the LP surface code is a [[52,4,4]] code whereas four copies of the surface code would be [[52,4,3]]. This means that the encoding rate is the same, but the code distance improves thanks to the LP procedure. These sorts of clever constructions are driving qLDPC code research to continually improve code properties while maintaining the commutativity properties.\n", "\n", - "Generally speaking, a LP surface code is parameterized with $l$ and $L$, where $l$ is the size of the base repetition code and $L$ is the number of copies produced by the lift procedure.\n", + "Generally speaking, a LP surface code is parameterized with $l$ and $L$, where $l$ is the size of the base repetition code and $L$ is the number of copies produced by the lift procedure." + ] + }, + { + "cell_type": "markdown", + "id": "a1bbfb033a5c4b55", + "metadata": {}, + "source": [ + "
\n", + "\n", + "**Exercise 4:**\n", "\n", - "
\n", - "

Exercise 4:

\n", - "

\n", "Build the [[52,4,4]] ( $l =2$ and $L =4$) LP surface code by performing the following steps. First, use the base matrix below which can be conveniently split into $H_{copy}$, which produces trivial copies of the surface code and ($H_{int}$), which interacts these surface code copies.\n", "\n", "$\n", @@ -499,18 +563,18 @@ "H =\\begin{pmatrix}\n", "1 & 1 & 0 \\\\\n", "0 & 1 & 1 \\\\\n", - "\\end{pmatrix} \n", + "\\end{pmatrix}\n", "$ and $H =\\begin{pmatrix}\n", "0 & 1 & 0 \\\\\n", "0 & 0 & 1 \\\\\n", - "\\end{pmatrix} \n", + "\\end{pmatrix}\n", "$. Lifting the first with $B_4(I)$ and the second with $B_4(P^{(1)})$ and summing the results will produce the parity check matrices for the [[52,4,4]] code.\n", "\n", - "Modify your HGP function to lift `Hx_a` and `Hz_a` with an arbitrary $B$ and `Hx_b` and `Hz_b` with the transpose of $B$. \n", + "Modify your HGP function to lift `Hx_a` and `Hz_a` with an arbitrary $B$ and `Hx_b` and `Hz_b` with the transpose of $B$.\n", "\n", - "Then build the [[52,4,4]] code by lifting H_copy and H_int (defined below) with $B_4(I)$ and $B_4(P^{(1)})$ , respectively. Confirm that Hz and Hx of the final result commute.\n", - "

\n", - "
\n" + "Then build the [[52,4,4]] code by lifting H_copy and H_int (defined below) with $B_4(I)$ and $B_4(P^{(1)})$, respectively. Confirm that Hz and Hx of the final result commute.\n", + "\n", + "
" ] }, { @@ -520,44 +584,45 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 4\n", + "\n", "H_copy = np.array([[1,1,0],\n", - " [0,1,1]]) \n", + " [0,1,1]])\n", "\n", "B_I_4 = np.array([[1,0,0,0],\n", " [0,1,0,0],\n", " [0,0,1,0],\n", - " [0,0,0,1]]) \n", + " [0,0,0,1]])\n", "\n", "H_int = np.array([[0,1,0],\n", - " [0,0,1]]) \n", + " [0,0,1]])\n", "\n", "B_P1_4 = np.array([[0,1,0,0],\n", " [0,0,1,0],\n", " [0,0,0,1],\n", - " [1,0,0,0]]) \n", + " [1,0,0,0]])\n", "\n", "\n", "\n", - "def LP(H, B):\n", - " \"\"\" \n", - " Function which perfoms lifted product construction of base matrices H with lift matrix B\n", - " \n", + "def LP(H: np.ndarray, B: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Perform lifted product construction of base matrices H with lift matrix B.\n", + "\n", " Args:\n", - " H (np.array): Base parity check matrix\n", - " B (np.array): Binary representation of lift matrix\n", + " H: Base parity check matrix\n", + " B: Binary representation of lift matrix\n", "\n", " Returns:\n", - " Hz (np.array): Hz matrix from HGP construction\n", - " Hx (np.array): Hx matrix from HGP construction\n", + " Tuple of (Hz, Hx) matrices from LP construction\n", " \"\"\"\n", "\n", " rows, cols = H.shape\n", "\n", " I_rows = np.eye(rows, dtype=int)\n", " I_cols = np.eye(cols, dtype=int)\n", - " \n", + "\n", " GF2 = galois.GF(2) # allows mod 2 math.\n", - " \n", + "\n", " H = GF2(H)\n", " I_rows = GF2(I_rows, dtype=int)\n", " I_cols = GF2(I_cols, dtype=int)\n", @@ -568,47 +633,47 @@ " print(Hx_a)\n", "\n", " print(\"First term in Hx lifted\")\n", - " Hx_a = # TODO\n", + " Hx_a = ##TODO##\n", " print(Hx_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hx\")\n", " Hx_b = np.kron(H.T,I_rows)\n", " print(Hx_b)\n", "\n", " print(\"Second term in Hx lifted\")\n", - " Hx_b = # TODO\n", + " Hx_b = ##TODO##\n", " print(Hx_b)\n", - " \n", + "\n", " print(\"\\n Full Lifted Hx\")\n", - " Hx = # TODO\n", + " Hx = ##TODO##\n", " print(Hx)\n", - " \n", + "\n", " print(\"\\nFirst term in Hz\")\n", " Hz_a = np.kron(H, I_cols)\n", " print(Hz_a)\n", "\n", " print(\"First term in Hz lifted\")\n", - " Hz_a = # TODO\n", + " Hz_a = ##TODO##\n", " print(Hz_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hz\")\n", " Hz_b = np.kron(I_rows, H.T)\n", " print(Hz_b)\n", "\n", " print(\"Second term in Hz lifted\")\n", - " Hz_b = # TODO\n", + " Hz_b = ##TODO##\n", " print(Hz_b)\n", - " \n", + "\n", " print(\"\\n Full Hz\")\n", - " Hz = # TODO\n", + " Hz = ##TODO##\n", " print(Hz)\n", - " \n", + "\n", " print(\"\\n Hz times HxT\")\n", " print(Hz @ Hx.T)\n", "\n", " return Hz, Hx\n", "\n", - "Hz_lifted_copy, Hx_lifted_copy = LP(H_copy, B_I_4)\n" + "Hz_lifted_copy, Hx_lifted_copy = LP(H_copy, B_I_4)" ] }, { @@ -637,7 +702,7 @@ "id": "5c737d2d-8a6d-4b48-a545-fcf373893379", "metadata": {}, "source": [ - "Now, analyze the Hz and Hx parity check matrices to 1) make sure they commute and 2) confirm the degrees are as expected. Each stabilizer should act on maximum 6 qubits and each qubit should be involved in no more than 6 checks (summing Z and X checks as the full parity check matrix would be a concatenation of both $H_x$ and $H_z$.). " + "Now, analyze the Hz and Hx parity check matrices to 1) make sure they commute and 2) confirm the degrees are as expected. Each stabilizer should act on maximum 6 qubits and each qubit should be involved in no more than 6 checks (summing Z and X checks as the full parity check matrix would be a concatenation of both $H_x$ and $H_z$)." ] }, { @@ -649,14 +714,14 @@ "source": [ "#Confirm Hz and Hx still commute\n", "print(\"\\n Hz times HxT\")\n", - "# TODO\n", + "##TODO##\n", "\n", "#Confirm [[52,4,4]] code was created\n", "print(\"\\n Number of logical qubits encoded (data qubits minus checks)\")\n", - "# TODO\n", + "##TODO##\n", "\n", "#Confirm degree of code is correct (should be 6 and 6)\n", - "# TODO" + "##TODO##" ] }, { @@ -664,13 +729,13 @@ "id": "b00fa7f3-cac9-4148-a1f3-314acad888e1", "metadata": {}, "source": [ - "## 7.4 Decoding with CUDA-Q QEC\n", + "---\n", "\n", - "💻 Just a heads-up: This notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning! ⭐\n", + "## 7.4. Decoding with CUDA-Q QEC\n", "\n", "qLDPC codes are well suited for decoding with CUDA-Q's accelerated [BP+OSD decoder](https://nvidia.github.io/cudaqx/components/qec/introduction.html#pre-built-qec-decoders) found in the [CUDA-Q QEC library](https://nvidia.github.io/cudaqx/components/qec/introduction.html). If you want to learn more about BP+OSD decoding, complete lab 4 on decoding.\n", "\n", - "As we have not discussed logical observables for these codes, the code below will randomly generate an error vector with a 5% chance of an error on each qubit (Only assuming bit flip errors for now). The decoder will produce a logical error if the decoder cannot identify all of the errors given the syndrome. However, note that each of these errors might not produce a logical flip in practice, so this considers a worst case scenario. \n", + "As we have not discussed logical observables for these codes, the code below will randomly generate an error vector with a 5% chance of an error on each qubit (only assuming bit flip errors for now). The decoder will produce a logical error if the decoder cannot identify all of the errors given the syndrome. However, note that each of these errors might not produce a logical flip in practice, so this considers a worst case scenario.\n", "\n", "In the cell below, run the decoder using the $H_z$ matrix you produced for the [[52,4,4]] LP surface code. Carefully read the code and see if you can spot where errors and syndromes are generated, where decoder options are specified, and how the decoder is called. Run the code and note what the logical error rate is." ] @@ -722,7 +787,7 @@ "for i, r in enumerate(results):\n", " bp_converged_flags.append(r.converged)\n", " decoded_error = np.array(r.result, dtype=np.uint8)\n", - " \n", + "\n", " # Check if error was corrected\n", " if not np.array_equal(decoded_error, errors[i]):\n", " num_logical_errors += 1\n", @@ -734,7 +799,7 @@ "\n", "# Optional: Single shot example\n", "single_syndrome = syndromes[0]\n", - "bp_converged, decoded_result, *_ = decoder.decode(single_syndrome.tolist())\n" + "bp_converged, decoded_result, *_ = decoder.decode(single_syndrome.tolist())" ] }, { @@ -742,7 +807,7 @@ "id": "8f995a87-3bf1-464c-95f3-ca709f520a29", "metadata": {}, "source": [ - "Now, run the same code but use the `Hz_lifted_copy` parity check matrix. This code is simply four non-interacting copies of the surface code and hence a [[52,4,3]] code. How does the logical error rate compare? Can you see the benefit of the LP surface code as it adds one to the distance and outperforms copies of the surface code significantly." + "Now, run the same code but use the `Hz_lifted_copy` parity check matrix. This code is simply four non-interacting copies of the surface code and hence a [[52,4,3]] code. How does the logical error rate compare? Can you see the benefit of the LP surface code as it adds one to the distance and outperforms copies of the surface code significantly." ] }, { @@ -792,7 +857,7 @@ "for i, r in enumerate(results):\n", " bp_converged_flags.append(r.converged)\n", " decoded_error = np.array(r.result, dtype=np.uint8)\n", - " \n", + "\n", " # Check if error was corrected\n", " if not np.array_equal(decoded_error, errors[i]):\n", " num_logical_errors += 1\n", @@ -812,10 +877,10 @@ "id": "b1f1e501-11fd-4943-a7dd-0908fb4f9410", "metadata": {}, "source": [ - "Each choice of $l$ and $L$ will produce a different LP surface code. It varies case by case if the LP approach is better than copies of the surface code. Examine the table below from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911) where the code parameters were obtained via numerical simulation. Note, how the entries highlighted in green are cases where the LP construction results (top entry) in a code with the same overhead but a higher code distance compared to surface code copies (bottom entry).\n", + "Each choice of $l$ and $L$ will produce a different LP surface code. It varies case by case if the LP approach is better than copies of the surface code. Examine the table below from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911) where the code parameters were obtained via numerical simulation. Note how the entries highlighted in green are cases where the LP construction results (top entry) in a code with the same overhead but a higher code distance compared to surface code copies (bottom entry).\n", "\n", "\n", - "\"Drawing\"" + "\"Table" ] }, { @@ -823,9 +888,20 @@ "id": "03ba3a34-2173-4df2-9224-09044ad68675", "metadata": {}, "source": [ - "## Summary\n", + "## Conclusion\n", "\n", - "You now have a foundational understanding of qLDPC codes and how they differ from their classical counterparts. qLDPC codes are quite promising and will continue to be an active field of research. The methods covered in this work are just a sample of the different ways to construct qLDPC code parity check matrices yet lay the groundwork for you to understand other state of the art techniques like [bivariate bicycle codes](https://arxiv.org/abs/2308.07915)." + "You now have a foundational understanding of qLDPC codes and how they differ from their classical counterparts. qLDPC codes are quite promising and will continue to be an active field of research. The methods covered in this work are just a sample of the different ways to construct qLDPC code parity check matrices yet lay the groundwork for you to understand other state of the art techniques like [bivariate bicycle codes](https://arxiv.org/abs/2308.07915)." + ] + }, + { + "cell_type": "markdown", + "id": "f49279a6cb904b95", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — covers surface codes which are a special case of qLDPC codes\n", + "* [QEC 101 Lab 8: Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — extends the decoding concepts introduced in this lab\n", + "* [QEC 101 Lab 4: Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — prerequisite lab covering belief propagation and Tanner graphs" ] } ], @@ -843,11 +919,24 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb b/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb index 9e423a2..e761c6c 100644 --- a/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb +++ b/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb @@ -1,33 +1,102 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "790ebf04544e4a8f", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, { "cell_type": "markdown", "id": "194fa8fa-d624-442e-8f54-7b49be807a1d", "metadata": {}, "source": [ - "# Decoder Metrics and (Temporal) Parallel Window Decoding\n", + "# Decoder Metrics and (Temporal) Parallel Window Decoding — QEC 101\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}$\n", + "\n", + "---\n", "\n", - "Decoders are at the heart of QEC. In the previous QEC 101 lesson called \"[Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)\" you learned about the role of decoders and some of the different approaches to decoding.\n", + "Decoders are at the heart of QEC. In the previous QEC 101 lesson called \"[Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)\" you learned about the role of decoders and some of the different approaches to decoding.\n", "\n", - "As QEC matures as a field, research is expanding from its origins in pure theory to experimental demonstrations of QEC workflows on physical QPUs. As QPUs continue to scale, it is becoming increasingly important to consider how every aspect of a QEC workflow scales such that it is possible to run fault-tolerant logic on devices with millions of qubits.\n", + "As QEC matures as a field, research is expanding from its origins in pure theory to experimental demonstrations of QEC workflows on physical QPUs. As QPUs continue to scale, it is becoming increasingly important to consider how every aspect of a QEC workflow scales such that it is possible to run fault-tolerant logic on devices with millions of qubits.\n", "\n", - "Decoders are likely the primary bottleneck of QEC, so it becomes critically important to understand the nature of these bottlenecks and what sorts of solutions will scale or not. \n", + "Decoders are likely the primary bottleneck of QEC, so it becomes critically important to understand the nature of these bottlenecks and what sorts of solutions will scale or not.\n", "\n", - "This notebook will explore the key metrics of decoders, giving you a better intuition for some of the most important practical considerations for decoders and why they matter. It will also explore a scalable decoding scheme called temporal parallel window decoding which has promise to help overcome some of challenges faced with QEC decoding.\n", + "This notebook will explore the key metrics of decoders, giving you a better intuition for some of the most important practical considerations for decoders and why they matter. It will also explore a scalable decoding scheme called temporal parallel window decoding which has promise to help overcome some of the challenges faced with QEC decoding.\n", "\n", - "**Prerequisites:** This lab assumes you have a moderate knowledge of QEC and have completed the core QEC 101 courses (labs 1-4), especially the labs covering [stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb). This course also mentions magic states, so the [notebook on magic state distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) would also provide helpful context. \n", + "**What You Will Do:**\n", + "* Explore the three primary decoder metrics: accuracy, throughput, and reaction time\n", + "* Simulate the exponential backlog problem using a QEC decoder simulator\n", + "* Compare sliding window and parallel window decoding strategies\n", + "* Analyze how reaction time and latency impact QPU clock speed\n", "\n", - "**What you will do:**\n", - "* Learn the three primary decoder metrics: accuracy, throughput, and reaction time.\n", - "* Run simulations to understand the throughput backlog problem.\n", - "* Learn about sliding and temporal parallel window decoding.\n", - "* Understand how reaction time impacts QPU clock speed.\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Completion of QEC 101 labs 1–4, especially [Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)\n", + "* Familiarity with magic state distillation ([Lab 5](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb)) is helpful\n", "\n", - "**Terminology you will use:**\n", - "* throughput, accuracy, reaction time\n", - "* the exponential backlog problem\n", - "* sliding window decoding\n", - "* parallel window decoding\n" + "**Key Terminology:**\n", + "* Accuracy\n", + "* Throughput\n", + "* Reaction time\n", + "* Exponential backlog\n", + "* Sliding window decoding\n", + "* Parallel window decoding\n", + "* Magic state distillation (MSD)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b7e741a4f034a31", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", + "\n", + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "6da893fac4ce4ed3", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bad1457557214d51", + "metadata": {}, + "outputs": [], + "source": [ + "from Images.parallel.decoder_simulator import QECSimulator" ] }, { @@ -35,9 +104,11 @@ "id": "46f66c66-b7a4-4ad7-aab1-0a2f93e6cb43", "metadata": {}, "source": [ - "## 8.1 The Key Decoder Metrics\n", + "---\n", "\n", - "Recall that the job of a decoder is to take syndromes measured from a quantum computer and a parity check matrix ($H$), defining the parity constraints of the QEC code, and determine where errors occurred so they can be fixed. Decoding a single syndrome extraction round can reveal errors in space, but certain errors, such as measurement errors, can only be caught when decoding in time, using a larger $H$ that can decode multiple rounds of syndrome data. \n", + "## 1. The Key Decoder Metrics\n", + "\n", + "Recall that the job of a decoder is to take syndromes measured from a quantum computer and a parity check matrix ($H$), defining the parity constraints of the QEC code, and determine where errors occurred so they can be fixed. Decoding a single syndrome extraction round can reveal errors in space, but certain errors, such as measurement errors, can only be caught when decoding in time, using a larger $H$ that can decode multiple rounds of syndrome data.\n", "\n", "Decoders can be assessed by three primary metrics:\n", "\n", @@ -45,54 +116,49 @@ "2. **Throughput** - The rate at which the decoder processes syndrome data.\n", "3. **Reaction time** - The time between when the last syndrome is sent from the QPU and the decoder returns a correction.\n", "\n", - "Accuracy is fairly straightforward. If a decoder makes poor predictions, logical errors occur which results in poor outcomes from the quantum algorithm. This is usually measured by a logical error rate. We will not discuss accuracy much here, but note that there is often a tradeoff between decoder accuracy and its scalability with respect to the number of syndromes decoded in a single block. \n", - "\n", - "Throughput measures how fast syndromes can be processed by the decoder. This rate ($r_{proc}$) must be faster than the rate at which syndromes arrive from the quantum computer $r_{gen}$. If syndromes arrive faster than they are processed, a backlog starts to build up and the decoder grinds to a halt. \n", + "Accuracy is fairly straightforward. If a decoder makes poor predictions, logical errors occur which results in poor outcomes from the quantum algorithm. This is usually measured by a logical error rate. We will not discuss accuracy much here, but note that there is often a tradeoff between decoder accuracy and its scalability with respect to the number of syndromes decoded in a single block.\n", "\n", - "Consider an example of the steady-state intercircuit feed-forward latency (SIFL) benchmark shown below. It begins with a preparation of the logical 1 state. Then, 10 syndrome extraction rounds are performed before a measurement occurs and a feed-forward operation determines if an $X$ get needs to be applied or not before the next measurement.\n", + "Throughput measures how fast syndromes can be processed by the decoder. This rate ($r_{proc}$) must be faster than the rate at which syndromes arrive from the quantum computer $r_{gen}$. If syndromes arrive faster than they are processed, a backlog starts to build up and the decoder grinds to a halt.\n", "\n", - "Between the grey and purple boxes, 10 syndrome extraction rounds occur to flag a bitflip error(s) that might have occurred and potentially induce a logical error when measured. The syndromes must be decoded before the decision is made to apply the $X$ gate or not. This decoding time is depicted as $L_i$ for the $i$th step. \n", + "Consider an example of the steady-state intercircuit feed-forward latency (SIFL) benchmark shown below. It begins with a preparation of the logical 1 state. Then, 10 syndrome extraction rounds are performed before a measurement occurs and a feed-forward operation determines if an $X$ gate needs to be applied or not before the next measurement.\n", "\n", + "Between the grey and purple boxes, 10 syndrome extraction rounds occur to flag a bitflip error(s) that might have occurred and potentially induce a logical error when measured. The syndromes must be decoded before the decision is made to apply the $X$ gate or not. This decoding time is depicted as $L_i$ for the $i$th step.\n", "\n", "
\n", - " \"plot\"\n", + " \"Diagram\n", "
\n", "\n", - "Though this circuit is entirley Clifford, meaning the errors could be tracked an applied at the end, it is a good model for the procedure necessary to apply non-Clifford gates like Toffoli gates or $T$ gates. The main bottleneck of a quantum algorithm is the execution of the non-Clifford gates like $T$ gates. Recall from the lesson [\"$T$ Gates and Magic State Distillation\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) that $T$ gates are generally prepared by using an asynchronously generated $\\ket{T}$ state via a costly process called magic state distillation (MSD). The $\\ket{T}$ states are stored and applied as needed via the teleportation circuit shown below. \n", + "Though this circuit is entirely Clifford, meaning the errors could be tracked and applied at the end, it is a good model for the procedure necessary to apply non-Clifford gates like Toffoli gates or $T$ gates. The main bottleneck of a quantum algorithm is the execution of the non-Clifford gates like $T$ gates. Recall from the lesson [\"$T$ Gates and Magic State Distillation\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) that $T$ gates are generally prepared by using an asynchronously generated $\\ket{T}$ state via a costly process called **magic state distillation (MSD)**. The $\\ket{T}$ states are stored and applied as needed via the teleportation circuit shown below.\n", "\n", "
\n", - " \"plot\"\n", + " \"Circuit\n", "
\n", "\n", - "As demonstrated in the SIFL figure, if $r_{gen}$ < $r_{proc}$, then each step continues with a constant decoder latency $L_i$ (Top). If $r_{gen}$ > $r_{proc}$, a buildup of unprocessed syndromes occurs (bottom) and each subsequent decoder step becomes slower and slower ($L_i$< $L_{i+1}$) before the next $X$ gate is applied. \n", - "\n", - "In Therhal's paper \"[Quantum Error Correction for Quantum Memories\n", - "](https://arxiv.org/pdf/1302.3428)\", the argument is made that this backlog becomes a serious problem and grows exponentially with the number of qubits. This means that a decoder with a sufficient throughput is a non-negotiable for QEC.\n", + "As demonstrated in the SIFL figure, if $r_{gen}$ < $r_{proc}$, then each step continues with a constant decoder latency $L_i$ (Top). If $r_{gen}$ > $r_{proc}$, a buildup of unprocessed syndromes occurs (bottom) and each subsequent decoder step becomes slower and slower ($L_i$< $L_{i+1}$) before the next $X$ gate is applied.\n", "\n", + "In Terhal's paper \"[Quantum Error Correction for Quantum Memories](https://arxiv.org/pdf/1302.3428)\", the argument is made that this backlog becomes a serious problem and grows exponentially with the number of qubits. This means that a decoder with a sufficient throughput is a non-negotiable for QEC.\n", "\n", - "The final metric \"reaction time\" essentially sets the limit on the wall clock time for the QPU. It is likely that FTQC circuits will be overwhelmingly limited by application of $T$ or other non-Clifford gates that require feed-forward information from the decoder before proceeding to the next gate.\n", + "The final metric \"reaction time\" essentially sets the limit on the wall clock time for the QPU. It is likely that FTQC circuits will be overwhelmingly limited by application of $T$ or other non-Clifford gates that require feed-forward information from the decoder before proceeding to the next gate.\n", "\n", - "\n", - "The reaction time is composed of two main components. First, the latency which is the time from the last syndrome produced by the QPU to the decoder returning its result ($T_{decode}$). The second is the communication overhead from the decoder to the QPU via the classical control system ($T_{latency}$). \n", + "The reaction time is composed of two main components. First, the latency which is the time from the last syndrome produced by the QPU to the decoder returning its result ($T_{decode}$). The second is the communication overhead from the decoder to the QPU via the classical control system ($T_{latency}$).\n", "\n", "$$ T_{reaction}=T_{decode}+T_{latency}$$\n", "\n", "Though reaction time is not as critical as throughput, it may be a major practical limitation for completing quantum algorithms with many non-Clifford gates in a reasonable amount of time.\n", "\n", + "A final honorable mention in the discussion of metrics is scalability. Though, not a metric per se, it is important to remember that any decoder, no matter how promising, needs to scale. Thus, today's research efforts should be directed towards the methods which have the greatest potential to scale and avoid those with proven limitations.\n", + "\n", + "
\n", "\n", - "A final honorable mention in the discussion of metrics is scalability. Though, not a metric per se, it is important to remember that any decoder, no matter how promising, needs to scale. Thus, today's research efforts should be directed towards the methods which have the greatest potential to scale and avoid those with proven limitations. \n", + "**Exercise 1:**\n", "\n", + "Use the `QECSimulator` below to explore the **exponential backlog** problem. The simulator essentially tracks the system load (white line) which consists of all the syndromes currently being decoded and the backlog as a function of wall clock time (of a hypothetical simulated QPU) based on the device parameters you set. The green bars are the time for each decoding round.\n", "\n", + "To setup the simulation, input the `syndrome_time_us` which is the time in microseconds for a single syndrome extraction from the QPU (this defines $r_{gen}$). The `min_batch_size` specifies the number of syndromes needed to send a batch to the decoder. `decode_func` is a lambda expression which computes how long it will take to decode all $n$ syndromes. This is an important variable to toggle, as the most accurate algorithmic decoders can scale exponentially in terms of the number of syndromes they are decoding. `max_batches_to_run` ensures the simulation stops at a reasonable point. Keep `n_processors` equal to 1 for now.\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", - "Use the $\\texttt{QECSimulator}$ below to explore the exponential backlog problem. The simulator essentially tracks the system load (white line) which consists of all the syndromes currently being decoded and the backlog as a function of wall clock time (of a hypothetical simulated QPU) based on the device parameters you set. The green bars are the time for each decoding round.\n", - " \n", - "To setup the simulation, input the $\\texttt{syndrome\\_time\\_us}$ which is the time in microseconds for a single syndrome extraction from the QPU (this defines $r_{gen}$ ). The $\\texttt{min\\_batch\\_size}$ specifies the number of syndromes needed to send a batch to the decoder. $\\texttt{decode\\_fun}$ is a lambda expression which computes how long it will take to decode all $n$ syndromes. This is an important variable to toggle, as the most accurate algorithmic decoders can scale exponentially in terms of the number of syndromes they are decoding. $\\texttt{max\\_batches\\_to\\_run}$ ensures the simulation stops at a reasonable point. Keep $\\texttt{n\\_processors}$ equal to 1 for now.\n", + "Run the simulation for the different situations presented below. In the first cell, run the case where the decoder scales linearly with the number of syndromes to decode but is faster than the syndrome generation rate. What happens to the system load and the decoding time?\n", "\n", - "Run the simulation for the different situations presented below. In the first cell, run the case where the decoder scales linearly with the number of syndromes to decode but is faster than the syndrome generation rate. What happens to the system load and the decoding time? \n", - "

\n" + "
" ] }, { @@ -102,7 +168,7 @@ "metadata": {}, "outputs": [], "source": [ - "from Images.parallel.decoder_simulator import QECSimulator\n", + "# EXERCISE 1\n", "\n", "print(\"Running Scenario 1...\")\n", "sim1 = QECSimulator(\n", @@ -121,7 +187,7 @@ "id": "29f5b1c0-1f03-4287-94ff-c4f8619b5a84", "metadata": {}, "source": [ - "Now, keep the decode function linear, but change the scaling factor so it is slightly slower than the syndrome generation rate. What happens to the system load and batch decoding times?" + "Now, keep the decode function linear, but change the scaling factor so it is slightly slower than the syndrome generation rate. What happens to the system load and batch decoding times?" ] }, { @@ -148,7 +214,7 @@ "id": "925cac34-c7bc-4e27-873a-0cae94baec55", "metadata": {}, "source": [ - "Now, make the decoder quadratic with the following decode function to better resemble a a decoder in practice deployment: $f(n) = 0.01 *n^2$." + "Now, make the decoder quadratic with the following decode function to better resemble a decoder in practice deployment: $f(n) = 0.01 *n^2$." ] }, { @@ -175,7 +241,7 @@ "id": "bf656bf1-6346-4582-ac2e-ea0659413b68", "metadata": {}, "source": [ - "Even a quadratic scaling decoder can perform well if it is handling a small enough batch of syndromes to keep up, but try changing the prefactor to $f(n) = n^2$ and see what happens. " + "Even a quadratic scaling decoder can perform well if it is handling a small enough batch of syndromes to keep up, but try changing the prefactor to $f(n) = n^2$ and see what happens." ] }, { @@ -202,9 +268,9 @@ "id": "b86e9e4e-109e-490e-8995-4b8401ab71d0", "metadata": {}, "source": [ - "The simulation can decode the first batch reasonably fast, but the backlog is so severe, the second batch takes 160,000 microseconds! And the simulation continues to blow up after this. Clearly, a more effective method is needed when accurate decoders are required and $r_{proc} < r_{gen}$.\n", + "The simulation can decode the first batch reasonably fast, but the backlog is so severe, the second batch takes 160,000 microseconds! And the simulation continues to blow up after this. Clearly, a more effective method is needed when accurate decoders are required and $r_{proc} < r_{gen}$.\n", "\n", - "One additional note. The 1 microsecond times for syndrome extraction are in the ballpark for superconducting devices. For slower modalities like ion traps, the same decoder setup may work fine simply because it take so much more time to perform syndrome extraction. Try running the analysis one more time but using a syndrome extraction time of 200 microseconds." + "One additional note. The 1 microsecond times for syndrome extraction are in the ballpark for superconducting devices. For slower modalities like ion traps, the same decoder setup may work fine simply because it takes so much more time to perform syndrome extraction. Try running the analysis one more time but using a syndrome extraction time of 200 microseconds." ] }, { @@ -239,69 +305,65 @@ "id": "896b09cf-ae80-4923-a281-1a101252df10", "metadata": {}, "source": [ - "## Parallel Window Decoding\n", + "---\n", + "\n", + "## 2. Parallel Window Decoding\n", "\n", - "The throughput problem is serious, and finding clever ways to increase decoder throughput is necessary to ensure the viability of real-time decoding. Accelerated computing has helped solve simulation problem across all domains of science by massively parallelizing scientific computing and boosting throughput by orders of magnitude. The QEC field is primed to benefit from such an approach as well and this section will build towards a scheme called parallel window decoding which can ameliorate the throughput problem. \n", + "The throughput problem is serious, and finding clever ways to increase decoder throughput is necessary to ensure the viability of real-time decoding. Accelerated computing has helped solve simulation problems across all domains of science by massively parallelizing scientific computing and boosting throughput by orders of magnitude. The QEC field is primed to benefit from such an approach as well and this section will build towards a scheme called parallel window decoding which can ameliorate the throughput problem.\n", "\n", - "Building up from a simple model. If a circuit was entirely composed of Clifford gates, it could be run and all syndromes be decoded after its completion as a single postprocessing step. Unfortunately, the benefits of quantum algorithms comes from non-Clifford gates like $T$ gates which require feedback from a decoder at each application. This means a decoder needs to work alongside the QPU in realtime. The magic states required for application of $T$ gates can also decohere if the decoder backlog is too high. Thus, clever parallel decoding schemes are required to solve the problem.\n", + "Building up from a simple model. If a circuit was entirely composed of Clifford gates, it could be run and all syndromes be decoded after its completion as a single postprocessing step. Unfortunately, the benefits of quantum algorithms comes from non-Clifford gates like $T$ gates which require feedback from a decoder at each application. This means a decoder needs to work alongside the QPU in realtime. The magic states required for application of $T$ gates can also decohere if the decoder backlog is too high. Thus, clever parallel decoding schemes are required to solve the problem.\n", "\n", "Let's consider decoding for a single set of 6 stabilizer rounds performed before some $T$ gate. One approach to decoding these could be waiting for all 6 rounds to finish and then decoding them all at once. (The `QECSimulator` above does currently.) This block decoding will be the slowest approach possible as the decoder must process a massive parity (that covers all syndromes) at once.\n", "\n", "
\n", - " \"plot\"\n", + " \"Diagram\n", "
\n", "\n", - "Aside from the more difficult decoding task, this approach cannot start until all of the syndromes are collected so there is no chance for a headstart. \n", - "\n", - "A second, far more common approach is called **sliding window**. The idea is to decode slices of the syndrome data and feed the results into the next decoding task. One advantage is that the decoding task is much smaller as the \"sliding window\" covers fewer syndromes at the same time. A second advantage is that the decoder can start working on the first window as soon as it is ready rather than waiting for all of the syndrome data to be generated.\n", + "Aside from the more difficult decoding task, this approach cannot start until all of the syndromes are collected so there is no chance for a headstart.\n", "\n", + "A second, far more common approach is called **sliding window decoding**. The idea is to decode slices of the syndrome data and feed the results into the next decoding task. One advantage is that the decoding task is much smaller as the \"sliding window\" covers fewer syndromes at the same time. A second advantage is that the decoder can start working on the first window as soon as it is ready rather than waiting for all of the syndrome data to be generated.\n", "\n", "
\n", - " \"plot\"\n", + " \"Diagram\n", "
\n", "\n", "Even though the decoder gets a head start, the decoding must still occur in serial as each decoding step cannot begin until the previous is finished. Thus, sliding window decoding will quickly run into scaling problems.\n", "\n", - "The solution to this is **parallel window** decoding as presented in the paper entitled [\"Parallel window decoding enables scalable fault tolerant quantum computation\"](https://www.nature.com/articles/s41467-023-42482-1). Parallel window decoding can be applied with respect to time (temporal) or space (spatial). For this lesson we will focus on temporal parallelism only, but know that similar techniqies could be used to break down a large QEC code patch spatially.\n", + "The solution to this is **parallel window decoding** as presented in the paper entitled [\"Parallel window decoding enables scalable fault tolerant quantum computation\"](https://www.nature.com/articles/s41467-023-42482-1). Parallel window decoding can be applied with respect to time (temporal) or space (spatial). For this lesson we will focus on temporal parallelism only, but know that similar techniques could be used to break down a large QEC code patch spatially.\n", "\n", "In temporal parallel window decoding, the entire block of syndrome history is obtained and then broken down into sub-blocks that can run in parallel on $N_{proc}$ number of processors in two steps: decoding and cleanup.\n", "\n", - "\n", - "\n", - "\n", "
\n", - " \"plot\"\n", + " \"Diagram\n", "
\n", "\n", - "The decoding step first commits error assignments in specififed regions while the second cleans up the boundaries to rectify errors between the commit regions as shown in the image below.\n", - "\n", + "The decoding step first commits error assignments in specified regions while the second cleans up the boundaries to rectify errors between the commit regions as shown in the image below.\n", "\n", "
\n", - " \"plot\"\n", + " \"Schematic\n", "
\n", "\n", - "Consider the central commit region in green. There are $n_{com}$ syndromes needed for the decode step. The commit regions are then flanked by buffer regions where syndromes are partially processed, but the results cannot be certain yet. The commit regions are the solved boundaries for the syndromes processed in the second cleanup step. Here, $n_w$ syndromes are processed to determine errors in the two overlapping buffer regions plus any addition syndromes between them. \n", + "Consider the central commit region in green. There are $n_{com}$ syndromes needed for the decode step. The commit regions are then flanked by buffer regions where syndromes are partially processed, but the results cannot be certain yet. The commit regions are the solved boundaries for the syndromes processed in the second cleanup step. Here, $n_w$ syndromes are processed to determine errors in the two overlapping buffer regions plus any additional syndromes between them.\n", "\n", - "As you can see from the figure, this pattern repeats for as many processors and sub-blocks as necessary. For counting purposes, the smallest sub-block hat can be processed in this manner consists of $n_{com} +n_W$ syndromes. There are many potential choices for the size of these regions. For example, the [\"Parallel window decoding enables scalable fault tolerant quantum computation\"](https://www.nature.com/articles/s41467-023-42482-1) paper sets $n_{com} =d$ and $n_w = 3*d$. Techniqes like \"[temporal encoding of lattice surgery](https://journals.aps.org/prxquantum/abstract/10.1103/PRXQuantum.3.010331)\" can allow for a smaller number of syndrome extraction rounds to be used, improving the throughput of the decoder even more by minimizing the sub-block decoding task.\n", + "As you can see from the figure, this pattern repeats for as many processors and sub-blocks as necessary. For counting purposes, the smallest sub-block that can be processed in this manner consists of $n_{com} + n_w$ syndromes. There are many potential choices for the size of these regions. For example, the [\"Parallel window decoding enables scalable fault tolerant quantum computation\"](https://www.nature.com/articles/s41467-023-42482-1) paper sets $n_{com} = d$ and $n_w = 3d$. Techniques like \"[temporal encoding of lattice surgery](https://journals.aps.org/prxquantum/abstract/10.1103/PRXQuantum.3.010331)\" can allow for a smaller number of syndrome extraction rounds to be used, improving the throughput of the decoder even more by minimizing the sub-block decoding task.\n", "\n", - "To avoid an exponential backlog if we have a single decoder, the following inequality must hold. \n", + "To avoid an exponential backlog if we have a single decoder, the following inequality must hold.\n", "\n", - "$$ (n_{com} + n_w)t_{syndrome-extraction} \\geq 2 * t_{decode} $$\n", + "$$ (n_{com} + n_w)t_{\\text{syndrome-extraction}} \\geq 2 \\cdot t_{\\text{decode}} $$\n", "\n", - "At face value this seems like a more challenging decoding task relative to the standard approach as $t_{decode}$ now has a factor of 2. The trick is that this construction, though requiring two steps, can be attacked with any number of processors $N_{proc}$ so instead the following must hold.\n", + "At face value this seems like a more challenging decoding task relative to the standard approach as $t_{\\text{decode}}$ now has a factor of 2. The trick is that this construction, though requiring two steps, can be attacked with any number of processors $N_{proc}$ so instead the following must hold.\n", "\n", - "$$ N_{proc}(n_{com} + n_w)t_{syndrome-extraction} \\geq 2 * t_{decode} $$\n", + "$$ N_{proc}(n_{com} + n_w)t_{\\text{syndrome-extraction}} \\geq 2 \\cdot t_{\\text{decode}} $$\n", "\n", - "This means that a slow decoder using parallel window decoding can still avoid the exponential backlog problem by using an arbitrary number of decoder in parallel up to the limit of the smallest sub-block decoding task. Such an approach has replaced the viability of sliding window and provides a much more promising path to scalable QEC.\n", + "This means that a slow decoder using parallel window decoding can still avoid the exponential backlog problem by using an arbitrary number of decoders in parallel up to the limit of the smallest sub-block decoding task. Such an approach has replaced the viability of sliding window and provides a much more promising path to scalable QEC.\n", "\n", + "
\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - "Use the QEC Simulator above and determine how many processors are required to keep up with the quadratic scaling decoder you tested earlier. Note, that the simulator considers the fact that two decoding rounds are required for parallel window decoding, however the simulator is simplified and divides the syndromes to process into $N$ blocks approximating the procedure to form a buffer zone. \n", - "

\n", + "**Exercise 2:**\n", "\n", - "\n" + "Use the QEC Simulator above and determine how many processors are required to keep up with the quadratic scaling decoder you tested earlier. Note that the simulator considers the fact that two decoding rounds are required for parallel window decoding, however the simulator is simplified and divides the syndromes to process into $N$ blocks approximating the procedure to form a buffer zone.\n", + "\n", + "
" ] }, { @@ -311,6 +373,8 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 2\n", + "\n", "print(\"Running Scenario 6...\")\n", "sim1 = QECSimulator(\n", " syndrome_time_us=1.0,\n", @@ -328,7 +392,7 @@ "id": "a4216d9c-f2e7-4d03-aedb-5f4bdb05cf38", "metadata": {}, "source": [ - "The key takeaway here is that it is often necessary to take a massive decoding task and spend the overhead to distribute it across AI supercomputing resources to avoid the exponential backlog. This is a primary motivation for why GPUs are powerful tools for QEC, even if they might have greater latency than other alternatives." + "The key takeaway here is that it is often necessary to take a massive decoding task and spend the overhead to distribute it across AI supercomputing resources to avoid the exponential backlog. This is a primary motivation for why GPUs are powerful tools for QEC, even if they might have greater latency than other alternatives." ] }, { @@ -336,11 +400,13 @@ "id": "c51cd73a-a421-45cc-8fc3-935281c5b34d", "metadata": {}, "source": [ - "## Decoder Latency and Reaction Time\n", + "---\n", "\n", - "Assuming the throughput problem is solved, latency becomes the next critical factor. The latency is the time it takes from the QPU producing the last syndrome to the when the decoder obtains a result for this syndrome. This is closely related to a similar quantity called reaction time which also includes the time it takes to transmit the result through the classical control systems.\n", + "## 3. Decoder Latency and Reaction Time\n", "\n", - "Rerun your parallel decoding simulation above, but this time use 100 decoders. What happens to the decode time? \n" + "Assuming the throughput problem is solved, latency becomes the next critical factor. The latency is the time it takes from the QPU producing the last syndrome to when the decoder obtains a result for this syndrome. This is closely related to a similar quantity called reaction time which also includes the time it takes to transmit the result through the classical control systems.\n", + "\n", + "Rerun your parallel decoding simulation above, but this time use 100 decoders. What happens to the decode time?" ] }, { @@ -367,31 +433,33 @@ "id": "d6bdd5a0-bff1-4335-a0b3-fe46fc071931", "metadata": {}, "source": [ - "Notice each decoder task is much faster so the result can also be returned to the QPU much faster and progress the quantum computation. Note that more processors can improve the latency, but they are still limited by twice the time it takes to decoder a fundamental block. Though this simulator can arbitrarily compute smaller times to give a qualitative depiction of this, in practice, the previous discussion about the size of the buffer and commit regions determines the limit on latency.\n", - "\n", - "A quantum algorithm will be constrained regardless, but there are a number of tradeoffs that can be considered. If the MSD process is slow and few $\\ket{T}$ states are on hand, then MSD becomes the bottleneck for QPU computation speed. If the number of $\\ket{T}$ states is abundant, then the decoder's reaction time becomes the bottleneck as the decoder must return its result before the next $T$ gate can be applied.\n", + "Notice each decoder task is much faster so the result can also be returned to the QPU much faster and progress the quantum computation. Note that more processors can improve the latency, but they are still limited by twice the time it takes to decode a fundamental block. Though this simulator can arbitrarily compute smaller times to give a qualitative depiction of this, in practice, the previous discussion about the size of the buffer and commit regions determines the limit on latency.\n", "\n", - "This means that decoding will usually be the primary bottleneck for the QPU wall clock time and might be the difference between an algorithm completing in a reasonable amount of time or not. This is why decoders need to be as fast as possible (even when the throughput problem is solved) and connect to the quantum control devices with interconnects optimized for latency such as [NVIDIA's NVQLink](https://www.nvidia.com/en-us/solutions/quantum-computing/nvqlink/).\n", + "A quantum algorithm will be constrained regardless, but there are a number of tradeoffs that can be considered. If the MSD process is slow and few $\\ket{T}$ states are on hand, then MSD becomes the bottleneck for QPU computation speed. If the number of $\\ket{T}$ states is abundant, then the decoder's reaction time becomes the bottleneck as the decoder must return its result before the next $T$ gate can be applied.\n", "\n", - "Additional space and time tradeoffs can be considered with methods like autocorrected $T$ gates, where $T$ gates proceed without resolving the necessary Clifford correction which is instead tracked as part of the quantum algorithm, speeding up the wall clock time. However, this time savings requires a space cost in the form of additional ancillas for every $T$ gate. Neither approaches resolves the underlaying constraints but, like many aspects of quantum computing, allows for selection of tradeoffs depending on the application.\n", + "This means that decoding will usually be the primary bottleneck for the QPU wall clock time and might be the difference between an algorithm completing in a reasonable amount of time or not. This is why decoders need to be as fast as possible (even when the throughput problem is solved) and connect to the quantum control devices with interconnects optimized for latency such as [NVIDIA's NVQLink](https://www.nvidia.com/en-us/solutions/quantum-computing/nvqlink/).\n", "\n", + "Additional space and time tradeoffs can be considered with methods like autocorrected $T$ gates, where $T$ gates proceed without resolving the necessary Clifford correction which is instead tracked as part of the quantum algorithm, speeding up the wall clock time. However, this time savings requires a space cost in the form of additional ancillas for every $T$ gate. Neither approach resolves the underlying constraints but, like many aspects of quantum computing, allows for selection of tradeoffs depending on the application.\n", "\n", - "## Summary\n", + "## Conclusion\n", "\n", - "After completing this lab, you should now have a better understanding for why decoding is so difficult and what specific metrics need to be considered when using a decoder in practice. A key takeaway is that the throughput problem is an absolute dealbreaker for decoders. If a decoder cannot process syndromes as fast as they are obtained from the QPU, it will result in an exponential backlog that grinds everything to a halt. \n", + "After completing this lab, you should now have a better understanding for why decoding is so difficult and what specific metrics need to be considered when using a decoder in practice. A key takeaway is that the throughput problem is an absolute dealbreaker for decoders. If a decoder cannot process syndromes as fast as they are obtained from the QPU, it will result in an exponential backlog that grinds everything to a halt.\n", "\n", - "Parallel window decoding is an innovative solution to this problem that allows many processors working in tandem to solve the throughput problem. \n", + "Parallel window decoding is an innovative solution to this problem that allows many processors working in tandem to solve the throughput problem.\n", "\n", "Response time and accuracy are also very important and might impose severe practical limitations on a QEC workflow, but these are secondary to throughput." ] }, { - "cell_type": "code", - "execution_count": null, - "id": "5afad8d5-4c0c-431b-a716-3ca669a0abd4", + "cell_type": "markdown", + "id": "74c698b892e74165", "metadata": {}, - "outputs": [], - "source": [] + "source": [ + "**Related Notebooks:**\n", + "* [QEC Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — covers decoder fundamentals that this notebook builds upon\n", + "* [QEC Magic State Distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) — explores T gates and MSD referenced throughout this notebook\n", + "* [QEC Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — introduces surface codes where these decoding strategies are applied" + ] } ], "metadata": { @@ -411,8 +479,22 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.3" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "hpc_integration" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/09_QEC_Detector_Error_Models.ipynb b/qec101/09_QEC_Detector_Error_Models.ipynb new file mode 100644 index 0000000..b046d86 --- /dev/null +++ b/qec101/09_QEC_Detector_Error_Models.ipynb @@ -0,0 +1,786 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "790ebf04544e4a8f", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "id": "b40ae642-189f-4b06-a2ed-fd66286f295d", + "metadata": {}, + "source": [ + "# Detector Error Models and Real-Time Decoding — QEC 101\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}$\n", + "\n", + "---\n", + "\n", + "So far the QEC 101 labs have focused primarily on QEC memory — the specific construction of logical qubit encodings with techniques like the repetition code, Steane and Shor codes, the surface code, and qLDPC codes. Though memory is foundational to QEC, it can only correct errors from a simple noise model that ignores multiple sources of noise when an algorithm or QEC routine is run.\n", + "\n", + "Similarly, we have not explored deployment of any codes in a real-time QEC workflow. This includes consideration of decoder metrics like throughput and reaction time covered in \"[Decoder Metrics and (Temporal) Parallel Window Decdoing](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb)\" but also specific implementation details like what APIs are necessary to perform QEC, keeping track of the correct data, and performing computations on the right processor.\n", + "\n", + "This lab will explore the limitations of memory experiments and motivate the need for a more robust tool for decoding how errors arise in practice during QEC rounds. The detector error model (DEM), introduced in a paper by Eisert and coworkers called [\"Designing fault-tolerant circuits using detector error models\"](https://quantum-journal.org/papers/q-2025-11-06-1905/pdf/), will be presented as a solution to this problem. You will explore some of the theory behind DEMs and some of the practical aspects of deploying them for real-time decoding.\n", + "\n", + "**What You Will Do:**\n", + "* Explore the limitations of QEC memory and why code capacity assumptions are insufficient for real-time decoding\n", + "* Construct detector error models (DEMs) for the repetition code under different noise models\n", + "* Compute detector error matrices and verify error identification using syndromes\n", + "* Analyze observables and undetectable error patterns using Tanner graphs\n", + "* Initialize a real-time decoder in CUDA-Q QEC using a DEM for the Steane code\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Completion of QEC 101 labs 1–4, especially [Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)\n", + "* Familiarity with the Steane code and repetition code encodings\n", + "* Basic understanding of noise models in quantum computing\n", + "\n", + "**Key Terminology:**\n", + "* Detector error model (DEM)\n", + "* Detector\n", + "* Detector matrix\n", + "* Measurement syndrome matrix\n", + "* Phenomenological noise model\n", + "* Code capacity\n", + "* Circuit-level noise model\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq_qec.patch`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — logical qubit register with data and ancilla sub-registers\n", + "* [`cudaq_qec.get_code`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — loads a pre-built QEC code definition\n", + "* [`cudaq_qec.z_dem_from_memory_circuit`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — builds a detector error model from a memory circuit\n", + "* [`cudaq_qec.decoder_config`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — configures decoder parameters\n", + "* [`cudaq_qec.enqueue_syndromes`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — sends syndrome data to a decoder\n", + "* [`cudaq_qec.get_corrections`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — retrieves logical corrections from a decoder\n", + "* [`cudaq_qec.configure_decoders_from_file`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — loads decoder configuration from YAML\n", + "\n", + "**Solutions:** [`Solutions/09_QEC_Detector_Error_Models_Solution.ipynb`](Solutions/09_QEC_Detector_Error_Models_Solution.ipynb)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b7e741a4f034a31", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", + "\n", + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "6da893fac4ce4ed3", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "a418336d-c945-4fff-87a4-e65d661f50de", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "import numpy as np\n", + "\n", + "import cudaq\n", + "\n", + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec" + ] + }, + { + "cell_type": "markdown", + "id": "5b77ef22-3ad5-4be9-a5b2-7a65166fe133", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 1. Real-Time Decoding and the Limits of Code Capacity\n", + "\n", + "Real-time decoding is an extremely difficult problem that requires a number of key ingredients. First, a QEC code is needed that efficiently encodes logical qubits. There are lots of considerations here, including qubit topology, qubit count, code distance, etc. Recall that in general, the goal is a logical qubit encoding scheme that can capture as many errors as possible using as few physical qubits as possible.\n", + "\n", + "Next, the logical qubits need to perform the logic necessary to run fault-tolerant algorithms and ensure that QEC cycles can continuously run to catch errors. These cycles involve measuring syndromes from the QPU, decoding them in the decoder, and sending the identified errors back to the QPU as shown in the figure below.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "The notebook on [\"Decoder Metrics and Parallel Window Decoding\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) explored the specific constraints that arise here in detail. For example, the throughput of the decoder must be greater than the rate at which syndromes are generated from the QPU. If not, the process grinds to a halt. Likewise, the reaction time, or how quickly the decoder can return results to the QPU, will determine QPU wall clock speed.\n", + "\n", + "A problem not yet discussed in a practical setting is what actually goes into preparation of the decoder. In previous notebooks, the parity check matrix that defined the QEC code memory was always the input. This is perfectly fine if we prepare say $\\ket{0}_L$ with the Steane code, allow an error to occur, and *then* run a stabilizer round to check for the error (i.e., **code capacity** assumptions). The issue is that assuming errors only occur on the data qubits between state prep and the stabilizer rounds is quite unrealistic. In a physical QPU, errors can occur on any qubit at any time!\n", + "\n", + "Explore the [Steane Code Error Models widget](https://nvidia.github.io/cuda-q-academic/interactive_widgets/steane_code_error_models.html) to test this with the Steane code. Confirm an error before the first stabilizer begins can be correctly identified. Next, see what happens when an error occurs before an ancilla measurement (choose circuit-level noise model). Finally, see what happens when an error occurs between the stabilizer extractions (choose phenomenological noise model).\n", + "\n", + "It should now be clear that quantum memory is not enough and any practical QEC routine needs to feed the decoder something more robust than the parity check matrix for encoding the logical qubits.\n", + "\n", + "Part of the solution has already been explored in the notebook on [decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb). Recall that decoders do not just work in space but also decode in time, taking multiple syndrome extraction circuits at once. Consider the Steane code again. Assume that only a single measurement error occurs on the first of five syndrome extraction cycles. If only the first round was decoded in isolation, it would be incorrectly assumed an error occurred. If all five are examined at once, it would be instead clear that a one-time measurement error occurred.\n", + "\n", + "This is just one example demonstrating why the decoder needs to be provided with an object more robust than a parity check matrix that can account for errors occurring anywhere in the circuit and the results of multiple stabilizer measurement rounds." + ] + }, + { + "cell_type": "markdown", + "id": "1545f893-b910-4126-932d-b0fc0c23e734", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 2. Detector Error Models (DEMs)\n", + "\n", + "**Detector error models (DEMs)** are powerful constructs that solve the problem described above by constructing a more robust detector error matrix that can initialize a decoder to flag errors at any possible location. To fully appreciate what a DEM is and why it works, it is worth walking through some of the theory. DEMs were first described in [\"Designing fault-tolerant circuits using detector error models\"](https://quantum-journal.org/papers/q-2025-11-06-1905/pdf/) and the next few sections of this lab will cover some of the the key definitions and let you try a number of exercises drawn from the paper.\n", + "\n", + "The first piece we need is to define a **detector** $d_i$. A detector is a sum of measurements corresponding to a parity constraint that arises from a specific circuit. For example, $m_1 \\oplus m_2 \\oplus m_3 =0$. The detector can be represented by a vector of length $m$ where $m$ is the number of measurements in a circuit and 1's indicate measurements included in the sum.\n", + "\n", + "$$d_1 =\\begin{bmatrix}\n", + "1 \\\\\n", + "1 \\\\\n", + "1\n", + "\\end{bmatrix}$$\n", + "\n", + "Stated otherwise, if $d_1$ is a valid detector for some circuit, it will always be true (absent of noise) that all three measurements result in a binary sum of $b_i$, where $b_i$ is the expected binary sum (Usually 0 for the examples in this lesson).\n", + "\n", + "
\n", + "\n", + "**Exercise 1:**\n", + "\n", + "Code the circuit below in CUDA-Q. Prove (by sampling or by stabilizer tracking) that $d_1$ is a detector for this circuit with an expected $b_i$=0.\n", + "\n", + "\n", + "\"Three-qubit\n", + "\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "9917061e-3cba-4ce6-90d8-6834c089da56", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ 000:231 011:263 101:248 110:258 }\n", + "\n" + ] + } + ], + "source": [ + "# EXERCISE 1\n", + "\n", + "@cudaq.kernel\n", + "def example1():\n", + " reg = cudaq.qvector(3)\n", + "\n", + " h(reg[1])\n", + " x(reg[2])\n", + " h(reg[2])\n", + "\n", + " x.ctrl(reg[1], reg[0])\n", + " x.ctrl(reg[2], reg[1])\n", + "\n", + "print(cudaq.sample(example1))" + ] + }, + { + "cell_type": "markdown", + "id": "a3e7e471-eb42-4d8b-8527-487051a32af5", + "metadata": {}, + "source": [ + "The only way for this not to hold true for this circuit would be if an error occurred. We say that a detector is violated if\n", + "$d_i^Tm\\neq b_i$, where $m$ is a vector of measurement outcomes.\n", + "\n", + "More complex circuits can have multiple detectors which can combine to form a **detector matrix** where each row of the matrix is a different detector defined in the absence of noise. These detectors must be linearly independent otherwise they are providing redundant constraints.\n", + "\n", + "
\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Consider the repetition code circuit below. Define a detector matrix which has four detectors. Use this detector matrix to prove that a measurement error would violate at least one detector. Hint: assume all qubits begin in the 0 state.\n", + "\n", + "
\n", + "\n", + "
\n", + " \"Circuit\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5380ea4e-4eca-4ea7-904e-5f7c1f681a6f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Single measurement error analysis:\n", + " Error on m_1: violated detectors = [1]\n", + " Error on m_2: violated detectors = [2]\n", + " Error on m_3: violated detectors = [3]\n", + " Error on m_4: violated detectors = [3 4]\n", + " Error on m_5: violated detectors = [4]\n" + ] + } + ], + "source": [ + "# EXERCISE 2\n", + "\n", + "D = np.array([[1, 0, 0, 0, 0],\n", + " [0, 1, 0 ,0, 0], \n", + " [0, 0, 1, 1, 0],\n", + " [0, 0, 0, 1, 1]])\n", + "\n", + "\n", + "print(\"Single measurement error analysis:\")\n", + "for i in range(D.shape[1]):\n", + " m = np.zeros(D.shape[1], dtype=int)\n", + " m[i] = 1 # apply measurement error on qubit i\n", + " violated = (D @ m) % 2\n", + " print(f\" Error on m_{i+1}: violated detectors = {np.where(violated)[0] + 1}\")" + ] + }, + { + "cell_type": "markdown", + "id": "32b5d458-8e99-4cdd-b8d2-0d3ea96cf165", + "metadata": {}, + "source": [ + "With detectors in hand, we can begin to expand our noise model and build out a few more pieces of the DEM. Consider the same circuit now with errors added before the stabilizers and before any measurement. This is a **circuit-level noise model** where \"any n-qubit Pauli error can occur after an n-qubit gate and any single Pauli error after state preparation.\"\n", + "\n", + "
\n", + " \"Circuit\n", + "
\n", + "\n", + "With this, or any other noise model, we can define a circuit error vector $e$ which has a 1 if error $E_i$ occurred and 0 if not. This allows us to construct the **measurement syndrome matrix** $\\Omega$ which maps errors to measurements. Each row corresponds to the measurements in the circuit, 5 in this case. Each column corresponds to one of the 8 possible error locations. 1's are populated if error $j$ would flip measurement $i$.\n", + "\n", + "In practice, $\\Omega$ needs to be computed via Pauli propagation for large and more complex circuits. Once obtained, we can compute a **detector error matrix** $H = D\\Omega$. $H$ is similar to what we previously called a parity check matrix, but now contains circuit-level information in addition to QEC memory information. The structure of $H$ is such that the rows correspond to detectors and the columns to errors. A 1 entry means error $j$ would violate detector $i$.\n", + "\n", + "Similar to a standard parity check matrix, we now call $s$ a syndrome obtained from $s = He$. This means that we can now identify errors based on which detectors are flagged, capturing information about QEC memory and the circuit-level noise!\n", + "\n", + "Similarly, DEMs can be extended to encompass an even broader **phenomenological noise model** where errors can happen between syndrome extraction gates. We will not cover this here, but the authors present so-called measurement schedules which can cleverly design syndrome extraction circuits such that a given routine is fault-tolerant by design and error will not propogate uncontrollably.\n", + "\n", + "
\n", + "\n", + "**Exercise 3:**\n", + "\n", + "Complete $\\Omega$ for the circuit-level noise model of the repetition code (image above with the 8 error locations). Use $\\Omega$ to compute $H$. For every weight-1 error, compute the syndrome and prove that each error corresponds to a unique syndrome. Note that we can capture more types of errors using a DEM, but are still constrained by the code distance ($d=3$) for how many errors can be flagged.\n", + "\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "fc4b4f28-de2f-43a3-92f0-ffaea8416a55", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1 1 0 1 0 0 0 0]\n", + " [0 1 1 0 1 0 0 0]\n", + " [1 1 0 0 0 1 1 0]\n", + " [0 1 1 0 0 0 1 1]]\n", + "Weight 1 Error Syndromes\n", + "Error Location i = 0\n", + "[1. 0. 1. 0.]\n", + "Error Location i = 1\n", + "[1. 1. 1. 1.]\n", + "Error Location i = 2\n", + "[0. 1. 0. 1.]\n", + "Error Location i = 3\n", + "[1. 0. 0. 0.]\n", + "Error Location i = 4\n", + "[0. 1. 0. 0.]\n", + "Error Location i = 5\n", + "[0. 0. 1. 0.]\n", + "Error Location i = 6\n", + "[0. 0. 1. 1.]\n", + "Error Location i = 7\n", + "[0. 0. 0. 1.]\n" + ] + } + ], + "source": [ + "# EXERCISE 3\n", + "\n", + "omega = np.array([[1, 1, 0, 1, 0, 0, 0, 0],\n", + " [0, 1, 1 ,0, 1, 0, 0 ,0], \n", + " [1, 0, 0, 0, 0, 1, 0, 0],\n", + " [0, 1, 0, 0, 0, 0, 1, 0],\n", + " [0, 0, 1, 0, 0, 0, 0, 1]]) \n", + "\n", + "H = D @ omega\n", + "\n", + "print(H)\n", + "\n", + "\n", + "print(\"Weight 1 Error Syndromes\")\n", + "for i in range(8):\n", + " error = np.zeros(8)\n", + " error[i] = 1\n", + " print(\"Error Location i =\", i)\n", + " print(H @ error)" + ] + }, + { + "cell_type": "markdown", + "id": "f8aba453-e3a1-4040-a7db-cabbcbd3c8a6", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 3. Observables and DEMs\n", + "\n", + "We can utilize the DEM model to explore computation of observables where an observable $o_i$ is \"a binary sum of measurements, which equals the outcome of measuring a logical operator, for any logical state.\" For example, the figure below is a repetition code with two QEC cycles. A valid choice for a logical $Z$ observable is $o = m_5$. Without noise, a single measurement of any of the data qubits determines if the state is 0 if $\\ket{0_L}$ or 1 if $\\ket{1_L}$.\n", + "\n", + "
\n", + " \"Circuit\n", + "
\n", + "\n", + "Other choices are valid, for example, $m_5 \\oplus m_6 \\oplus m_7$ would also work. The key is to select a constraint that depends on the logical state where the other detectors do not depend on the logical state.\n", + "\n", + "
\n", + "\n", + "**Exercise 4:**\n", + "\n", + "Use the following detectors to build $D$ for the two-round repetition code circuit. Also code the circuit in CUDA-Q. Using the Tanner graph below, try to identify an error pattern that flips the observable and does not trip any detectors. How many errors are required for this? Test one of these cases using your CUDA-Q code by manually entering bitflips, sampling the circuit, and confirming the resulting bitstring(s) do not flag any detectors.\n", + "\n", + "$$d_1: m_1 = 0$$\n", + "$$d_2: m_2 = 0$$\n", + "$$d_3: m_1 \\oplus m_3 = 0$$\n", + "$$d_4: m_2 \\oplus m_4 = 0$$\n", + "$$d_5: m_3 \\oplus m_5 \\oplus m_6 = 0$$\n", + "$$d_6: m_4 \\oplus m_6 \\oplus m_7 = 0$$\n", + "\n", + "
\n", + "\n", + "
\n", + " \"Tanner\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cfa43393-e0cb-4ca9-bbb8-656f87ef0a96", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ 0000111:1000 }\n", + "\n", + "[0 0 0 0 0 0]\n" + ] + } + ], + "source": [ + "# EXERCISE 4\n", + "\n", + "D = np.array([[1, 0, 0, 0, 0, 0, 0],\n", + " [0, 1, 0 ,0, 0, 0, 0], \n", + " [1, 0, 1, 0, 0, 0, 0],\n", + " [0, 1, 0, 1, 0, 0, 0],\n", + " [0, 0, 1, 0, 1, 1, 0],\n", + " [0, 0, 0, 1, 0, 1, 1]]) \n", + "\n", + "\n", + "# Example with E_7 E_8 E_9 E_11\n", + "\n", + "@cudaq.kernel\n", + "def exercise4():\n", + " ancilla = cudaq.qvector(4)\n", + " reg = cudaq.qvector(3)\n", + "\n", + " #state prep 0_L\n", + " x.ctrl(reg[0], reg[1])\n", + " x.ctrl(reg[0], reg[2])\n", + " \n", + " #M1\n", + " x.ctrl(reg[0], ancilla[0])\n", + " x.ctrl(reg[1], ancilla[0])\n", + "\n", + " #M2\n", + " x.ctrl(reg[1], ancilla[1])\n", + " x.ctrl(reg[2], ancilla[1])\n", + "\n", + " x(reg[1])\n", + " x(reg[2])\n", + "\n", + " #M3\n", + " x.ctrl(reg[0], ancilla[2])\n", + " x.ctrl(reg[1], ancilla[2])\n", + "\n", + " #M4\n", + " x.ctrl(reg[1], ancilla[3])\n", + " x.ctrl(reg[2], ancilla[3])\n", + "\n", + " x(reg[0]) # E9\n", + " x(ancilla[2]) #E11\n", + " \n", + "sample = cudaq.sample(exercise4)\n", + "print(sample)\n", + "\n", + "for bitstring in sample:\n", + " bitstring_np = np.fromiter((int(b) for b in bitstring), dtype=int)\n", + "\n", + "print((D @ bitstring_np)%2)\n" + ] + }, + { + "cell_type": "markdown", + "id": "d67d7571-31f9-4cb8-8840-ba1c1be72eb0", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 4. Initializing a Decoder in CUDA-Q with a DEM for Real-Time Decoding\n", + "\n", + "Aside from when the decoding occurs, what differentiates real-time decoding from how we think about offline decoding?\n", + "\n", + "The answer revolves around how the workflow is implemented in practice and where specific computations occur. Recall the workflow below. Though it is correct in general, it is oversimplified and hides some of the nuance.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "For example, the decoder actually decodes syndromes that are XOR'd with the previous syndrome in order to flag differences (often called detector events). Where does this XOR calculation occur? It cannot be the QPU which only outputs raw syndrome measurements. Similarly, the decoding process might output inferred errors, but all that matters is whether a logical flip occurred. At some point this needs to be determined from the actual decoding solution.\n", + "\n", + "A more slightly more realistic (but still simplified) real-time workflow is the following where the QPU only produces measurements and receives determination if logical flips occurred or not. The decoder must then handle everything else.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "This means when writing code for quantum algorithms, we need APIs that can be called within a quantum kernel to send data to a decoder and retrieve results when finished. CUDA-Q QEC enables real-time decoding workflows that look like the following.\n", + "\n", + "The first key function is `qec.enqueue_syndromes` which takes measurement data from the QPU and sends it to a preconfigured decoder. The second is `qec.get_corrections` which simply returns the logical flips so they can be applied and the algorithm can proceed. Everything else is handled within a preinitialized decoder. Such a construct is important because a more complex quantum algorithm may be sending data to multiple decoders depending on the scenario. Each can be initialized and prepared to use any technique necessary for the job.\n", + "\n", + "So, what does this have to do with DEMs? A real-time decoding workflow involves preparation of a decoder using a DEM. This results in a robust decoder that can handle a range of error models and process syndromes from the QPU with a single function call.\n", + "\n", + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "Work through and run the code cells below to initialize a decoder with a DEM in CUDA-Q QEC for the Steane code. Along the way, you might be asked to fix the code or enter values as prompted.\n", + "\n", + "
\n", + "\n", + "First, we need to define a few helper functions that prepare the logical zero state and measure the stabilizer checks for the Steane code. We need to define two kernels, one to prepare the Steane code $\\ket{0_L}$ state and another to define measurement of the $Z$ stabilizers to check for $X$ errors. (We are ignoring $Z$ errors in this example for simplicity.) Both kernels should take a `qec.patch` object as an input and act on its `logical.data[i]`, `logical.ancz[i]`, or `logical.ancx[i]` registers. You can directly import the Steane code from CUDA-Q QEC, but you can also define it manually which is helpful if you bring your own code.\n", + "\n", + "Both of these kernels take a `qec.patch` object as input which is similar to providing a register, but contains three registers for $X$ and $Z$ check ancillas and the data qubits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17c380b7-cd97-4b16-aef5-716c09290648", + "metadata": {}, + "outputs": [], + "source": [ + "# EXERCISE 5\n", + "\n", + "os.environ[\"CUDAQ_DEFAULT_SIMULATOR\"] = \"stim\"\n", + "\n", + "# Prepare logical |0⟩\n", + "@cudaq.kernel\n", + "def prep0(logical: qec.patch):\n", + " h(logical.data[4])\n", + " h(logical.data[5])\n", + " h(logical.data[6])\n", + "\n", + " x.ctrl(logical.data[0],logical.data[1])\n", + " x.ctrl(logical.data[0],logical.data[2])\n", + "\n", + " x.ctrl(logical.data[4],logical.data[0])\n", + " x.ctrl(logical.data[4],logical.data[1])\n", + " x.ctrl(logical.data[4],logical.data[3])\n", + "\n", + " x.ctrl(logical.data[5],logical.data[0])\n", + " x.ctrl(logical.data[5],logical.data[2])\n", + " x.ctrl(logical.data[5],logical.data[3])\n", + "\n", + " x.ctrl(logical.data[6],logical.data[1])\n", + " x.ctrl(logical.data[6],logical.data[2])\n", + " x.ctrl(logical.data[6],logical.data[3])\n", + "\n", + "\n", + "# Measure Z stabilizers for Steane code\n", + "@cudaq.kernel\n", + "def measure_stabilizers_z(logical: qec.patch) -> list[bool]:\n", + "\n", + " for i in range(logical.ancz.size()):\n", + " reset(logical.ancz[i])\n", + "\n", + " h(logical.ancz)\n", + "\n", + " z.ctrl(logical.ancz[0],logical.data[0])\n", + " z.ctrl(logical.ancz[0],logical.data[1])\n", + " z.ctrl(logical.ancz[0],logical.data[3])\n", + " z.ctrl(logical.ancz[0],logical.data[4])\n", + "\n", + " z.ctrl(logical.ancz[1],logical.data[0])\n", + " z.ctrl(logical.ancz[1],logical.data[2])\n", + " z.ctrl(logical.ancz[1],logical.data[3])\n", + " z.ctrl(logical.ancz[1],logical.data[5])\n", + "\n", + " z.ctrl(logical.ancz[2],logical.data[1])\n", + " z.ctrl(logical.ancz[2],logical.data[2])\n", + " z.ctrl(logical.ancz[2],logical.data[3])\n", + " z.ctrl(logical.ancz[2],logical.data[6])\n", + "\n", + " h(logical.ancz)\n", + "\n", + " return [mz(logical.ancz[0]), mz(logical.ancz[1]), mz(logical.ancz[2])]" + ] + }, + { + "cell_type": "markdown", + "id": "3fc11bcb-34e0-4916-975c-d9f1dc196f1a", + "metadata": {}, + "source": [ + "Next, the main QEC circuit is prepared. This prepares a set of registers for the data and ancilla qubits called a patch stored as the variable `logical`. Enter the correct number of qubits for each register.\n", + "\n", + "After preparing the initial state, three syndrome extraction cycles are run. The syndromes are simply obtained from the measurement outcomes of the stabilizer circuit you defined above. These are fed into the function `enqueue_syndromes` which takes the decoder ID, the syndromes, and an optional flag for debugging. Note that in complex QEC workflows, it is likely that multiple decoders will be used, hence a command is needed to send syndromes to any specified decoder.\n", + "\n", + "The decoder then follows any instructions it has and completes decoding.\n", + "\n", + "Finally, the corrections are obtained from the same decoder ID with `get_corrections` and the number of logical observables (3 in the case of three rounds). The corrections are then applied to the data qubits before measurement." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3dd7790e-912a-4d15-aafc-e3412be4e21d", + "metadata": {}, + "outputs": [], + "source": [ + "@cudaq.kernel\n", + "def qec_circuit() -> list[bool]:\n", + " qec.reset_decoder(0)\n", + "\n", + " data = cudaq.qvector(7)\n", + " ancz = cudaq.qvector(3)\n", + " ancx = cudaq.qvector(0) # Keep 0 as we are ignoring Z errors\n", + " logical = patch(data, ancx, ancz)\n", + "\n", + " prep0(logical)\n", + "\n", + " # 3 rounds of syndrome measurement\n", + " for _ in range(3):\n", + " syndromes = measure_stabilizers_z(logical)\n", + " qec.enqueue_syndromes(0, syndromes, 0)\n", + "\n", + " # Get corrections and apply them\n", + " corrections = qec.get_corrections(0, 3, False)\n", + " for i in range(3):\n", + " if corrections[i]:\n", + " x(data[i])\n", + "\n", + " return mz(data)" + ] + }, + { + "cell_type": "markdown", + "id": "1c30a75f-99dd-4076-99b0-e0e940cdb1bd", + "metadata": {}, + "source": [ + "Now that the prerequisites are complete, we can begin the main workflow and decoder initialization. First, use `get_code` to load information about the Steane code from the pre-built code information.\n", + "\n", + "Next, define a noise model. In the cell below, a depolarization error model is applied to all CNOT gates with a probability of 0.01.\n", + "\n", + "Then, call `z_dem_from_memory_circuit` to build a DEM given the QEC code, the logical state prep circuit, the number of rounds, and the noise model. Under the hood, this calculates the syndrome measurement matrix ($\\Omega$) and selects a set of detectors. The set of detectors picked by this function is valid, but may not be optimal." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23481a15-f72c-4c80-8a11-2e0d47508cd6", + "metadata": {}, + "outputs": [], + "source": [ + "code = qec.get_code(\"steane\", distance=3)\n", + "\n", + "# [Begin DEM Generation]\n", + "print(\"Step 1: Generating DEM...\")\n", + "cudaq.set_target(\"stim\")\n", + "\n", + "noise = cudaq.NoiseModel()\n", + "noise.add_all_qubit_channel(\"x\", cudaq.Depolarization2(0.01), 1)\n", + "\n", + "dem = qec.z_dem_from_memory_circuit(code, qec.operation.prep0, 3, noise)\n", + "\n", + "print(dem.detector_error_matrix)" + ] + }, + { + "cell_type": "markdown", + "id": "e0622e0b-cc6e-41cc-b898-5d7673669723", + "metadata": {}, + "source": [ + "The decoder can be set up by setting a number of configurations and saving them in a YAML file. Notice these settings involve selecting the decoder type, how large the decoding problem is, sparse representation of the DEM, etc. The main idea is that you can have lots of flexibility when defining a decoder to balance the many different tradeoffs and that many of the settings come directly from the structure of the DEM. The [CUDA-Q QEC docs](https://nvidia.github.io/cudaqx/components/qec/introduction.html) provide details on all of the different settings." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "029b9078-5119-48c8-b094-963bebebad90", + "metadata": {}, + "outputs": [], + "source": [ + "config = qec.decoder_config() \n", + "config.id = 0 # Sets decoder ID\n", + "config.type = \"multi_error_lut\" # Specifies decoding algorithm\n", + "config.block_size = dem.detector_error_matrix.shape[1]\n", + "config.syndrome_size = dem.detector_error_matrix.shape[0]\n", + "config.H_sparse = qec.pcm_to_sparse_vec(dem.detector_error_matrix)\n", + "config.O_sparse = qec.pcm_to_sparse_vec(dem.observables_flips_matrix)\n", + "\n", + "# Calculate numRounds from DEM (we send 1 additional round, so add 1)\n", + "num_syndromes_per_round = 3 \n", + "num_rounds = dem.detector_error_matrix.shape[0] // num_syndromes_per_round + 1\n", + "config.D_sparse = qec.generate_timelike_sparse_detector_matrix(num_syndromes_per_round, num_rounds, False)\n", + "lut_config = qec.multi_error_lut_config()\n", + "lut_config.lut_error_depth = 2\n", + "config.set_decoder_custom_args(lut_config)\n", + "\n", + "multi_config = qec.multi_decoder_config()\n", + "multi_config.decoders = [config]\n", + "\n", + "with open(\"config.yaml\", 'w') as f:\n", + " f.write(multi_config.to_yaml_str(200))\n", + "print(\"Saved config to config.yaml\")" + ] + }, + { + "cell_type": "markdown", + "id": "4919a40a-0000-49ca-a70f-ff10f6cb1319", + "metadata": {}, + "source": [ + "With the decoder settings specified, all you need to do is load the configuration file with `configure_decoders_from_file()` and then use `cudaq.run()` specifying the `qec_circuit` you defined above. Recall that in the CUDA-Q kernel, you designated a decoder ID, so your main kernel could send syndromes to multiple decoders each with different settings depending on the needs.\n", + "\n", + "This experiment is run on the `stim` backend, but it can easily retarget to a physical QPU to perform real-time error correction on a device!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39e107cb-6fb0-42d4-bd08-a0509d2426fc", + "metadata": {}, + "outputs": [], + "source": [ + "print(\"\\nStep 2: Running circuit with decoding...\")\n", + "\n", + "qec.configure_decoders_from_file(\"config.yaml\")\n", + "\n", + "run_result = cudaq.run(qec_circuit, shots_count=10)\n", + "\n", + "print(\"Ran 10 shots\")\n", + "\n", + "qec.finalize_decoders()\n", + "\n", + "print(\"\\nDone!\")" + ] + }, + { + "cell_type": "markdown", + "id": "51e645f5-6162-4b78-963a-2ab5a56017af", + "metadata": {}, + "source": [ + "A similar workflow was used to obtain the first ever real-time decoding results from a physical QPU. NVIDIA partnered with Quantinuum to use a relay-BP decoder to decode a 30-qubit qLDPC code on Quantinuum's Helios device. Experiments resulted in a median decode time of 67 microseconds and over a 5X error reduction from 4.95 to 0.925 thanks to decoding. This was also enabled by NVIDIA's low-latency NVQLink interconnect. You can read more about the work in the blog [here](https://developer.nvidia.com/blog/nvidia-nvqlink-architecture-integrates-accelerated-computing-with-quantum-processors/).\n", + "\n", + "## Conclusion\n", + "\n", + "As real-time error correction continues to mature, it is critically important to develop intuition for what such a procedure requires. You now have an understanding of DEMs and how they provide a robust means to capture a whole host of different errors rather than just errors within the encoded QEC memory. CUDA-Q QEC provides the infrastructure to prepare a decoder with a DEM (among other settings) and run real-time error correction through simple API calls within a kernel.\n", + "\n", + "Everything discussed in this notebook complements previous lessons you have completed as it is agnostic of which specific QEC code you are using in a real-time experiment." + ] + }, + { + "cell_type": "markdown", + "id": "74c698b892e74165", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — covers decoder fundamentals that this notebook builds upon\n", + "* [QEC Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — explores decoder throughput and reaction time metrics referenced in this notebook\n", + "* [QEC Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — introduces stabilizer formalism used to construct detectors" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv (3.11.5)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + }, + "learning_goals": { + "application_domain": "error_correction", + "cfqt_domain": "QCS", + "cfqt_proficiency": "B1", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "hpc_integration" + ] + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/qec101/Images/dem/dem_rep.png b/qec101/Images/dem/dem_rep.png new file mode 100644 index 0000000..2ffa6d7 Binary files /dev/null and b/qec101/Images/dem/dem_rep.png differ diff --git a/qec101/Images/dem/dem_rep_clevel.png b/qec101/Images/dem/dem_rep_clevel.png new file mode 100644 index 0000000..43292f5 Binary files /dev/null and b/qec101/Images/dem/dem_rep_clevel.png differ diff --git a/qec101/Images/dem/dem_two_rep.png b/qec101/Images/dem/dem_two_rep.png new file mode 100644 index 0000000..f9e60f4 Binary files /dev/null and b/qec101/Images/dem/dem_two_rep.png differ diff --git a/qec101/Images/dem/exercise1_circuit.png b/qec101/Images/dem/exercise1_circuit.png new file mode 100755 index 0000000..8d5f952 Binary files /dev/null and b/qec101/Images/dem/exercise1_circuit.png differ diff --git a/qec101/Images/dem/qec_workflow.png b/qec101/Images/dem/qec_workflow.png new file mode 100644 index 0000000..cb14aa7 Binary files /dev/null and b/qec101/Images/dem/qec_workflow.png differ diff --git a/qec101/Images/dem/rt_qec_workflow.png b/qec101/Images/dem/rt_qec_workflow.png new file mode 100644 index 0000000..e70117c Binary files /dev/null and b/qec101/Images/dem/rt_qec_workflow.png differ diff --git a/qec101/Images/dem/tanner.png b/qec101/Images/dem/tanner.png new file mode 100644 index 0000000..8fda8c3 Binary files /dev/null and b/qec101/Images/dem/tanner.png differ diff --git a/qec101/README.md b/qec101/README.md index 969f0d3..fee7703 100644 --- a/qec101/README.md +++ b/qec101/README.md @@ -15,24 +15,6 @@ Learners should have familiarity with Jupyter notebooks and programming in Pytho --- ## Notebooks -The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in any of the platforms listed below, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - - -| Notebook |qBraid[^1] | Brev | Google Colab[^2] | -| ----------- | ----------- | ----------- | ----------- | -|Lab 1 - The Basics of Classical and Quantum Error Correction |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb)| -| Lab 2 - Stabilizers, the Shor code, and the Steane code |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb)| -| Lab 3 - Noisy Simulation[^4] |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb)| -| Lab 4 - Decoders[^4] |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)| -| Lab 5 - Magic State Distillation |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb)| -| Lab 6 - The Toric and Surface Codes |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb)| -| Lab 7 - qLDPC Codes |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/07_QEC_qLDPC.ipynb)| -| Lab 8 - Decoder Metrics and (Temporal) Parallel Window Decoding |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb)| - - -[^1]:If using qBraid Lab, use the [Environment Manager](https://docs.qbraid.com/lab/user-guide/environments) to install the CUDA-Q environment and then activate it in your notebook. In qBraid Lab you can switch to a GPU instance using the [Compute Manager](https://docs.qbraid.com/lab/user-guide/compute-manager). -[^2]:You will need to run the command `!pip install cudaq` in a python code block in each notebook to run on Google CoLab. -[^3]:You will need to move the Images > noisy folder to your working environment to run the optional interactive widget. +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/qec101/Solutions/01_QEC_Intro_Solution.ipynb b/qec101/Solutions/01_QEC_Intro_Solution.ipynb index 2e53dbe..1f80c61 100644 --- a/qec101/Solutions/01_QEC_Intro_Solution.ipynb +++ b/qec101/Solutions/01_QEC_Intro_Solution.ipynb @@ -27,128 +27,104 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101 Lab 1 - The Basics of Classical and Quantum Error Correction\n", + "# QEC 101 — Lab 1: The Basics of Classical and Quantum Error Correction — Solutions\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", "---\n", - "## Overview\n", - "One of the biggest challenges in realizing practical quantum computing is the noisy nature of qubits, making quantum error correction (QEC) essential for detecting and fixing errors in real time. In this lab, you’ll explore the fundamentals of error correction (EC) concepts and terminology, walk through examples of classical EC codes, examine how QEC differs from classical methods, and ultimately get hands-on experience coding your first QEC procedure.\n", + "\n", + "**What You Will Do:**\n", + "* Define the five aspects common to all error correction procedures\n", + "* Implement the classical repetition code and analyze its performance\n", + "* Construct the generator and parity check matrices for the Hamming code\n", + "* Identify the challenges that distinguish quantum error correction from classical methods\n", + "* Implement the three-qubit quantum repetition code using CUDA-Q\n", "\n", "**Prerequisites:**\n", - "Learners should have familiarity with Jupyter notebooks and programming in Python and CUDA-Q. It is assumed the reader has some familiarity already with quantum computation and is comfortable with braket notation and the concepts of qubits, quantum circuits, measurement, and circuit sampling. The CUDA-Q Academic course entitled \"[Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\" provide a walkthrough of this prerequisite knowledge if the reader is new to quantum computing and CUDA-Q or needs refreshing.\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **1.1** Define the basics of EC, including the 5 aspects common to EC procedures\n", - "* **1.2** Code the classical repetition code\n", - "* **1.3** Code the classical Hamming code\n", - "* **1.4** Experiment with noisy qubits to understand what makes QEC challenging\n", - "* **1.5** Explore why there is still hope for QEC\n", - "* **1.6** Learn the theory for the quantum repetition code\n", - "* **1.7** Implement the quantum repetition code in CUDA-Q\n", - "\n", - "Terminology and notation you'll use\n", - "* encoder, decoder, logical codewords, codespace, error space, noisy channel, logical error, logical error rate\n", - "* repetition code, Hamming code, $[n,k,d]$-codes\n", - "* syndrome" - ] - }, - { - "cell_type": "markdown", - "id": "b5046650", - "metadata": { - "id": "b5046650" - }, - "source": [ - "Execute the cells below to load all the necessary packages for this lab." + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement, circuit sampling)\n", + "* Familiarity with braket notation ($\\ket{\\psi}$, $\\bra{\\psi}$)\n", + "* Completion of [Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum) or equivalent knowledge\n", + "\n", + "**Key Terminology:**\n", + "* Encoder\n", + "* Decoder\n", + "* Logical Codewords\n", + "* Codespace\n", + "* Error Space\n", + "* Noisy Channel\n", + "* Logical Error\n", + "* Logical Error Rate\n", + "* Repetition Code\n", + "* Hamming Code\n", + "* $[n,k,d]$-codes\n", + "* Syndrome\n", + "* Parity Checks\n", + "* Distance\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq.BitFlipChannel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.KrausChannel) — bit-flip noise channel\n", + "* [`cudaq.register_operation`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.register_operation) — registers a custom unitary gate" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "622f2dcb", "metadata": {}, "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qec101/Images ./Images\n" + "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { - "cell_type": "code", - "execution_count": 3, - "id": "3783a385", + "cell_type": "markdown", + "id": "b5046650", "metadata": {}, - "outputs": [], "source": [ - "# install `qutip` and `ipywidgets` in the current Python kernel. Skip this if they are already installed.\n", - "# `matplotlib` is required for all visualization tasks.\n", - "# Make sure to restart your kernel if you execute this!\n", - "# In a Jupyter notebook, go to the menu bar > Kernel > Restart Kernel.\n", - "# In VSCode, click on the Restart button in the Jupyter toolbar.\n", - "\n", - "# The '\\' before the '>' operator is so that the shell does not misunderstand\n", - "# the '>' qualifier for the bash pipe operation.\n", - "\n", - "import sys\n", - "\n", - "try:\n", - " import matplotlib.pyplot as plt\n", - " import qutip\n", - " import ipywidgets as widgets\n", - " import matplotlib_venn\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install qutip\\>=5 matplotlib\\>=3.5 matplotlib_venn\n", - " !{sys.executable} -m pip install ipywidgets\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")" + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", - "import numpy as np\n", "import random\n", - "import matplotlib.pyplot as plt\n", "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", "\n", - "from typing import List\n", - "import ipywidgets as widgets\n", - "from ipywidgets import interact, Output, VBox, HBox\n", - "from IPython.display import display" + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *" ] }, { "cell_type": "markdown", "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc", - "metadata": { - "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc" - }, + "metadata": {}, "source": [ + "---\n", + "\n", "## 1.1 The Basics of Error Correction" ] }, @@ -185,6 +161,8 @@ "id": "1d55edc7-6827-4a0b-93ed-199e19ae7a22", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.2 The Repetition Code" ] }, @@ -193,7 +171,7 @@ "id": "261cb846-2e18-4ab1-bf50-800bc7c0abc2", "metadata": {}, "source": [ - "The most basic EC code is called the repetition code. \n", + "The most basic EC code is called the **repetition code**. \n", "\n", "Consider encoding the information in a single bit (0 or 1). The repetition code simply adds more bits which are in the same state. So a 3-bit repetition code encodes the logical 0 state ($0_L$) as 000 and the logical 1 state ($1_L$) as 111, making 000 and 111 the logical codewords.\n", "\n", @@ -224,56 +202,28 @@ "\n", "The table below shows the likelihood of the four possible scenarios below. Notice the three bit repetition code with majority count will transmit the message with 0.972 probability of success, a significant improvement over the original probability of 0.9. The **logical error rate** is equal to $1-p$, where $p$ is the probability of success. In the case of the 3-bit repetition code, the logical error rate is 0.028.\n", "\n", - "\"Drawing\"\n", + "\"Table\n", "\n", "\n", "\n", - "
\n", - "

Exercise 1 - The repetition code:

\n", - "

\n", - "You now know enough to code up the repetition code. The exercise below will require you to generalize the repetition code so it will work with $n$ bits. Fill in the #TODO sections and then observe the plots that are generated. What conclusions can you draw from the code performance using more bits? What do you notice about the logical error rate relative to the physical error rate? \n", - "

\n", + "
\n", + "\n", + "**Exercise 1:** The Repetition Code\n", + "\n", + "You now know enough to code up the repetition code. The exercise below will require you to generalize the repetition code so it will work with $n$ bits. Fill in the `##TODO##` sections and then observe the plots that are generated. What conclusions can you draw from the code performance using more bits? What do you notice about the logical error rate relative to the physical error rate?\n", + "\n", "
" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "0118be65-9328-4a6a-9c94-debb6864cf89", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA04AAAIjCAYAAAA0vUuxAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAeShJREFUeJzt3Xd4VGX6xvF7JpWEJNQUipRQQ8BQhA0KolRLEBcVC4LYQWzRdWV1gdiwIGJbUFZARQW7PywBRFkbEiG0EECBUJQUIJCEhLSZ8/sjZmRImwlJZpJ8P9eVS+ec95x5ZnhFbs57nmMyDMMQAAAAAKBCZlcXAAAAAADujuAEAAAAAFUgOAEAAABAFQhOAAAAAFAFghMAAAAAVIHgBAAAAABVIDgBAAAAQBUITgAAAABQBYITAAAAAFSB4ASgQRs2bJiGDRtWa+fv2LGjbrrpplo7P+ofk8mk6dOn1+l7Ms8BoPYRnADUiaVLl8pkMmnjxo2uLsUlTCZThT933nmnq8sr17p16+zq9PDwUHBwsK666irt3Lmz2ud96qmn9Omnn9ZcoXXk9O/CbDarTZs2GjVqlNatW+fq0twG8/wv9XWeA6iYp6sLAIDatHr1aleXYDNy5EhNmjSpzPZu3bq5oBrH3XPPPTrvvPNUVFSkbdu2aeHChVq3bp2SkpIUGhrq9PmeeuopXXXVVRo3blzNF1vLSn8NDcNQSkqK/vOf/+jiiy/WF198oUsuucRldTHPzx7zHEBVCE4AGjRvb29Xl2DTrVs3TZw40enj8vLy5OfnV2Z7cXGxrFbrWX3G3Nxc+fv7VzpmyJAhuuqqq2yvu3fvrqlTp+qtt97SQw89VO33ro/O/DW88sor1adPH82fP9+lwYl5XjnmOYCawFI9AG5l8+bNuuSSSxQYGKimTZtq+PDh+vnnn8uM27Ztmy688EI1adJE7dq10xNPPKElS5bIZDJp//79tnHl3fuRn5+v2bNnq1u3bvL19VVYWJj+/ve/a+/evbYxc+fO1eDBg9WyZUs1adJE/fv314cfflhbH9uu3sjISG3atElDhw6Vn5+f/vWvf2n//v0ymUyaO3eu5s+fr/DwcPn4+Cg5OVmS9M0332jIkCHy9/dXs2bNdMUVV5RZZjR79myZTCYlJyfr+uuvV/PmzXXBBRc4XeOQIUMkye77khz7zkwmk3Jzc/Xmm2/alkadfu/MH3/8oZtvvlkhISHy8fFRr169tHjx4iprioyM1EUXXVRmu9VqVdu2be3+QLx8+XL1799fAQEBCgwMVO/evfXiiy868xXY9O7dW61atVJKSkqZfZ9++qkiIyNtnyM+Pt6279tvv5XJZNInn3xS5rh3331XJpNJ69evlySlpaVpypQpateunXx8fBQWFqYrrriCed4I5zkA1+KKEwC3sWPHDg0ZMkSBgYF66KGH5OXlpddee03Dhg3T//73Pw0aNEhSyR86LrroIplMJs2YMUP+/v7673//Kx8fnyrfw2Kx6PLLL9fatWt17bXX6t5771VOTo7WrFmjpKQkhYeHS5JefPFFjR07VjfccIMKCwu1fPlyXX311fr888912WWXVevz5efn6+jRo2W2BwYG2v1t+rFjx3TJJZfo2muv1cSJExUSEmLbt2TJEuXn5+v222+Xj4+PWrRooa+//lqXXHKJOnfurNmzZ+vUqVN6+eWXdf755ysxMVEdO3a0e7+rr75aXbt21VNPPSXDMJz+HKV/YG/evLnddke+s7ffflu33nqrBg4cqNtvv12SbN95enq6/va3v9maK7Ru3VpfffWVbrnlFmVnZ+u+++6rsKYJEyZo9uzZSktLs1tW9cMPP+jw4cO69tprJUlr1qzRddddp+HDh+uZZ56RJO3cuVM//vij7r33Xqe/i+PHj+v48ePq0qWL3fYffvhBH3/8saZNm6aAgAC99NJLGj9+vA4ePKiWLVtq2LBhat++vd555x1deeWVdse+8847Cg8PV3R0tCRp/Pjx2rFjh+6++2517NhRGRkZWrNmjQ4ePFjm17YU87xhznMALmYAQB1YsmSJIcn45ZdfKhwzbtw4w9vb29i7d69t2+HDh42AgABj6NChtm133323YTKZjM2bN9u2HTt2zGjRooUhyUhJSbFtv/DCC40LL7zQ9nrx4sWGJGPevHll3t9qtdr+PS8vz25fYWGhERkZaVx88cV22zt06GBMnjy5ws9USlKFP++9955dvZKMhQsX2h2fkpJiSDICAwONjIwMu31RUVFGcHCwcezYMdu2rVu3Gmaz2Zg0aZJt26xZswxJxnXXXVdlvYZhGN9++60hyVi8eLFx5MgR4/Dhw0Z8fLzRpUsXw2QyGQkJCXbjHf3O/P39y/3ObrnlFiMsLMw4evSo3fZrr73WCAoKKnP+0+3evduQZLz88st226dNm2Y0bdrUduy9995rBAYGGsXFxVV+/jNJMm655RbjyJEjRkZGhrFhwwZj+PDhhiTj+eeftxvn7e1t7Nmzx7Zt69atZeqbMWOG4ePjY5w4ccK2LSMjw/D09DRmzZplGIZhHD9+3JBkPPfcc5XWxjxvHPMcgGuxVA+AW7BYLFq9erXGjRunzp0727aHhYXp+uuv1w8//KDs7GxJUnx8vKKjoxUVFWUb16JFC91www1Vvs9HH32kVq1a6e677y6zz2Qy2f69SZMmtn8/fvy4srKyNGTIECUmJlbn40mSrrjiCq1Zs6bMz5lLzHx8fDRlypRyzzF+/Hi1bt3a9jo1NVVbtmzRTTfdpBYtWti29+nTRyNHjtSXX35Z5hzOdje7+eab1bp1a7Vp00ZjxoxRVlaW3n77bZ133nl2487mOzMMQx999JFiYmJkGIaOHj1q+xk9erSysrIqPU+3bt0UFRWlFStW2LZZLBZ9+OGHiomJsdXWrFkz5ebmas2aNU59B6XeeOMNtW7dWsHBwRo0aJB+/PFHxcbGlrlKMGLECNsVBqnk1yMwMFD79u2zbZs0aZIKCgrslnmtWLFCxcXFtnuEmjRpIm9vb61bt07Hjx93uE7mecOc5wBci6V6ANzCkSNHlJeXp+7du5fZ17NnT1mtVh06dEi9evXSgQMHbMuYTnfmcqny7N27V927d5enZ+W//X3++ed64okntGXLFhUUFNi2n/6HTme1a9dOI0aMqHJc27ZtK7wRvlOnTnavDxw4IEkVfm+rVq0qc2P8meeoysyZMzVkyBCdPHlSn3zyiZYvXy6zuezfu53Nd3bkyBGdOHFCr7/+ul5//fVyx2RkZFR6jgkTJuhf//qX/vjjD7Vt21br1q1TRkaGJkyYYBszbdo0vf/++7rkkkvUtm1bjRo1Stdcc43GjBlTZY1SSSiYPn26TCaTAgIC1KtXr3KbDpxzzjlltjVv3twu/PTo0UPnnXee3nnnHd1yyy2SSpbp/e1vf7PNZR8fHz3zzDN64IEHFBISor/97W+6/PLLNWnSpEo7vTHPG+48B+A6BCcAOMP333+vsWPHaujQofrPf/6jsLAweXl5acmSJXr33Xdr/f1P/xttZ/bVxPnL07t3b9sfhMeNG6e8vDzddtttuuCCC9S+fXtJZ/+dWa1WSdLEiRM1efLkcsf06dOn0nNMmDBBM2bM0AcffKD77rtP77//voKCguxCUXBwsLZs2aJVq1bpq6++0ldffaUlS5Zo0qRJevPNN6us09FQ4OHhUe5244x7bSZNmqR7771Xv//+uwoKCvTzzz/rlVdesRtz3333KSYmRp9++qlWrVqlf//735ozZ46++eYb9e3bt8paKsI8t1df5jkA1yE4AXALrVu3lp+fn3bv3l1m365du2Q2m21/eOnQoYP27NlTZlx5284UHh6uDRs2qKioSF5eXuWO+eijj+Tr66tVq1bZNZxYsmSJox+nznTo0EGSKvzeWrVqVWUbZmc9/fTT+uSTT/Tkk09q4cKFkpz7zsr7m/nWrVsrICBAFovFoWBSnk6dOmngwIFasWKFpk+fro8//ljjxo0r0zTE29tbMTExiomJkdVq1bRp0/Taa6/p3//+t0NXLWvStddeq9jYWL333ns6deqUvLy87K6QlQoPD9cDDzygBx54QL/99puioqL0/PPPa9myZeWel3l+9tx1ngNwHe5xAuAWPDw8NGrUKH322Wd2bZbT09P17rvv6oILLlBgYKAkafTo0Vq/fr22bNliG5eZmal33nmnyvcZP368jh49WuZv9aW/rgZ4eHjIZDLJYrHY9u3fv1+ffvpp9T5cLQoLC1NUVJTefPNNnThxwrY9KSlJq1ev1qWXXlrj7xkeHq7x48dr6dKlSktLk+Tcd+bv729Xa+nx48eP10cffaSkpKQyxxw5csSh2iZMmKCff/5Zixcv1tGjR8uEkGPHjtm9NpvNtr/hP33ZVV1p1aqVLrnkEi1btkzvvPOOxowZo1atWtn25+XlKT8/3+6Y8PBwBQQEVFov8/zsufM8B+AaXHECUKcWL15s9zybUvfee6+eeOIJrVmzRhdccIGmTZsmT09PvfbaayooKNCzzz5rG/vQQw9p2bJlGjlypO6++25bO/JzzjlHmZmZld5rMGnSJL311luKjY1VQkKChgwZotzcXH399deaNm2arrjiCl122WWaN2+exowZo+uvv14ZGRl69dVX1aVLF23btq3an/3XX38t9wpBSEiIRo4cWe3zPvfcc7rkkksUHR2tW265xdamOSgoSLNnz672eSvzj3/8Q++//77mz5+vp59+2qnvrH///vr66681b948tWnTRp06ddKgQYP09NNP69tvv9WgQYN02223KSIiQpmZmUpMTNTXX3+tzMzMKuu65ppr9OCDD+rBBx9UixYtyvyt/q233qrMzExdfPHFateunQ4cOKCXX35ZUVFR6tmzZ41+R46aNGmS7TlTjz/+uN2+X3/9VcOHD9c111yjiIgIeXp66pNPPlF6erqtxXpF52Senz13necAXMSFHf0ANCKl7cgr+jl06JBhGIaRmJhojB492mjatKnh5+dnXHTRRcZPP/1U5nybN282hgwZYvj4+Bjt2rUz5syZY7z00kuGJCMtLc027sw2zYZR0k74kUceMTp16mR4eXkZoaGhxlVXXWXXBv2NN94wunbtavj4+Bg9evQwlixZYmtzfLqaaNN8en0XXnih0atXrzLHl7Zprqgt9ddff22cf/75RpMmTYzAwEAjJibGSE5OthtTWv+RI0eqrNcw/mrT/MEHH5S7f9iwYUZgYKCtnbaj39muXbuMoUOHGk2aNDEk2X1/6enpxl133WW0b9/e9mszfPhw4/XXX3eoZsMwjPPPP9+QZNx6661l9n344YfGqFGjjODgYMPb29s455xzjDvuuMNITU2t8rySjLvuuqva4yqaKwUFBUbz5s2NoKAg49SpU3b7jh49atx1111Gjx49DH9/fyMoKMgYNGiQ8f7779uNY57/pbHMcwB1z2QY1XgqHAC4ofvuu0+vvfaaTp48WeHN+YC7KS4uVps2bRQTE6M33njD1eUAACrAPU4A6qVTp07ZvT527JjefvttXXDBBYQm1Cuffvqpjhw5okmTJrm6FABAJbjiBKBeioqK0rBhw9SzZ0+lp6frjTfe0OHDh7V27VoNHTrU1eUBVdqwYYO2bdumxx9/XK1ateLBpwDg5mgOAaBeuvTSS/Xhhx/q9ddfl8lkUr9+/fTGG28QmlBvLFiwQMuWLVNUVJSWLl3q6nIAAFXgihMAAAAAVIF7nAAAAACgCgQnAAAAAKhCo7vHyWq16vDhwwoICKj0IZkAAAAAGjbDMJSTk6M2bdrIbK78mlKjC06HDx9W+/btXV0GAAAAADdx6NAhtWvXrtIxjS44BQQESCr5cgIDA11cjVRUVKTVq1dr1KhR8vLycnU5cHPMFziLOQNnMWfgLOYMnOVOcyY7O1vt27e3ZYTKNLrgVLo8LzAw0G2Ck5+fnwIDA10+ceD+mC9wFnMGzmLOwFnMGTjLHeeMI7fw0BwCAAAAAKpAcAIAAACAKhCcAAAAAKAKBCcAAAAAqALBCQAAAACqQHACAAAAgCoQnAAAAACgCgQnAAAAAKgCwQkAAAAAquAWwenVV19Vx44d5evrq0GDBikhIaHCsUuXLpXJZLL78fX1rcNqAQAAADQ2Lg9OK1asUGxsrGbNmqXExESde+65Gj16tDIyMio8JjAwUKmpqbafAwcO1GHFAAAAABoblwenefPm6bbbbtOUKVMUERGhhQsXys/PT4sXL67wGJPJpNDQUNtPSEhIHVYMAAAAoLHxdOWbFxYWatOmTZoxY4Ztm9ls1ogRI7R+/foKjzt58qQ6dOggq9Wqfv366amnnlKvXr3KHVtQUKCCggLb6+zsbElSUVGRioqKauiTVF9pDe5QC9wf8wXOYs7AWcwZOIs5A2e505xxpgaXBqejR4/KYrGUuWIUEhKiXbt2lXtM9+7dtXjxYvXp00dZWVmaO3euBg8erB07dqhdu3Zlxs+ZM0dxcXFltq9evVp+fn4180FqwJo1a1xdAuoR5gucxZyBs5gzcBZzBo6wGtLebJOyi0z67cOvFR5oyGxyXT15eXkOj3VpcKqO6OhoRUdH214PHjxYPXv21GuvvabHH3+8zPgZM2YoNjbW9jo7O1vt27fXqFGjFBgYWCc1V6aoqEhr1qzRyJEj5eXl5epy4OaYL3AWcwbOYs7AWcwZOGrVjnTN+XKX0rL/Wg0WGuijRy/todG9XHPrTelqNEe4NDi1atVKHh4eSk9Pt9uenp6u0NBQh87h5eWlvn37as+ePeXu9/HxkY+PT7nHudN/3O5WD9wb8wXOYs7AWcwZOIs5g8rEJ6Xq7uVbZZyxPT27QHcv36oFE/tpTGRYndflzJx1aXMIb29v9e/fX2vXrrVts1qtWrt2rd1VpcpYLBZt375dYWF1/0UDAAAAqJzFaihuZXKZ0CTJti1uZbIs1vJGuA+Xd9WLjY3VokWL9Oabb2rnzp2aOnWqcnNzNWXKFEnSpEmT7JpHPPbYY1q9erX27dunxMRETZw4UQcOHNCtt97qqo8AAAAAoAIJKZlKzcqvcL8hKTUrXwkpmXVXVDW4/B6nCRMm6MiRI5o5c6bS0tIUFRWl+Ph4W8OIgwcPymz+K98dP35ct912m9LS0tS8eXP1799fP/30kyIiIlz1EQAAAABUICOn4tBUnXGu4vLgJEnTp0/X9OnTy923bt06u9cvvPCCXnjhhTqoCgAAAMDZOpHnWMvv4ADfWq7k7LhFcAIAAADQsFithl77bp+eW1X+Y4ZKmSSFBvlqYKcWdVNYNRGcAAAAANSooycLFPv+Vn336xFJ0oAOzbXpwHFJsmsSUfoIp1kxEfJw5QOdHEBwAgAAAFBjftp7VPct36KMnAL5epk1O6aXJpzXXqt2pCluZbJdo4jQIF/NiolwSStyZxGcAAAAADjFYjWUkJKpjJx8BQf8tczupbW/6aVvfpNhSF2Dm+qV6/upe2iAJGlMZJhGRoRq/Z4Mrf5+g0YNGaToLsFuf6WpFMEJAAAAgMPik1LLXDkKDvBRUBMv/ZZxUpJ0zYB2mj22l/y87eOGh9mkQZ1a6NhOQ4M6tag3oUkiOAEAAABwUHxSqqYuSyzzMNuMnAJl5BTIx9OsZ8b30bi+bV1SX21y+QNwAQAAALg/i9VQ3MrkMqHpdIFNvBRzbps6q6kuEZwAAAAAVCkhJdNueV55juQUKCEls44qqlsEJwAAAABVysipPDQ5O66+ITgBAAAAqFJwgG+NjqtvaA4BAAAAoFKGYSjx4PFKx5hU8lym0tbkDQ3BCQAAAECFCoot+tfHSfoo8XfbNpNk1ySitKn4rJiIetVi3BkEJwAAAADlOnayQHcu26Rf9h+X2STNiumlkECfMs9xCg3y1ayYCI2JDHNhtbWL4AQAAACgjF/Tc3Tz0l/0+/FTCvDx1Cs39NOF3VpLkkZGhCohJVMZOfkKDihZntdQrzSVIjgBAAAAsPPt7gzd/e5mnSwo1jkt/LT4pgHqEhxg2+9hNik6vKULK6x7BCcAAACgkbJYDbsrR+d1bK431x/Qk18ky2pIAzu10GsT+6u5v7erS3U5ghMAAADQCMUnpZa5V6mJt4dOFVokSdcMaKcnxvWWtydPMJIITgAAAECjE5+UqqnLEu0640myhabx/drqmfF9ZDI17PuWnEF8BAAAABoRi9VQ3MrkMqHpdD/tPSZrZQMaIYITAAAA0IgkpGTaLc8rT2pWvhJSMuuoovqB4AQAAAA0Ihk5lYcmZ8c1FgQnAAAAoBEJDvCt0XGNBcEJAAAAaEQ6tvSr9GG1JklhQSUPtcVfCE4AAABAI3Eir1A3LflFlgo6P5TGqVkxEZWGq8aI4AQAAAA0AicLijV5yS/anZ6j4AAfPX5FpMKC7JfjhQb5asHEfhoTGeaiKt0Xz3ECAAAAGrj8IotuffMXbT10Qs39vLTs1kHqFhKg6wedo4SUTGXk5Cs4oGR5HleaykdwAgAAABqwwmKrpr2TqJ/3Zaqpj6feurkkNEmSh9mk6PCWLq6wfmCpHgAAANBAWayGYt/fom92ZcjXy6zFN52n3u2CXF1WvURwAgAAABogwzD0yCfb9fm2VHl5mLRwYn865Z0FghMAAADQwBiGoSe/2KnlvxyS2SS9eG1fDese7Oqy6jXucQIAAADqOYvVsGvy8PO+Y/rvDymSpKfH99GlvemSd7YITgAAAEA9Fp+UqriVyUrNyi+zb1ZMhK4Z0N4FVTU8BCcAAACgnopPStXUZYkq/3G2KvOcJlQf9zgBAAAA9ZDFaihuZXKFockkKW5lsizWikbAGQQnAAAAoB5KSMksd3leKUNSala+ElIy666oBozgBAAAANRDGTkVh6bqjEPlCE4AAABAPWMYhnal5jg0NjiA+5xqAs0hAAAAgHrkeG6h/vXJdn2VlFbpOJOk0CBfHnpbQ7jiBAAAANQT//v1iEbP/05fJaXJ02zSFVFtZFJJSDpd6etZMRHyMJ+5F9XBFScAAADAzZ0qtOjpr3bqzfUHJEnhrf01f0Jf9W4XpEsiQ8s8xyk0yFezYiI0JpIH39YUghMAAADgJixWQwkpmcrIyVdwQMkyu52p2bp3+WbtPZIrSZoc3UEPX9JTTbw9JEljIsM0MiK0zHFcaapZBCcAAADADcQnpZa5ctTUx1N5hcWyGlJwgI+eu/pcXditdZljPcwmRYe3rMtyGx2CEwAAAOBi8UmpmrossczDbE8WFEuS+rZvpsU3nafm/t51Xxwk0RwCAAAAcCmL1VDcyuQyoel0adn5CmziVWc1oSyCEwAAAOBCCSmZdsvzypOala+ElMw6qgjlITgBAAAALpSRU3locnYcagfBCQAAAHCh3D/vY6pKcIBvLVeCytAcAgAAAHCRlVsPK27ljkrHmFTyXKaBnVrUTVEoF1ecAAAAgDpmtRqau2q37n5vswqKDUWEBcqkkpB0utLXs2IieC6TixGcAAAAgDp0sqBYdyzbpFe+3SNJun1oZ628+wItmNhPoUH2y/FCg3y1YGI/jYkMc0WpOA1L9QAAAIA6cvBYnm596xf9mn5S3p5mPf333vp7v3aSpDGRYRoZEaqElExl5OQrOKBkeR5XmtwDwQkAAACoYRarUSYAbUg5pmnvJOpEXpGCA3z02o391fec5nbHeZhNig5v6aKqURmCEwAAAFCD4pNSFbcy2e7ZTIG+njpZUCyrIZ3bLkiv3TigzLI8uDeCEwAAAFBD4pNSNXVZoowztmfnl7QcH9ixud66ZZB8vTzqvjicFZpDAAAAADXAYjUUtzK5TGg63aHjp+TlwR/B6yN+1QAAAIAakJCSabc8rzypWflKSMmso4pQkwhOAAAAQA3IyKk8NDk7Du6F4AQAAADUgOAAx5o9ODoO7oXgBAAAANSA8zo2VxPvips+mCSFBZW0Jkf9Q3ACAAAAasCb6w/oVKGl3H2lj7CdFRPBA23rKYITAAAAcJa+3ZWhJ79IliRd1a+tws54RlNokK8WTOynMZFhrigPNYDnOAEAAABnYXdaju5+b7OshjRhQHs9Pb63rEZJl72MnHwFB5Qsz+NKU/1GcAIAAACq6djJAt3y5i86WVCsQZ1a6PFxkTKZTPIwSdHhLV1dHmoQS/UAAACAaigotuiOtzfp9+On1KGlnxZO7C9vT/543VDxKwsAAAA4yTAMzfh4uzYeOK4AX0+9Mfk8Nff3dnVZqEUEJwAAAMBJC/+3Tx8n/iEPs0mvXt9PXYKburok1DKCEwAAAOCEVTvS9OyqXZJK2osP7dbaxRWhLhCcAAAAAAftOJyl+5ZvkWFIN/6tgyZFd3R1SagjdNUDAAAAKmCxGra24t6eZsX93w6dKrLogi6tNCsmwtXloQ4RnAAAAIByxCelKm5lslKz8u22hwT66NUb+snTg8VbjQm/2gAAAMAZ4pNSNXVZYpnQJEnp2QVav/eoC6qCKxGcAAAAgNNYrIbiVibLqGC/SVLcymRZrBWNQENEcAIAAABOk5CSWe6VplKGpNSsfCWkZNZdUXA5ghMAAABwmv3HTjo0LiOn4nCFhsctgtOrr76qjh07ytfXV4MGDVJCQoJDxy1fvlwmk0njxo2r3QIBAADQ4GXlFWne6t2KW5ns0PjgAN9argjuxOVd9VasWKHY2FgtXLhQgwYN0vz58zV69Gjt3r1bwcHBFR63f/9+PfjggxoyZEgdVgsAAID65vSW4sEBvhrYqYU8zCbb/uz8Ii3+IUVv/JCinPxiSZKn2aTiCu5hMkkKDSo5DxoPlwenefPm6bbbbtOUKVMkSQsXLtQXX3yhxYsX6+GHHy73GIvFohtuuEFxcXH6/vvvdeLEiTqsGAAAAPVFeS3Fw4J8NSsmQud3aaWlP+7Xou/3KfvPwNQ9JED3jegqw5DuejdRkuyaRJTGrVkxEXbhCw2fS4NTYWGhNm3apBkzZti2mc1mjRgxQuvXr6/wuMcee0zBwcG65ZZb9P3331f6HgUFBSooKLC9zs7OliQVFRWpqKjoLD/B2SutwR1qgftjvsBZzBk4izkDZ7nznFm1I113L99apjteWla+7lyWKD9vD+UVWiRJXVr76+6LwjWmV4jMfwail689V098uUtp2X/9WTI0yEePXNJDw7u3csvPXB+405xxpgaXBqejR4/KYrEoJCTEbntISIh27dpV7jE//PCD3njjDW3ZssWh95gzZ47i4uLKbF+9erX8/Pycrrm2rFmzxtUloB5hvsBZzBk4izkDZ7nbnLEaUlyix5+hyf7KUGmQyiu0qLWPoUvOsapvyyzpUKLiD9mf558R0t5sk7KLpEAvKTwwV5YDm/TlgTr4EA2cO8yZvLw8h8e6fKmeM3JycnTjjTdq0aJFatWqlUPHzJgxQ7GxsbbX2dnZat++vUaNGqXAwMDaKtVhRUVFWrNmjUaOHCkvLy9XlwM3x3yBs5gzcBZzBs5y1zmzISVTJ37eWOW4udcN0ODwlnVQEUq505wpXY3mCJcGp1atWsnDw0Pp6el229PT0xUaGlpm/N69e7V//37FxMTYtlmtVkmSp6endu/erfDwcLtjfHx85OPjU+ZcXl5eLv+FOp271QP3xnyBs5gzcBZzBs5ytzlzLK/YoXEn8i1uVXdj4g5zxpn3d2k7cm9vb/Xv319r1661bbNarVq7dq2io6PLjO/Ro4e2b9+uLVu22H7Gjh2riy66SFu2bFH79u3rsnwAAAC4KUdbhdNSHI5y+VK92NhYTZ48WQMGDNDAgQM1f/585ebm2rrsTZo0SW3bttWcOXPk6+uryMhIu+ObNWsmSWW2AwAAoPEa2KmFmvl56URe+Tf/01IcznJ5cJowYYKOHDmimTNnKi0tTVFRUYqPj7c1jDh48KDMZrd4Ti8AAADqidU70pR9quLQJNFSHM5xeXCSpOnTp2v69Onl7lu3bl2lxy5durTmCwIAAEC99dX2VN393mZZDWlgx+Y6mHlKadl/Pccp9M/nOI2JDHNhlahv3CI4AQAAADWhNDQVWw2Ni2qj56+JkiQlpGQqIydfwQEly/O40gRnEZwAAADQIMQnlQ1NpQEpmpbjOEvcPAQAAIB6Lz4pVdPfLT80ATWB4AQAAIB6jdCEusBSPQAAANQLFqtR5l6lNclphCbUCYITAAAA3F58UqriViYrNeuv7njN/LyUfapIVkOEJtQ6ghMAAADcWnxSqqYuS5RxxvbSh9sO7Nic0IRaxz1OAAAAcFsWq6G4lcllQtPpDh0/VWf1oPEiOAEAAMBtJaRk2i3PK09qVr4SUjLrqCI0VgQnAAAAuK2MnMpDk7PjgOoiOAEAAMBtBQf41ug4oLoITgAAAHBbAzu1UKum3hXuN0kKCyppTQ7UJoITAAAA3FZ6dr4s1vJbQ5T20JsVE0FHPdQ6ghMAAADcUmZuoW58Y4OO5xUpJNBHwQE+dvtDg3y1YGI/jYkMc1GFaEx4jhMAAADczsmCYk1ZkqC9R3LVJshXH04drJBAXyWkZCojJ1/BASXL87jShLpCcAIAAIBbKSi26I63N2rr71lq7uelt24ZpDbNmkiSosNburg6NFYs1QMAAIDbsFgN3bd8i37cc0z+3h5aOmWgugQ3dXVZAMEJAAAA7sEwDD366XZ9lZQmbw+zXp80QOe2b+bqsgBJBCcAAAC4iedW7dZ7CYdkNkkvXRel87u0cnVJgA3BCQAAAC636Lt9+s+6vZKkp67sTac8uB2aQwAAAKBOWayGXXe8g5m5evLLnZKkf47poWsHnuPiCoGyCE4AAACoM/FJqYpbmazUrPwy+24f2ll3XtjZBVUBVSM4AQAAoE7EJ6Vq6rJEGRXs79u+mUwmnssE98Q9TgAAAKh1FquhuJXJFYYmk6THPk+WxVrRCMC1CE4AAACodQkpmeUuzytlSErNyldCSmbdFQU4geAEAACAWmUYhtbtznBobEZOxeEKcCXucQIAAEC1WKyGNqRkatNRk1qmZCq6S7A8zH/do1RsseqL7alasG6vdqXlOHTO4ADf2ioXOCsEJwAAADjNvjueh976baPCgnw1KyZCw7oH68NNv+v17/bpYGaeJMnPyyyTyaTcQku55zNJCg3y1cBOLeruQwBOIDgBAADAKRV1x0vLytedyxIV4OupnPxiSVILf29NGdxRN0Z30M/7jmnqskRJsju29BrVrJgIuytWgDshOAEAAMBhlXXHK92Wk1+sNkG+un1oZ11zXnv5eZf8kXNMZJgWTOxX5jlOoX9eqRoTGVb7HwCoJoITAAAAHFZVd7xSz17VRxd0bV1m+5jIMI2MCFVCSqYycvIVHFCyPI8rTXB3BCcAAAA4zNGud8dyCyvc52E2KTq8ZU2VBNQJ2pEDAADAYYG+Xg6NozseGhquOAEAAMAhP+09qn9/tr3SMXTHQ0NFcAIAAEClThYU6+mvdmrZzwclSS38vJWZVyiT6I6HxoPgBAAAgAr9uOeoHvpwm/44cUqSdMOgczTj0p764bcjdMdDo0JwAgAAaOQsVqNMl7u8wmLN+WqX3t1QcpWpXfMmenZ8Hw3u0krSX93x1u/J0OrvN2jUkEGK7hLMlSY0WAQnAACARiw+KbXMlaMWft4yZOh4XpEkaVJ0B/1zTA/5+9j/0dHDbNKgTi10bKehQbQURwNHcAIAAGik4pNSNXVZYpmH2WbmlbQSb9nUW69c14/W4YBoRw4AANAoWayG4lYmlwlNp/Mym+mOB/yJ4AQAANAIJaRk2i3PK09adr4SUjLrqCLAvRGcAAAAGqGMnMpDk7PjgIaO4AQAANAIBQf41ug4oKEjOAEAADRCAzu1UAt/7wr3mySFBflyjxPwJ4ITAABAI5STXySL1VruvtKm4rNiImgxDvyJ4AQAANDIGIahf32yXVmnihUS6KPQQB+7/aFBvlowsZ/GRIa5qELA/fAcJwAAgEbm48Q/9OX2NHmaTVo0aYB6tQlSQkqmMnLyFRxQsjyPK02APYITAABAI3IoM0+z/m+HJOn+kd3Up10zSeIht0AVWKoHAADQSFishu5fsUUnC4o1oENz3XlhuKtLAuoNghMAAEAjsfB/e7XxwHE19fHUCxOiWI4HOIHgBAAA0Ahs+/2EXljzqyQpbmwvtW/h5+KKgPqF4AQAANDA5RUW677lW1RsNXRZ7zD9vV9bV5cE1DsEJwAAgAbuqS93at/RXIUE+ujJKyNlMrFED3AWwQkAAKAB+2ZXupb9fFCS9PzVUWrm5+3iioD6ieAEAADQQB09WaCHPtwmSbrlgk66oGsrF1cE1F8EJwAAgAbIMAw9/NE2HT1ZqO4hAfrH6O6uLgmo13gALgAAQANhsRpKSMlURk6+tv+epa93Zsjbw6z510bJ18vD1eUB9RrBCQAAoAGIT0pV3MpkpWbl220fe26YeoYFuqgqoOFgqR4AAEA9F5+UqqnLEsuEJkn6KPEPxSeluqAqoGEhOAEAANRjFquhuJXJMioZE7cyWRZrZSMAVIXgBAAAUI8lpGSWe6WplCEpNStfCSmZdVcU0AARnAAAAOqxjJyKQ1N1xgEoH8EJAACgHgsO8K3RcQDKV63g9Pbbb+v8889XmzZtdODAAUnS/Pnz9dlnn9VocQAAAKjc9j9OVLrfJCksyFcDO7Wok3qAhsrp4LRgwQLFxsbq0ksv1YkTJ2SxWCRJzZo10/z582u6PgAAAFTgtf/t1VNf7rK9Np2xv/T1rJgIeZjP3AvAGU4Hp5dfflmLFi3SI488Ig+Pvx6kNmDAAG3fvr1GiwMAAED5Xv12j+Z8VRKa7hneVQtu6KfQIPvleKFBvlowsZ/GRIa5okSgQXH6AbgpKSnq27dvme0+Pj7Kzc2tkaIAAABQsRe//k0vfP2rJCl2ZDfdM7yrJGlUr1AlpGQqIydfwQEly/O40gTUDKeDU6dOnbRlyxZ16NDBbnt8fLx69uxZY4UBAADAnmEYemHNr3rpmz2SpIfGdNe0YV1s+z3MJkWHt3RVeUCD5nRwio2N1V133aX8/HwZhqGEhAS99957mjNnjv773//WRo0AAACNnmEYem7Vbv1n3V5J0r8u7aHbh4a7uCqg8XA6ON16661q0qSJHn30UeXl5en6669XmzZt9OKLL+raa6+tjRoBAAAaFYvVsFtyd17H5np21W69/t0+SdK/L4/QLRd0cnGVQOPidHCSpBtuuEE33HCD8vLydPLkSQUHB9d0XQAAAI1SfFKq4lYmKzXrrwfW+nl7KK+wpJPxY1f00qToji6qDmi8nO6qd/HFF+vEiROSJD8/P1toys7O1sUXX1yjxQEAADQm8Umpmros0S40SbKFpusHtic0AS7idHBat26dCgsLy2zPz8/X999/XyNFAQAANDYWq6G4lckyKhnz7e4jslgrGwGgtji8VG/btm22f09OTlZaWprttcViUXx8vNq2bVuz1QEAADQSCSmZZa40nSk1K18JKZl0zgNcwOErTlFRUerbt69MJpMuvvhiRUVF2X769++vJ554QjNnzqxWEa+++qo6duwoX19fDRo0SAkJCRWO/fjjjzVgwAA1a9ZM/v7+ioqK0ttvv12t9wUAAHAXGTmVhyZnxwGoWQ5fcUpJSZFhGOrcubMSEhLUunVr2z5vb28FBwfLw8PD6QJWrFih2NhYLVy4UIMGDdL8+fM1evRo7d69u9ymEy1atNAjjzyiHj16yNvbW59//rmmTJmi4OBgjR492un3BwAAcAfBAb41Og5AzXI4OJU+8NZqtdZoAfPmzdNtt92mKVOmSJIWLlyoL774QosXL9bDDz9cZvywYcPsXt97771688039cMPPxCcAABAvTWwUwsFNfFS1qmicvebJIUG+WpgpxZ1WxgASdVsRy6V3Od08ODBMo0ixo4d6/A5CgsLtWnTJs2YMcO2zWw2a8SIEVq/fn2VxxuGoW+++Ua7d+/WM888U+6YgoICFRQU2F5nZ2dLkoqKilRUVP5vTHWptAZ3qAXuj/kCZzFn4CzmjOus3Zmh7EpCkyQ9ckl3WS3Fslrqrq6qMGfgLHeaM87U4HRw2rdvn6688kpt375dJpNJhlHS2cVkKvlP2mJx/L/ko0ePymKxKCQkxG57SEiIdu3aVeFxWVlZatu2rQoKCuTh4aH//Oc/GjlyZLlj58yZo7i4uDLbV69eLT8/P4drrW1r1qxxdQmoR5gvcBZzBs5iztStvdnSgmQPGTKpa6BVR/JNOlFosu0P8jb0945WWQ5s0pcHXFhoJZgzcJY7zJm8vDyHxzodnO6991516tRJa9euVadOnZSQkKBjx47pgQce0Ny5c509XbUEBARoy5YtOnnypNauXavY2Fh17ty5zDI+SZoxY4ZiY2Ntr7Ozs9W+fXuNGjVKgYGBdVJvZYqKirRmzRqNHDlSXl5eri4Hbo75AmcxZ+As5kzd25WWo0ff+EVFRrGG92itV649VyaTSRsPHFdGToGCA3w0oENzeZhNVZ/MBZgzcJY7zZnS1WiOcDo4rV+/Xt98841atWols9kss9msCy64QHPmzNE999yjzZs3O3yuVq1aycPDQ+np6Xbb09PTFRoaWuFxZrNZXbp0kVTS7W/nzp2aM2dOucHJx8dHPj4+ZbZ7eXm5/BfqdO5WD9wb8wXOYs7AWcyZunEoM0+3vJWonPxindexuV69ob98vUqabV3QLaSKo90LcwbOcoc548z7O/0AXIvFooCAAEklwefw4cOSSppH7N6926lzeXt7q3///lq7dq1tm9Vq1dq1axUdHe3weaxWq919TAAAAO7u6MkC3fjGBmXkFKhHaID+O/k8W2gC4H6cvuIUGRmprVu3qlOnTho0aJCeffZZeXt76/XXX1fnzp2dLiA2NlaTJ0/WgAEDNHDgQM2fP1+5ubm2LnuTJk1S27ZtNWfOHEkl9ywNGDBA4eHhKigo0Jdffqm3335bCxYscPq9AQAAXOFkQbGmLPlF+4/lqV3zJnrz5oEKasLVGsCdOR2cHn30UeXm5kqSHnvsMV1++eUaMmSIWrZsqeXLlztdwIQJE3TkyBHNnDlTaWlpioqKUnx8vK1hxMGDB2U2/3VhLDc3V9OmTdPvv/+uJk2aqEePHlq2bJkmTJjg9HsDAADUtYJii+54e6O2/5Gllv7eeuvmgQoJ5NlMgLtzOjid/qykLl26aNeuXcrMzFTz5s1tnfWcNX36dE2fPr3cfevWrbN7/cQTT+iJJ56o1vsAAAC4ksVqKHbFVv2455j8vT20dMpAdW7d1NVlAXCA0/c4ladFixZKS0urMPwAAAA0RharofV7j+mzLX9o/d6jmvlZkr7YnipvD7NenzRAvdsFubpEAA5y6orTjh079O2338rb21vXXHONmjVrpqNHj+qJJ57Qa6+9Vq17nAAAABqi+KRUxa1MVmpWfpl9L0yI0vldWrmgKgDV5XBw+r//+z9dddVVKi4uliQ9++yzWrRoka655hr1799fn3zyicaMGVNrhQIAANQX8UmpmrosUUYF+z1qZM0PgLrk8H+2TzzxhO666y5lZ2dr3rx52rdvn+655x59+eWXio+PJzQBAIAGy37J3TFZrBVFopKxcSuTKwxNJklxK5MrPQcA9+PwFafdu3fr3XffVdOmTXX33XfrwQcf1AsvvKDzzjuvNusDAABwqfKW3IUF+WpWTITGRIaVGf91clq5y/NKGZJSs/KVkJKp6PCWtVEygFrgcHDKyclRYGCgJMnDw0NNmjThniYAANCgVbTkLi0rX1OXJWrBxH7q36GFNqQc04Z9mdqQcky/pp906NwZORWHKwDux6nmEKtWrVJQUEn3F6vVqrVr1yopKcluzNixY2uuOgAAABepbMld6ba73t1c7SV3wQE8uwmoT5wKTpMnT7Z7fccdd9i9NplMslgsZ18VAACAiyWkZFa65E6SLTT1CA3Q3zq31N86t1C/c5rrild/VFpWfrmhyyQpNMhXAzu1qPmiAdQah4OT1WqtzToAAADciqNL6eb8PVLXDexgt21WTISmLkuUSbILT6bT9nuYTQJQf9AMEwAAoBzBAT4OjevYsmmZbWMiw7RgYj+FBtkvxwsN8tWCif3KbSoBwL05tVQPAACgMcgrLNZ7CQcrHVPVkrsxkWEaGRGqhJRMZeTkKzigZCxXmoD6ieAEAABwmn1HTmrqskTtTs+R2SRZDVV7yZ2H2UTLcaCBIDgBAAD8KT4pTQ9+sFUnC4rVOsBHr1zXV8fzCss8xym0kuc4AWiYnApOFotFP/74o/r06aNmzZrVUkkAAAB1q9hi1XOrd+u1/+2TJJ3Xsblevb6fggNL7lFiyR0Ap4KTh4eHRo0apZ07dxKcAABAvWOxGmUCUGZuoe55b7PW7zsmSbrlgk56+JIe8vL4q4cWS+4AOL1ULzIyUvv27VOnTp1qox4AAIBaEZ+UWmbJXUt/bxVbrco6VSw/bw89e1UfXd6njQurBOCunA5OTzzxhB588EE9/vjj6t+/v/z9/e32BwYG1lhxAAAANSE+KVVTlyWWeSDtsdxCSVJIoI/euXWQugQH1H1xAOoFp4PTpZdeKkkaO3asTKa/1vYahiGTySSLxVJz1QEAAJwli9VQ3MrkMqHpdCaZ1KlV2ecxAUApp4PTt99+Wxt1AAAA1IqElEy75XnlScvOV0JKJvcxAaiQ08HpwgsvrI06AAAAakVGTuWhydlxABqnaj3H6cSJE3rjjTe0c+dOSVKvXr108803KygoqEaLAwAAOFvBAT4OjvOt5UoA1GfmqofY27hxo8LDw/XCCy8oMzNTmZmZmjdvnsLDw5WYmFgbNQIAAFRLbkGx3vxpf6VjTJLCgkpakwNARZy+4nT//fdr7NixWrRokTw9Sw4vLi7Wrbfeqvvuu0/fffddjRcJAADgrP1Hc3X72xv1a/pJeZgli7UkJJ3eJKK0zdWsmAgeaAugUk4Hp40bN9qFJkny9PTUQw89pAEDBtRocQAAANWxbneG7nlvs7LzixUc4KMFE/vrSE5+mec4hQb5alZMhMZEhrmwWgD1gdPBKTAwUAcPHlSPHj3sth86dEgBATz7AAAAuI5hGPrPur2au3q3DEPqd04zLZjYXyGBJfcvjYwIVUJKpjJy8hUcULI8jytNABzhdHCaMGGCbrnlFs2dO1eDBw+WJP3444/6xz/+oeuuu67GCwQAAHBEbkGx/vHhVn25PU2SdN3A9po9tpd8PD1sYzzMJlqOA6gWp4PT3LlzZTKZNGnSJBUXF0uSvLy8NHXqVD399NM1XiAAAMCZLFbD7spRSKCPpi5L1O70HHl5mDR7bC/dMKiDq8sE0IA4FZwsFot+/vlnzZ49W3PmzNHevXslSeHh4fLz86uVAgEAAE4Xn5Ra5l6l0qYPrQN8tOCGfhrQkQ55AGqWU8HJw8NDo0aN0s6dO9WpUyf17t27tuoCAAAoIz4pVVOXJdp1xpP+6pT3wMhuhCYAtcLp5zhFRkZq3759tVELAABAhSxWQ3Erk8uEplImSS+u/U0Wa0UjAKD6nA5OTzzxhB588EF9/vnnSk1NVXZ2tt0PAABAbUhIybRbnncmQ1JqVr4SUjLrrigAjYbTzSEuvfRSSdLYsWNlMv3VvtMwDJlMJlkslpqrDgAA4E8ZORWHpuqMAwBnOB2cvv3229qoAwAAoFKHj59yaFxwgG8tVwKgMXIqOBUVFemxxx7TwoUL1bVr19qqCQAAwM5b6/fr2VW7Kx1jkhQaVPJQWwCoaU7d4+Tl5aVt27bVVi0AAAB2ShpC7NDMz3bIkDQ4vKVMKglJpyt9PSsmQh7mM/cCwNlzujnExIkT9cYbb9RGLQAAADa5BcW64+2NWvLjfknSQ2O6651bB2nBxH4KDbJfjhca5KsFE/tpTGSYCyoF0Bg4fY9TcXGxFi9erK+//lr9+/eXv7+/3f558+bVWHEAAKBxSsvK1y1v/qIdh7Pl42nWvGuidFmfklA0JjJMIyNClZCSqYycfAUHlCzP40oTgNrkdHBKSkpSv379JEm//vqr3b7Tu+wBAABUx47DWbpl6UalZeerVVNvvT5pgPqd09xujIfZpOjwli6qEEBjRFc9AADgEharUeaq0brdGbr7vc3KK7SoS3BTLbnpPLVv4efqUgHA+eBUmYyMDAUHB9fkKQEAQAMUn5SquJXJdg+0DfT1VE5+sQxJ53dpqf/c0F9BTbxcVyQAnMbh5hB+fn46cuSI7fVll12m1NRU2+v09HSFhXFDJgAAqFx8UqqmLku0C02SlF0amsJbaumUgYQmAG7F4eCUn58vwzBsr7/77judOmX/ILrT9wMAAJyppL14sir7E8O+o7kyc980ADfjdDvyytAcAgAAVCYhJbPMlaYzpWblKyEls44qAgDH1GhwAgAAqExGTuWhydlxAFBXHA5OJpPJ7orSma8BAACqEhzgW/UgJ8YBQF1xuKueYRjq1q2bLSydPHlSffv2ldlstu0HAACoTN9zmsnH06yCYmu5+02SQoNKWpMDgDtxODgtWbKkNusAAAANXLHFqgc+2FppaJKkWTER8jCzqgWAe3E4OE2ePLk26wAAAA2Y1WrooQ+36YttqfLyMOn2oZ31ceIfdo0iQoN8NSsmQmMiebwJAPdTow/ABQAAOJNhGHrk0yR9vPkPeZhNevm6fhoTGarYkd2VkJKpjJx8BQeULM/jShMAd0VwAgAAtcYwSp7b9F7CQZlN0gsTojQmMlSS5GE2KTq8pYsrBADH0I4cAADUCsMw9Ez8bi39ab8k6ZnxfTT23DauLQoAqongBAAAasWLa3/Twv/tlSQ9Pi5SVw9o7+KKAKD6WKoHAADOisVqlLlXadH3+zT/698kSY9e1lM3/q2Di6sEgLPjUHCKjY11+ITz5s2rdjEAAKB+iU9KVdzKZLvueIG+nsrOL5Yk/WN0d906pLOrygOAGuNQcNq8ebNDJyt9OC4AAGj44pNSNXVZoowztpeGpksiQ3XXRV3qvjAAqAUOBadvv/22tusAAAAuZrEa2pCSqU1HTWqZkqnoLsEVtge3WEu65Z0Zmk635dAJWawGLcYBNAjc4wQAAM5Ycueht37bqLBKHkibkJJptzyvPKlZ+UpIyaTlOIAGoVrBaePGjXr//fd18OBBFRYW2u37+OOPa6QwAABQNypacpeWla+pyxI19+pzdU5LP/2anqPf0k9qd1qOkv444dC5M3IqD1cAUF84HZyWL1+uSZMmafTo0Vq9erVGjRqlX3/9Venp6bryyitro0YAAFBLKltyV7rtgQ+2Vvv8wQG+1T4WANyJ08Hpqaee0gsvvKC77rpLAQEBevHFF9WpUyfdcccdCgsreykfAAC4L0eW3ElSS38vRbZtpu6hAeoa3FThrZtq6jublJFdUG7oMkkKDSppTQ4ADYHTwWnv3r267LLLJEne3t7Kzc2VyWTS/fffr4svvlhxcXE1XiQAAKgdji6lmxnTS1dEtbXbFje2l6YuS5RJsgtPpa0gZsVE0BgCQINhdvaA5s2bKycnR5LUtm1bJSUlSZJOnDihvLy8mq0OAADUKkeX0pU3bkxkmBZM7KfQIPt9oUG+WjCxX7lNJQCgvnL6itPQoUO1Zs0a9e7dW1dffbXuvfdeffPNN1qzZo2GDx9eGzUCAIBacm77IHl7mFVosZa7v6old2MiwzQyIlQJKZnKyMlXcEDJWK40AWhonA5Or7zyivLzSy7rP/LII/Ly8tJPP/2k8ePH69FHH63xAgEAQO2wWA09+MHWSkOTVPWSOw+ziZbjABo8p4NTixZ//Y2T2WzWww8/XKMFAQCA2mcYhh79NElfbk+Tl4dJ04Z10fsbD9k1igit5DlOANDYOB2cvvzyS3l4eGj06NF221evXi2LxaJLLrmkxooDAAC1Y+7q3Xov4aBMJunFa/vq0t5humd4V63fk6HV32/QqCGDFN0lmCV3APAnp5tDPPzww7JYLGW2W61Wrj4BAFAP/Pf7fXr1272SpCfH9dalvUuuKHmYTRrUqYX6tzI0iPuUAMCO08Hpt99+U0RERJntPXr00J49e2qkKAAAUDs+3PS7nvhipyTpH6O76/pB57i4IgCoH5wOTkFBQdq3b1+Z7Xv27JG/v3+NFAUAAGremuR0/fOjbZKkWy/opGnDwl1cEQDUH04HpyuuuEL33Xef9u7da9u2Z88ePfDAAxo7dmyNFgcAAGrGz/uO6a53E2WxGhrfr53+dWlPmUwsxQMARzndHOLZZ5/VmDFj1KNHD7Vr106S9Pvvv2vIkCGaO3dujRcIAACcY7Eads9V8vP20G1vblRhsVUjeobomfG9Zeb+JQBwitPBKSgoSD/99JPWrFmjrVu3qkmTJurTp4+GDh1aG/UBAAAnxCelKm5lsl1bcbNJshrSwE4t9Mr1feXp4fSCEwBo9JwOTpJkMpk0atQojRo1qqbrAQAA1RSflKqpyxJlnLHd+ueGa89rL18vjzqvCwAaAoeC00svvaTbb79dvr6+eumllyode88999RIYQAAwHEWq6G4lcllQtPpnlu1W1dEtaXNOABUg0PB6YUXXtANN9wgX19fvfDCCxWOM5lM1QpOr776qp577jmlpaXp3HPP1csvv6yBAweWO3bRokV66623lJSUJEnq37+/nnrqqQrHAwDQGCSkZNotzytPala+ElIyFR3eso6qAoCGw6HglJKSUu6/14QVK1YoNjZWCxcu1KBBgzR//nyNHj1au3fvVnBwcJnx69at03XXXafBgwfL19dXzzzzjEaNGqUdO3aobdu2NVobAAD1RUZO5aHJ2XEAAHsuvzt03rx5uu222zRlyhRFRERo4cKF8vPz0+LFi8sd/84772jatGmKiopSjx499N///ldWq1Vr166t48oBAHAfwQG+NToOAGDP6eYQsbGx5W43mUzy9fVVly5ddMUVV6hFixZVnquwsFCbNm3SjBkzbNvMZrNGjBih9evXO1RPXl6eioqKKny/goICFRQU2F5nZ2dLkoqKilRUVOTQe9Sm0hrcoRa4P+YLnMWcaTz6tgtQgK+ncvKLy91vkhQa5KO+7QIqnQ/MGTiLOQNnudOccaYGk2EYld1HWsZFF12kxMREWSwWde/eXZL066+/ysPDQz169NDu3btlMpn0ww8/KCIiotJzHT58WG3bttVPP/2k6Oho2/aHHnpI//vf/7Rhw4Yq65k2bZpWrVqlHTt2yNe37N+izZ49W3FxcWW2v/vuu/Lz86vy/AAA1AfbMk16Y7dZJRHJ+POfpUr+V39zN6vObenU//YBoEHLy8vT9ddfr6ysLAUGBlY61ukrTqVXk5YsWWI7eVZWlm699VZdcMEFuu2223T99dfr/vvv16pVq6r3CRz09NNPa/ny5Vq3bl25oUmSZsyYYXeVLDs7W+3bt9eoUaOq/HLqQlFRkdasWaORI0fKy8vL1eXAzTFf4CzmTOOw5dAJ/XPJRklWnR/eUnuPnFRa9l+rLcKCfPXIJT00uldIledizsBZzBk4y53mTOlqNEc4HZyee+45rVmzxi50BAUFafbs2Ro1apTuvfdezZw506FnPLVq1UoeHh5KT0+3256enq7Q0NBKj507d66efvppff311+rTp0+F43x8fOTj41Nmu5eXl8t/oU7nbvXAvTFf4CzmTMN14Fiu7nxni/KLrLqoe2stmjRAJpNJCSmZysjJV3CArwZ2auF0C3LmDJzFnIGz3GHOOPP+TjeHyMrKUkZGRpntR44csSW2Zs2aqbCwsMpzeXt7q3///naNHUobPZy+dO9Mzz77rB5//HHFx8drwIABzn4EAAAahOO5hbppyS86lluoyLaBeuX6fvL0MMvDbFJ0eEtdEdVW0eEteW4TANSAai3Vu/nmm/X888/rvPPOkyT98ssvevDBBzVu3DhJUkJCgrp16+bQ+WJjYzV58mQNGDBAAwcO1Pz585Wbm6spU6ZIkiZNmqS2bdtqzpw5kqRnnnlGM2fO1LvvvquOHTsqLS1NktS0aVM1bdrU2Y8DAEC9lF9k0W1vbVTK0Vy1bdZEiyefJ38fp/+3DgBwkNO/w7722mu6//77de2116q4uKRzj6enpyZPnmx7OG5pm3BHTJgwQUeOHNHMmTOVlpamqKgoxcfHKySkZB32wYMHZTb/dWFswYIFKiws1FVXXWV3nlmzZmn27NnOfhwAAOodq9XQA+9v1cYDxxXg66klU85TcCBtxgGgNjkdnJo2bapFixbphRde0L59+yRJnTt3trvaExUV5dQ5p0+frunTp5e7b926dXav9+/f79S5AQBoaJ6J36UvtqfKy8Ok127sr24hAa4uCQAavGpf02/atKnt2UkskQMAoG68tX6/Xvuu5C8un72qjwaHt3JxRQDQODjdHMJqteqxxx5TUFCQOnTooA4dOqhZs2Z6/PHHZbVaa6NGAAAaJYvV0Pq9x/TZlj+0fu8xrU5K0+z/2yFJenBUN13Zt52LKwSAxsPpK06PPPKI3njjDT399NM6//zzJUk//PCDZs+erfz8fD355JM1XiQAAI1NfFKq4lYmKzUrv8y+a89rr7su6uKCqgCg8XI6OL355pv673//q7Fjx9q29enTR23bttW0adMITgAAnKX4pFRNXZYoo4L9F3RtJZOJFuMAUJecXqqXmZmpHj16lNneo0cPZWZm1khRAAA0VharobiVyRWGJpOkJ7/YKYu1ohEAgNrgdHA699xz9corr5TZ/sorr+jcc8+tkaIAAGisElIyy12eV8qQlJqVr4QU/rISAOqS00v1nn32WV122WX6+uuvFR0dLUlav369Dh06pC+//LLGCwQAoDHJyK44NNmNy3FsHACgZjh9xenCCy/Ur7/+qiuvvFInTpzQiRMn9Pe//127d+/WkCFDaqNGAAAaPMMw9O3uDL30zW8OjQ8O4IG3AFCXqvUcpzZt2pRpAvH777/r9ttv1+uvv14jhQEA0FBYrIYSUjKVkZOv4ABfDezUQh7mkuYOhmHom10Zemntb9r6e1aV5zJJCg0qOQcAoO5U+wG4Zzp27JjeeOMNghMAAKcpr614WJCvZl4eIQ+zSS9985uS/siWJDXx8tCN0R0U3tpfD3+0XZLsmkSU9tGbFRNhC14AgLpRY8EJAADYq6iteGpWvqa+k2h77eftoUnRHXXrkE5q1dRHkhTUxKtM4AoN8tWsmAiNiQyri/IBAKchOAEAUAuqaisulVxBunNYZ902JFwt/L3t9o2JDNPIiNAKl/gBAOoWwQkAAAdVdq/SmapqKy6VLMMb2jW4TGgq5WE2KTq85dmWDQCoAQ4Hp7///e+V7j9x4sTZ1gIAgNuq6F6lM5fO5RUW67tfj2rxjykOnZe24gBQPzgcnIKCgqrcP2nSpLMuCAAAd1PRvUppWfmauixRT4/vLashrUlO1w97jqqw2OrwuWkrDgD1g8PBacmSJbVZBwAAbqmye5VKt/3zzw54pdq3aKLhPUL0f1sP63huYbnH0lYcAOoX7nECAKASjtyrJEnhrf31937tNKJniLqFNJXJZNLfOrfQ1GWJMom24gBQ35ldXQAAAO7M0XuQ7hneVXdd1EXdQwNkMpWEoTGRYVowsZ9Cg+yX44UG+WrBxH60FQeAeoQrTgAAVMLRe5AqGkdbcQBoGAhOAABUYmCnFmru56XjeUXl7nfkXiXaigNA/cdSPQAAKrH9jyydLCgudx/3KgFA40FwAgCgAocy83Trm7+oyGKoV5tAhQZyrxIANFYs1QMAoBxZp4o0ZekvOnqyUD3DArXijmg18fLgXiUAaKQITgAAnKGw2KqpyzZpT8ZJhQb6avFNA9TUp+R/mdyrBACNE0v1AAA4jWEYmvHxdv2095j8vT20+KbzFBbUxNVlAQBcjOAEAMBpXv5mjz5K/F0eZpNeuaGfItoEurokAIAbIDgBAPCnTzf/oXlrfpUkxY3tpYu6B7u4IgCAuyA4AQAg6ed9x/TQh9skSXcM7ayJf+vg4ooAAO6E4AQAaPT2HjmpO97epEKLVZf2DtU/x/RwdUkAADdDVz0AQKNjsRq2tuK+nmY98cVOZZ0qUt9zmmneNVEy02IcAHAGghMAoFGJT0pV3MpkpWbl221v2dRbiyYNkK+Xh4sqAwC4M5bqAQAajfikVE1dllgmNEnSsZOF2rg/0wVVAQDqA4ITAKBRsFgNxa1MllHBfpOkuJXJslgrGgEAaMwITgCARiEhJbPcK02lDEmpWflKSOGqEwCgLIITAKBROHwiz6FxGTkVhysAQONFcAIANHg/7T2quat/dWhscIBvLVcDAKiP6KoHAGiwjuQU6Kkvd+qTzX9IkswmqaJbmEySQoN8NbBTi7orEABQbxCcAAD11unPYwoOKAk9HmaTrFZD7yYc1LPxu5SdXyyTSZo4qIOizmmmB9/fKkl2TSJKn9o0KyZCHjzDCQBQDoITAKBeKu95TGFBvpoyuKO+SErT1kMnJEmRbQP1xLjeimrfTJLk7+1R5rjQIF/NionQmMiwuvwIAIB6hOAEAKh3Sp/HdOaqu9SsfD311S5JUlMfTz04qptujO5odxVpTGSYRkaElnulCgCAihCcAAD1SlXPY5IkXy+zVt8/VG2aNSl3v4fZpOjwlrVTIACgQaKrHgCgXqnqeUySlF9k1YFjjrUfBwDAEQQnAEC9suXQcYfG8TwmAEBNYqkeAMDlKuqOVyojJ1//t+WwPkr8QztTsx06J89jAgDUJIITAMClKuqON+OSHjKZTPo48Xd999tRWf58AJOX2SQPs0n5xdZyz8fzmAAAtYHgBABwmcq6492zfIvdtr7nNNPf+7VTTJ8w/bzvmKYuS5TE85gAAHWD4AQAcAlHuuN5mKQ7h4VrfL926ty6qW37mMgwLZjYj+cxAQDqDMEJAOASjnTHsxjSBV1a24WmUjyPCQBQlwhOAACXcLTrXWXjeB4TAKCu0I4cAOASuQXFDo2jOx4AwB1wxQkAUOd+2nNUT36xs9IxdMcDALgTrjgBAOrU59sO66Ylvyi30KKuwU1l0l/d8ErRHQ8A4G4ITgCAOvPmT/t193ubVWix6pLIUK28+wItmNhPoUH2y/FCg3y1YGI/uuMBANwGS/UAALXOMAw9v/pXvfLtHknSxL+do7ixkfIwm+iOBwCoFwhOAIBaVWyx6l+fbNf7G3+XJMWO7Ka7L+4ik+mvYER3PACAuyM4AQBqjMVq2F056t02SPet2Kyvd2bIbJKevLK3rht4jqvLBADAaQQnAECNiE9KVdzKZLuH2np5mFRkMeTjadZL1/XV6F6hLqwQAIDqIzgBAM5afFKqpi5LlHHG9iJLyZa7L+5CaAIA1Gt01QMAnBWL1VDcyuQyoel072w4KIu1shEAALg3ghMA4KwkpGTaLc8rT2pWvhJSMuuoIgAAah5L9QAAds5s8FBRa3DDMJScmq2lP6Y4dN6MnMrDFQAA7ozgBACwKa/BQ1iQr2bFRGhMZJisVkObDx1XfFKa4nek6VDmKYfPHRzgW/UgAADcFMEJACCp4gYPaVn5unNZooZ0baVdaTk6klNg2+fjadbQrq2UsP+4sk8VlXufk0lSaFDJlSsAAOorghMAoNIGD6Xbvv/tqCQpwMdTw3sGa0xkqIZ2ay0/b09b6DKdNl4qCU2SNCsmotzlfgAA1BcEJwCAQw0eJOnhS3ro5vM7ydvTvrfQmMgwLZjYr8wyv9DTlvkBAFCfEZwAoIGyWA1tSMnUpqMmtUzJVHSX4HKv+qRl5eudDQccOmdYkG+Z0FRqTGSYRkaEOtRYAgCA+obgBAANkH2TBw+99dtGuyYP+UUWrU5O14ebftcPvx2Ro49YqqrBg4fZpOjwlmf/AQAAcDMEJwBoYKpq8nBBl1ba+vsJ5eQX2/ad16G5fs04SYMHAAAqQHACgAbEkSYPP+wpafLQtlkTje/fTuP7tVWHlv40eAAAoBIEJwBoQBxt8vDoZT118/mdZD4tCNHgAQCAihGcAKABycipOjRJUusAH7vQVIoGDwAAlI/gBAANiLdH+R3vzlRZkwcaPAAAUBbBCQAaAMMw9MGm3/X4yh2VjqPJAwAA1UNwAoB67lBmnv71yXZ9/1tJ04dzWvjpYGYeTR4AAKhBjq3pqEWvvvqqOnbsKF9fXw0aNEgJCQkVjt2xY4fGjx+vjh07ymQyaf78+XVXKAC4iMVqaP3eY/psyx9av/eYLH8+dMlqNbTkxxSNnv+dvv/tqHw8zZpxSQ9988CFWjixn0KD7JfjhQb5asHEfjR5AACgGlx6xWnFihWKjY3VwoULNWjQIM2fP1+jR4/W7t27FRwcXGZ8Xl6eOnfurKuvvlr333+/CyoGgLpl/yDbEmFBvrp9aGd9vi1Vmw4clyQN7NRCT/+9tzq3birpryYP6/dkaPX3GzRqyCBFdwnmShMAANXk0itO8+bN02233aYpU6YoIiJCCxculJ+fnxYvXlzu+PPOO0/PPfecrr32Wvn4+NRxtQBQt0qfq3Rme/HUrHzFrUzWpgPH5e/tocfHRWr5bX+zhaZSHmaTBnVqof6tDA2iMx4AAGfFZVecCgsLtWnTJs2YMcO2zWw2a8SIEVq/fn2NvU9BQYEKCgpsr7OzsyVJRUVFKioqqrH3qa7SGtyhFrg/5kvjYbEamv1/O8p9kG0pH0+zVt4VrfYt/GSxFMtiKTuGOQNnMWfgLOYMnOVOc8aZGlwWnI4ePSqLxaKQkBC77SEhIdq1a1eNvc+cOXMUFxdXZvvq1avl5+dXY+9zttasWePqElCPMF8avt+yTErL9qh0TEGxVR+v+p+6BlUWr0owZ+As5gycxZyBs9xhzuTl5Tk8tsF31ZsxY4ZiY2Ntr7Ozs9W+fXuNGjVKgYGBLqysRFFRkdasWaORI0fKy8vL1eXAzTFfGo+V21Kl5O1VjuvcK0qX9qm42QNzBs5izsBZzBk4y53mTOlqNEe4LDi1atVKHh4eSk9Pt9uenp6u0NDQGnsfHx+fcu+H8vLycvkv1OncrR64N+ZLwxfWzN/hcY7MBeYMnMWcgbOYM3CWO8wZZ97fZc0hvL291b9/f61du9a2zWq1au3atYqOjnZVWQDgFnq1CZS3Z8W/RZtU0l2PB9kCAFA3XLpULzY2VpMnT9aAAQM0cOBAzZ8/X7m5uZoyZYokadKkSWrbtq3mzJkjqaShRHJysu3f//jjD23ZskVNmzZVly5dXPY5AKAmZeYWasqSBBUWW8vdz4NsAQCoey4NThMmTNCRI0c0c+ZMpaWlKSoqSvHx8baGEQcPHpTZ/NffuB4+fFh9+/a1vZ47d67mzp2rCy+8UOvWravr8gGgxv1x4pRufGOD9h3JVXM/L90+NFxvrd9v15I8NMhXs2IieJAtAAB1yOXNIaZPn67p06eXu+/MMNSxY0cZRtXdowCgPtqTkaMb30hQala+2gT56q1bBqpLcIBuH9pZCSmZysjJV3BAyfI8rjQBAFC3XB6cAADS5oPHNWXpLzqRV6QuwU311s0D1aZZE0klD7KNDm/p4goBAGjcCE4A4GL/+/WI7nx7k04VWRTVvpmW3HSemvt7u7osAABwGoITANQRi9Uos+Tui+2peuD9LSqyGBrStZUWTuwvfx9+awYAwN3wf2cAqAPxSamKW5ls1+Qh0NdT2fnFkqSYc9vo+avPrbQFOQAAcB2CEwA4qbwrR5U1a4hPStXUZYk6s7VNaWga1q21XpwQJTMNHwAAcFsEJwBwQnlXjsIqaQ9usRqKW5lcJjSdbnd6TqX7AQCA67EmBAAcVHrl6PTQJElpWfmauixR8Umptm0FxRbtO3JSr3+3t8z4M6Vm5SshJbNWagYAADWDK04A4IDKrhyVbrt/xVYt/iFFvx8/pdTsfDnz2LmMnMrDFQAAcC2CEwA4ICEls8orR6eKLErYf9z22s/bQy39vXXo+Kkqzx8c4HvWNQIAgNpDcAIABzh6RWhSdAdd2betzmnhpxb+3rIa0gXPfKO0rPxyr1aZJIUGlTSYAAAA7ot7nADAAc2aeDk07pLIMPU9p7laNvWRyWSSh9mkWTERkkpC0ulKX8+Kiai0Kx8AAHA9ghMAVGFPxkk9+cXOSseYVNJdr7wrR2Miw7RgYj+FBtkvxwsN8tWCif3K7cYHAADcC0v1AKASn235QzM+3q68QosCfD2Vk18sk2S37M6RK0djIsM0MiLUqec/AQAA90FwAoBy5BdZFLcyWe8lHJQkRXduqRevi1LigeNlnuMUWslznE7nYTYpOrxlrdYNAABqB8EJAM6QcjRX095J1M7UbJlM0t0XddG9I7rJw2ziyhEAAI0UwQlAo2WxGmUC0JfbU/XwR9uUW2hRS39vvTAhSkO7tbY7jitHAAA0PgQnAI1SfFJqmSV3ft4eyiu0SJIGdmqhl6/rq5BAnq8EAAAITgAaofikVE1dlljmuUqloWlMZIheua6fPD1oPAoAAErwpwIAjYrFaihuZXK5D6MttfVQlkwm7lkCAAB/ITgBaFQSUjLtlueVJzUrXwkpmXVUEQAAqA8ITgAalb1HTjo0LiOn8nAFAAAaF+5xAtAo5BdZtPjHFL209jeHxgcH0BQCAAD8heAEoEEzDEOfb0vV01/t0h8nTkmSvDxMKrKUf5eTSSUPtB3YqUUdVgkAANwdwQlAvVfe85g8zCZtPXRCj3+erI0HjkuSQgN99dCY7vL19NBd7yZKkl2TiNJ2ELNiInigLQAAsENwAlCvlfc8puAAH3Vs5W9r8NDEy0N3XNhZtw/tLD/vkt/2Fpj7lTkuNMhXs2IiNCYyrG4/BAAAcHsEJwD1VkXPY8rIKVBGToEk6e/92uqh0T0UGmR/z9KYyDCNjAgt90oVAADAmQhOANxGRUvuKhpb1fOYWjX11nNXnVvhOTzMJkWHt6yBygEAQENHcALgFspbchdWztK5gmKLkv7I1ieJv1f5PKajJwuVkJJJOAIAAGeN4ATA5SpacpeWla87lyVq6oXhsspQ4oHj2vp7lgqLrQ6fm+cxAQCAmkBwAuBSlS25K9224H977ba38PdWx5b+Sjx4vMrz8zwmAABQEwhOAFwqISWzyiV3knRR99a6tHeY+ndork6t/GU1pAue+UZpWfnlhi6exwQAAGqS2dUFAGjcUo6edGjcuL5tdfWA9urcuqlMJpM8zCbNiomQ9Nfzl0rxPCYAAFDTCE4AXOJUoUWvfrtHj3+e7ND48pbcjYkM04KJ/cq0Gg8N8tWCif14HhMAAKgxLNUDUOMqaytebLHqg02/64U1v9qeteRpNqnYWn5j8aqW3PE8JgAAUBcITgBqVEVtxWdeXrJs7tlVu7Uno2R5XrvmTfSP0d3lZTbrrncTJcnufiVHl9zxPCYAAFDbCE4AakxFbcVTs/I19Z1E2+vmfl6afnFXTfzbOfLx9JAkLTD3KxO4Qst5jhMAAIArEJwA1IjK2oqfbuqwzpo6rIsCfb3strPkDgAAuDOCE4Aa4Whb8aFdg8uEplIsuQMAAO6KrnoAakRGTtWhyZlxAAAA7oTgBOCsHcrM06eb/3BobHltxQEAANwdS/UAVKiytuKSdOBYrl75Zo8+3vyHLBW0Ey9VVVtxAAAAd0ZwAlCuitqKz4qJULeQAL3y7R59tuWwLTAN7dZagzo119xVv0qqXltxAAAAd0VwAlBGZW3F71yWKJP+CkYXdW+tu4d3Vb9zmkuSwls3pa04AABocAhOAOw40lbckDS8R2vdM7ybzm3fzG4fbcUBAEBDRHACYMfRtuK3DgkvE5pK0VYcAAA0NAQnoBGoqslDKcMwtCHlmEPnpK04AABoTAhOQANXWZOH0nuOdqfl6LMtf2jltsM6lHnKofPSVhwAADQmBCegAauoyUPan00erohqo12pOdqdnmPb18TLLENSfpG13HPSVhwAADRGBCegnrBYDW1IydSmoya1TMlUdJfgShsuVNbkoXTbZ1sOS5K8PEwa1j1YV0S10fAeIfrfrxmauizRbqxEW3EAANB4EZyAesB+uZ2H3vptY5nldmf6Zme6Q00ebh/aSXcN66ogPy/btjGRYVowsR9txQEAAP5EcALcXGXL7aYuS9Sjl0eobTNf7T2Sq5Sjudp35KRSjubqeF6RQ+fv1SbILjSVoq04AADAXwhOgBtzZLnd458nn9V7VNbkgbbiAAAAJQhOQB1ztDW4JH2zy7HlduGt/NWnfTN1auWvzq391amVv85p4adRL3yntKz8coMXTR4AAAAcR3AC6pAjrcH/OHFKa3akaXVyutbvc+yZSveM6KorotqW2T4rJkJTlyXKJJo8AAAAnA2CE1BHqmoNPrZPmPYdy1XSH9lOn7ui5XY0eQAAAKgZBCegDjhyr9L/bUuVJJlM0nkdWmhUrxAN7xGi6//781ktt6PJAwAAwNkjOAHV5My9SgkpmQ7dq3TH0M66bWhntWrqY9tWE8vtaPIAAABwdghOQDU4cq+SYRjaeyRX3+xK1/sbDzl03og2gXahSWK5HQAAgDsgOAFOquq5StMv7qKc/GJ9sytDBzPznDp3ZfcqjYwI1fo9GVr9/QaNGjJI0V2CWW4HAABQRwhOgBMcuVfp5W/22LZ5e5g1qHMLXdS9tRas26ejJwuqfa+Sh9mkQZ1a6NhOQ4O4RwkAAKBOEZwAJzh6r9JF3Vvr2oHn6IIureTvU/KfWZtmTWgNDgAAUE+ZXV0A4GoWq6H1e4/psy1/aP3eY7JYy14TMgxDOw5naemPKQ6dc1zfthrdK9QWmqS/7lUKDbJfjhca5KsFE/txrxIAAIAb44oTGrXKmjyMigjV5kPHFZ+UpvgdaTqUecrh81Z1rxKtwQEAAOoXghMaDGfag0sVN3lI/fOBtIG+nsrOL7Zt9/Uya2jXVtqQclzZp4rO6l4lWoMDAADULwQnNAiOtAc/XWVNHkpl5xerqY+HRvQM0ZjIUA3t1lp+3p62wMW9SgAAAI0HwQn1XlXtwUvvHzIMQ78fP6Vtv2cpfkeqQ00eFtzQX0O6tbbbxnOVAAAAGh+CE+o1R9qDP/jBNr274aCSDmcrM7fQqfNn5pU/nnuVAAAAGheCE9yOM/cqOdIe/GRBsb777agkycvDpB6hgQoJ9NHXOzOqrKWiJg8S9yoBAAA0JgQnuBVH71U6nluoTQeO671fDjp03qsHtNPEQR3UIyxAPp4eslgNXfDMN0rLyq92kwcAAAA0HgQn1Jqa6nKX9meXu5sGd1BBsaGN+zP1W8ZJp2r5e992Ord9M9trD7NJs2IiaPIAAAAAhxCcUCVnA5BUs13uSrct/emA3fYuwU3Vv0MzxSelK+tUUbl1VHbliCYPAAAAcBTBCZVyNgCVHlNVl7vzu7TS/qN5SjmWq/1Hc/WLA/cqSdLlfcI09tw26t+huVo29ZEkXdS95P0k568c0eQBAAAAjiA41UPVuQJUneMcbfN95ntUdeVo2juJslb2AKVKjIwI0aheoXbbzvbKEU0eAAAAUBWCkwtZrIY2pGRq01GTWqZkKrpLcI0vgavucY4EoH99kqSsU0U6erJQaVn5Ss/O194jJ6u8clQamlo19VbHlv7q1MpfZrNJK345VOlxUsVd7rhyBAAAgNrkFsHp1Vdf1XPPPae0tDSde+65evnllzVw4MAKx3/wwQf697//rf3796tr16565plndOmll9ZhxWfPPsh46K3fNtbIErjyjnXmuGKLValZ+Vq9I63KAJSZW6h/frTdwU9s75nxvTXhvHNsry1WQ9/9euSsutxx5QgAAAC1xeXBacWKFYqNjdXChQs1aNAgzZ8/X6NHj9bu3bsVHBxcZvxPP/2k6667TnPmzNHll1+ud999V+PGjVNiYqIiIyNd8AmcVxtL4EyS4lYma2REqN1VFkeuHN2/YquW/JiiP07kKzUrXxYn1tH1CA1Q77ZBCgn0VUiQr07kFur5Nb9Wedw5LfztXtPlDgAAAO7M5cFp3rx5uu222zRlyhRJ0sKFC/XFF19o8eLFevjhh8uMf/HFFzVmzBj94x//kCQ9/vjjWrNmjV555RUtXLiwTmuvDkeCzD8/2qY/TpyS1SoVWa2yWAylHMut9AqQISk1K1+XvfS9fLw8VFBkUaHFquy8Ih3NLay0plNFFm1IOW577e1hVkt/b6VmV92sYVZML7urPBaroXcTDlbryhFd7gAAAOCuXBqcCgsLtWnTJs2YMcO2zWw2a8SIEVq/fn25x6xfv16xsbF220aPHq1PP/203PEFBQUqKCiwvc7OzpYkFRUVqaio/BbWtWmDA93jsk4V6/HPd1br/LvScqp13A0D2yumT6jaNW+i1k19ZEga9vx3Ss8uqCQA+ahvu4Ay3+Mjl3TX3cu3Vnjl6JFLustqKZbVUva8w7u30rCuQ7TxwHFl5BQoOMBHAzo0l4fZ5JJfL3dT+h3wXcBRzBk4izkDZzFn4Cx3mjPO1ODS4HT06FFZLBaFhITYbQ8JCdGuXbvKPSYtLa3c8WlpaeWOnzNnjuLi4spsX716tfz8/KpZefVtOmqS5FHluI5NrWrlK3mYSn6yi6Sk4+YqjxvdzqJz/CVPs+RpNpSWa9IH+6t+v+Yn9yt9R4rST9t2aahJi7NL3/P0JXKGDEmXhORpVfxX5Z5vSjeTPt5v1onCv44L8jb0945WWQ5s0pcHyj3MjoekY5JWVS9DNmhr1qxxdQmoZ5gzcBZzBs5izsBZ7jBn8vLyHB7r8qV6tW3GjBl2V6iys7PVvn17jRo1SoGBgXVeT8uUTL3128Yqxz1xzUANOm05m8VqOHQF6MXbhpa5x+l7B46bPmFomfuHLpXUb0e6nvhyl9Ky/7pqFxbkq0cu6aHRvewD7JnHPmQ1yr1yhOorKirSmjVrNHLkSHl5ebm6HNQDzBk4izkDZzFn4Cx3mjOlq9Ec4dLg1KpVK3l4eCg9Pd1ue3p6ukJDQ8s9JjQ01KnxPj4+8vHxKbPdy8vLJb9Q0V2CFRbkW+U9QGe2JveSNHtsryqaJ/SSr4+33fmqe1ypy6Pa6ZI+bavV5ttL0gXdKg5XqD5XzV/UX8wZOIs5A2cxZ+Asd5gzzrx/1Wu/apG3t7f69++vtWvX2rZZrVatXbtW0dHR5R4THR1tN14qucxX0Xh3U9o9TrJf/Hb664q6x5U2TwgNsn+WUWiQb4WtyM/muNNrjg5vqSui2io6vCVXjQAAANDouHypXmxsrCZPnqwBAwZo4MCBmj9/vnJzc21d9iZNmqS2bdtqzpw5kqR7771XF154oZ5//nlddtllWr58uTZu3KjXX3/dlR/DKWfTPa66D3rlAbEAAABA9bk8OE2YMEFHjhzRzJkzlZaWpqioKMXHx9saQBw8eFBm818XxgYPHqx3331Xjz76qP71r3+pa9eu+vTTT+vNM5xKlQaZ9XsytPr7DRo1ZFCZ5XkVqe6DXnlALAAAAFA9Lg9OkjR9+nRNnz693H3r1q0rs+3qq6/W1VdfXctV1T4Ps0mDOrXQsZ2GBnH1BwAAAHBbLr3HCQAAAADqA4ITAAAAAFSB4AQAAAAAVSA4AQAAAEAVCE4AAAAAUAWCEwAAAABUgeAEAAAAAFUgOAEAAABAFQhOAAAAAFAFghMAAAAAVIHgBAAAAABVIDgBAAAAQBUITgAAAABQBU9XF1DXDMOQJGVnZ7u4khJFRUXKy8tTdna2vLy8XF0O3BzzBc5izsBZzBk4izkDZ7nTnCnNBKUZoTKNLjjl5ORIktq3b+/iSgAAAAC4g5ycHAUFBVU6xmQ4Eq8aEKvVqsOHDysgIEAmk8nV5Sg7O1vt27fXoUOHFBgY6Opy4OaYL3AWcwbOYs7AWcwZOMud5oxhGMrJyVGbNm1kNld+F1Oju+JkNpvVrl07V5dRRmBgoMsnDuoP5gucxZyBs5gzcBZzBs5ylzlT1ZWmUjSHAAAAAIAqEJwAAAAAoAoEJxfz8fHRrFmz5OPj4+pSUA8wX+As5gycxZyBs5gzcFZ9nTONrjkEAAAAADiLK04AAAAAUAWCEwAAAABUgeAEAAAAAFUgOAEAAABAFQhOtezVV19Vx44d5evrq0GDBikhIaHS8R988IF69OghX19f9e7dW19++WUdVQp34cyc2bFjh8aPH6+OHTvKZDJp/vz5dVco3IYzc2bRokUaMmSImjdvrubNm2vEiBFV/r6EhseZOfPxxx9rwIABatasmfz9/RUVFaW33367DquFO3D2zzOlli9fLpPJpHHjxtVugXA7zsyZpUuXymQy2f34+vrWYbWOITjVohUrVig2NlazZs1SYmKizj33XI0ePVoZGRnljv/pp5903XXX6ZZbbtHmzZs1btw4jRs3TklJSXVcOVzF2TmTl5enzp076+mnn1ZoaGgdVwt34OycWbduna677jp9++23Wr9+vdq3b69Ro0bpjz/+qOPK4SrOzpkWLVrokUce0fr167Vt2zZNmTJFU6ZM0apVq+q4criKs3Om1P79+/Xggw9qyJAhdVQp3EV15kxgYKBSU1NtPwcOHKjDih1koNYMHDjQuOuuu2yvLRaL0aZNG2POnDnljr/mmmuMyy67zG7boEGDjDvuuKNW64T7cHbOnK5Dhw7GCy+8UIvVwR2dzZwxDMMoLi42AgICjDfffLO2SoSbOds5YxiG0bdvX+PRRx+tjfLghqozZ4qLi43Bgwcb//3vf43JkycbV1xxRR1UCnfh7JxZsmSJERQUVEfVVR9XnGpJYWGhNm3apBEjRti2mc1mjRgxQuvXry/3mPXr19uNl6TRo0dXOB4NS3XmDBq3mpgzeXl5KioqUosWLWqrTLiRs50zhmFo7dq12r17t4YOHVqbpcJNVHfOPPbYYwoODtYtt9xSF2XCjVR3zpw8eVIdOnRQ+/btdcUVV2jHjh11Ua5TCE615OjRo7JYLAoJCbHbHhISorS0tHKPSUtLc2o8GpbqzBk0bjUxZ/75z3+qTZs2Zf7SBg1TdedMVlaWmjZtKm9vb1122WV6+eWXNXLkyNouF26gOnPmhx9+0BtvvKFFixbVRYlwM9WZM927d9fixYv12WefadmyZbJarRo8eLB+//33uijZYZ6uLgAA4BpPP/20li9frnXr1rnlTbhwHwEBAdqyZYtOnjyptWvXKjY2Vp07d9awYcNcXRrcTE5Ojm688UYtWrRIrVq1cnU5qCeio6MVHR1tez148GD17NlTr732mh5//HEXVmaP4FRLWrVqJQ8PD6Wnp9ttT09Pr/Am/tDQUKfGo2GpzpxB43Y2c2bu3Ll6+umn9fXXX6tPnz61WSbcSHXnjNlsVpcuXSRJUVFR2rlzp+bMmUNwagScnTN79+7V/v37FRMTY9tmtVolSZ6entq9e7fCw8Nrt2i4VE38ecbLy0t9+/bVnj17aqPEamOpXi3x9vZW//79tXbtWts2q9WqtWvX2iXq00VHR9uNl6Q1a9ZUOB4NS3XmDBq36s6ZZ599Vo8//rji4+M1YMCAuigVbqKmfp+xWq0qKCiojRLhZpydMz169ND27du1ZcsW28/YsWN10UUXacuWLWrfvn1dlg8XqInfZywWi7Zv366wsLDaKrN6XN2doiFbvny54ePjYyxdutRITk42br/9dqNZs2ZGWlqaYRiGceONNxoPP/ywbfyPP/5oeHp6GnPnzjV27txpzJo1y/Dy8jK2b9/uqo+AOubsnCkoKDA2b95sbN682QgLCzMefPBBY/PmzcZvv/3mqo+AOubsnHn66acNb29v48MPPzRSU1NtPzk5Oa76CKhjzs6Zp556yli9erWxd+9eIzk52Zg7d67h6elpLFq0yFUfAXXM2TlzJrrqNT7Ozpm4uDhj1apVxt69e41NmzYZ1157reHr62vs2LHDVR+hXCzVq0UTJkzQkSNHNHPmTKWlpSkqKkrx8fG2m+UOHjwos/mvi36DBw/Wu+++q0cffVT/+te/1LVrV3366aeKjIx01UdAHXN2zhw+fFh9+/a1vZ47d67mzp2rCy+8UOvWravr8uECzs6ZBQsWqLCwUFdddZXdeWbNmqXZs2fXZelwEWfnTG5urqZNm6bff/9dTZo0UY8ePbRs2TJNmDDBVR8BdczZOQM4O2eOHz+u2267TWlpaWrevLn69++vn376SREREa76COUyGYZhuLoIAAAAAHBn/PUAAAAAAFSB4AQAAAAAVSA4AQAAAEAVCE4AAAAAUAWCEwAAAABUgeAEAAAAAFUgOAEAAABAFQhOAAAAAFAFghMAoFxLly5Vs2bNau3869atk8lk0okTJ2rkfPv375fJZNKWLVtq5HwAAJyO4AQAjdRNN90kk8kkk8kkb29vdenSRY899piKi4vr5P0HDx6s1NRUBQUF1cn7SdKwYcNsn/n0nzvvvLPOaijP0qVLbbWYzWaFhYVpwoQJOnjwoFPnmT17tqKiomqnSABo5DxdXQAAwHXGjBmjJUuWqKCgQF9++aXuuusueXl5acaMGbX+3t7e3goNDa319znTbbfdpscee8xum5+fX4Xji4qK5OXlZbetsLBQ3t7eTr93ZccFBgZq9+7dMgxDKSkpmjZtmq6++mpt2LDB6fcBANQ8rjgBQCPm4+Oj0NBQdejQQVOnTtWIESP0f//3f3ZjVq1apZ49e6pp06YaM2aMUlNTJUnfffedvLy8lJaWZjf+vvvu05AhQyRJBw4cUExMjJo3by5/f3/16tVLX375paTyl+r9+OOPGjZsmPz8/NS8eXONHj1ax48flyTFx8frggsuULNmzdSyZUtdfvnl2rt3r9Of2c/PT6GhoXY/gYGBkv5a7rdixQpdeOGF8vX11TvvvKObbrpJ48aN05NPPqk2bdqoe/fukqTt27fr4osvVpMmTdSyZUvdfvvtOnnypO29KjquPCaTSaGhoQoLC9PgwYN1yy23KCEhQdnZ2bYx//znP9WtWzf5+fmpc+fO+ve//62ioiJJJVet4uLitHXrVtvVq6VLl0qSTpw4oVtvvVWtW7dWYGCgLr74Ym3dutXp7w4AGjOCEwDApkmTJiosLLS9zsvL09y5c/X222/ru+++08GDB/Xggw9KkoYOHarOnTvr7bffto0vKirSO++8o5tvvlmSdNddd6mgoEDfffedtm/frmeeeUZNmzYt9723bNmi4cOHKyIiQuvXr9cPP/ygmJgYWSwWSVJubq5iY2O1ceNGrV27VmazWVdeeaWsVmuNfw8PP/yw7r33Xu3cuVOjR4+WJK1du1a7d+/WmjVr9Pnnnys3N1ejR49W8+bN9csvv+iDDz7Q119/renTp9ud68zjHJGRkaFPPvlEHh4e8vDwsG0PCAjQ0qVLlZycrBdffFGLFi3SCy+8IEmaMGGCHnjgAfXq1UupqalKTU3VhAkTJElXX321MjIy9NVXX2nTpk3q16+fhg8frszMzJr4ugCgcTAAAI3S5MmTjSuuuMIwDMOwWq3GmjVrDB8fH+PBBx80DMMwlixZYkgy9uzZYzvm1VdfNUJCQmyvn3nmGaNnz5621x999JHRtGlT4+TJk4ZhGEbv3r2N2bNnl/v+3377rSHJOH78uGEYhnHdddcZ559/vsP1HzlyxJBkbN++3TAMw0hJSTEkGZs3b67wmAsvvNDw8vIy/P397X6WLVtmd4758+fbHTd58mQjJCTEKCgosG17/fXXjebNm9s+q2EYxhdffGGYzWYjLS2twuPKU/pd+/v7G35+foYkQ5Jxzz33VHrcc889Z/Tv39/2etasWca5555rN+b77783AgMDjfz8fLvt4eHhxmuvvVbp+QEAf+EeJwBoxD7//HM1bdpURUVFslqtuv766zV79mzbfj8/P4WHh9teh4WFKSMjw/b6pptu0qOPPqqff/5Zf/vb37R06VJdc8018vf3lyTdc889mjp1qlavXq0RI0Zo/Pjx6tOnT7m1bNmyRVdffXWFtf7222+aOXOmNmzYoKNHj9quNB08eFCRkZEOf+YbbrhBjzzyiN22kJAQu9cDBgwoc1zv3r3t7k/auXOnzj33XNtnlaTzzz9fVqtVu3fvtp3zzOMqEhAQoMTERBUVFemrr77SO++8oyeffNJuzIoVK/TSSy9p7969OnnypIqLi23LDCuydetWnTx5Ui1btrTbfurUqWotdQSAxorgBACN2EUXXaQFCxbI29tbbdq0kaen/f8WzmyKYDKZZBiG7XVwcLBiYmK0ZMkSderUSV999ZXWrVtn23/rrbdq9OjR+uKLL7R69WrNmTNHzz//vO6+++4ytTRp0qTSWmNiYtShQwctWrRIbdq0kdVqVWRkpN3SQkcEBQWpS5culY45PQxVts0Rjh5nNpttdfXs2VN79+7V1KlTbUsh169frxtuuEFxcXEaPXq0goKCtHz5cj3//POVnvfkyZMKCwuz+3UpVZvt5gGgoeEeJwBoxPz9/dWlSxedc845ZUKTo2699VatWLFCr7/+usLDw3X++efb7W/fvr3uvPNOffzxx3rggQe0aNGics/Tp08frV27ttx9x44d0+7du/Xoo49q+PDh6tmzp61phKv07NlTW7duVW5urm3bjz/+KLPZXGkTCEc9/PDDWrFihRITEyVJP/30kzp06KBHHnlEAwYMUNeuXXXgwAG7Y7y9vW33hJXq16+f0tLS5OnpqS5dutj9tGrV6qzrBIDGguAEADgro0ePVmBgoJ544glNmTLFbt99992nVatWKSUlRYmJifr222/Vs2fPcs8zY8YM/fLLL5o2bZq2bdumXbt2acGCBTp69KiaN2+uli1b6vXXX9eePXv0zTffKDY2tlr15uXlKS0tze6nOiHshhtukK+vryZPnqykpCR9++23uvvuu3XjjTeWWfpXHe3bt9eVV16pmTNnSpK6du2qgwcPavny5dq7d69eeuklffLJJ3bHdOzYUSkpKdqyZYuOHj2qgoICjRgxQtHR0Ro3bpxWr16t/fv366efftIjjzyijRs3nnWdANBYEJwAAGfFbDbrpptuksVi0aRJk+z2WSwW3XXXXerZs6fGjBmjbt266T//+U+55+nWrZtWr16trVu3auDAgYqOjtZnn30mT09Pmc1mLV++XJs2bVJkZKTuv/9+Pffcc9Wqd9GiRQoLC7P7ue6665w+j5+fn1atWqXMzEydd955uuqqqzR8+HC98sor1aqrPPfff7+++OILJSQkaOzYsbr//vs1ffp0RUVF6aefftK///1vu/Hjx4/XmDFjdNFFF6l169Z67733ZDKZ9OWXX2ro0KGaMmWKunXrpmuvvVYHDhyokYAHAI2FyTh9sToAANVwyy236MiRI2WeAQUAQENBcwgAQLVlZWVp+/btevfddwlNAIAGjeAEAKi2K664QgkJCbrzzjs1cuRIV5cDAECtYakeAAAAAFSB5hAAAAAAUAWCEwAAAABUgeAEAAAAAFUgOAEAAABAFQhOAAAAAFAFghMAAAAAVIHgBAAAAABVIDgBAAAAQBX+H2nruQBzuX3ZAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1cAAAIjCAYAAADvBuGTAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAgPdJREFUeJzt3Xd8U9X/x/F30t3SAZRuloDsISjIRgSKIsOByFdluAcunLhQ8CeKivh1gKgg6ldBVHCgZQmigiJLZAqI7LJKW7rb5P7+KA2EprSBNEnb1/Px4NHm5tybT05vS989555rMgzDEAAAAADgvJg9XQAAAAAAVAaEKwAAAABwAcIVAAAAALgA4QoAAAAAXIBwBQAAAAAuQLgCAAAAABcgXAEAAACACxCuAAAAAMAFCFcAAAAA4AKEKwBepUePHurRo0e5Hb9evXoaMWJEuR0fKIt69erpqquu8nQZZfbKK6/oggsukI+Pj9q0aePpcsrNsmXLZDKZtGzZslLb/vvvvzKZTPrwww/Lva5zlZGRoaioKP3vf/9zet9jx44pJCRE33//fTlUBlRehCsADn344YcymUxavXq1p0vxCJPJVOK/u+66y9PlOVT0i2HRPx8fH0VFRem6667Tli1bzvm4L774oubNm+e6Qt2kqB9ee+21Ys9V9fPbGQsXLtRjjz2mzp07a8aMGXrxxRdLbDtixAi7czAgIEAXXnihnn32WeXk5Lix6rN75513yhyKPv30U02ePLlc6ykvb7zxhkJDQ3XDDTc4vW/NmjV122236ZlnnimHyoDKy9fTBQDA6RYuXOjpEmx69+6tYcOGFdt+4YUXeqCasrv//vt1ySWXKD8/Xxs2bNDUqVO1bNkybdy4UTExMU4f78UXX9R1112nQYMGub5YN3jllVd09913Kzg42NOlVEg//vijzGazPvjgA/n7+5faPiAgQO+//74kKS0tTV9//bXGjx+vnTt3ntMISnl45513FBkZWWwUu1u3bsrOzrZ7n59++qk2btyoBx980K5t3bp1lZ2dLT8/PzdU7Lz8/Hy98cYbeuihh+Tj43NOx7jrrrv03//+Vz/++KN69uzp4gqByolwBcCrlOWXN3e58MILddNNNzm9X1ZWlsNf5AsKCmS1Ws/rPWZmZiokJOSsbbp27arrrrvO9rhx48a6++679dFHH+mxxx4759euiNq0aaP169dr6tSpGj16tKfLcStXnG+SdPjwYQUFBZX5OL6+vnbfN/fcc486deqkzz77TJMmTVJ0dPR51VOezGazAgMDy9TWZDKVua0nfPfddzpy5Iiuv/76cz5G06ZN1aJFC3344YeEK6CMmBYI4LysW7dOV1xxhcLCwlStWjVdfvnl+u2334q127Bhg7p3766goCAlJCTohRde0IwZM2QymfTvv//a2jm65ionJ0fPPfecLrzwQgUGBio2NlbXXHONdu7caWvz6quvqlOnTqpZs6aCgoLUrl07ffHFF+X1tu3qbdGihdasWaNu3bopODhYTz75pO16jFdffVWTJ09WgwYNFBAQoM2bN0sqHA3o2rWrQkJCFBERoYEDBxabuvfcc8/JZDJp8+bN+s9//qPq1aurS5cuTtfYtWtXSbLrL6lsfWYymZSZmamZM2fapnqd/tf+/fv365ZbblF0dLQCAgLUvHlzTZ8+vdSaWrRoocsuu6zYdqvVqvj4eLtwOGvWLLVr106hoaEKCwtTy5Yt9cYbb5TpvXfu3Fk9e/bUxIkTlZ2dfda2JV3vN2LECNWrV8/2+PSv7dtvv60LLrhAwcHB6tOnj/bu3SvDMDR+/HglJCQoKChIAwcOVEpKisPXXLhwodq0aaPAwEA1a9ZMX331VbE2qampevDBB1W7dm0FBASoYcOGevnll2W1Wh3W5Oh8c6SgoEDjx4+3ta1Xr56efPJJ5ebm2tqYTCbNmDFDmZmZtq+/s9cYmUwmdenSRYZh6J9//rF77ocffrB9H4SGhqpfv37atGmTXZsRI0aoWrVq+ueff5SYmKiQkBDFxcVp3LhxMgzDrq3VatXkyZPVvHlzBQYGKjo6WnfeeaeOHz9ua1OvXj1t2rRJP/30k+09FX3dz7zmqkePHpo/f752795ta1t0LpR0zZUz39s7duzQiBEjFBERofDwcI0cOVJZWVl2bRctWqQuXbooIiJC1apVU+PGjfXkk0+W2u/z5s1TvXr11KBBA4f9uX//fg0aNEjVqlVTrVq19Mgjj8hisRQ7Tu/evfXtt98W62sAjjFyBeCcbdq0SV27dlVYWJgee+wx+fn56d1331WPHj30008/qUOHDpIKfwG/7LLLZDKZNGbMGIWEhOj9999XQEBAqa9hsVh01VVXacmSJbrhhhv0wAMP6MSJE1q0aJE2btxo+8XhjTfe0IABA3TjjTcqLy9Ps2bN0uDBg/Xdd9+pX79+5/T+cnJydPTo0WLbw8LC7P6Kf+zYMV1xxRW64YYbdNNNN9n9ZX7GjBnKycnRHXfcoYCAANWoUUOLFy/WFVdcoQsuuEDPPfecsrOz9eabb6pz585au3at3S/ykjR48GA1atRIL7744jn9glMUXqtXr263vSx99vHHH+u2225T+/btdccdd0iSrc8PHTqkSy+9VCaTSaNGjVKtWrX0ww8/6NZbb1V6enqxaVSnGzJkiJ577jklJyfbTVX85ZdfdODAAds1IosWLdLQoUN1+eWX6+WXX5YkbdmyRb/++qseeOCBMr3/5557Tt26ddOUKVNcOnr1v//9T3l5ebrvvvuUkpKiiRMn6vrrr1fPnj21bNkyPf7449qxY4fefPNNPfLII8VC5/bt2zVkyBDdddddGj58uGbMmKHBgwcrKSlJvXv3llQ4Ctq9e3ft379fd955p+rUqaMVK1ZozJgxOnjwYLFrgRydbyW57bbbNHPmTF133XV6+OGH9fvvv2vChAnasmWL5s6dK6nw6z9t2jStWrXKNtWvU6dOTveVo3Pw448/1vDhw5WYmKiXX35ZWVlZmjJlirp06aJ169bZfR9YLBb17dtXl156qSZOnKikpCSNHTtWBQUFGjdunK3dnXfeqQ8//FAjR47U/fffr127dumtt97SunXr9Ouvv8rPz0+TJ0/Wfffdp2rVqumpp56SpBJH05566imlpaVp3759ev311yVJ1apVK/F9Ovu9ff3116t+/fqaMGGC1q5dq/fff19RUVG2c33Tpk266qqr1KpVK40bN04BAQHasWOHfv3111L7fMWKFWrbtq3D5ywWixITE9WhQwe9+uqrWrx4sV577TU1aNBAd999t13bdu3a6fXXX9emTZvUokWLUl8XqPIMAHBgxowZhiTjjz/+KLHNoEGDDH9/f2Pnzp22bQcOHDBCQ0ONbt262bbdd999hslkMtatW2fbduzYMaNGjRqGJGPXrl227d27dze6d+9uezx9+nRDkjFp0qRir2+1Wm2fZ2Vl2T2Xl5dntGjRwujZs6fd9rp16xrDhw8v8T0VkVTiv88++8yuXknG1KlT7fbftWuXIckICwszDh8+bPdcmzZtjKioKOPYsWO2bX/++adhNpuNYcOG2baNHTvWkGQMHTq01HoNwzCWLl1qSDKmT59uHDlyxDhw4ICRlJRkNGzY0DCZTMaqVavs2pe1z0JCQhz22a233mrExsYaR48etdt+ww03GOHh4cWOf7pt27YZkow333zTbvs999xjVKtWzbbvAw88YISFhRkFBQWlvv8zSTLuvfdewzAM47LLLjNiYmJsx3V0fp957hUZPny4UbduXdvjoq9trVq1jNTUVNv2MWPGGJKM1q1bG/n5+bbtQ4cONfz9/Y2cnBzbtrp16xqSjC+//NK2LS0tzYiNjTUuuugi27bx48cbISEhxt9//21X0xNPPGH4+PgYe/bssavJ0fnmyPr16w1Jxm233Wa3/ZFHHjEkGT/++KPd+w8JCSn1mKe3PXLkiHHkyBFjx44dxquvvmqYTCajRYsWtu/ZEydOGBEREcbtt99ut39ycrIRHh5ut3348OGGJOO+++6zbbNarUa/fv0Mf39/48iRI4ZhGMbPP/9sSDL+97//2R0zKSmp2PbmzZs7/FoXfQ8tXbrUtq1fv352X/8iRX0+Y8YM2zZnv7dvueUWu2NeffXVRs2aNW2PX3/9dUOS7T2WVX5+vmEymYyHH3642HNF/Tlu3Di77RdddJHRrl27Yu1XrFhhSDJmz57tVA1AVcW0QADnxGKxaOHChRo0aJAuuOAC2/bY2Fj95z//0S+//KL09HRJUlJSkjp27Gi3hHONGjV04403lvo6X375pSIjI3XfffcVe85kMtk+DwoKsn1+/PhxpaWlqWvXrlq7du25vD1J0sCBA7Vo0aJi/86czhYQEKCRI0c6PMa1116rWrVq2R4fPHhQ69ev14gRI+xGFVq1aqXevXs7XPbY2dUJb7nlFtWqVUtxcXHq27ev0tLS9PHHH+uSSy6xa3c+fWYYhr788kv1799fhmHo6NGjtn+JiYlKS0s763EuvPBCtWnTRrNnz7Zts1gs+uKLL9S/f39bbREREcrMzNSiRYuc6oMzFY2STZ069byOc7rBgwcrPDzc9rhopPamm26Sr6+v3fa8vDzt37/fbv+4uDhdffXVtsdhYWEaNmyY1q1bp+TkZEnSnDlz1LVrV1WvXt2uj3v16iWLxaLly5fbHfPM860kRefZmSN5Dz/8sCRp/vz5pR6jJJmZmapVq5Zq1aqlhg0b6pFHHlHnzp319ddf275nFy1apNTUVA0dOtTuffn4+KhDhw5aunRpseOOGjXK9nnRaGleXp4WL14sqbCvwsPD1bt3b7tjtmvXTtWqVXN4TFdyxfd2165ddezYMdvPzoiICEnS119/bTcNtDQpKSkyDKPYaHVpr33mtE3p1Gijo1F8AMUxLRDAOTly5IiysrLUuHHjYs81bdpUVqtVe/fuVfPmzbV792517NixWLuGDRuW+jo7d+5U48aN7X5ZdeS7777TCy+8oPXr1xe7ZuRcJSQkqFevXqW2i4+PL/Fi//r169s93r17tySV2G8LFiwotmjFmccozbPPPquuXbsqIyNDc+fO1axZs2Q2F/9b2vn02ZEjR5Samqpp06Zp2rRpDtscPnz4rMcYMmSInnzySe3fv1/x8fFatmyZDh8+rCFDhtja3HPPPfr88891xRVXKD4+Xn369NH111+vvn37llrj6bp166bLLrtMEydOdNlS+nXq1LF7XBS0ateu7XD76df9SIXn/5l9XbQS5b///quYmBht375dGzZsKDEwndnHZT1Xdu/eLbPZXOx7MCYmRhEREbbz9FwEBgbq22+/lSTt27dPEydOtC2KUWT79u2SVOIiCWFhYXaPzWaz3R9xJPu+KjpmWlqaoqKiHB6ztPPxfJ3L9/aZ51BRkDl+/LjCwsI0ZMgQvf/++7rtttv0xBNP6PLLL9c111yj6667zuH39JmMEqYRBwYGFjunqlevXuwcPf0Y5/OzFKhKCFcAKryff/5ZAwYMULdu3fTOO+8oNjZWfn5+mjFjhj799NNyf/3Tf2l05jlXHN+Rli1b2kLhoEGDlJWVpdtvv11dunSx/eJ/vn1W9Ff0m266ScOHD3fYplWrVmc9xpAhQzRmzBjNmTNHDz74oD7//HOFh4fbBaeoqCitX79eCxYs0A8//KAffvhBM2bM0LBhwzRz5swy9UeRsWPHqkePHnr33XdtIwKnM5lMDn8ZdXSRv6QSl7cuaXtJv+iejdVqVe/evUtc5fHM2wI4e66Uxy/MPj4+dn+USExMVJMmTXTnnXfqm2++kXTq/Pn4448d3h6gtD+mOGK1Ws96w9yyjOi5W2nnSlBQkJYvX66lS5dq/vz5SkpK0uzZs9WzZ08tXLiwxP1r1Kghk8nkMCyd7XUdKTpGZGRkmfcBqjLCFYBzUqtWLQUHB2vbtm3Fntu6davMZrPtF/m6detqx44dxdo52namBg0a6Pfff1d+fn6J95P58ssvFRgYqAULFtgtkjFjxoyyvh23qVu3riSV2G+RkZGlLrXurJdeeklz587V//3f/9mmxTnTZ45+Aa9Vq5ZCQ0NlsVjKNLrnSP369dW+fXvNnj1bo0aN0ldffaVBgwYVW+jE399f/fv3V//+/WW1WnXPPffo3Xff1TPPPFOm0c8i3bt3V48ePfTyyy/r2WefLfZ89erVHU6LOp9RnLPZsWOHDMOw69+///5bkmwLHzRo0EAZGRnn3MclqVu3rqxWq7Zv366mTZvath86dEipqam289QVYmNj9dBDD+n555/Xb7/9pksvvdS2KEpUVFSZ3pvVatU///xjFyYd9dXixYvVuXPnUkOmM6GyrG3L63vbbDbr8ssv1+WXX65JkybpxRdf1FNPPaWlS5eW2He+vr5q0KCBdu3a5fTrnanoGKefJwBKxjVXAM6Jj4+P+vTpo6+//tpuKfVDhw7p008/VZcuXWxTexITE7Vy5UqtX7/e1i4lJaVMNxS99tprdfToUb311lvFniv6666Pj49MJpPdCMO///6refPmndubK0exsbFq06aNZs6cqdTUVNv2jRs3auHChbryyitd/poNGjTQtddeqw8//NB2LY8zfRYSEmJXa9H+1157rb788ktt3Lix2D5HjhwpU21DhgzRb7/9punTp+vo0aN2UwKlwpUYT2c2m20jYqdPZSyromuvHE1lbNCggbZu3WpX+59//lmmldnOxYEDB2yr8klSenq6PvroI7Vp08Y2mnP99ddr5cqVWrBgQbH9U1NTVVBQcE6vXXSenbna4KRJkyTpnFfYLMl9992n4OBgvfTSS5IKfyaEhYXpxRdfVH5+frH2js6f038GGIaht956S35+frr88sslFfaVxWLR+PHji+1bUFBgdw47OqdLEhISorS0tFLblcf3tqMl/IuuXS3t/O/YsaNWr17t9Gueac2aNQoPD1fz5s3P+1hAVcDIFYCzmj59upKSkoptf+CBB/TCCy/Y7sFyzz33yNfXV++++65yc3M1ceJEW9vHHntMn3zyiXr37q377rvPthR7nTp1lJKScta/DA8bNkwfffSRRo8erVWrVqlr167KzMzU4sWLdc8992jgwIHq16+fJk2apL59++o///mPDh8+rLffflsNGzbUhg0bzvm9//333/rkk0+KbY+OjrYtlX0uXnnlFV1xxRXq2LGjbr31VttyzeHh4XruuefO+bhn8+ijj+rzzz/X5MmT9dJLLznVZ+3atdPixYs1adIkxcXFqX79+urQoYNeeuklLV26VB06dNDtt9+uZs2aKSUlRWvXrtXixYtLvLfT6a6//no98sgjeuSRR1SjRo1if4m/7bbblJKSop49eyohIUG7d+/Wm2++qTZt2pzTX9K7d++u7t2766effir23C233KJJkyYpMTFRt956qw4fPqypU6eqefPmtgUGXOnCCy/Urbfeqj/++EPR0dGaPn26Dh06ZDd6+Oijj+qbb77RVVddpREjRqhdu3bKzMzUX3/9pS+++EL//vvvOU3Xat26tYYPH65p06YpNTVV3bt316pVqzRz5kwNGjTI4T3IzkfNmjU1cuRIvfPOO9qyZYuaNm2qKVOm6Oabb1bbtm11ww03qFatWtqzZ4/mz5+vzp0724WpwMBAJSUlafjw4erQoYN++OEHzZ8/X08++aRtul/37t115513asKECVq/fr369OkjPz8/bd++XXPmzNEbb7xhu39au3btNGXKFL3wwgtq2LChoqKiSrz+q127dpo9e7ZGjx6tSy65RNWqVVP//v0dtnX19/a4ceO0fPly9evXT3Xr1tXhw4f1zjvvKCEhodR73g0cOFAff/yx/v7772LTR52xaNEi9e/fn2uugLLy0CqFALxc0VLVJf3bu3evYRiGsXbtWiMxMdGoVq2aERwcbFx22WXGihUrih1v3bp1RteuXY2AgAAjISHBmDBhgvHf//7XkGQkJyfb2jlaDjsrK8t46qmnjPr16xt+fn5GTEyMcd1119ktAf/BBx8YjRo1MgICAowmTZoYM2bMsC13fDpXLMV+en3du3c3mjdvXmz/omWaX3nlFYfHX7x4sdG5c2cjKCjICAsLM/r3729s3rzZrk1R/WVdhrloGek5c+Y4fL5Hjx5GWFiYbfnwsvbZ1q1bjW7duhlBQUGGJLv+O3TokHHvvfcatWvXtn1tLr/8cmPatGllqtkwDKNz584OlwU3DMP44osvjD59+hhRUVGGv7+/UadOHePOO+80Dh48WOpxddpS7Kcr6ic5uNXAJ598YlxwwQWGv7+/0aZNG2PBggUlLsV+5te2pP53tOx73bp1jX79+hkLFiwwWrVqZfsaOPranThxwhgzZozRsGFDw9/f34iMjDQ6depkvPrqq0ZeXt5Zazqb/Px84/nnn7d9X9WuXdsYM2aM3ZLxhnFuS7E7snPnTsPHx8fu/Fm6dKmRmJhohIeHG4GBgUaDBg2MESNGGKtXry52zJ07dxp9+vQxgoODjejoaGPs2LGGxWIp9jrTpk0z2rVrZwQFBRmhoaFGy5Ytjccee8w4cOCArU1ycrLRr18/IzQ01O572tFS7BkZGcZ//vMfIyIiwpBkOxccLcVuGOf3vV10rhTdnmLJkiXGwIEDjbi4OMPf39+Ii4szhg4dWmxpfkdyc3ONyMhIY/z48XbbS/oaOfre37JliyHJWLx4camvB6CQyTC45TYAz3jwwQf17rvvKiMjw6kLrAFUHSNGjNAXX3yhjIwMT5dS4YwfP14zZszQ9u3bz+ln7IMPPqjly5drzZo1jFwBZcQ1VwDcIjs72+7xsWPH9PHHH6tLly4EKwAoBw899JAyMjI0a9Ysp/c9duyY3n//fb3wwgsEK8AJXHMFwC06duyoHj16qGnTpjp06JA++OADpaen65lnnvF0aQBQKVWrVu2c7+9Vs2ZNRguBc0C4AuAWV155pb744gtNmzZNJpNJbdu21QcffKBu3bp5ujQAAACX4JorAAAAAHABrrkCAAAAABcgXAEAAACAC3DNlQNWq1UHDhxQaGgoK+QAAAAAVZhhGDpx4oTi4uJkNp99bIpw5cCBAwdUu3ZtT5cBAAAAwEvs3btXCQkJZ21DuHIgNDRUUmEHhoWFebiayi0/P18LFy5Unz595Ofn5+lyqgT63L3ob/ejz92PPncv+tv96HP386Y+T09PV+3atW0Z4WwIVw4UTQUMCwsjXJWz/Px8BQcHKywszOPfOFUFfe5e9Lf70efuR5+7F/3tfvS5+3ljn5flciEWtAAAAAAAFyBcAQAAAIALEK4AAAAAwAUIVwAAAADgAoQrAAAAAHABwhUAAAAAuIDHw9Xbb7+tevXqKTAwUB06dNCqVatKbLtp0yZde+21qlevnkwmkyZPnnzexwQAAAAAV/BouJo9e7ZGjx6tsWPHau3atWrdurUSExN1+PBhh+2zsrJ0wQUX6KWXXlJMTIxLjgkAAAAAruDRcDVp0iTdfvvtGjlypJo1a6apU6cqODhY06dPd9j+kksu0SuvvKIbbrhBAQEBLjkmAAAAALiCr6deOC8vT2vWrNGYMWNs28xms3r16qWVK1e69Zi5ubnKzc21PU5PT5dUeGfo/Pz8c6oFZVPUv/Sz+9Dn7kV/ux997n70uXvR3+5Hn7ufN/W5MzV4LFwdPXpUFotF0dHRdtujo6O1detWtx5zwoQJev7554ttX7hwoYKDg8+pFjhn0aJFni6hyqHP3Yv+dj/63P3oc/eiv92PPnc/b+jzrKysMrf1WLjyJmPGjNHo0aNtj9PT01W7dm316dNHYWFhHqys8svPz9eiRYvUu3dv+fn5ebqcKoE+dy/62/3oc/ejz92L/nY/+tz9vKnPi2a1lYXHwlVkZKR8fHx06NAhu+2HDh0qcbGK8jpmQECAw2u4/Pz8PP7FrCroa/ejz92L/nY/+tz96HP3or/djz53P2/oc2de32MLWvj7+6tdu3ZasmSJbZvVatWSJUvUsWNHrzkm7FmshlbuPKav1+/Xyp3HZLEani4JAAAA8AoenRY4evRoDR8+XBdffLHat2+vyZMnKzMzUyNHjpQkDRs2TPHx8ZowYYKkwgUrNm/ebPt8//79Wr9+vapVq6aGDRuW6Zg4d0kbD+r5bzfrYFqObVtseKDG9m+mvi1iPVgZAAAA4HkeDVdDhgzRkSNH9Oyzzyo5OVlt2rRRUlKSbUGKPXv2yGw+Nbh24MABXXTRRbbHr776ql599VV1795dy5YtK9MxcW6SNh7U3Z+s1ZnjVMlpObr7k7WaclNbAhYAAACqNI8vaDFq1CiNGjXK4XNFgalIvXr1ZBilT0M72zHhPIvV0PPfbi4WrCTJkGSS9Py3m9W7WYx8zCY3VwcAAAB4B4/eRBgVw6pdKXZTAc9kSDqYlqNVu1LcVxQAAADgZQhXOKttySc0bfnOMrU9fKLkAAYAAABUdh6fFgjvk5adr2/+PKAvVu/Vn/vSyrxfVGhgOVYFAAAAeDfCFSRJVquhFTuP6fPVe7VgU7JyC6ySJF+zSZc3idKqf1OUmpXv8Lork6SY8EC1r1/DrTUDAAAA3oRwVcXtTcnSnDX79OWafdqfmm3b3iQmVNe1S9DVF8WrZrUA22qBJskuYBUtXzG2fzMWswAAAECVRriqgrLzLPph40HNWb1PK/85ZtseFuirgW3iNfjiBLWMD5fJdCos9W0Rqyk3tS12n6sY7nMFAAAASCJcVRmGYWjd3lTNWb1P3/15QCdyCyRJJpPUpWGkBl9cW32aRSvQz6fEY/RtEavezWK0cudR3Tx9lQxD+vLuToqLCHLX2wAAAAC8FuGqkjt8Ikdz1+7XnDX7tONwhm17nRrBuq5dgq5tl6B4J8KRj9mkLo1qKS48SPtTs3UwLZtwBQAAAIhwVSnlW6z6cethzVm9T0u3HZbFWniVVKCfWVe2jNXgdrXVoX4Nmc/jGqmE6oXhat/xbLWr66rKAQAAgIqLcFWJbEs+oTmr92re+v06mpFn2962ToQGX1xbV7WKVWign0teK6F6sH7flaJ9x7NLbwwAAABUAYQrL2axGlq1K0WHT+QoKrRwqfMzV+RLy87Xt38e0Jwz7klVKzRA17SN1+B2CWoYFery2uKrF04F3Hc8y+XHBgAAACoiwpWXStp4sNjKfLEnV+br0yxGK3Ye05w1e5W08Yx7UjWN0vUX11b3C2vJ18dcbvUl2MIVI1cAAACARLjySkX3lDrzhr0H03J01ydrVSPYXylZp6b9NY4O1eCLEzToonhFVgtwS41F4Wo/4QoAAACQRLjyOharoee/3VwsWJ0uJStPoQE+GnRRgsN7UrlD7erBkqR9qdmyWo3zWhwDAAAAqAwIV15m1a4Uu6mAJXnrxrbqfmGUGypyLCY8UGaTlFdg1dGMXEWFBXqsFgAAAMAblN9FOTgnh0+UHqwkKTUrv5wrOTs/H7NiwwunBu5laiAAAABAuPI2UaFlGwEqa7vyVHTzYVYMBAAAAAhXXqd9/RqKDQ9USVcwmVS4amD7+jXcWZZDtkUtUhm5AgAAAAhXXsbHbNLY/s0kqVjAKno8tn+zYve78gSWYwcAAABOIVx5ob4tYjXlpraKCbef+hcTHqgpN7VV3xaxHqrMXkLRioGEKwAAAIDVAr1V3xax6t0sRqt2pejwiRxFhRZOBfSGEasip0auuOYKAAAAIFx5MR+zSR0b1PR0GSWKP+1GwoZhuP1eWwAAAIA3YVogzllseJBMJim3wKqjGXmeLgcAAADwKMIVzpm/r1kxJ28ezNRAAAAAVHWEK5wXVgwEAAAAChGucF5YMRAAAAAoRLjCeYmPYMVAAAAAQCJc4TwVTQvcn8rIFQAAAKo2whXOC9MCAQAAgEKEK5yX028kbBiGh6sBAAAAPIdwhfMSGxEok0nKybfqWCb3ugIAAEDVRbjCeQnw9VF0aNG9rpgaCAAAgKqLcIXzFl+dFQMBAAAAwhXOm23FQEauAAAAUIURrnDeTi1qQbgCAABA1UW4wnk7tRw70wIBAABQdRGucN4YuQIAAAAIV3CB+IhT4Yp7XQEAAKCqIlzhvMWdDFfZ+RYdz8r3cDUAAACAZxCucN4C/XwUFRogieuuAAAAUHURruASXHcFAACAqo5wBZdgxUAAAABUdYQruEQ8I1cAAACo4ghXcImiaYH7CVcAAACooghXcIlT0wIJVwAAAKiaCFdwiVMLWmRxrysAAABUSYQruETRjYQz8yxK5V5XAAAAqIIIV3CJQD8fRVYrutcVUwMBAABQ9RCu4DK2RS1SWY4dAAAAVQ/hCi7DjYQBAABQlRGu4DKsGAgAAICqjHAFlzl9xUAAAACgqiFcwWWYFggAAICqjHAFlzk9XHGvKwAAAFQ1hCu4THxE4TVXGbkFSs8u8HA1AAAAgHsRruAyQf4+iqzmL0nay3VXAAAAqGIIV3CpeFYMBAAAQBVFuIJLsWIgAAAAqirCFVwqIYIVAwEAAFA1Ea7gUkUjV/tTCVcAAACoWghXcKkErrkCAABAFUW4gktxzRUAAACqKsIVXCr+ZLg6kVOgtOx8D1cDAAAAuA/hCi4V7O+rGiGF97pi9AoAAABVCeEKLmdb1ILrrgAAAFCFEK7gcqeuuyJcAQAAoOogXMHlWDEQAAAAVRHhCi7HioEAAACoighXcDmmBQIAAKAqIlzB5eIjiqYFMnIFAACAqoNwBZcrutdVek6B0nO41xUAAACqBsIVXK5agK+qB/tJYjl2AAAAVB2EK5QLVgwEAABAVUO4QrlgxUAAAABUNR4PV2+//bbq1aunwMBAdejQQatWrTpr+zlz5qhJkyYKDAxUy5Yt9f3339s9n5GRoVGjRikhIUFBQUFq1qyZpk6dWp5vAQ7ER7BiIAAAAKoWj4ar2bNna/To0Ro7dqzWrl2r1q1bKzExUYcPH3bYfsWKFRo6dKhuvfVWrVu3ToMGDdKgQYO0ceNGW5vRo0crKSlJn3zyibZs2aIHH3xQo0aN0jfffOOutwWdGrnimisAAABUFR4NV5MmTdLtt9+ukSNH2kaYgoODNX36dIft33jjDfXt21ePPvqomjZtqvHjx6tt27Z66623bG1WrFih4cOHq0ePHqpXr57uuOMOtW7dutQRMbiW7ZqrVKYFAgAAoGrw9dQL5+Xlac2aNRozZoxtm9lsVq9evbRy5UqH+6xcuVKjR4+225aYmKh58+bZHnfq1EnffPONbrnlFsXFxWnZsmX6+++/9frrr5dYS25urnJzc22P09PTJUn5+fnKz2cp8XMRE1q4WuC+lOyz9mHRc/Sz+9Dn7kV/ux997n70uXvR3+5Hn7ufN/W5MzV4LFwdPXpUFotF0dHRdtujo6O1detWh/skJyc7bJ+cnGx7/Oabb+qOO+5QQkKCfH19ZTab9d5776lbt24l1jJhwgQ9//zzxbYvXLhQwcHBzrwtnJRTIEm+Ss3O11fffK/AUs60RYsWuaMsnIY+dy/62/3oc/ejz92L/nY/+tz9vKHPs7LKPhPLY+GqvLz55pv67bff9M0336hu3bpavny57r33XsXFxalXr14O9xkzZozdiFh6erpq166tPn36KCwszF2lVzovbvxRadkFat6+qxrHhDpsk5+fr0WLFql3797y8/Nzc4VVE33uXvS3+9Hn7kefuxf97X70uft5U58XzWorC4+Fq8jISPn4+OjQoUN22w8dOqSYmBiH+8TExJy1fXZ2tp588knNnTtX/fr1kyS1atVK69ev16uvvlpiuAoICFBAQECx7X5+fh7/YlZkCdWDlZadrkMZ+WpRSj/S1+5Hn7sX/e1+9Ln70efuRX+7H33uft7Q5868vscWtPD391e7du20ZMkS2zar1aolS5aoY8eODvfp2LGjXXupcKiwqH3RNVJms/3b8vHxkdVqdfE7QGlO3euKFQMBAABQ+Xl0WuDo0aM1fPhwXXzxxWrfvr0mT56szMxMjRw5UpI0bNgwxcfHa8KECZKkBx54QN27d9drr72mfv36adasWVq9erWmTZsmSQoLC1P37t316KOPKigoSHXr1tVPP/2kjz76SJMmTfLY+6yqbCsGciNhAAAAVAEeDVdDhgzRkSNH9Oyzzyo5OVlt2rRRUlKSbdGKPXv22I1CderUSZ9++qmefvppPfnkk2rUqJHmzZunFi1a2NrMmjVLY8aM0Y033qiUlBTVrVtX//d//6e77rrL7e+vqmPkCgAAAFWJxxe0GDVqlEaNGuXwuWXLlhXbNnjwYA0ePLjE48XExGjGjBmuKg/n4dTIFeEKAAAAlZ9HbyKMyi0+onDkan8q4QoAAACVH+EK5Sb+5LTAlMw8ZeYWeLgaAAAAoHwRrlBuwoP8FHby7sGMXgEAAKCyI1yhXLFiIAAAAKoKwhXKFSsGAgAAoKogXKFcxROuAAAAUEUQrlCuiqYF7idcAQAAoJIjXKFcnZoWyDVXAAAAqNwIVyhXXHMFAACAqoJwhXJVNC3wWGaesvK41xUAAAAqL8IVylV4kJ9CA07e64rRKwAAAFRihCuUO9uKgdxIGAAAAJUY4Qrl7tSNhAlXAAAAqLwIVyh3rBgIAACAqoBwhXLHioEAAACoCghXKHeEKwAAAFQFhCuUu6JrrlgtEAAAAJUZ4Qrlrmjk6mhGrnLyLR6uBgAAACgfhCuUu/AgP1U7ea8rpgYCAACgsiJcodyZTCZWDAQAAEClR7iCW7CoBQAAACo7whXcIj6iMFztTyVcAQAAoHIiXMEtilYMZOQKAAAAlRXhCm7BNVcAAACo7AhXcAtGrgAAAFDZEa7gFkUjV0dOcK8rAAAAVE6EK7hFRLCfgv19JLGoBQAAAConwhXc4vR7Xe1naiAAAAAqIcIV3IbrrgAAAFCZEa7gNqwYCAAAgMqMcAW3ORWuGLkCAABA5UO4gtvERxRNC2TkCgAAAJUP4QpuY1vQgtUCAQAAUAkRruA2ReHqUHqucgu41xUAAAAqF8IV3KZGiL+C/ArvdXUgNcfD1QAAAACuRbiC25x+ryuuuwIAAEBlQ7iCW7FiIAAAACorwhXcKr5oUQvCFQAAACoZwhXcKqE6y7EDAACgciJcwa2YFggAAIDKinAFtzo1ckW4AgAAQOVCuIJb2e51dSJHeQVWD1cDAAAAuA7hCm5VM8RfgX5mGYZ0MI3RKwAAAFQehCu4lclkUnwE110BAACg8iFcwe1YMRAAAACVEeEKbseKgQAAAKiMCFdwO1YMBAAAQGVEuILbxdtGrpgWCAAAgMqDcAW3K5oWuJ+RKwAAAFQihCu4XVG4Sk7nXlcAAACoPAhXcLta1QIU4GuW1SgMWAAAAEBlQLiC25lMJtt1V/tTmRoIAACAyoFwBY84tWIgI1cAAACoHAhX8Ij4iMKRqwOMXAEAAKCSIFzBIxKYFggAAIBKhnAFjygKV/tSmRYIAACAyoFwBY8ouuaKe10BAACgsiBcwSNqn3avKwu3ugIAAEAlQLiCR0RWC5C/T+G9rlLzPF0NAAAAcP4IV/AIs/nUva5Sck0ergYAAAA4f4QreEyCLVx5uBAAAADABQhX8JgERq4AAABQiRCu4DFFKwYycgUAAIDKgHAFj4mPYOQKAAAAlQfhCh7DNVcAAACoTAhX8JiiaYGpuVIBN7sCAABABXdO4erjjz9W586dFRcXp927d0uSJk+erK+//tqlxaFyiwoNkJ+PSVaZdOgEw1cAAACo2JwOV1OmTNHo0aN15ZVXKjU1VRaLRZIUERGhyZMnu7o+VGJms0lx4YVTA/cdz/ZwNQAAAMD5cTpcvfnmm3rvvff01FNPycfHx7b94osv1l9//eXS4lD5xVcPlCTtTyVcAQAAoGJzOlzt2rVLF110UbHtAQEByszMdElRqDqKVgzcn5rj4UoAAACA8+N0uKpfv77Wr19fbHtSUpKaNm3qippQhZwKV4xcAQAAoGLzdXaH0aNH695771VOTo4Mw9CqVav02WefacKECXr//ffLo0ZUYgkRJ6cFcs0VAAAAKjinw9Vtt92moKAgPf3008rKytJ//vMfxcXF6Y033tANN9xQHjWiEos/ea+rfUwLBAAAQAXndLiSpBtvvFE33nijsrKylJGRoaioKFfXhSqiaFpgclqOLFZDPmaThysCAAAAzo3T11z17NlTqampkqTg4GBbsEpPT1fPnj1dWhwqv6jQAJlNhgqshg6lM3oFAACAisvpcLVs2TLl5eUV256Tk6Off/7Z6QLefvtt1atXT4GBgerQoYNWrVp11vZz5sxRkyZNFBgYqJYtW+r7778v1mbLli0aMGCAwsPDFRISoksuuUR79uxxujaUPx+zSdX9Cz/nXlcAAACoyMocrjZs2KANGzZIkjZv3mx7vGHDBq1bt04ffPCB4uPjnXrx2bNna/To0Ro7dqzWrl2r1q1bKzExUYcPH3bYfsWKFRo6dKhuvfVWrVu3ToMGDdKgQYO0ceNGW5udO3eqS5cuatKkiZYtW6YNGzbomWeeUWBgoFO1wX1qBBiSpH3HszxcCQAAAHDuynzNVZs2bWQymWQymRxO/wsKCtKbb77p1ItPmjRJt99+u0aOHClJmjp1qubPn6/p06friSeeKNb+jTfeUN++ffXoo49KksaPH69Fixbprbfe0tSpUyVJTz31lK688kpNnDjRtl+DBg2cqgvuVSOg8CMjVwAAAKjIyhyudu3aJcMwdMEFF2jVqlWqVauW7Tl/f39FRUXJx8enzC+cl5enNWvWaMyYMbZtZrNZvXr10sqVKx3us3LlSo0ePdpuW2JioubNmydJslqtmj9/vh577DElJiZq3bp1ql+/vsaMGaNBgwaVWEtubq5yc3Ntj9PT0yVJ+fn5ys/PL/N7gvPy8/NVM7Bw5GrPsUz62w2K+pi+dg/62/3oc/ejz92L/nY/+tz9vKnPnamhzOGqbt26kgoDjCscPXpUFotF0dHRdtujo6O1detWh/skJyc7bJ+cnCxJOnz4sDIyMvTSSy/phRde0Msvv6ykpCRdc801Wrp0qbp37+7wuBMmTNDzzz9fbPvChQsVHBx8Lm8PTqgeULhC4Iade/X997s9XE3VsWjRIk+XUKXQ3+5Hn7sffe5e9Lf70efu5w19npVV9ktXzmkpdqnwuqs9e/YUW9xiwIAB53rI81YU/AYOHKiHHnpIUuF0xhUrVmjq1KklhqsxY8bYjYilp6erdu3a6tOnj8LCwsq/8CosPz9fO74s/KbJ8QnRlVd29XBFlV9+fr4WLVqk3r17y8/Pz9PlVHr0t/vR5+5Hn7sX/e1+9Ln7eVOfF81qKwunw9U///yjq6++Wn/99ZdMJpMMo3BKl8lUOPpgsVjKdJzIyEj5+Pjo0KFDdtsPHTqkmJgYh/vExMSctX1kZKR8fX3VrFkzuzZNmzbVL7/8UmItAQEBCggIKLbdz8/P41/MqqDomquDaTky+/hyrys34fx2L/rb/ehz96PP3Yv+dj/63P28oc+deX2nl2J/4IEHVL9+fR0+fFjBwcHatGmTli9frosvvljLli0r83H8/f3Vrl07LVmyxLbNarVqyZIl6tixo8N9OnbsaNdeKhwqLGrv7++vSy65RNu2bbNr8/fff9umNcL7hPtLvmaT8i2GDp/gXlcAAAComJweuVq5cqV+/PFHRUZGymw2y2w2q0uXLpowYYLuv/9+rVu3rszHGj16tIYPH66LL75Y7du31+TJk5WZmWlbPXDYsGGKj4/XhAkTJBUGu+7du+u1115Tv379NGvWLK1evVrTpk2zHfPRRx/VkCFD1K1bN1122WVKSkrSt99+61Twg3v5mKSY8EDtO56tfcezFRse5OmSAAAAAKc5PXJlsVgUGhoqqXAa3oEDByQVLnhx5ohRaYYMGaJXX31Vzz77rNq0aaP169crKSnJtmjFnj17dPDgQVv7Tp066dNPP9W0adPUunVrffHFF5o3b55atGhha3P11Vdr6tSpmjhxolq2bKn3339fX375pbp06eLsW4UbxUcU3oeMe10BAACgonJ65KpFixb6888/Vb9+fXXo0EETJ06Uv7+/pk2bpgsuuMDpAkaNGqVRo0Y5fM7RaNPgwYM1ePDgsx7zlltu0S233OJ0LfCc+IggSce1n3tdAQAAoIJyOlw9/fTTyszMlCSNGzdOV111lbp27aqaNWtq1qxZLi8QVUNCROFUQG4kDAAAgIrK6XCVmJho+7xhw4baunWrUlJSVL16dduKgYCz4qsXTQskXAEAAKBicvqaK0dq1Kih5OTkEqf3AaWJt41ccc0VAAAAKianRq42bdqkpUuXyt/fX9dff70iIiJ09OhRvfDCC3r33XfP6ZorQJISqheGqwOpObJaDZm51xUAAAAqmDKPXH3zzTe66KKLdP/99+uuu+7SxRdfrKVLl6pp06baunWr5s6dq02bNpVnrajEokMD5GM2Kc9i1ZGMXE+XAwAAADitzOHqhRde0L333qv09HRNmjRJ//zzj+6//359//33SkpKUt++fcuzTlRyvj5mxYSxHDsAAAAqrjKHq23btunee+9VtWrVdN9998lsNuv111/XJZdcUp71oQopmhrIohYAAACoiMocrk6cOKGwsDBJko+Pj4KCgrjGCi6VUD1YEuEKAAAAFZNTC1osWLBA4eHhkiSr1aolS5Zo48aNdm0GDBjguupQpZwauWJaIAAAACoep8LV8OHD7R7feeeddo9NJpMsFsv5V4UqKZ5pgQAAAKjAyhyurFZredYB2Eau9hOuAAAAUAG55CbCgCvULrrmKjVbVqvh4WoAAAAA5xCu4DViwgNlNkl5BVYd5V5XAAAAqGAIV/Aafj5mxYYXTg3cy9RAAAAAVDCEK3iV+IiT112lEq4AAABQsTgVriwWi5YvX67U1NRyKgdVHcuxAwAAoKJyKlz5+PioT58+On78eHnVgyougeXYAQAAUEE5PS2wRYsW+ueff8qjFkAJRSsGEq4AAABQwTgdrl544QU98sgj+u6773Tw4EGlp6fb/QPOB9MCAQAAUFGV+SbCRa688kpJ0oABA2QymWzbDcOQyWSSxWJxXXWocopGrvYfz7adUwAAAEBF4HS4Wrp0aXnUAUgqvNeVySTlFlh1NCNPtUIDPF0SAAAAUCZOh6vu3buXRx2AJMnf16yYsEAdTMvRvuNZhCsAAABUGE6HK0lKTU3VBx98oC1btkiSmjdvrltuuUXh4eEuLQ5VU0L1oJPhKlsX1anu6XIAAACAMnF6QYvVq1erQYMGev3115WSkqKUlBRNmjRJDRo00Nq1a8ujRlQxrBgIAACAisjpkauHHnpIAwYM0HvvvSdf38LdCwoKdNttt+nBBx/U8uXLXV4kqhZWDAQAAEBF5HS4Wr16tV2wkiRfX1899thjuvjii11aHKqm+IjCcLU/lZErAAAAVBxOTwsMCwvTnj17im3fu3evQkNDXVIUqjamBQIAAKAicjpcDRkyRLfeeqtmz56tvXv3au/evZo1a5Zuu+02DR06tDxqRBVz+rRAwzA8XA0AAABQNk5PC3z11VdlMpk0bNgwFRQUSJL8/Px0991366WXXnJ5gah6YiMK73WVk2/Vscw8RVZjOXYAAAB4P6fClcVi0W+//abnnntOEyZM0M6dOyVJDRo0UHBwcLkUiKonwNdH0aGBSk4vXI6dcAUAAICKwKlpgT4+PurTp49SU1MVHBysli1bqmXLlgQruFz8yamB+7nuCgAAABWE09dctWjRQv/880951ALYsBw7AAAAKhqnw9ULL7ygRx55RN99950OHjyo9PR0u3+AK5wKV4xcAQAAoGJwekGLK6+8UpI0YMAAmUwm23bDMGQymWSxWFxXHaqsU8uxM3IFAACAisHpcLV06dLyqAOww8gVAAAAKhqnwlV+fr7GjRunqVOnqlGjRuVVE2B3I+GiUVEAAADAmzl1zZWfn582bNhQXrUANrHhgZKk7HyLjmfle7gaAAAAoHROL2hx00036YMPPiiPWgCbQD8fRYUW3t+K664AAABQETh9zVVBQYGmT5+uxYsXq127dgoJCbF7ftKkSS4rDlVbQvUgHT6Rq33Hs9UqIcLT5QAAAABn5XS42rhxo9q2bStJ+vvvv+2e47oYuFJC9WCt3ZPKyBUAAAAqBFYLhNdixUAAAABUJE5fc3U2hw8fduXhUMXFnwxX+wlXAAAAqADKHK6Cg4N15MgR2+N+/frp4MGDtseHDh1SbGysa6tDlXb6cuwAAACAtytzuMrJyZFhGLbHy5cvV3a2/S+9pz8PnK9T0wKzOLcAAADg9Vw6LZAFLeBK8RGF4Sozz6JU7nUFAAAAL+fScAW4UqCfj2rZ7nXF1EAAAAB4tzKHK5PJZDcydeZjoDwUjV7tT2U5dgAAAHi3Mi/FbhiGLrzwQlugysjI0EUXXSSz2Wx7HnC1hOpBWr83lZErAAAAeL0yh6sZM2aUZx2AQ6wYCAAAgIqizOFq+PDh5VkH4NDpKwYCAAAA3owFLeDVToUrRq4AAADg3QhX8GpF4Wr/8Wyu6wMAAIBXI1zBq8VHFF5zdSK3QOnZBR6uBgAAACgZ4QpeLcjfR5HV/CVJe7nuCgAAAF6McAWvF8+KgQAAAKgAyrRa4OjRo8t8wEmTJp1zMYAjCdWD9OfeVFYMBAAAgFcrU7hat25dmQ5WdINhwJVYMRAAAAAVQZnC1dKlS8u7DqBECREnVwxMJVwBAADAe3HNFbxeAtdcAQAAoAIo08jVmVavXq3PP/9ce/bsUV5ent1zX331lUsKA4qcmhbINVcAAADwXk6PXM2aNUudOnXSli1bNHfuXOXn52vTpk368ccfFR4eXh41ooqLPxmuTuQUKC0738PVAAAAAI45Ha5efPFFvf766/r222/l7++vN954Q1u3btX111+vOnXqlEeNqOKC/X1VM6TwXleMXgEAAMBbOR2udu7cqX79+kmS/P39lZmZKZPJpIceekjTpk1zeYGAdGr0aj/XXQEAAMBLOR2uqlevrhMnTkiS4uPjtXHjRklSamqqsrIYVUD5YDl2AAAAeDunF7To1q2bFi1apJYtW2rw4MF64IEH9OOPP2rRokW6/PLLy6NGgBUDAQAA4PWcDldvvfWWcnJyJElPPfWU/Pz8tGLFCl177bV6+umnXV4gILFiIAAAALyf0+GqRo0ats/NZrOeeOIJlxYEOMK0QAAAAHg7p6+5+v7777VgwYJi2xcuXKgffvjBJUUBZ4qPKJwWuD+VcAUAAADv5HS4euKJJ2SxWIptt1qtjGKh3BStFpiWna/0HO51BQAAAO/jdLjavn27mjVrVmx7kyZNtGPHDpcUBZypWoCvqgf7SWI5dgAAAHgnp8NVeHi4/vnnn2Lbd+zYoZCQEJcUBTjCioEAAADwZk6Hq4EDB+rBBx/Uzp07bdt27Nihhx9+WAMGDHBpccDpWDEQAAAA3szpcDVx4kSFhISoSZMmql+/vurXr6+mTZuqZs2aevXVV8ujRkDSqXDFtEAAAAB4I6eXYg8PD9eKFSu0aNEi/fnnnwoKClKrVq3UrVu38qgPsImPYDl2AAAAeC+nw5UkmUwm9enTR3369HF1PUCJbNdcpTItEAAAAN6nTNMC//vf/yonJ8f2+dn+nYu3335b9erVU2BgoDp06KBVq1adtf2cOXPUpEkTBQYGqmXLlvr+++9LbHvXXXfJZDJp8uTJ51QbvEdCDUauAAAA4L3KNHL1+uuv68Ybb1RgYKBef/31EtuZTCbdf//9ThUwe/ZsjR49WlOnTlWHDh00efJkJSYmatu2bYqKiirWfsWKFRo6dKgmTJigq666Sp9++qkGDRqktWvXqkWLFnZt586dq99++01xcXFO1QTvVDQtMDUrXydy8hUa6OfhigAAAIBTyhSudu3a5fBzV5g0aZJuv/12jRw5UpI0depUzZ8/X9OnT3d4U+I33nhDffv21aOPPipJGj9+vBYtWqS33npLU6dOtbXbv3+/7rvvPi1YsED9+vU7aw25ubnKzc21PU5PT5ck5efnKz+fG9aWp6L+LUs/B/pIEUF+Ss3O1+4jJ9Q4JrS8y6uUnOlznD/62/3oc/ejz92L/nY/+tz9vKnPnanhnK65cpW8vDytWbNGY8aMsW0zm83q1auXVq5c6XCflStXavTo0XbbEhMTNW/ePNtjq9Wqm2++WY8++qiaN29eah0TJkzQ888/X2z7woULFRwcXMZ3g/OxaNGiMrWrZvZRqkz6eskvalHdKOeqKrey9jlcg/52P/rc/ehz96K/3Y8+dz9v6POsrLJf7+90uDoz2BQxmUwKDAxUw4YNNXDgQNWoUaPUYx09elQWi0XR0dF226Ojo7V161aH+yQnJztsn5ycbHv88ssvy9fXt8xTFMeMGWP3vtLT01W7dm316dNHYWFhZToGzk1+fr4WLVqk3r17y8+v9Gl+36Wu174thxXboLmuvLSOGyqsfJztc5wf+tv96HP3o8/di/52P/rc/bypz4tmtZWF0+Fq3bp1Wrt2rSwWixo3bixJ+vvvv+Xj46MmTZronXfe0cMPP6xffvlFzZo1c/bw523NmjV64403tHbtWplMpjLtExAQoICAgGLb/fz8PP7FrCrK2td1aoZIkg6m5/K1OU+c3+5Ff7sffe5+9Ll70d/uR5+7nzf0uTOv7/RNhAcOHKhevXrpwIEDWrNmjdasWaN9+/apd+/eGjp0qPbv369u3brpoYceKvVYkZGR8vHx0aFDh+y2Hzp0SDExMQ73iYmJOWv7n3/+WYcPH1adOnXk6+srX19f7d69Ww8//LDq1avn7NuFlym6kTArBgIAAMDbOB2uXnnlFY0fP95uulx4eLiee+45TZw4UcHBwXr22We1Zs2aUo/l7++vdu3aacmSJbZtVqtVS5YsUceOHR3u07FjR7v2UuFczKL2N998szZs2KD169fb/sXFxenRRx/VggULnH278DK2e10RrgAAAOBlnJ4WmJaWpsOHDxeb8nfkyBHbfMSIiAjl5eWV6XijR4/W8OHDdfHFF6t9+/aaPHmyMjMzbasHDhs2TPHx8ZowYYIk6YEHHlD37t312muvqV+/fpo1a5ZWr16tadOmSZJq1qypmjVr2r2Gn5+fYmJibNMYUXEVLce+P5VwBQAAAO/idLgaOHCgbrnlFr322mu65JJLJEl//PGHHnnkEQ0aNEiStGrVKl144YVlOt6QIUN05MgRPfvss0pOTlabNm2UlJRkW7Riz549MptPDbB16tRJn376qZ5++mk9+eSTatSokebNm1fsHleonOJPTgtMycxTZm6BQgI8uuAlAAAAYOP0b6bvvvuuHnroId1www0qKCgoPIivr4YPH267wXCTJk30/vvvl/mYo0aN0qhRoxw+t2zZsmLbBg8erMGDB5f5+P/++2+Z28K7hQf5KSzQV+k5Bdqfmq0Lo7nXFQAAALyD0+GqWrVqeu+99/T666/rn3/+kSRdcMEFqlatmq1NmzZtXFYgcKaE6sHafDBd+45nEa4AAADgNZxe0KJItWrVVKNGDdWoUcMuWAHljRUDAQAA4I2cDldWq1Xjxo1TeHi46tatq7p16yoiIkLjx4+X1WotjxoBO0UrBu4nXAEAAMCLOD0t8KmnntIHH3ygl156SZ07d5Yk/fLLL3ruueeUk5Oj//u//3N5kcDp4hm5AgAAgBdyOlzNnDlT77//vgYMGGDb1qpVK8XHx+uee+4hXKHcnZoWmOXhSgAAAIBTnJ4WmJKSoiZNmhTb3qRJE6WkpLikKOBsuOYKAAAA3sjpcNW6dWu99dZbxba/9dZbat26tUuKAs6m6JqrY5l5ysor8HA1AAAAQCGnpwVOnDhR/fr10+LFi9WxY0dJ0sqVK7V37159//33Li8QOFN4kJ9CA311IqdAB1Kz1TCK5dgBAADgeU6PXHXv3l1///23rr76aqWmpio1NVXXXHONtm3bpq5du5ZHjUAx8RGFUwP3MjUQAAAAXsLpkStJiouLK7Zwxb59+3THHXdo2rRpLikMOJuE6sHamnyC664AAADgNc75JsJnOnbsmD744ANXHQ44K1YMBAAAgLdxWbgC3IkVAwEAAOBtCFeokIpWDCRcAQAAwFsQrlAhFY1c7SdcAQAAwEuUeUGLa6655qzPp6amnm8tQJkVhaujGbnKybco0M/HwxUBAACgqitzuAoPDy/1+WHDhp13QUBZhAf5qVqArzJyC7TveLYaRlXzdEkAAACo4socrmbMmFGedQBOMZlMSqgedHI59izCFQAAADyOa65QYbFiIAAAALwJ4QoVVtGKgftTCVcAAADwPMIVKqz4CEauAAAA4D0IV6iwTk0LzPJwJQAAAADhChUYNxIGAACANyFcocIqGrk6cqLwXlcAAACAJxGuUGFFBPspxL/w5sEHWNQCAAAAHka4QoVlMpkUz3LsAAAA8BKEK1RoXHcFAAAAb0G4QoXGioEAAADwFoQrVGgJTAsEAACAlyBcoUI7NS2QkSsAAAB4FuEKFVp8ROHI1X5WCwQAAICHEa5QoRVNCzyUnqvcAu51BQAAAM8hXKFCqxHiryC/ontd5Xi4GgAAAFRlhCtUaCaTiRUDAQAA4BUIV6jwWDEQAAAA3oBwhQov/mS42k+4AgAAgAcRrlDhsRw7AAAAvAHhChUe0wIBAADgDQhXqPBiwwvD1fbDGVq585gsVsPDFQEAAKAqIlyhQkvaeFB3f7JGkpSWna+h7/2mLi//qKSNBz1cGQAAAKoawhUqrMJgtVaHT+TabU9Oy9Hdn6wlYAEAAMCtCFeokCxWQ89/u1mOJgAWbXv+281MEQQAAIDb+Hq6AOBcrNqVooNpOSU+b0g6mJajsd9s1MV1a6hWaIAiqwWoVmiAIoL8ZDabyrU+i9XQql0pOnwiR1GhgWpfv4Z8yvk1AQAA4FmEK1RIh0+UHKxO98lve/TJb3vstvmaTapZzV+1QgNUq9qp0HV6ACv6PCzQVyaTc6EoaeNBPf/tZrvwFxseqLH9m6lvi1injgUAAICKg3CFCikqNLBM7To3qClD0pETuTqakavjWfkqsBo6lJ6rQ+m5pe7v72suDGAng1itUP+TH4sHsZAAX9t1YGdORiy6DmzKTW0JWAAAAJUU4QoVUvv6NRQbHqjktByH112ZJMWEB+qjWzvYTcfLK7AqJTNPR07k6khGjo6eyNORjNyTjws/Hj35+YmcAuUVWLU/NVv7U0u/h1aQn1l5FqPE68BMKrwOrHezGKYIAgAAVEKEK1RIPmaTxvZvprs/WSuTZBdoimLL2P7NioUYf1+zYsIDFRMeKCn8rK+Rk2+xjXgVha/CMJZzcvvJkHYiV9n5FmXnW896vKLrwFbtSlHHBjWdfcsAAADwcoQrVFh9W8Rqyk1ti13fFOOi65sC/XxUu0awatcILrVtZm6BZq3ao/Hzt5TatqzXiwEAAKBiIVyhQuvbIla9m8V4fGW+kABfNYs7+0hYkbJeLwYAAICKhXCFCs/HbPKKaXZlvQ6sff0a7i4NAAAAbsBNhAEXKboOTDp13dfpDDm+DgwAAACVA+EKcKGi68AKF8ywFxroqx6NozxQFQAAANyBaYGAi515HVjNEH89OudPHUzP1Vdr9+s/Hep4ukQAAACUA0augHJQdB3YwDbx6tKolm7r1kCSNG35Tlmsjq7IAgAAQEVHuALc4IZLais8yE//HsvSwk3Jni4HAAAA5YBwBbhBSICvhnWsK0ma+tNOGQajVwAAAJUN4Qpwk+Gd6inA16w/96Xpt39SPF0OAAAAXIxwBbhJZLUADb44QVLh6BUAAAAqF8IV4EZ3dG0gs0n66e8j2nwg3dPlAAAAwIUIV4Ab1akZrCtbxkoqXDkQAAAAlQfhCnCzu7oXLsv+7YaD2puS5eFqAAAA4CqEK8DNWsSHq0vDSFmshj74ZZenywEAAICLEK4AD7iz+wWSpNl/7NXxzDwPVwMAAABXIFwBHtClYaSax4UpO9+imSv/9XQ5AAAAcAHCFeABJpNJd5689mrmin+VnWfxcEUAAAA4X4QrwEOubBGj2jWCdDwrX5+v3uvpcgAAAHCeCFeAh/j6mHVH18Jrr977+R8VWKwerggAAADng3AFeNB17WqrRoi/9h3P1vy/Dnq6HAAAAJwHwhXgQUH+PhrRqZ4k6d2f/pFhGJ4tCAAAAOeMcAV42M2X1lWQn482H0zXz9uPerocAAAAnCPCFeBh1UP8dUP72pKkqT/t9HA1AAAAOFeEK8AL3Nb1AvmYTVqx85g27Ev1dDkAAAA4B4QrwAvERwRpQOs4SYXXXgEAAKDiIVwBXuLO7oXLsv+w8aD+PZrp4WoAAADgLMIV4CWaxISpR+NashqF970CAABAxeIV4ertt99WvXr1FBgYqA4dOmjVqlVnbT9nzhw1adJEgYGBatmypb7//nvbc/n5+Xr88cfVsmVLhYSEKC4uTsOGDdOBAwfK+20A5+2u7g0kSXPW7NORE7kergYAAADO8Hi4mj17tkaPHq2xY8dq7dq1at26tRITE3X48GGH7VesWKGhQ4fq1ltv1bp16zRo0CANGjRIGzdulCRlZWVp7dq1euaZZ7R27Vp99dVX2rZtmwYMGODOtwWckw71a6h17QjlFVg1c8W/ni4HAAAATvB4uJo0aZJuv/12jRw5Us2aNdPUqVMVHBys6dOnO2z/xhtvqG/fvnr00UfVtGlTjR8/Xm3bttVbb70lSQoPD9eiRYt0/fXXq3Hjxrr00kv11ltvac2aNdqzZ4873xrgNJPJpLtPXnv10cp/lZFb4OGKAAAAUFa+nnzxvLw8rVmzRmPGjLFtM5vN6tWrl1auXOlwn5UrV2r06NF22xITEzVv3rwSXyctLU0mk0kREREOn8/NzVVu7qkpWOnp6ZIKpxjm5+eX8d3gXBT1L/18So9GNVW/ZrB2HcvSp7/9q5Gd6rr0+PS5e9Hf7kefux997l70t/vR5+7nTX3uTA0eDVdHjx6VxWJRdHS03fbo6Ght3brV4T7JyckO2ycnJztsn5OTo8cff1xDhw5VWFiYwzYTJkzQ888/X2z7woULFRwcXJa3gvO0aNEiT5fgVdqHm7TrmI/eWbJVNVM2ybccxpjpc/eiv92PPnc/+ty96G/3o8/dzxv6PCsrq8xtPRquylt+fr6uv/56GYahKVOmlNhuzJgxdqNh6enpql27tvr06VNiIINr5Ofna9GiRerdu7f8/Pw8XY7XuDzfoh8n/awjGXmyxLfRgIviXHZs+ty96G/3o8/djz53L/rb/ehz9/OmPi+a1VYWHg1XkZGR8vHx0aFDh+y2Hzp0SDExMQ73iYmJKVP7omC1e/du/fjjj2cNSQEBAQoICCi23c/Pz+NfzKqCvrbn5+enkV3qa2LSNr3/67+67uI6MptNLn8N+tx96G/3o8/djz53L/rb/ehz9/OGPnfm9T26oIW/v7/atWunJUuW2LZZrVYtWbJEHTt2dLhPx44d7dpLhcOFp7cvClbbt2/X4sWLVbNmzfJ5A0A5urFDXVUL8NXfhzK07G/Hq2cCAADAe3h8tcDRo0frvffe08yZM7VlyxbdfffdyszM1MiRIyVJw4YNs1vw4oEHHlBSUpJee+01bd26Vc8995xWr16tUaNGSSoMVtddd51Wr16t//3vf7JYLEpOTlZycrLy8vI88h6BcxEe5Kf/dKgjSZq6jJsKAwAAeDuPX3M1ZMgQHTlyRM8++6ySk5PVpk0bJSUl2Rat2LNnj8zmUxmwU6dO+vTTT/X000/rySefVKNGjTRv3jy1aNFCkrR//3598803kqQ2bdrYvdbSpUvVo0cPt7wvwBVu6VxfM37dpVX/pmjN7uNqV7e6p0sCAABACTweriRp1KhRtpGnMy1btqzYtsGDB2vw4MEO29erV0+GYbiyPMBjYsIDdfVF8fp89T69+9NOTRt2sadLAgAAQAk8Pi0QwNnd0a3wpsKLthzSjsMZHq4GAAAAJSFcAV6uYVSoejeLlmFI7y3n2isAAABvRbgCKoC7uheOXs1dt1+H0nM8XA0AAAAcIVwBFUC7ujV0Sb3qyrNYNf2XXZ4uBwAAAA4QroAK4q7uDSRJ//t9j9Jz8j1cDQAAAM5EuAIqiMsaR6lRVDVl5Bbof7/t8XQ5AAAAOAPhCqggzGaT7jw5ejX9113KLbB4uCIAAACcjnAFVCADWscpNjxQR07kau7a/Z4uBwAAAKchXAEViL+vWbd2qS9Jmrb8H1mt3DAbAADAWxCugArmhvZ1FBboq3+OZmrh5kOeLgcAAAAnEa6ACqZagK9u7lhXkjT1p50yDEavAAAAvAHhCqiARnSqL39fs9bvTdWqXSmeLgcAAAAiXAEVUq3QAF3XLkFS4egVAAAAPI9wBVRQd3S9QCaTtHTbEW1NTvd0OQAAAFUe4QqooOpFhuiKFjGSpGk//ePhagAAAEC4Aiqwu07eVPibPw9of2q2h6sBAACo2ghXQAXWKiFCnRrUVIHV0Ac/7/J0OQAAAFUa4Qqo4O48OXo16489Ss3K83A1AAAAVRfhCqjgujWKVNPYMGXlWfTxyt2eLgcAAKDKIlwBFZzJZNJd3S+QJH244l/l5Fs8XBEAAEDVRLgCKoF+LWMVHxGkY5l5mrNmn6fLAQAAqJIIV0Al4Otj1u1d60uS3lv+jwosVg9XBAAAUPUQroBK4vpLaqt6sJ/2pGQpaVOyp8sBAACocghXQCUR7O+r4Z3qSZKm/rRThmF4tiAAAIAqhnAFVCLDOtZToJ9ZG/en69cdxzxdDgAAQJVCuAIqkRoh/rrhkjqSpHeX7/RwNQAAAFUL4QqoZG7tUl8+ZpN+3n5UG/enebocAACAKoNwBVQytWsE66pWsZKkd5f/4+FqAAAAqg7CFVAJ3dGt8KbC8zcc0J5jWR6uBgAAoGogXAGVUPO4cHW7sJashvT+L4xeAQAAuAPhCqik7upeOHr1+eq9OpaR6+FqAAAAKj/CFVBJdbygplolhCsn36qZK/71dDkAAACVHuEKqKRMJpPu6t5AkjRz5W5l5hZ4uCIAAIDKjXAFVGKJzWNUr2aw0rLzNfuPvZ4uBwAAoFIjXAGVmI/ZpNtPrhz4wS+7lG+xergiAACAyotwBVRy17ZNUGQ1f+1PzdZ3Gw54uhwAAIBKi3AFVHKBfj4a2bm+JGnqsp367Z9jWnPUpN93pchiNTxcHQAAQOVBuAKqgJs61FWAr1nbDmXo5hlr9NF2H900fbW6vPyjkjYe9HR5AAAAlQLhCqgCVv5zVLkFxa+3Sk7L0d2frCVgAQAAuADhCqjkLFZDz3+72eFzRZMCn/92M1MEAQAAzpOvpwsAUL5W7UrRwbScEp83JB1My9GQd1eqVUKEatcIUkL1YNvHagHl+2PCYjW0aleKDp/IUVRooNrXryEfs6lcXxMAAKA8EK6ASu7wiZKD1elW7z6u1buPF9tePdjPLmzVrm4fvgL9fM65tqSNB/X8t5vtwl9seKDG9m+mvi1iz/m4AAAAnkC4Aiq5qNDAMrUb0ame/HxM2puSrX2pWdqbkq207Hwdz8rX8aw0/bU/zeF+kdUCSgxecRGBCvB1HL6SNh7U3Z+s1ZmTEYuuA5tyU1sCFgAAqFAIV0Al175+DcWGByo5LadYkJEkk6SY8EA9c1WzYtPx0nPytS8lW/uOZ2nv8ZMfTz7edzxbGbkFOpqRq6MZuVq3J7X4sU1SdGhgsfAVFxGoZ77e5LAe42RNz3+7Wb2bxTBFEAAAVBiEK6CS8zGbNLZ/M939yVqZJLtAUxRbxvYvHqwkKSzQT83i/NQsLqzYc4ZhKC073xa29p4MXHtTTn48nqWcfKuS03OUnJ6jP/4tPuWwJEXXga3alaKODWo69X4BAAA8hXAFVAF9W8Rqyk1ti13fFHMe1zeZTCZFBPsrIthfLRPCiz1vGIaOZebZha2i8LU1+YSOnMgt9TXKer0YAACANyBcAVVE3xax6t0sRit3HNbCn39Xn64d1LFhVLlNuzOZTIqsFqDIagG6qE51u+dW7jymoe/9Vuoxynq9GAAAgDfgPldAFeJjNqlD/RpqF2mogweXPC+6Dqy0V/9151Fl5RW4pSYAAIDzRbgC4HZF14FJOmvAeuvHHer56k/6ev1+GQY3OQYAAN6NcAXAI4quA4sJt5/6FxseqCk3ttWUG9sqoXqQktNz9MCs9bpu6kpt2JfqmWIBAADKgGuuAHhM0XVgq3al6PCJHEWFBqr9adMVL2sSpfd//kfvLNupNbuPa8Bbv+q6dgl6LLGxosK4HgsAAHgXwhUAj/Ixm0pcbj3Qz0ejejbSde1qa2LSVn21br++WLNPP/x1UKN6NtItXeqVeJNiAAAAd2NaIACvFxMeqElD2uirezqpde0IZeZZ9HLSVvV5fbkWbErmeiwAAOAVCFcAKoy2dapr7t2dNOn61ooKDdDuY1m68+M1uumD37Ut+YSnywMAAFUc4QpAhWI2m3RN2wQtfaSH7r2sgfx9zfp1xzFd8cZyPfv1Rh3PzPN0iQAAoIoiXAGokEICfPVoYhMtGd1dV7SIkdWQPlq5Wz1eXaYPf92lfIvV0yUCAIAqhnAFoEKrXSNYU25qp09v76AmMaFKy87Xc99u1pVv/Kyftx/xdHkAAKAKIVwBqBQ6NYjUd/d10QuDWqh6sJ+2H87QzR+s0m0zV+vfo5meLg8AAFQBhCsAlYavj1k3XVpXyx65TLd0ri9fs0mLtxxS79d/0oQftuhETr6nSwQAAJUY4QpApRMe7Kdn+zdT0oNd1e3CWsq3GHr3p3902as/6fM/9spqZel2AADgeoQrAJVWw6hQzRx5iaaPuFj1I0N0NCNXj325QQPf/lWr/03xdHkAAKCSIVwBqNRMJpN6NonWgge76el+TRUa4Ku/9qfpuqkrdf9n63QgNdvTJQIAgEqCcAWgSvD3Neu2rhdo6aM9NLR9bZlM0jd/HlDP15Zp8uK/lZ1nsbW1WA2t3HlMX6/fr5U7j8nCNEIAAFAGvp4uAADcKbJagCZc00o3dqircd9t1qpdKZq8eLs+/2OvxlzZVL5mk8Z9t1kH03Js+8SGB2ps/2bq2yLWY3VbrIZW7UrR4RM5igoNVPv6NeRjNnmsHgAAUBzhCkCV1CI+XLPvuFTf/5WsF7/fov2p2brvs3UO2yan5ejuT9Zqyk1tPRKwkjYe1PPfel/gAwAA9pgWCKDKMplM6tcqVkse7q4HezUqsZ1x8t8zX2/S3pQspWTmKSuvwC2rDiZtPKi7P1lrF6ykU4EvaePBcq8BAACUDSNXAKq8QD8fdahfU9L2s7Y7ciJXXScutdvm72NWgJ9ZgX4+CvAt/BjoZ1agr48C/Xzk72PS8aNmLcv+S0EBfie3n9butP0C7J7zka/ZpGfmbZKjCGdIMkl6/tvN6t0shimCAAB4AcIVAEg6fCKn9EaSfEyS5bS0k2exKs9i1YmcgrPsZda6Y64fYTIkHUzL0apdKerYoKbLjw8AAJxDuAIASVGhgWVq98ltl+qSetWVW2BVTr5FOUUf8y22bbn5Rc9ZlJmTr7V//qUGFzZRvlXKOflcboHF9vnp++bkW5VTUHiMtOw8ZeRaSq3p3k/XqGODSLVOCFfrhAi1iA9XSAA/3gEAcDf+9wUASe3r11BseKCS03IcTsMzSYoJP7VKn6+PuUwBJj8/X6GHN+jKrvXl5+fnVE0rdx7T0Pd+K7VdSma+5m84qPkbCkfHzCapYVQ1tUqIKAxctSPUJCZM/r6V/zJbi9XQ77tStOaoSTV3pahjwyimTAIA3IZwBQCSfMwmje3fTHd/slYmyS5gFf1qPrZ/M7f+ol6WwBcVFqBXrm2tvw6kacO+VG3Yl6aDaTn6+1CG/j6UoS/W7JNUeG1Y09hQtUqIUKuEcLWpHaELalWrVMHDflVFH320fTWrKgIA3IpwBQAn9W0Rqyk3tS227HmMh35BL0vge35Ac3VrXEvdGteyPXc4PUd/7isMW0UfU7Py9ee+NP25L83WLsTfRy3iC0e2Wp2cUphQPUgmU9kClzfde6toVcUzQ6inl9EHAFQthCsAOE3fFrHq3SzGa0LDuQS+qLBA9W4WqN7NoiVJhmFob0q21u9L1Ya9haNbf+1PU2aeRb/vStHvu1Js+9YI8VerhHDblMJWCRGqFRpQ7DW86d5bFquh57/d7LWrKnpTCAUAlC/CFQCcwcds8qrV98438JlMJtWpGaw6NYM1oHWcpMJf+HccztCfe1P158nphFuT05WSmadl245o2bYjtv3jI4LsAteh9ByN/vzP8x4lMgxDOflWZeYVKCvXUvgxz6KsvAJl5p78mGdRVu5p208+zsyzKDuvcJ+jJ3KL3QfM7nVUuKri6Nnr1bp2YVisFRqgyGqFH8MCfcs8WucsbwqhpyPwlQ39BMBZhCsAqABcHfh8zCY1jglV45hQXX9JbUlSTr5FW5NP2AWunUcytD81W/tTs/XDxuSzHrMobD0850/9suOosvOsys4/LSidFpiKgpFR/vdhtvn6zwP6+s8Dxbb7+5pVq1qAIkMDVKuaf2H4sj22D2LOrMLorVMVCXxl48395G2Ltnjb186b66Kmstfkbed5WXlFuHr77bf1yiuvKDk5Wa1bt9abb76p9u3bl9h+zpw5euaZZ/Tvv/+qUaNGevnll3XllVfanjcMQ2PHjtV7772n1NRUde7cWVOmTFGjRo3c8XYAoEIK9PNRm9oRalM7wrbtRE6+Nu5PPxm2UrXqnxQdzcw763Eycy365Lc9Tr12sL+Pgv19FRJQ+LHwsY9C/H0VHHDqY7DfqTZFH3cfzdQL328p9TUSm0fLz8esIydydSQjV0dP5Co9p0B5BVZbgCxLnUVBqzCA+atWtcCTAczfFsRqhPh75VRFAl/Z6/H+fvKORVu87WvnzXVR07nU5B3nuTM8Hq5mz56t0aNHa+rUqerQoYMmT56sxMREbdu2TVFRUcXar1ixQkOHDtWECRN01VVX6dNPP9WgQYO0du1atWjRQpI0ceJE/fe//9XMmTNVv359PfPMM0pMTNTmzZsVGFi2e9kAAKTQQD91bFDTNmr29fr9emDW+lL3S2wWrYvqVleI/2lhKcDX9vj0EBXk5yPzeQQMi9XQB7/uKnUZ/XdubFcsyOTkW3Q0I1dHM/IKQ9eJXB3NyLX//OTjwqmJFu1JydKelKxzrlc6NVXxgVnrlFA9WD5mycdkktlssn00m0zyMevkx8J/ts+L2p7+/Gn7+5iLjnHquJL01NyNJQY+SRr7zSZdekFNBfr5yN/HfF5fl7LytiDjrdfweVs/eWtN3loXNVXcmpxlMgx3TsoorkOHDrrkkkv01ltvSZKsVqtq166t++67T0888USx9kOGDFFmZqa+++4727ZLL71Ubdq00dSpU2UYhuLi4vTwww/rkUcekSSlpaUpOjpaH374oW644YZSa0pPT1d4eLjS0tIUFhbmoncKR/Lz8/X999/ryiuvdPoeQDg39Ll7Vbb+Luu9tz67/VK3XrdW9B+y5HhVRVf8h5yZW2ALXqcHsCNFwezkaNiRjFzlFVjP67W8hY/ZJD8fk/zMZvn5mgs/9zHL38csPx+z/HwLH/uaTUo7fkyx0VHy9/U5+bzpZBuz/WMfs/xPHstsMum/S7YrPaegxBoigvz0xBVNZKgw+FgNQxarcdrncrDNkMUwZLWeer5o+6m2ctDW0NGMXK3dk1pq37StU121Qv0dh97Twm9Jobl429P3ly0g+5hMkkma8P1WpWXnl1hP9WA//d+gFm4JxJJktRp6ct5GpWZ5T03lWVdBgUVr165V27Zt5evr4xU1nY+KWFPRH8p+ebyn26cIOpMNPDpylZeXpzVr1mjMmDG2bWazWb169dLKlSsd7rNy5UqNHj3abltiYqLmzZsnSdq1a5eSk5PVq1cv2/Ph4eHq0KGDVq5c6TBc5ebmKjc31/Y4PT1dUuEvRfn5JZ90OH9F/Us/uw997l6Vrb8vSghVTFiADqXnnmWUKEAXJYS69T1f3jhSb97QWi98v1XJ6ad+nseEB+ipK5ro8saR512Pv1mKC/NXXJi/pNAS2xmGoaXbjujO/60v9ZhXtohWdFig3S/9RYHh9F/4DaPwsaN2xUKDYchqPRVEitql5+TraMbZp3SeqSiw5Mgq5ZbW2qy/0446dfyySM3O1xNf/eXy456vtXuOe7oEO8ez8nXPp+s8XYYdb6xJOp+6fDT97z9dXo/knX3lbTUVjfiv3HFYHerXcOtrO/P/h0fD1dGjR2WxWBQdHW23PTo6Wlu3bnW4T3JyssP2ycnJtueLtpXU5kwTJkzQ888/X2z7woULFRwcXLY3g/OyaNEiT5dQ5dDn7lWZ+vvKGJOmp5tPPjr9r4eGDElXRGdpQdIPHqhMeryZtDPdpPR8KcxPahCWKcvuNfp+t3vrsBpShL+PUvMk+z4qYijCX+pdbb/MRXPNTJLMDpq6yPY0k97aXPpf3O9ualG90MJAVmBIFkMqsBZ+PP3zAsN0MgSerY1OtjHZPi86psUqHc6RdmeU/qbjg62qHiCZTYVddHIwp/DxGZ8XPe/4c6PUdkeypaUHS++ny2Itigws/IXPahT+K/65qfCxJOPk9rN9bjVOPj7j89RcaX9W6f1UK9BQNTcNkGfkS0dySh89cGdNknfWRU1lU9aaFv78u45tce/Eu6yssk8F9/g1V95gzJgxdqNh6enpql27tvr06cO0wHKWn5+vRYsWqXfv3pViylRFQJ+7V2Xs7ysltd10qNgoUWx4oJ66ookSm0eXvLMbeEuf+9U7pPtmFf6Vu/hURZNeuKa1W/vKYjX0xWvLSx11fOCGbk5PuTnXPv99V4pumr661HYv39DebX+ptlgN9ShDP025y/l+Oldl7afX/3OJ2/rJG2uSyq+u8/m54o19VZFr6tO1g9tHropmtZWFR8NVZGSkfHx8dOjQIbvthw4dUkxMjMN9YmJiztq+6OOhQ4cUGxtr16ZNmzYOjxkQEKCAgOI3yfTz86s0vwx5O/ra/ehz96ps/X1VmwRd0Sre65bvPZ2n+/yqNgny9fVx6gbQ5clP0nMDmuvuT9bKJMfXpo3t31yBAf7n/hpO9nnHhlGKDQ8sdTESdy7D7I5+cpY39pM31uSOus7l54o39hU1OceZr3k5TkAonb+/v9q1a6clS5bYtlmtVi1ZskQdO3Z0uE/Hjh3t2kuF022K2tevX18xMTF2bdLT0/X777+XeEwAgPOK7r01sE28Ojao6VXBylv0bRGrXx7vqc9uv1Rv3NBGn91+qX55vKfHVrvq2yJWU25qq5hw+5VzY8IDPbIKl4/ZpLH9m0kqPnnyVJBp5vZzi36qmDV5a13UVHFrOhcenxY4evRoDR8+XBdffLHat2+vyZMnKzMzUyNHjpQkDRs2TPHx8ZowYYIk6YEHHlD37t312muvqV+/fpo1a5ZWr16tadOmSZJMJpMefPBBvfDCC2rUqJFtKfa4uDgNGjTIU28TAFBFufoG0Oerb4tY9W4W4zWjjkVBxltG+E6vi36qeDV5a13UVHFrcpbHw9WQIUN05MgRPfvss0pOTlabNm2UlJRkW5Biz549MptPDbB16tRJn376qZ5++mk9+eSTatSokebNm2e7x5UkPfbYY8rMzNQdd9yh1NRUdenSRUlJSdzjCgAAEfjKylv7aeWOw1r48+/q07WDR6ZIOarJ27523lgXNTlXkzed587weLiSpFGjRmnUqFEOn1u2bFmxbYMHD9bgwYNLPJ7JZNK4ceM0btw4V5UIAADKkbcFGW/lYzapQ/0aOrbFUAcvCDFFNXnj184b66KmsvHG87ysPHrNFQAAAABUFoQrAAAAAHABwhUAAAAAuADhCgAAAABcgHAFAAAAAC5AuAIAAAAAFyBcAQAAAIALEK4AAAAAwAUIVwAAAADgAoQrAAAAAHABwhUAAAAAuADhCgAAAABcgHAFAAAAAC7g6+kCvJFhGJKk9PR0D1dS+eXn5ysrK0vp6eny8/PzdDlVAn3uXvS3+9Hn7kefuxf97X70uft5U58XZYKijHA2hCsHTpw4IUmqXbu2hysBAAAA4A1OnDih8PDws7YxGWWJYFWM1WrVgQMHFBoaKpPJ5OlyKrX09HTVrl1be/fuVVhYmKfLqRLoc/eiv92PPnc/+ty96G/3o8/dz5v63DAMnThxQnFxcTKbz35VFSNXDpjNZiUkJHi6jColLCzM4984VQ197l70t/vR5+5Hn7sX/e1+9Ln7eUuflzZiVYQFLQAAAADABQhXAAAAAOAChCt4VEBAgMaOHauAgABPl1Jl0OfuRX+7H33ufvS5e9Hf7kefu19F7XMWtAAAAAAAF2DkCgAAAABcgHAFAAAAAC5AuAIAAAAAFyBcAQAAAIALEK5QbiZMmKBLLrlEoaGhioqK0qBBg7Rt27az7vPhhx/KZDLZ/QsMDHRTxRXfc889V6z/mjRpctZ95syZoyZNmigwMFAtW7bU999/76ZqK7569eoV62+TyaR7773XYXvOb+ctX75c/fv3V1xcnEwmk+bNm2f3vGEYevbZZxUbG6ugoCD16tVL27dvL/W4b7/9turVq6fAwEB16NBBq1atKqd3UPGcrc/z8/P1+OOPq2XLlgoJCVFcXJyGDRumAwcOnPWY5/KzqSop7TwfMWJEsf7r27dvqcflPHestP529HPdZDLplVdeKfGYnOMlK8vvgzk5Obr33ntVs2ZNVatWTddee60OHTp01uOe68//8ka4Qrn56aefdO+99+q3337TokWLlJ+frz59+igzM/Os+4WFhengwYO2f7t373ZTxZVD8+bN7frvl19+KbHtihUrNHToUN16661at26dBg0apEGDBmnjxo1urLji+uOPP+z6etGiRZKkwYMHl7gP57dzMjMz1bp1a7399tsOn584caL++9//aurUqfr9998VEhKixMRE5eTklHjM2bNna/To0Ro7dqzWrl2r1q1bKzExUYcPHy6vt1GhnK3Ps7KytHbtWj3zzDNau3atvvrqK23btk0DBgwo9bjO/Gyqako7zyWpb9++dv332WefnfWYnOclK62/T+/ngwcPavr06TKZTLr22mvPelzOccfK8vvgQw89pG+//VZz5szRTz/9pAMHDuiaa64563HP5ee/WxiAmxw+fNiQZPz0008ltpkxY4YRHh7uvqIqmbFjxxqtW7cuc/vrr7/e6Nevn922Dh06GHfeeaeLK6saHnjgAaNBgwaG1Wp1+Dzn9/mRZMydO9f22Gq1GjExMcYrr7xi25aammoEBAQYn332WYnHad++vXHvvffaHlssFiMuLs6YMGFCudRdkZ3Z546sWrXKkGTs3r27xDbO/myqyhz1+fDhw42BAwc6dRzO87Ipyzk+cOBAo2fPnmdtwzledmf+Ppiammr4+fkZc+bMsbXZsmWLIclYuXKlw2Oc689/d2DkCm6TlpYmSapRo8ZZ22VkZKhu3bqqXbu2Bg4cqE2bNrmjvEpj+/btiouL0wUXXKAbb7xRe/bsKbHtypUr1atXL7ttiYmJWrlyZXmXWenk5eXpk08+0S233CKTyVRiO85v19m1a5eSk5PtzuHw8HB16NChxHM4Ly9Pa9assdvHbDarV69enPfnKC0tTSaTSREREWdt58zPJhS3bNkyRUVFqXHjxrr77rt17NixEttynrvOoUOHNH/+fN16662ltuUcL5szfx9cs2aN8vPz7c7XJk2aqE6dOiWer+fy899dCFdwC6vVqgcffFCdO3dWixYtSmzXuHFjTZ8+XV9//bU++eQTWa1WderUSfv27XNjtRVXhw4d9OGHHyopKUlTpkzRrl271LVrV504ccJh++TkZEVHR9tti46OVnJysjvKrVTmzZun1NRUjRgxosQ2nN+uVXSeOnMOHz16VBaLhfPeRXJycvT4449r6NChCgsLK7Gdsz+bYK9v37766KOPtGTJEr388sv66aefdMUVV8hisThsz3nuOjNnzlRoaGipU9Q4x8vG0e+DycnJ8vf3L/YHmrOdr+fy899dfD366qgy7r33Xm3cuLHU+ccdO3ZUx44dbY87deqkpk2b6t1339X48ePLu8wK74orrrB93qpVK3Xo0EF169bV559/Xqa/uuHcffDBB7riiisUFxdXYhvOb1Qm+fn5uv7662UYhqZMmXLWtvxsOj833HCD7fOWLVuqVatWatCggZYtW6bLL7/cg5VVftOnT9eNN95Y6uJDnONlU9bfBysyRq5Q7kaNGqXvvvtOS5cuVUJCglP7+vn56aKLLtKOHTvKqbrKLSIiQhdeeGGJ/RcTE1NsNZ5Dhw4pJibGHeVVGrt379bixYt12223ObUf5/f5KTpPnTmHIyMj5ePjw3l/noqC1e7du7Vo0aKzjlo5UtrPJpzdBRdcoMjIyBL7j/PcNX7++Wdt27bN6Z/tEue4IyX9PhgTE6O8vDylpqbatT/b+XouP//dhXCFcmMYhkaNGqW5c+fqxx9/VP369Z0+hsVi0V9//aXY2NhyqLDyy8jI0M6dO0vsv44dO2rJkiV22xYtWmQ3uoLSzZgxQ1FRUerXr59T+3F+n5/69esrJibG7hxOT0/X77//XuI57O/vr3bt2tntY7VatWTJEs77MioKVtu3b9fixYtVs2ZNp49R2s8mnN2+fft07NixEvuP89w1PvjgA7Vr106tW7d2el/O8VNK+32wXbt28vPzsztft23bpj179pR4vp7Lz3+38ehyGqjU7r77biM8PNxYtmyZcfDgQdu/rKwsW5ubb77ZeOKJJ2yPn3/+eWPBggXGzp07jTVr1hg33HCDERgYaGzatMkTb6HCefjhh41ly5YZu3btMn799VejV69eRmRkpHH48GHDMIr396+//mr4+voar776qrFlyxZj7Nixhp+fn/HXX3956i1UOBaLxahTp47x+OOPF3uO8/v8nThxwli3bp2xbt06Q5IxadIkY926dbaV6V566SUjIiLC+Prrr40NGzYYAwcONOrXr29kZ2fbjtGzZ0/jzTfftD2eNWuWERAQYHz44YfG5s2bjTvuuMOIiIgwkpOT3f7+vNHZ+jwvL88YMGCAkZCQYKxfv97uZ3tubq7tGGf2eWk/m6q6s/X5iRMnjEceecRYuXKlsWvXLmPx4sVG27ZtjUaNGhk5OTm2Y3Cel11pP1cMwzDS0tKM4OBgY8qUKQ6PwTledmX5ffCuu+4y6tSpY/z444/G6tWrjY4dOxodO3a0O07jxo2Nr776yva4LD//PYFwhXIjyeG/GTNm2Np0797dGD58uO3xgw8+aNSpU8fw9/c3oqOjjSuvvNJYu3at+4uvoIYMGWLExsYa/v7+Rnx8vDFkyBBjx44dtufP7G/DMIzPP//cuPDCCw1/f3+jefPmxvz5891cdcW2YMECQ5Kxbdu2Ys9xfp+/pUuXOvw5UtSvVqvVeOaZZ4zo6GgjICDAuPzyy4t9LerWrWuMHTvWbtubb75p+1q0b9/e+O2339z0jrzf2fp8165dJf5sX7p0qe0YZ/Z5aT+bqrqz9XlWVpbRp08fo1atWoafn59Rt25d4/bbby8WkjjPy660nyuGYRjvvvuuERQUZKSmpjo8Bud42ZXl98Hs7GzjnnvuMapXr24EBwcbV199tXHw4MFixzl9n7L8/PcEk2EYRvmMiQEAAABA1cE1VwAAAADgAoQrAAAAAHABwhUAAAAAuADhCgAAAABcgHAFAAAAAC5AuAIAAAAAFyBcAQAAAIALEK4AAAAAwAUIVwCAc/bvv//KZDJp/fr1ni7FZuvWrbr00ksVGBioNm3aeLqcc/Lhhx8qIiKi1HYmk0nz5s0r93pKc/PNN+vFF18sc/ujR48qKipK+/btK8eqAMD9CFcAUIGNGDFCJpNJL730kt32efPmyWQyeagqzxo7dqxCQkK0bds2LVmyxGGbon4zmUzy8/NT/fr19dhjjyknJ8fN1Ur16tXT5MmT7bYNGTJEf//9t+3xc8895zAoHjx4UFdccUU5V3h2f/75p77//nvdf//9Zd4nMjJSw4YN09ixY8uxMgBwP8IVAFRwgYGBevnll3X8+HFPl+IyeXl557zvzp071aVLF9WtW1c1a9YssV3fvn118OBB/fPPP3r99df17rvves0v+0FBQYqKiiq1XUxMjAICAtxQUcnefPNNDR48WNWqVXNqv5EjR+p///ufUlJSyqkyAHA/whUAVHC9evVSTEyMJkyYUGIbRyMfkydPVr169WyPR4wYoUGDBunFF19UdHS0IiIiNG7cOBUUFOjRRx9VjRo1lJCQoBkzZhQ7/tatW9WpUycFBgaqRYsW+umnn+ye37hxo6644gpVq1ZN0dHRuvnmm3X06FHb8z169NCoUaP04IMPKjIyUomJiQ7fh9Vq1bhx45SQkKCAgAC1adNGSUlJtudNJpPWrFmjcePGyWQy6bnnniuxTwICAhQTE6PatWtr0KBB6tWrlxYtWmT3WhMmTFD9+vUVFBSk1q1b64svvrA9v2zZMplMJs2fP1+tWrVSYGCgLr30Um3cuNHudX755Rd17dpVQUFBql27tu6//35lZmba3vfu3bv10EMP2UbSJPtpgR9++KGef/55/fnnn7Y2H374oe39nj4t8K+//lLPnj0VFBSkmjVr6o477lBGRobt+aKv8auvvqrY2FjVrFlT9957r/Lz821t3nnnHTVq1EiBgYGKjo7WddddV2IfWiwWffHFF+rfv7/d9nr16unFF1/ULbfcotDQUNWpU0fTpk2za9O8eXPFxcVp7ty5JR4fACoawhUAVHA+Pj568cUX9eabb573NSw//vijDhw4oOXLl2vSpEkaO3asrrrqKlWvXl2///677rrrLt15553FXufRRx/Vww8/rHXr1qljx47q37+/jh07JklKTU1Vz549ddFFF2n16tVKSkrSoUOHdP3119sdY+bMmfL399evv/6qqVOnOqzvjTfe0GuvvaZXX31VGzZsUGJiogYMGKDt27dLKpwm17x5cz388MM6ePCgHnnkkTK9740bN2rFihXy9/e3bZswYYI++ugjTZ06VZs2bdJDDz2km266qVhwfPTRR/Xaa6/pjz/+UK1atdS/f39bWNm5c6f69u2ra6+9Vhs2bNDs2bP1yy+/aNSoUZKkr776SgkJCRo3bpwOHjyogwcPFqttyJAhevjhh9W8eXNbmyFDhhRrl5mZqcTERFWvXl1//PGH5syZo8WLF9teq8jSpUu1c+dOLV26VDNnztSHH35oC2urV6/W/fffr3Hjxmnbtm1KSkpSt27dSuy3DRs2KC0tTRdffHGx51577TVdfPHFWrdune655x7dfffd2rZtm12b9u3b6+effy7x+ABQ4RgAgApr+PDhxsCBAw3DMIxLL73UuOWWWwzDMIy5c+cap/+IHzt2rNG6dWu7fV9//XWjbt26dseqW7euYbFYbNsaN25sdO3a1fa4oKDACAkJMT777DPDMAxj165dhiTjpZdesrXJz883EhISjJdfftkwDMMYP3680adPH7vX3rt3ryHJ2LZtm2EYhtG9e3fjoosuKvX9xsXFGf/3f/9nt+2SSy4x7rnnHtvj1q1bG2PHjj3rcYYPH274+PgYISEhRkBAgCHJMJvNxhdffGEYhmHk5OQYwcHBxooVK+z2u/XWW42hQ4cahmEYS5cuNSQZs2bNsj1/7NgxIygoyJg9e7at/R133GF3jJ9//tkwm81Gdna2YRiGUbduXeP111+3azNjxgwjPDzc9tjR188wDEOSMXfuXMMwDGPatGlG9erVjYyMDNvz8+fPN8xms5GcnGx733Xr1jUKCgpsbQYPHmwMGTLEMAzD+PLLL42wsDAjPT39rP1XZO7cuYaPj49htVrtttetW9e46aabbI+tVqsRFRVlTJkyxa7dQw89ZPTo0aNMrwUAFYGvJ4MdAMB1Xn75ZfXs2bPMozWONG/eXGbzqUkN0dHRatGihe2xj4+PatasqcOHD9vt17FjR9vnvr6+uvjii7VlyxZJhQseLF261OE1OTt37tSFF14oSWrXrt1Za0tPT9eBAwfUuXNnu+2dO3fWn3/+WcZ3eMpll12mKVOmKDMzU6+//rp8fX117bXXSpJ27NihrKws9e7d226fvLw8XXTRRXbbTn/vNWrUUOPGje3e+4YNG/S///3P1sYwDFmtVu3atUtNmzZ1uu6SbNmyRa1bt1ZISIhtW+fOnWW1WrVt2zZFR0dLKvwa+/j42NrExsbqr7/+kiT17t1bdevW1QUXXKC+ffuqb9++uvrqqxUcHOzwNbOzsxUQEOBw8ZRWrVrZPjeZTIqJiSl23gQFBSkrK+vc3zQAeBnCFQBUEt26dVNiYqLGjBmjESNG2D1nNptlGIbdttOvsyni5+dn97hoNb0zt1mt1jLXlZGRof79++vll18u9lxsbKzt89NDgTuEhISoYcOGkqTp06erdevW+uCDD3TrrbfarlOaP3++4uPj7fZzZgGJjIwM3XnnnQ5X0qtTp855VH/uzvb1DA0N1dq1a7Vs2TItXLhQzz77rJ577jn98ccfDpeGj4yMVFZWlvLy8uymVJb2OkVSUlJUq1YtF7wrAPAOXHMFAJXISy+9pG+//VYrV660216rVi0lJyfbBSxX3pvqt99+s31eUFCgNWvW2EZl2rZtq02bNqlevXpq2LCh3T9nAlVYWJji4uL066+/2m3/9ddf1axZs/Oq32w268knn9TTTz+t7OxsNWvWTAEBAdqzZ0+xmmvXrm237+nv/fjx4/r777/t3vvmzZuLHaNhw4a2MOLv7y+LxXLW+srSpmnTpvrzzz9ti2VIhX1jNpvVuHHjMveFr6+vevXqpYkTJ2rDhg36999/9eOPPzpsW7RIyubNm8t8/NNt3Lix2EggAFRkhCsAqERatmypG2+8Uf/973/ttvfo0UNHjhzRxIkTtXPnTr399tv64YcfXPa6b7/9tubOnautW7fq3nvv1fHjx3XLLbdIku69916lpKRo6NCh+uOPP7Rz504tWLBAI0eOLDUwnOnRRx/Vyy+/rNmzZ2vbtm164okntH79ej3wwAPn/R4GDx4sHx8fvf322woNDdUjjzyihx56SDNnztTOnTu1du1avfnmm5o5c6bdfuPGjdOSJUu0ceNGjRgxQpGRkRo0aJAk6fHHH9eKFSs0atQorV+/Xtu3b9fXX39tt8hEvXr1tHz5cu3fv99uBcXT1atXT7t27dL69et19OhR5ebmFmtz4403KjAwUMOHD9fGjRu1dOlS3Xfffbr55pttUwJL89133+m///2v1q9fr927d+ujjz6S1WotMZzVqlVLbdu21S+//FKm458uKytLa9asUZ8+fZzeFwC8FeEKACqZcePGFZt+1bRpU73zzjt6++231bp1a61ateq8rs0600svvaSXXnpJrVu31i+//KJvvvlGkZGRkmQbbbJYLOrTp49atmypBx98UBEREXbXd5XF/fffr9GjR+vhhx9Wy5YtlZSUpG+++UaNGjU67/fg6+urUaNGaeLEicrMzNT48eP1zDPPaMKECWratKn69u2r+fPnq379+sXe+wMPPKB27dopOTlZ3377rW1UqlWrVvrpp5/0999/q2vXrrrooov07LPPKi4uzrb/uHHj9O+//6pBgwYlTpG79tpr1bdvX1122WWqVauWPvvss2JtgoODtWDBAqWkpOiSSy7Rddddp8svv1xvvfVWmfsgIiJCX331lXr27KmmTZtq6tSp+uyzz9S8efMS97ntttvsrikrq6+//lp16tRR165dnd4XALyVyThzEj4AACjVsmXLdNlll+n48eMOr0eqKrKzs9W4cWPNnj3bbnGP0lx66aW6//779Z///KccqwMA92LkCgAAnLOgoCB99NFHJU5pdOTo0aO65pprNHTo0HKsDADcj5ErAADOASNXAIAzEa4AAAAAwAWYFggAAAAALkC4AgAAAAAXIFwBAAAAgAsQrgAAAADABQhXAAAAAOAChCsAAAAAcAHCFQAAAAC4AOEKAAAAAFzg/wGnrl9B9tiB6gAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "'Encoding the data with more bits improves the logical error rate to the point where logical errors are virtually nonexistent. \\nNotice how going from n to n+1 when n is odd does not add much improvement as even numbers can result in voting where a tie occurs.'" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ + "# EXERCISE 1\n", + "\n", "def encode(bit, n):\n", " \"\"\"Function that encodes a single bit rendundantly n times\n", "\n", @@ -413,10 +363,10 @@ { "cell_type": "markdown", "id": "b394caa4", - "metadata": { - "id": "b394caa4" - }, + "metadata": {}, "source": [ + "---\n", + "\n", "## 1.3 More Efficient EC Codes (The Hamming Code)\n", "\n", "There are many clever ways to improve the efficiency of EC codes. One common way is to make use of a concept called **parity checks**. Parity checks provide a clever way to index where errors occur, without a brute force statistical approach like the repetition code. \n", @@ -425,248 +375,18 @@ "\n", "This is accomplished by each parity bit encoding a parity, or the mod2 sum of a subset of the data bits. The [Venn diagram](https://en.wikipedia.org/wiki/Hamming_code) below depicts the encoding. In this example, $p_1$ encodes the parity of $d_1$, $d_2$, and $d_4$. If our data bits ($d_1d_2d_3d_4$) were 0110, then $p_1$ would be calculated to be 1.\n", "\n", - "\"Drawing\"\n", + "\"Venn\n", "\n", - "Either using the static Venn diagram above or the interactive one generated by executing the cell below, \n", - "reason through the following example:\n", + "Using the Hamming code widget [here](https://nvidia.github.io/cuda-q-academic/interactive_widgets/hamming.html), reason through the following example:\n", "\n", "> If you wanted to send the message 0110 (here $d_1 = 0$, $d_2 = 1$, $d_3 = 1$, and $d_4 = 0$), appending the three parity bits to the end of the original bitstring would produce the logical codeword: 0110110 (where $p_1 = 1$, $p_2 = 1$, and $p_3 = 0$). Note, this is a slight deviation from the traditional placement of the bits in the Hamming code done for simplicity.\n", ">\n", ">Errors could occur on any of the data or parity bits. Assume an error occurs on $d_2$ and the recipient receives 0010110. To produce the syndrome, the recipient can take the received data bits, 0010, and compute the expected parity. This is then compared to the parity that was sent, 110. The parity bits that disagree flag an error. \n", ">\n", ">In this case, the received message has parity bits 011 which disagrees with 110. Here, $p_1$ and $p_3$ are flagged. This syndrome can only correspond to an error on $d_2$ based on the Venn diagram. \n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "8b233a2a", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "371571b399ca459bbf4588b5e425797c", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "VBox(children=(Label(value='Hamming Code Visualization: Computing parity bits (p1, p2, p3)'), Dropdown(descrip…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAJrCAYAAABwTiJiAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAoqhJREFUeJzs/Xm0ZPtd1/8/91i75rnOfE7Pw53nS0ajEJQkQDAxAkaBgPwCIopLwOkLujQqIYILNXwNsgijQvwaAY0KSIKAkDk3uUPu2OMZa56r9vj749xz0n3T3beHqrPrVL0fazXc7j5d592dqr1f+zO8P0oQBAFCCCGEEGJmqGEXIIQQQgghDpYEQCGEEEKIGSMBUAghhBBixkgAFEIIIYSYMRIAhRBCCCFmjARAIYQQQogZIwFQCCGEEGLGSAAUQgghhJgxEgCFEEIIIWaMBEAhhBBCiBkjAVAIIYQQYsZIABRCCCGEmDESAIUQQgghZowEQCGEEEKIGSMBUAghhBBixkgAFEIIIYSYMRIAhRBCCCFmjARAIYQQQogZIwFQCCGEEGLGSAAUQgghhJgxEgCFEEIIIWaMBEAhhBBCiBkjAVAIIYQQYsZIABRCCCGEmDESAIUQQgghZowEQCGEEEKIGSMBUAghhBBixkgAFEIIIYSYMRIAhRBCCCFmjARAIYQQQogZIwFQCCGEEGLGSAAUQgghhJgxEgCFEEIIIWaMBEAhhBBCiBkjAVAIIYQQYsZIABRCCCGEmDESAIUQQgghZowEQCGEEEKIGSMBUAghhBBixkgAFEIIIYSYMRIAxU37nd/5Hb77u7+be+65B03TOHLkSNglCSGEeIVer8e/+3f/jq//+q9nYWGBZDLJgw8+yM/+7M/ieV7Y5YkJIQFQ3LRf+7Vf49d+7ddIp9MsLi6GXY4QQohreOmll/ibf/NvEgQBf+fv/B0+8IEPcPToUb7/+7+f97znPWGXJyaEEgRBEHYR4nDY2NigWCxiGAZve9vbePLJJzl//nzYZQkhhLhCpVJhe3ubu++++6pff8973sMv/MIv8Pzzz3PixImQqhOTQkYABf/4H/9jFEXhy1/+Mu9617tIpVLk83n+1t/6WwwGg/2vW1xcxDCMECsVQojZdjPX60Kh8FXhD+BbvuVbAHjmmWcOtGYxmSQAin3vete7GAwG/It/8S94y1vews/8zM/wvd/7vWGXJYQQ4hVu53q9tbUF7AZEIfSwCxCT4+jRo/zmb/4mAH/jb/wNUqkUH/zgB/m7f/fvct9994VcnRBCiD23er22bZt//a//NUePHuXRRx896HLFBJIRQLHvb/yNv3HVz//m3/ybAHzsYx8LoxwhhBDXcavX6x/4gR/g6aef5t/+23+LrsvYj5AAKK5w8uTJq35+/PhxVFWVjR5CCDFhbuV6/ZM/+ZP83M/9HP/0n/5T3vKWtxxQhWLSSQAU16UoStglCCGEuAnXu15/+MMf5kd/9Ed573vfyz/6R//ogKsSk0wCoNj3/PPPX/XzF154Ad/3peGzEEJMmJu5Xv/mb/4m3/M938Nf/It/kX/37/7dAVcoJp0EQLHvlReIf/Nv/g0A3/AN3xBGOUIIIa7j1a7X/+f//B++9Vu/lTe+8Y386q/+Kqoqt3txNVkJKvadO3eOb/qmb+Iv/IW/wJ/8yZ/wK7/yK3z7t387999/PwBf/OIX+a3f+i1g92mz2Wzyz/7ZPwPg/vvv5xu/8RtDq10IIWbJja7XFy5c4Ju+6ZtQFIV3vvOdfOQjH7nqz953333S2UFIABRf8eu//uv82I/9GH/v7/09dF3nB37gB/jJn/zJ/d//3Oc+x//z//w/V/2ZvZ9/x3d8hwTA2xQEAV5gY3vdq3+4XfzAwfVt/MDBCxw838H1Bzh+H8cb4Afu/tofBQVe/rH7SwoqKppqYWhRDNVCVyNoqrn7/5WX/78aIaIlMPUEES2JrkZk/eeUCYIAd+hjdx2GPQe76+LaPp7t4dk+ru3hOT6u7eMOPZy+i2t7BP7LB0UFsHdmVPCV/0DVVHRLQ49oGBEN1VDRdBVVV9HN3f9vxnTMqI4R0/f/WzNVeY/doRtdr8+dO0ez2QS+ercwwI//+I9LABQSAMVXFIvFr3pSvNJ3fud38p3f+Z0HV9CU8HyXgdug79Tpu3V6Tp2+U6Nrl+m7DfpOA8fv4/kOfmDvB77dSLd7kwyu+L8KCoqioSoqX1nFceWJji9/dbD73wE+fvCVA+C/8povv56ivhwGTTTVxFBjRI0sMSNPzMgR0ZNYepqYkSNm5IkaWXTVHNO/lrgdru0xaNr0GkN6DZtB28buOvTqNr36kH7Lxum7eM7Loc/1CbyAQNl7N+wKACUARVNQ1Zd/54ov+Epme/k9FAQE/ss/gv1X+MqXBLs/VF1BM3fDoWaoGJaOlTKwUibxvEUsHcFKm0Rf/mGlTDRdpixv5EbX6ze96U3IKa/i1UgAFGIEgiDA9jq07W06w226TpmOvUNrsEHHLuP6fRx/gOcP2btBaoqBphpoym7wimhxNDWLphioinEgIyRBsBsQPd/GC2xcf8jQbdFzKmz7T+EH7t5XoioGhhrF0KLEzSJpa4mEObcfDJOReaJ6VkZ2xiQIAvpNm065T6++G/S6lT7N7R7d6gBn4OEOPDw3QFF2HwA0XUEzVDRTQzNUrISBZkbQDBVFVQ7sPea7wW74fPnHoOPQrQ3wXB/PeTkloqCb6u5ooqURy0VIz8WI5y3ieYtEwSJRiGLGdXmPCTECEgCFuEWe79C1y7thz96mNVin2n+Jrl3B9jq4/hAIUNAwtCi6ahHRUiTMEpoyWdOriqKgoKFqUQyi1/26IAjwA+flqec+9f55djrPELA7sqgqBqaWIGZkycdOkLFWSEbmSZrzxM0SmiqXmlvhuT7d6oB2uU+nPKCx0aV2vk2/OcTuuXiuvzsSrCoYloZuaURTJkZJR9UPJtjdLEVR0IzdIHojQRDg2T7OcDfI1i91KT/fwvcDFECL7E4nx3MWudUkqbkoiUKURNEinrNe9fWFEFeTq7IQr2LotmkMLtEYXKTae5Fy91mGbgvb7xIEAYqiYKpxTC1BOrKMrloTdQMeBUVR9kcqLT191e/trmF0sL0OPadKvXqBAH//3yWip8hGj1CInSBjrZKJrspI4RX2RvYa612aG12qF9rUL3UYdhzsngsBKKqCGd9dQxfP74adafv3UxQFPbK7npDU1b+3u4bRw+65tLZ6VM61dv9dFDCiOlbSpHAsSX4tRXoxRnohTiRhhPMXEeKQUAJZKCDEviAIaNtbNAeXqPcvUu5+mcbgEgO3iR84qIpOREsR0ZOYWhxV0cIueWL5gffyhpYOQ7eNF9goioalp0lFFpmL30U2eoRMdJWEOffymsbp5/sBnUqf5nqX+uUuO883aG33GLZdfD9AN9T9sGfGdFRtNv5dbofv+Th9j2HHYdBx8D0fTVeJJA0yi3GKx9OkF+OkF2Mki9GpC81C3AkJgGLmDdwW1d6LVHsvsNH+Aq3hBkO3DQQYapSInsbSU+hqJOxSDz3Pdxh6LfpOE8fvAgoRLUkiUqQUv5ti/DSF2AliRn5qbtZBENCrD6meb1N5qcXO8w061QF214Ug2B/BiiQN2fgwAq7tMWzvBkJ34IKiEInrJOdiLNyVJX8kRX4tKSOEYuZJABQzx/Od/encrc6XKHefo+82CAIXU0sQ1bNE9KSM7h0AP/CxvTYDt8nQbREAUT1DLnacheR9FGInyUaPHLpdx67t0bjcpXK+xdYzdWoX2wxaDgCRuIGVNDATxld22oqx8b2AYdeh37Sxey6qphBNmRSOp5g7lSG3liSzFJfwLWaOBEAxE2y3S7n3LFvtJ9lof4GOvYPj9dBUk5iRI6pn0VQZEQibH3j0nQZ9t4rrD9DVKAmzxELyPorxM5TiZ4kambDLvKZ+06b8QpPyS002n67TrQ1wBx56RCOWiWClDJnOnQCe49Nv2vSbQzzXx7B211Uu3JVl7nSG4rEUZkyuBWL6SQAUU2vgttjuPM1W54tstJ6g65QJAn+/p52hxqZmmnEaBUGA4/fpOzX6bgNFUYjqORaS97GYepC5+N1EjfSrv9AY9Zs2Oy802HyqxuYzdfpNG4IAK2kSzUQwLE3eYxMsCAKcvke/MWTQtlE0lXguwsLZLHNnspROprGSh2v0WYibJQFQTJWh22a78zQb7S+w0f48PbsKCkT1HHEjj3bIphLFV/iBS8+p0rWrBPjEjDyLyftZTD1IKX7XgYXBQctm5/kmG0/X2Hq6Tq8xRFEhlrGIZUxUmUo8tDzHp1cf0GvaAMQyERbvzrFwV5bSyYysGxRTRQKgOPQ836Xc/TKXW5/lUvNTdO0dUCCm54kZBelBN4X8wKVrV+k6FSAgbhRYSD3Acuph5hP3YmjWSL+fM3DZfrbB+pdqbD5do1cfggLxbIRYJiKhbwp5rr97ikpjSAAk8hbL9xdYujdH4Xha1gyKQ08CoDiUgiCgPdxkvf15ztf/iPrgAp4/JGbkiRvSeHiWeL5Lz6nQdaooikI6ssSRzOtZSj9M1lq77SnYIAioX+6w8WSNC5/eobXdI/B3Q180G5EAMEP2GnP3Gja6qZJdjrP6SInFu3MkS9JeRhxOEgDFoTJ0O2x2vsilxifZ6nyJvtPA1BIkI/OYWizs8kTIXH9Ix96m7zaIaElKibs4knkti8kHiOjJm3qNQdtm65k6Fz5TpvxCk2HXwUqaJIoWuik7w2ed3XfplPvYPRcrZTJ3OsPqg0Xmz2Zk84g4VCQAikOhMbjMxcafcK7+f2gNt1AVlYRZIqrn5OlbfJUgCBi4Tdr2Fn7gkjDnWM18Davpx8jHTn5V02nfD6iea3H5iQqXPl+hUxmgagqJYhQreTDnMovDJQgC+g2bTnVAEAQki1GOPDbH6kMF0gvxsMsT4lVJABQTyw88tjtPc67+f1hvfZaeWyem50ia89KyRdw0z3fpOjt0nQqmGmcucTfHcm9iMfkAOAabT9U498lttp9r4PRdYtnI7nFrMsUrbpLn+HQqfXoNm2jGZOnePEceLVE6mZbWP2JiSQAUE8d2u1xufYYX65+g3H0Wz7dJmvNTdTqECMfAbdEabuC7EB0cR/3E19LfVNDQSJai0vJD3JEgCOg1hnTKAzRDpXgsxdGvmWfpvpxMD4uJIwFQTIyuXeFc/Q95qf4JWoMNVEUnbS1jajKdIkaj1+/Rbnahl0G3iygXjuI2FehESMfTRGPRsEsUU8LuubS2e3iOT3o+xpGvmePIoyXiudHuUBfidkkAFKFrD7d5qf4JXqx9go69TVTPkDQXZJpXjEQQBHS6HbqtIeogT0xdIJ9ao5Q+Ar5K+VKNyqUm/YqD39JIRtLEE3EZbRYj4bk+7Z0+g5ZNvGBx7DXzHHt8jkRRHjZEuCQAitC0Bhu8WP8EL9U+QdcuEzeLJMz5r1qgL8TtCIKAbq9Luz5Et+dI6AsU00fJJZZQ1VduAvGpb7YoX6rT3Rni1jSSVpp4XIKgGA3fD+iU+/RqQ2L5CEcfn+PYa+ZJzUn3AhEOCYDiwDUGl3ix+nHONf6QnlMlYZRImCUUCX5iRLq9Du36EG1YJGmsMJ89SSY+96p/LggCWuUum+fKdDaHeA2NVDRLPC43aTEagR/QqQ7oVgdEMxGOPFbi2GvmySzKUhdxsCQAigPTGm7yXOV/cq7+R/TcGilznrhRkhEWMTL9fo9mo4PaL5HQllnIniSTWLjl91gQBDS2W2y9VKWzbeM3dDKJLNGoTNuJ0QiCgG51QKcyIJo2WXu0xMk/s0iqJA8b4mBIABRj13PqvFD9PZ6v/i5dp0LSXCBuFCT4iZEZDPs06x3o5YlrS8xnTl5zqvdW+b5PbaPF9rkqvR2HoGmSSWaxopERVS5mXRAE9GpD2uU+sVyEE29Y5MTrFoimZUe6GC8JgGJsbK/Hufof8uXKf6fZv0zcLJA0b300Rojrse0h9XoLulni6hKl9HEKqbU7Dn6v5Ps+lYsNdi7U6O24KB2TTDpHxJSbtBiNIAho7+yuEUwvxjj9Z5dZe6yEGZVjLcV4SAAUI+f5Dhebn+TL5f9Opfc8ES1J2lpGVeQYLTEarutQrzfwO2miLFJMH6OUPjr2M6A912fnYo3KhTr9HQ+1Z5HL5NANuUmL0fA9n+Zmj2HXoXA0xZmvXWb5/gKaIWukxWhJABQjEwQBW50neWrnv7LV+RKaYpC11tBUGSURo+H7Aa1Wk35DJxasUEgeYy57HP2AWwa5jsfO+Rrli3UGWz7RIEUqnZId7GJkPMenfrmD7/nMn8ly9s0rzJ3OyAyKGBkJgGIkOnaZp3d+i5fqn8D1h2StI5iaLGYWo9Pv92hUexj2IjnrOMuFs5hGuJsy7IHD+nPb1C52cMsamURONoqIkXL6LvX1DrqpcfTxOc6+eYV4XppJizsnAVDcEde3ean+Bzy985u0huukIyvEjFzYZYkp4rou9Xodv5Mjqa6ylL+LdLwUdllXaVY6rD+3TWfdQe1Y5LI5dF2mhcXo9BpDWps9Uosxzn7dCkceK6GbsqxG3D4JgOK2BEHATvfLPLnzX9hofZ6IliBtrcoUmBiZIAhotVr06iqWv8Jc+iTzmRMj3+AxKr7ns32uys65OoPtgDgpkumUTNmJkfH9gOZGF7vnsnBXlru/YY3icXmPidsjAVDcsp5T4+md3+bF2u/j+D1y1jEMTaa9xOgMhgMa1Q76YIGMdZTl/F1Y5uFolDvoDrn83A7NS13cqkY2UZC2MWKknIFL/VIHw9I59rp5znztMrGMvMfErZEAKG6aH/hcan6SL259hHr/HKnIEjEjL0+fYmQ836Neq+O1U8SV3enebGIh7LJuS327xcZzO3Q3XfRejGw2i6bJlJ0YjSAI6NWHtLf7ZFYS3PvWNVYelP6q4uZJABQ3pefU+OLWR3ip/gcoqOSiR6Wtixipbq9Dq+JiuSsUkydZyJ0ce1uXcfNcn60Xy5TPNxhuQyqSl2PlxEj5nk/9Uhff9zn2NfPc89Y1GQ0UN0UCoLihIAi42PwkX9r+CNX+S2Qja0SNTNhliSni+z61Wg2vlSVtHGOtdD9RMxl2WSPVbw+5+PQGzUtD9E6MXC43sWsZxeHUb9k01rvkVndHA5cfkNFAcWMSAMV19ZwaX9r+/3ix9nEUFHLRYzLqJ0ZqMOxTL/eI2KvMpc6wkD01tcHI9322Xqyy/WINe0slG5e1gWK0fM+ndqkDARx7zRz3vGWNaFreY+LaJACKrxIEAZdbn+aJrd+g2nuRrCWjfmK0giCg0WgwbFgk1WOslu4naeXDLutAtOs9Lj61SeeyQ9RJkc6kZaRGjFS/adPc6JJbS3LPW9dYvl/WaouvJgFQXMX2ejy5/VGerfwPIJBRPzFyjmNTrTTQe0sU4qdYKd5z6Nf63SrP9bn05S2q51q4ZZ1CtohhHOxpJmK67Y0GKsCpNy1x119YlXOFxVUkAIp99f4FPrfxy6y3PkcqskTcnI0RGXFw2u0WnZpC3D/KSv5essnFsEsKVX2ryeVnyvTWXRJqlmRqutY+ivB16wPa230W78nx4F88TmbpcLRTEuMnAVDgBz7n63/EE1v/iY69QyF2El2VdSNidDzPpVKpo3RLZCPHWSveF/oxbpPCHjhceHqT5oUeQcOkkC2i6TLqLkbHHXpUz7dIFKPc+7YjrD1aQlVlSnjWSQCccUO3zRe3PsLztd9FU0yy1hFZKyJGqj/o09gZYLlHWMzcRSlzVN5jrxAEATsXamw9X2WwAdlYASsq572K0QmCgPrlLr7jc+INC9zz1jUicVl2MMskAM6wau9FPrvxS2y1v0jGWiVqZMMuSUyZVrtJr2qSUk5wdP4homYq7JImWr895PyX1mldcEgEGZkSFiPXbw5pbvSYO5PhwXccJ78m77FZJQFwBgVBwPnGH/P5zV+l51QoRE+iqWbYZYkp4vsBtXoVv5knZ53m6NyDM7fR43a5jsf5Jzeon+uitWLk8jk5Y1uMlGt7VM+3ieUiPPD2Y6w9UpRR+RkkAXDGeL7Dkzsf5emd30RVdJnyFSPnui7Vch29v8JC6m4WcqflPXaLgiBg88UKm89VcbcNitkiui4BWozO/pSw53P2zSvc/edX0Qx50JglEgBnSN9p8LmNX+al+idImPMkzGLYJYkpMxgOqO/0iTrHWCs+QDYx27t871R9q8nFp3YYrAdko0VpHC1GrlMd0K0MOPI1czz0F49hpWQ2aFZIAJwRtf55Pn3559nqPEk+epyIngi7JDFl2u02napGSjnBsfmHZb3fiPTbQ849cZn2RZcEsi5QjN6w61C/2GHuTIaH33WC7LLcH2aBBMApt3uqx2f47MYv0h5uUYydRlNl55cYHd8PqNdruM0sucgpjsw9hKHJe2yUHMflwpc2qJ/roXfiZHNZWRcoRspzfCrnWiSLUR58xzGW7y+EXZIYMwmAU8wPfJ4tf4wvbn8EP3DJR4+jyE1DjJDnuZR3auj9FeaTd7OUPyPr/cbE9302Xyiz/XwDZ1unlCuiybpAMUKBH1C72EbRFO596xqn/uyy9AucYhIAp5TnOzyx9Rs8U/4tLD1DKrIQdkliyjiOTWWrQ9Q9xmr+AXLJpbBLmgnVjSaXn95hcBkK6ZIcISdGrr3TY9B2OPN1y9z71iOyOWRKSQCcQrbX47Prv8gLtd8jFVkkZsiRbmK0bHtIdatH3DvF8fnHiFuZsEuaKd1mj5e+sEHvvEcuUSISkc0hYrR69SHtnT7HX7/Ag+84JucITyEJgFOm59T51OX/wMXmn5CzjhLRZcG4GK3+oE9j2yUZnOT4wmNETXmPhaHfGfLS5y/RueCRiRSJyskhYsSGHYf65Q6rDxV55FtPEE3Lg8Y0kQA4RVrDTT556d+z2XmCQvQUhiZnrYrR6va6tHYgo5zm+MJjcp5vyOyBw4ufv0TrnE1KKxCPx8IuSUwZZ+BSPddm/myWR//KSVIleY9NCwmAU6LSe4FPXvoQ1f4LlGJn5GQPMXLtdotuxSCrn+XYwmOy03dCOLbLS1+4TOOlAYkgSzIpI7JitDzHp/xik9xaksf+yik5Pm5KSACcAtudp/mTSz9Lx96iGDuDqmhhlySmTLPZYFBNkIuc5dj8Q3Ks24TxXJ9zX7xM7cUeUTtNOp0OuyQxZXzPp/JSi0QxyuPvPkXpZCbsksQdkgB4yG22v8SfXvp/6TlVijE5ckuMXq1ew61nKUbvZq10P6oqOwInke/7XHhyk8rzbcxugmwuF3ZJYsoEQUDlpRaxbITH/sop5s9kwy5J3AEJgIfYRusL/Onlf0/frVOMSvgToxUEAdVqlaBVYj5xL8uFu+U9NuGCIODyl7fZfraB2oiRz+flfzMxUnshMJo2efTbT7F4tzxoHFYSAA+py63P8snLH2LotilET8pFXoyU7weUKxW0zhKL6ftZzJ0KuyRxCzZfKLPxTJWgEqFQKMqpIWKkgiCger5NJGHw6LefZOkeaTV2GEkAPIQuNT/FJy//HI7XIx89IeFPjFQQBFQqFZT2MqvZhylljoZdkrgNO+drXHpqB8oWhXxRrhNipIIgoHqhjRnVefTbTsrRcYeQPBYeMhcaf8KfXvr3uF5fwp8Yi2qtitJeZDnzoIS/Q6x0JMfSXQXID6hWq2GXI6aMoijk15I4A49P/epzXPxcOeySxC2SAHiIXGx8kk9d/jn8wCEXPS7hT4xctVolaM6xmHqA+ezxsMsRd2j+aIGFs3n8TI9arRZ2OWLKKIpCbjWB5/h8+j8+z6XPSwg8TCQAHhIbrS/wqfX/gOc7ZK2jEv7EyNXrdbxmgYXkfSzmZc3ftFg8UWThbBY32aFeq4ddjpgyiqKQXdkNgZ/59RfYfFoeNA4LCYCHwHbnGT55+UPYXpdc9JiEPzFyjWYdp55hIX4fS/mzYZcjRmzxVInSmTR2vE2z0Qi7HDFl9kYCh12HT//H5ym/0Ay7JHETJABOuGrvJT55ebfPn+z2FePQbDUZVlOUYvewXJRWL9NIURRWzsxTPJ2ib7VoNeUGLUZLURQKR1N0a0M++avPUbvYDrsk8SokAE6wxuASf3rpg7SGG9LkWYxFu91iUIlRtO5htXifvMemmKIorN41T+Fkgp7ZpN2SG7QYLUVRKB5L0dru8clffo7mZjfsksQNSACcUO3hNn9y8YPU+ucoxs6gSB8vMWKdbodexSJv3c2RuQfkhI8ZoKoqR+5dJHciTker0+l0wi5JTBlFVSgeS1O/1OZPf/k5OuV+2CWJ65Ar/gTqOXX+9NLPUu49SzF2Vs72FSPX63fp7GjkjLs4NveQhL8Zoqoqx+5bInciRpsavV4v7JLElFE1hcLxNJUXm3zyV56j3xyGXZK4BrnqTxjHG/CZ9V9gs/MEpdgZNFUPuyQxZWzbprnjk9HPcGzhYVRVHjBmjaqpHLt/mczRCE27im07YZckpoymqxSPp9l8psZnf+NFnIEXdkniFSQAThA/8PjC5q9xvvFH5KMn0VQz7JLElPE8l+p2m2RwgmPzj8gDxgzTdJWj9y8TX9WptnbwXLlBi9HSDJX8kRQXPrPDF3/rHL7nh12SuIIEwAkRBAHP7Pw3nq3+TzKRVUwtFnZJYsr4fkBlp07UPc7RuUcwdSvskkTITMvg6P2LWMtQru/gB3KDFqNlRnXSC3Ge+8Q6z/7+OnL67OSQADghLjT+L1/a/s/E9BxRIxN2OWIK1apV9P4qa4UHiVuZsMsREyKejrJ6Twl9zpEj48RYRNMm0UyEJz92gYufldNCJoUEwAmw3Xmaz278IoqikozMh12OmEKNZoOgXWQxfS/ZxGLY5YgJk51Ps3A6T5DuS6NoMRbJYhSAz/+Xl9iRRtETQQJgyJqDdT51+T8wcJtkrSNhlyOmULfbZViLUYidZT57IuxyxISaP1YgfyJJ32zR7crOYDF62ZUE/abNZ/7T8zS35D0WNgmAIRq6HT69/vPUBxcoSKNnMQa2PaRVDsjop1iTRs/iBhRFYe2uBdJHLFpOheFQWneI0do7LaR+qcNnf/15hl3ZfR4mCYAh8QOfJ7b+Exutz1OMnUKVRs9ixFzXpbrdJckJjs1Luxfx6nbbwyyRWNOpdcq4rht2SWLKqJpC4ViKzafqfPG3z+P7sikkLJI6QvJC9fd4vvq7ZKw1dDUSdjliyvh+QHWnTtQ5zrH5RzBkx6+4SUbE4Mj9S1jLiuwMFmOhmxrppTgv/tEmL/3xZtjlzCwJgCHY7jzNE1u/gaklZMevGItavYo+WGOt+CCxSDrscsQhE09FWbt3HnPeo1athV2OmELRlIkZ0/nib59n5/lG2OXMJAmAB6xrV/jM+oexvTbpyHLY5Ygp1Om28Zo55lNnySYWwi5HHFKZUpK5Ezm8ZE/ODBZjkZqPMey6fO4/v0S3Ngi7nJkjAfAAub7NZzd+iUr/eQqxk7IgX4yc49i0K5A1TrCQPRV2OeKQmz+eJ70ape3WcRxZsC9GS1EUCkeSVM+1+Pz/9xKuLafRHCQJgAckCAKe2vmvnG/8MYXoSVRFjuASo+X7AdVyi7h/jCNzD6Cq8vEWd0ZVVY7cu0hsWaXSlPWAYvRUXSW3luTCZ3d4+ncuyUkhB0juEAdko/15nin/NklzTo55E2NRq9cwBqusFe/HNKJhlyOmhGkZrN69gFHyqdXqYZcjppAZ00kUojz7vy+z8ZSsOT0oEgAPQNeu8vnNX8MPXBJmKexyxBTqdjt4zSzzqTOk43NhlyOmTLqYYO5EFi/epduRBr5i9BJ5C8/1+eJvnqdXlx6UB0EC4Jj5gccTW79Orf8i+aicwiBGz3VdWlWPjHGMhezpsMsRU2rhRIHUqkXLqUp/QDEW+bUktQvt3f6Aniw3GDcJgGP2Uu0POFf/A7LWUVRFGvGK0QqCgGqlTsw7ylpJ1v2J8VFVlbW7F4guqlTqFVmrJUZO1VQyy3HOf2qb85/cCbucqSd3izGq9y/yxe2PYKgxLD0VdjliCjVbTbTeAku5e7DMeNjliClnxSMsnSmi5W1azWbY5YgpZCVN9IjGlz52gcZ6N+xyppoEwDFxvAGf3/xVuvYOGWs17HLEFBoMBwzqEXLRkxRSK2GXI2ZEfilD7kiSntZmOJC1WmL0MktxOuU+X/ivL+EMpDXMuEgAHJNnyv+N9dZnyEdPSL8/MXK+H9CodEhwlNXSvWGXI2bM8tk5EssatU5VWsOIkVMUhfyRJOtfqvHsxy+HXc7UkgA4BjvdL/PlyseIG0UMTdpxiNFrNhvowyVWCveiq0bY5YgZYxg6y2fm0QsurUYr7HLEFDIsnXguwrO/v075RVluMA4SAEfM8QZ8ces3sL02CVPacYjRcxybYTNCPnqcdFzaColwpIsJcqtJ+lobx5ZTQsToJQoWw67Dk//9Au5QpoJHTQLgiL1Q/T022k+Qjx6XqV8xckEQUKs0iQWrLBfuCrscMeOWT88RW9CoNCphlyKmkKIo5FaTbD5d54U/2gi7nKkjAXCE6v2LPF3+LWJ6Fl21wi5HTKFOp43an2MxdxeGHgm7HDHjjIjO4qkCWs6h3WqHXY6YQkZEw0qbPPN7l2VX8IhJABwRz3f50vZv0HWqpCJLYZcjppDrunRqkDGPkU8uh12OEADkFtOkV2J0vIY0iBZjkZqL0qsN+dJ/P4/nyqajUZEAOCLnG3/IxeanyEePydSvGItavY7lrbJSvEfeY2JiKIrCyuk5rHmFWkPOcRWjtzcVfPkLFS58ShpEj4oEwBFoD7f50vZ/wVCjmJo04xWj1+t3Cdp55jOnpOGzmDhWIkLpWBY/MaDXlbOCxeiZMR3D0nnqdy7SqfTDLmcqSAC8Q0EQ8OTOR2kNLpOx1sIuR0whz/dpVmxS+hpz6eNhlyPENc0dzZNajtAc1PB8maYTo5deitPa6PHU/7wkRxGOgATAO7TZ+SLn639ExlpFVeSfU4xeo17HdJZZKd4rZ/2KiaWqKitn5jBLAY16PexyxBRSVYX0YpwLn9lh6xl5j90puZvcAdcf8tT2R/EDh6iRDbscMYWGwwFuK0EpcYKEJe+xV/Pud7+b973vfWGXMbPimRiFo2lsqyvHxImxiKZNPNfn6f91SXoD3iEJgHfgpdofsNV5klz0aNiliCnVqLWJK6ss5s+EXcqh87/+1//iu77ru3jsscc4deoUTz/99B293nA45Ed/9Ed529vextmzZ/m+7/u+EVU6XRZPlIjP69TbsiFEjEduOcH2cw3Of2o77FIONQmAt6lrV3mm/N+IaEnp+SfGotfvovQLzGVOoKl62OUcOv1+n4cffpi/+3f/7khez/M8LMvir/7Vv8prX/vakbzmNNJ0lbmjeZS0Q68nG0LE6OkRDTOm88z/vkyvLiPNt0vuKrfpuer/ojm4xFzinrBLEVMoCAJatT4p7TT55GrY5UykXq/Hj//4j/O7v/u7xONx3vOe91z1+29/+9sBuHx5NIfJx2Ix/sk/+ScAfO5zn6PVkjNwrye/nKZyuU7zuQbRaFTaFomRSy/E2X6uznN/sM4Dbz8WdjmHkgTA21DrneOF6u+RjCygKlrY5Ygp1Om00YbzLBRPycaP63j/+9/Ppz/9aT74wQ+Sz+f5qZ/6KZ566inOnj1706/x3d/93Xz2s5+97u8vLi7ysY99bBTlzhRVVVk4UaRXWafT7pBMJcMuSUwZVVNIFqO8+H+3WH24SG5F3mO3SgLgLfIDn6fLv8XAbTIXXwm7HDGFfN+n0/DIG6tkEwthlzORut0uH/nIR/jABz6wPx37Ez/xE7zxjW+8pdd53/vex3B4/SkkXZdL5O3KlJIkF6LUn2kS9+PyICNGLp632H62wZd/9zJf851nUFUZab4VcnW7RdudJ7nU/BQZa02mNcRYNFtNTGeBxQXZ+HE9Fy9exHEc7r///v1fy2QyHD16axuy5ufnR12auMLiiSKdnUu0ai0y2UzY5YgpoygKmaU4l56ocOzZBvNnpVPCrZAAeAv8wOPL5Y/h+jaWngq7HDGFPM9l0NSZs45I25cDIFPA45XIxsgsxynX2yTdJJouS2bEaFlJk+ZWj+c+sU7pVBpVk5HmmyUB8BZstL/ARvsLZOXEDzEm9UYTyzvCwvzpsEuZaKurqxiGwRNPPMHi4iIAzWaT8+fP89hjj93068gU8PgtHi/S2urS2GqQz+fDLkdMocxinI2na2w+U2fpHnmP3Sy5ut0kz3f5cvljQEBET4RdjphCjmPjtmPMxY8RNeU9diPxeJx3vvOdvP/97yeTyZDP5/npn/7pq9aZNRoNNjY22NnZPTz+3LlzABSLRYrFInDrU8AvvPACtm3TbDbpdrv7vQXvuuuuUfy1ppKViJBbTbFZa+E4KQzDCLskMWUicYPAh+c+vs78mSyaLqOAN0MC4E1ab32W7c6Tct6vGJt6vUk0OM187mTYpRwKP/IjP0K32+W9733vfhuYdru9//u///u/z9/7e39v/+c/9EM/BMAP/MAP8IM/+IO39T3/+l//66yvr+//fK/VzHPPPXdbrzcrFo4VqW92qF+oUyqWwi5HTKHMUpztZxtsfKnKyoPFsMs5FJRATlR+Va5v8/GX/jk7vWcpxWRqTozecDigsa6xkn4ti7lTYZcjxMhtvlDm0uerZJU5IlYk7HLEFCq/2KR4PMWbfuA+NENGAV+N/AvdhEvNT7HdfZpsREb/xHg0m22iygJzGWloKqZT6UieaEGn2W6EXYqYUpmlODsvtLj8RCXsUg4FCYCvwvWHPFv5H2iKjqHJkW9i9BzHJuimKCTX5Mg3MbU0XSW/nMaLDXEcJ+xyxBQyLB1VV3j24+u4Qy/sciaeBMBXcbn1WSrd52XtnxibZrNFhDmK6VvrYSfEYVNczWLlNJrNZtiliCmVWYhTPd9m48la2KVMPAmAN+AHHi9U/zeKqqGrsmZFjJ7ruridGPnYGoYu7zEx3YyITnY5gWP2cF037HLEFNIjGooCL/7xJr7nh13ORJMAeANb7S+x03maTGQ57FLElGq1WkT8kqz9EzNj/kgeM6/QarXCLkVMqfRCnO3nG2w/2wi7lIkmAfA6giDgxdrH8fEwtXjY5Ygp5Pk+w7ZOOrqKacbCLkeIA2FGTTILCYZqF8+XERoxemZMJ/ACXvy/W0ijk+uTAHgdld7zrLc/TzqyFHYpYkq1201Mr8R8Vkb/xGwpHcmh5wI6MgooxiQ5F2Pz6RrV8+1X/+IZJQHwOl6qfwLH6xLR5MxfMXq+H9BvQSqyTCySDrscIQ5UPBUlPR+j53fwAxkFFKNnJQ3srsu5T26HXcrEkgB4DY3BZS42PknSXEBRlLDLEVOo021jOCXmM8fDLkWIUMyt5dCzPt1ON+xSxBRSFIVkKcqlz5Vpbsp77FokAF7D+fof0XNqxAw5VFqMXhAE9JouSWOJZKwQdjlChCKZT5CYs+gMWrJOS4xFLBuh17Q5/+mdsEuZSBIAX2Hgtjjf+CPiRkFG/8RY9Ho9NLtAKSN9/8RsK61m0bIevV4/7FLEFFIUhXgmwoXPlBl2pPn4K0kAfIWN1udpD7dIRubCLkVMqU67R0ybJxNfCLsUIUKVmUsRzRu0u7IZRIxHvGDR2elLY+hrkAB4BT/wean+f9AUA1WRI7nE6LmuS9BPkY0vygizmHmKopBdSBHEbWkMLcZC01VUQ+H8p7bxfVlqcCUJgFeo9p6n3H2WVGQx7FLElGq32xhBjkJqNexShJgIheUMZlqh3ZZ2HWI8UqUY5ZdaVM/JSPOVJABe4WLjk7h+n4ieDLsUMYWCIGDQDUhFFjGNaNjlCDERTMsgWYoy8HuyGUSMRSRh4Aw8Ln2hEnYpE0UC4Mt6Tp0LzT8hZsiuTDEe/UEfzc5TSMrRgkJcKbeQRk15DPqDsEsRUyqeNbn0+Qr9ph12KRNDAuDLNlqfo2PvkDBLYZciplSn3cVSCqTj82GXIsREycwlsXI67Y5MA4vxiOctOpUBG09Wwy5lYkgABPzA41z9DzFUC1XRwi5HTCHP9/B7MbLxRVRVPnZCXElVVTJzCTxrgOfJySBi9FRNRTfV3c0g8h4DJAACUOu9RLX3IklT2nKI8ei02+henkJ6LexShJhIheUsRlqhI6OAYkySpSjVCx1qFzthlzIRJAACG+0nsP0uphYPuxQxpfpdl6Q5T9SUDUZCXEs0ESFejNBz5OYsxsOM6TgDl60v18MuZSLMfAB0fZuLzT/F0tLSl02MxXA4QBlkycnmDyFuKDefRkl4DIfDsEsRU0hRFCIJg0ufK+PaXtjlhG7mA2C5+yytwbps/hBj0253sJQCueRS2KUIMdFyi2kiWVV6AoqxSeQtmls9Ki9JT8CZD4Ab7S/gBTaGJn3ZxOj5foDTM0lFF9FUOV1GiBvRdJX0XAJH6+MHslBfjJ5h6Xi2z+YzcjTcTAdA2+txqfkponou7FLElBoM++huhmxCNhgJcTOypRRaMmDYl2lgMR6xTITLX6hi92f7+MGZDoDbnafo2NskzGLYpYgp1ev2MJUMqai8x4S4GclCDDOp0e11wy5FTKndnoB9dp5rhF1KqGY6AK63PgcEaKoZdiliSjl9jaQ1J73/hLhJqqqSKERxVDkVRIyHZqgEPmw8OdvTwDN7Vxq6HTbaXyCm58MuRUypwXCA5mRIx2WDkRC3Il1IosZ9BgOZBhbjEctG2HymzrDrhF1KaGZ2VXql9xw9p0ohejLsUsSU6nV7GMyRkaPfDozruniet//D9318f3czQRAE+18XBMH+zxVFuaoF1N5/q6qKqqpomrb/Q9dn9pJ5oDJzSYzkNr2tLpYVCbscMYVimQjVC22q59os3jOb+wBm9mq23XmGIPDQVCPsUsSUsnsB+UhRdv/eAd/3sW37qmDn+/41Q95e0NujKMpVoe/KX7+W633tK399Lxi+Mhzu/VzXdTRNwzRNmfq/TZqukihYVDf6YZcippRmqARewM4LDQmAs8TzHTban8PS02GXIqaU49godppMbi7sUg4F13WxbZvhcIht2ziOsx/6XumVAWwvaL1ypE5V1f0Ru1sNYnth0nVdfN+/5sji3n87jnPNAArs12IYBqZpEolEME1TRhJvQrqQoBbv4TgOhiEP6mL0IgmDjSdr3PvWI2jG7D2szeRVqNY/R3u4RSoijXnFeHS6XXSWZfr3FWzb3v/hOM7+6N5eeFIUZT80RaNRTNPcD0wHOQW7FxhN89Y2iO0Fxb1Au/f3GwwGdLvd/dHEvXBqmuZ+ONz7IXbtTgOX6Va6ZDKZsMsRUyiWjdDe6VG/1KFwLBV2OQduJgNgufssrj/AUKX5sxgPu+eSMUoYuhV2KaFxXZd+v0+/32c4HOI4u4ut90LQ3siYZVlXjY4d5mlTXdfRdZ1IJEI8fvXZ4nvT2VeOcg4GAzqd3bNv96amDcMgEokQjUaJRqMzO1poRAzieYv6Zo8MmbDLEVPIiGo4A4/KuZYEwFkQBAHrrc9iqFE5+1eMheu6MEyRSs9W7z/Xdel2uwwGg6sCn6ZpGIZBKpXaD3mGYRzqoHc7VFXFsiws6+qHAt/390dDh8Mhw+GQXq+3fxzalYEwFovNVCBM5uPUYxVc152pv7c4GIqiYER11r9U5fSfW5q5TDBzn6j2cJP64AIxoxB2KWJK9XpddH+ObHwx7FLGyrbtq0b4XHe3q/7eurxkMkksFiMSkV2cN6KqKpFIhEgkQjKZ3P/1vSC492+8N1K4N8K4N0I4zdPG2VKSjUSFXqNLKi1rtsXoxbIRGpc7tHf6pOZiYZdzoGYuAFZ6zzN022Qiq2GXIqZUvz8kqeexzPirf/EhYtv2VSN8exs0NE3DsiwymczUB5KDtBcKs9kscHXg3ltTCLv//pFIBMuyiMfjU/XvbyUixLImne0BKSQAitGzkgbNrR7V820JgNOu0nsBUGZuqFccnGBoEbemo8G4bdt0Oh263S62bQO7U5J7o0+zNiUZpr1NIumXR8Jc190fIdwbLazVapimSTweJ5FITEUYjGeitMx22GWIKbXbBxSq51ocfXy2ujbM1JXb8122O08SlfYvYkxs20ZxEyQy2bBLuW3D4ZB2u02v18O27f0pynw+TyKRkMA3IXRdJ5VKkUrtLl53XXc/rDcaDer1OoZhEIvFSCaTh3YqPp6OoUab2LaDaUo7GDF6VsJg+/kmnuuj6bOzNnmmruTN4WW6TpWEIUdzifHo93vowSKp6OFaY7q3G7XX6+1v3ohGo6TTaeLxuIS+Q0DXdTKZDJlMZn9DTrfbpdls0mw2D20YTOZjaFHo13uYpjy8i9GzUibd6oDWZo/sSiLscg7MTF3V6/1zOF4X05qutVlicgwGNgk9cyjav+yFvm63i+u6KIpCJBIhnU6TSCTQNC3sEsVt0nWddDpNOp3G8zw6nQ6dTodWq0Wz2UTX9f1p4lfuSp40pmVgpUx62wOQdYBiDMyYTv2yS/1yRwLgtKr0nkdRNFn/J8YmGEaIRyZ3+td1XRqNxlWhz7IsstksiURi5lqzzAJN0/bDoO/71w2DmUxmYkd6Y1mLtt4JuwwxpRRFQVUVqudaHHvN7DTvn8xP+xh4vsN25yksTZ4gxXg4jo3ixomnJ+9cyV6vR71eZzgcEgQB0WiUXC5HPB6X0DdDVFXdXzfo+z7dbpd2u02z2aTVau3vOo7FJms3ZCIdYyfakmPhxNhEkubuOkDHn5lj4WYmADYGl+jZNZKR2Un34mD1B330YI60NRkNoH3f37+xu6571UjQpI70iIOjqirJZJJkMonrujSbTdrtNpubm/sbTNLp9EQ8IKT21gE2+hhpCYBi9KIpk3a5T3OzS241+ep/YArMzF2g3j+P4/cw1Ml6shXTYziwieoZTDPcIwaHw+H+NG8QBFiWJaN94oZ0XSefz5PNZul2u7RaLWq1GvV6fX96OMyNI2bUxEoZ9HcGpJi9I7vE+O0eC7e7DlAC4JRpDC4Cqqz/E2PjDXTiZjjTv3tru5rN5n7rlkQiQSaTmYpecOJgXDkqaNv2/oNEp9PZ70EY1lrRWNqio3UP/PuK2aAoCoqq0FifnffYTATAIAgo954josnuXzEeruugOEkSyYPdALK3qaPdbuP7PqZpks/nSaVSMton7ohpmpRKJXzfp9Vq0W63KZfLVKtVksnkgW8aSWSilKMtXMdFN2bi1iUOmBnVqZ5rEQTBTAwWzcSnqO/W6Q53iOgydSDGo9/vo1EkcUD9/2zbpl6v70/zxuNx0uk00Wi4089i+qiqut9fsN/v7/cVbLVaxONxstnsgYwyJ3NxtJhCv9knaczGFJ04WJG4Qac6ZNCyiaYPT6/M2zUTAbA12GDotUmYs3XMizg4w6FNREkRNcfbQ+rK4AeQSqUmun2HmC57RwDujTy3Wi06nQ6JRIJ8Pj/W96GViGDGdIblIUkkAIrRiyQMOtUBra2eBMBp0Ryu4wcemiq7x8R4uLZPyhhfiyHXdanVarTbbRRFIZlMks1mJfiJUOi6TqFQIJPJUK/XabfbdDodkskkuVxubO9LK2XS8+2xvLYQmqESeAGt7T5zpye3n+uozMTdozG4gKLIeigxRk6U6BhOmHFdd/8GC7sjfhL8xKTQdZ1isUg2m/2qIDiO96kVM8Hsj/Q1hbiKCo3Ls9F0fOrvIn7gU+k+T0SbneNdxMFyXRfFi2JFRjctdeUUG0AikRjryIoQd+LKIFir1fY3jYx6iYIVj6BEfFzXlc+CGItI3KByro3vB6jqdG8EmfpPUM+u0HOqRDTZACLGY2gPUUkTjdz5e8z3/f0bKEA8HieXy8npB+JQ0HWdUqm0PzXcarVotVqkUilyudwd70yPpSJoEYVh35YAKMYiEjfoNYf0agMSheneVDf1n6C2vc3Q65A0F8MuRUwp27bRA4uYefsB0Pd9Go0GzWYT3/dJJBIHtrtSiFEzTZO5ubn9TUt7p4yk02kymcxtB8Fo0kKNqDhNG+LS1F+MnhnXaZf7dCoSAA+9nlMhCHw0der/qiIkju0S15OoqnZbf77dblOtVvE8b3/ET4KfmAZ7QXBvanhvVDCfz5NM3vqSCVVTicQ1eq5sBBHjoem7G0F6tWHYpYzd1KeiznAHCMIuQ0wx31GxjFsf/bNtm3K5zHA4xDAM5ufnsSxrDBUKES7TNJmfn2cwGFAulymXy7RaLYrF4i0/7EQSJm1l+m/OIkRKQLc2CLuKsZv6ANgYXkJXp7+fjwhHEATgRLESN78DeG+6t9FooCgK+XyedHp8LWSEmBSWZbGyskKz2aRWq3H58uX9JtM3Oy0cjUUg0puZ0xrEwdMMjebG9B8JN9UB0A98moPLGKocASfGw3UdFN8iepPr/3q9HpVKBcdxDqR5rhCTKJ1OE4/HqVar1Ot1Op0OhUKBWOzV1/VFEhFUM8B1XAxTNkeJ0TOiOs2t3tTvBJ7q5nh9p87Q7WBoslhYjMdgOETDetUdwK7rsr29zebmJkEQsLCwwNzcnIQ/MbN0XWdubo6FhQWCIGBjY4Pt7W1c173hn4ulLFQThkOZBhbjYUY1hh2HQWu615pO9d2n51RwvC4Jsxh2KWJKObaDTgzLuH6fyb3priAIyGazd7QLUohpE4vFWFlZ2V8W0ev1yOVy110WYcVNdEvFrk/3zVmEx7B0urUh3eqAWGZ6l5BNdQDs2hW8wEFTZEelGA/HcUnpyWuuRRoOh+zs7GDbNtFolEKhILt7hbgGVVXJ5XIkEgkqlQqVSoVWq0WpVCISufoGrCgKkaRB+5ITUrVi2mmmimf79OrTPco83QHQqQDIQmExNoGrYepXLzG4spmzqqqUSqXbankhxKwxTZPFxcX91kjr6+vXbCJtRHV8Zfp3aYpw7GWG7pS3gpnqANizqyhI+BNj5JqYV7RuGQwGbG9v43keyWSSfD4v071C3KJkMrm/SaTVatHr9SiVSvttkkzTAEPOBBZjpEC/Od0BcKrvTB17R1rAiLHx/QA8E8PY7RZfq9VYX18HYHFxkWKxKOFPiNukqirFYvGqTSK1Wg3YXaOF7uEHfshVimmlmxqd6nSPMk/tCGAQBHSdCpoEQDEmnuegYKARYX19ncFgQCKRkOAnxAhFo1FWVlYol8vU63X6/T6mGUU1FDzHQzXlsyZGT4+odKuDqe43ObUB0PUH2G4HXZWTFcR4OK6D4iVpVLvomilr/YQYE1VVmZubIxaLUalU6Dl9lIiP03WkF6AYC83UsLsu7tDbHXGeQlP76DRwm7j+UKaAxVj4vk+75kA/gxWJsrS0JOFPiDFLJpMsLS1hWRH8uE0rqOL7Mg0sRk83VVzbY9ie3t3mUxwAW7j+QAKgGDnbttneqKF310iYBVZWVqW9ixAHxDRNlleWiecstLzHdnUT257em7QIh25qeLbPYIoD4HSOa7I7Aig9AMWotdttOlWFBHeRih4HTwu7JCFmjqIoJDJREkmVdmRI9fImiUGWZEpG4cVoaKaK5/gMOtMbAKd2BHDotkBRpnbxpjhYvu9TLlfolxMUjAc4s/RGDNWSzR5ChERVVQxL4/Tja+TOxOhZdcrlskwJi5HYyw7D9vSeODO1I4BDr4MSBGGXIaaA67pUdhqYgzUW0ncznz2Boih4noemyQigEGHQNA3P8zAiBiceXGErXWHr+RrbW5sU03PoxtTe3sQBGnZvfDb1YTa1wxeO10fin7hTg+GA8kab6PA0x0uvZSF3cv/J0Pd9CYBChETTtP3RPkVRWDhe5NgjS8RWodzaYtCf7ia+YvyCANyhF3YZYzO1AdD22ijT+9cTB6DT7VDf9EgFd3Nq8XWk46Wrfl9GAIUIz5UBcE+6kODEY6skj2vUnW06nU5I1YlpoKoKw66sATx0Bm4bTZX+UOLWBUFAvV6nu22R1+/j1NLriJpXLy73fV9GAIUI0V4AfGUIjCYinHxsjeypKB2tRr1eJ5DlQOI2qLqCLZtADh/ba6MqsgZE3Brf9ylXKrj1AvPxhzix8BiG9tU7yX3fR1EUCYBChGTvs3etTR+GqXPioRXm707jpNqUq7I5RNw6VVenegRwahPS0O1IABS3ZG+zhzE4wnLmPuayx677tVeuPRJCHLwr1+Jei6qqrN61QCRmsvFMhe2tLYrZErou9wVxczRNwZ7iTSBT+UnwAw/b66EpMgUsbs5wOKC20yfmnmKt9BCZ+NwNv35vSknawAgRjr3P3qtN784dyWPFIpx/cpPypS1y8SIRSw4IEK9O1VXsvovv+aja9F3rp+9vBLjeAD9wZQRQ3JRut0NtyyXl38Wpxde9aviDr9x0ZARQiHDdzPq+dCnBqUdXSRzXqA236XZ6B1CZOOxUXcF3g6ndCTyVAdDxJQCKm9Nut+jsGOS1ezm1+DqiZuqm/pxMAQsRrpsdAdwTTUY49eju5pCWUqHdao+zPDEFVF3F93zc4XSuH53KAOgHLkHgoyhT+dcTI9JsNemVo+Qj93Ji4XEM/eanhYIgQJGTZoQIzautAbwWI7K7OSR/Kk7XqNNqNsdVnpgCigK+H+C7EgAPjSDwCPClD6C4rkazzqCSoGjdx7G5h1HVW9vNGwTBfggUQhy82/3sqZrKsfuXKJxK0Is0aTYaoy1MTA1FVSDYDYHTaCoTko9HgNycxbXV63Xsaoa52H0cmXvgtjZyyBpAIcJ1OyOAe1RV5ci9i5TOpBhEW9Rr9VGXJ6aAoii7D/tTGgCncpGcH3i7Z7ggN2dxtWq1itcssBC/j+Xi3bcd4PZuOrILWIhw3OoawGv9+bW7F1A1hZ0vN6nVAnK53ChLFIecokDgg+9NZwCcyrtXEPgyBSyuEgQBlWoFv1liMfnAHYW/K8kIoBDh2Pvs3ckpH4qisHJmnoW7srjpDpVKRU4NEV+hMNUjgFOZkHbXAMoUsNjl+wGVSgWaCyylHmK5cNcdvzdkE4gQ4dr7/N1pYFMUhaXTcyzenSfI96hUyxICBfDyQ0aABMDDxMfb3QUsU8Azz/cDypUKSnuZ5czDLOZPjeR19zaByBSwEOFQVXX/czgKiyeKLN1TgOKAnco2fjCdOz/FzVPU3QcMmQI+RILABwKm9K8nbpLvB5R3KuidVVZzjzCfPT6y15YdwEKEbxQjgFeaP1pg9d4S2pxDubIjIXDWvbwGUEYAD5HdNYABcn+eXUEQUK1W0HsrrOYfoZQ+MtLXl4PlhZgMo/4sFldzrN43hzpnU6nKmsBZthchpvUtMJUBUFE0FFT54M6wWq2G0l5kOfMAxdTqWL6HjAAKEa5xfQYLyxmW7yqi5AfUarWxfA8x+YJgdyewqk7ntX4qA6CqaCgoBEgAnEX1Rn231UvqXuZGOO0rhJgdc0fyLJzO4aW6NOqNsMsRIQiCABRQNAmAh4aqaCiK+vJaQDFLWu0mdi3NXPweFnNnxvZ99hagCyHCM+6NWAsnixRPpRlGW7Tk7OCZEwS7G0FUCYCHh6JogEKABMBZ0um26VeiFKyzrBTuGesU7d7ic1kLKEQ4fN8f+2YsRVFYOTtH7kScnl6n0+mM7XuJCeS/3O5LpoAPj70RQGQKeGb0+j06ZZ2scZajcw+OvT3L3utLABQiHAd1Go+qqhy9d4ns8SjtoE6v2xvr9xOTY3cEEBkBPExUXl4DKFPAM2EwHNDc9sloZzi+8Aiqqo39e0oAFCJcB3kco6qpHLt/hfQxk6ZbYTAYjv17ivDtN/yXAHh47O8ClhHAqWfbNvXtIWnlNMfnH0VTD+Z4a1VVURRFAqAQIfF9H0VRDqwZu6arHH9gmeRRg3p/B8d2DuT7ivAEfiC7gA8b2QQyG1zXpbrdIemf4sT8Yxh65MC+994mEAmAQoRjbw3gQZ7GY0QMTjy4QnxNp9LexnXdA/ve4uAFAaAqqNpURqXpDICaaqIqGn7ghV2KGBPfD6ju1Im5Jzi+8BimGTvQ7y9TwEKE6yCngK9kRk2OP7hMdEWhXJfTQqZZ4AWoqoJmTGVUms4AaKhRVEXHD2SIflrVqlX0wRprxQeJmqkD//6atrvOUAKgEOHY++ztfRYPUjQZYfWeecw5j1pVGkVPK8/1UXUVPXLw77GDMJUBUFMNdNXCD2R4fhq1Wk38doGF9N1k4vOh1CAjgEKEK6wRwD2ZUpL50zm8dI9WsxlKDWK8fNfHsDQZATxsInoSTwLg1On3e/SqJnnrNAvZk6HVIQFQiHCFHQAB5o8VyB+L09Wa9Pv90OoQ4+G7AWb8YDYWhmGqA6BMAU8X13Vo7Nik1ZMcmXsg9LN4ZRewEOHZ2wUcJkVRWLt7idSaSaNfwXVk0GGaeK6PlTDDLmNspjYAWnpKpoCniO/7VHaaxPzjHJl/+MDavdyIHAcnRHgOegfw9Wj6bqPo2IpKubEjD4VTxHcDIgkj7DLGJvxPz5hEtJTsAp4i1WoVY7DKkeKDRM1E2OUAMgIoRJgmYQRwj5WIsHrPAua8R7VWDbscMSK+LwHwUDK0KJNxaRB3qtFsQLvEYuZe0vG5sMvZJwFQiPB4njcxARAgXUywcCZPkO7TlE0hU0FRwLCmcwcwTHUAjMk5IFOg1+8xqFnko2eYzxwPu5yrqKoqAVCIkPi+PxFTwFeaO5InfzxBT2/S78mmkENPAuDhFNGSQCBrtA4x13Vp7jiktZOsle6bqKd9AMMw5CQAIULieR6GMVnTc4qisHrXIum1CI1hRa4Ph1gQBCBTwIeTpadRFUN2Ah9SQRBQrdSJekc4MvfQRGz6eCXDMPA8WWcqxEHzfX8iAyDsbgo5cs8i1qJKpVaRQYhDyncDVF3FSsou4EMnqqfRVQvXH4ZdirgNrVYLtTfHUu7uidn08Uq6rhMEAbZth12KEDPFdV2CIJjIAAi7m0KWThVQczbtZivscsRtcIceekQjkpzM99goTG0AtPQ0uhrB9QdhlyJukW3b9Oo6OesExfRa2OVcl2nuPhk6jowyC3GQ9j5zkxoAAQorWbJH4nTVJrYt14jDxrU9dFPDkgB4+BhaDFOLywjgIeP7AbVyiwRHWS3eF3Y5N6Tru9PSss5HiIO1FwD3PoOTavXsAvElnWpzBz+QDWOHiTv0MGM6RnSy32N3YmoDoKIoJMyiBMBDptGoYwyXWS3ch6FHwi7nhnRdR1VVmQIW4oA5joOqqhMfAI2IzsrZeYyST6PeCLsccQtc2yeetyZu8+EoTW0ABEiYJZkCPkT6/R5OM0Epfnqi+v3diKZpMgIoxAFzHAdNOxztOdKlBMVjaWyrI+cFHyLu0CNRtMIuY6ymOgDGzDyBdAM8FDzfo1EZkFSPsVQ4G3Y5N03XdQmAQhwwz/MmfvTvSkunSiSXTRq9Kp4nU8GHRTQ92bNQd2qqA6ClZ0AC4KFQq9awnDXWSvejqofjyR52A6C0ghHiYB22AKhqKqt3LxCZD6jVK2GXI25SNDW9LWBgygNg3CyiKgaeL2u0Jlmn2yFoF5jPnCVuZcIu55bs9QKUE0GEOBiT3APwRuLpKHMncvipAZ1OJ+xyxA14jo+qKcTzMgJ4aCWMIqYWw/Z6YZcirsN1XdoVn4xxnPnMibDLuWV7oxDSCkaIg3EYWsBcz9zRPOmVKG2nLktHJpjddzGiOvGcrAE8tGJmHlNL4PgSACdVvV7H8lZZncCj3m6G9AIU4mDt7bo/jAFQVVVW75rHmleo1WthlyOuw+m7ROIG0YxMAR9aqqKRtpawvW7YpYhr6Pd7+J0cc+mTWGY87HJuy95NSAKgEAdjb+TsMAZAACseoXQ0i58YyK7gCeX0XVLzUVRtqiPSdAdAgIy1Jr0AJ5DvBzSrA5LK6qGc+t2jqiqapkkAFOKA7LWAUdXDe/uaO5YnsWTQ6NSkQfQEcmyfzNLhHJS4FYf3E3STEmYp7BLENbTaDXR7gaXiXYf6Qg676wCHQ3nIEOIgDIfDQ7UD+FpUVWXp1Bx60aMlZwVPHAWI56d7/R/MQACMm0UURcXzZcHtpHBdh37DIBc9Rjp2+AO6ZVkyAijEAXEcB8s6/DfndCFBbjVBX2nhOnJ/mhSe66OoTP0GEJiBAJgwi5hqHMeXdYCTolptEPNXWS7cFXYpIxGJRAiCQEYBhRiz4XBIEAREItPRnmPp1BzReY1qQ3oDTgpnRnYAwwwEwJhRwDJSDF3puzQJur0OdPPMp09j6tPxAYtGowAMBnLsoBDjtPcZ2/vMHXamZTB3PAcpm15XulVMArvrYiVNYrnpeMi4kakPgJqqk4seZ+i1wy5l5vm+T6tmk9LXKKaPhF3OyOi6jqZpEgCFGLPBYICmaYd+DeCViqtZkksRmv26NJSfAMOuQ241gaZPfTya/gAIkIsexQ9kjUXYGs0Gpr3ESvGeQ7/x45VM09zvTyaEGA/btvd7b04LVVVZOj2HUfJoNhthlzPzPNcnt5oMu4wDMV134etIWYsA+IGc2RoWx7Gxm1HyseMkrFzY5YycZVnYti1P8CF797vfzfve976wyxBj4Ps+tm1PxQaQV0pmY+TW0gy0Do4tG8rC4ns+iqKQnJuOJQavZiYCYDqyRERPYnuyDjAstVqTWLDCUv5M2KWMxd5NSTaCTKYf+7Ef49SpU3z4wx++7dfY2dnh7/ydv8PXf/3Xc/r0aQmaB2zvszWNARBg6USR2IJGrVkNu5SZNey6ROI6qVIs7FIOxEwEwLhZImbkGbjSbykMg+EAulmK6WMY+nQurLUsC0VRpLP/BPqd3/kdvvCFL1Aq3VnLIdu2yeVyfN/3fR9nzkzng8wk6/f7KIoytQHQiOgU1rIECZtBXx4kwzDsOMSyEeKF6XyPvdJMBEBVUSnFzzD0JACGoVlvE1cXmUsfC7uUsVFVVRpCH7Ber8cP//AP88ADD/C6172On//5n/+qr9na2uKf/tN/yr/6V//qjo8OW15e5h/9o3/Et3zLt5BMzsYaoUkyGAzQdX3q1g9fqbSaI1bSabTrYZcyk+yuQ+F4GlU9fOfS347p2Ur1KrLRI/iBTxAEKMps/I87Cfr9HvTylLInUFUt7HLGKhKJyE7gA/T+97+fT3/603zwgx8kn8/zUz/1Uzz11FOcPXsW2F0z9iM/8iN8z/d8DydPnrzma7zlLW9hY2Pjut/j4YcfvmawFAdvWhpA34imq5SO5Dhf3qHf709Nu5vDIAgCfD8gu5wIu5QDMzMBMG0to6sRXH+AocmH6qA0Gx2S2nEKqbWwSxk70zTpdDp4noemTXfYDVu32+UjH/kIH/jAB3jta18LwE/8xE/wxje+cf9rPvShD6FpGn/tr/21677Oz/3cz+G61+8QMC0Nhw8713VxXXcm/vcorGQoX6zTeKEuAfAAuUMP3dRIL8zG+j+YoQCYtdaIGln6bkMC4AHp9rqo/RJz+ZNTPW2z58qG0PH49B8kHqaLFy/iOA7333///q9lMhmOHj0KwJNPPskv/dIv8dGPfvSGI/5LS0tjr1XcuWnfAHIlVVWZO5qnV9mi2+0Rj89OIAnToGUTTZtklmQEcOoYWpRS/Czn6n9IKrIQdjlTLwgC2vU+Ke00ucRs3GRN00RRFAmAE+Azn/kM1WqVN73pTfu/5nke//Jf/kt+8Rd/kY9//OOATAEfFnsbQKatB+D15BZTlOdqtJ6rE4tFZdnSAei3HI48VsKwZmf2ZmYCIEAxfpoXa78v6wAPQLfXRRuWWCjOxugf7D65G4Yh6wAPwOrqKoZh8MQTT7C4uNvns9lscv78eR577DG++Zu/eX9qeM973vMevvmbv5l3vOMd+78mU8CHw3A4xDCMmbqWzB/L093ZpNvtkkjMzqhUGIIgwPcCisfTYZdyoGYqAOajxzDUGI7fw9RkhGZcfD+gXR+SNVbIxGdrtDUajdJsNvF9f2ZuVmGIx+O8853v5P3vfz+ZTIZ8Ps9P//RP7/+bZ7NZstnsVX/GMAyKxSLHjn1lN/qtTgE//fTTwO4O5FqtxtNPP41pmpw4ceIO/0bienzfZzAYkMlkwi7lQGXmUiQWajSfaRKPx2XQYoycvodhaWRXZitoz1QATFsrxM08fachAXCMOt02hj3HQunkzF20EokEzWaTbrcrrULG7Ed+5Efodru8973vJR6P8573vId2e7xnfr/97W/f/+8nn3yS3/7t32ZpaWl/SlmMXrfbBZi5ZRWKorBwrEh3+zKddodkSq4n49Jv2cSyETKLM/YeC4IgCLuIg/TJyx/i+ervMRe/K+xSppLvB2yv18krD3Nq6bWv/gem0Pnz54lGo8zNzYVdihCH3vb2Nv1+nyNHjoRdSiie+/QF6k8PmcstoioyqzAO2883OPn6BR799lNhl3KgZu7dVIidIgg8Ziz3HphOt4XhzLGQna0P0pWi0aicCCLEiMx6P7yF4wX0nE+nJUeZjkMQBAR+QOHYbK3/gxkMgLnoUUwtwdAb71TRLAqCgG7TJWksk4wVwi4nNLFYbH/dkhDi9g0GA3zfJxab3VYoyVycxLxF12nLwMUY2F0XM6bP3Po/mMEAmLaWSUUW6Dly4Pao9XpddLtIKXM07FJCtbdWqdORJ3Yh7sTeZ2jW1v+9Umk1i5bx6PVkZmHUevUhqbkYqfnZG2WeuQCoKhpLqUcYenJzHrVOq09cWyATnw+7lFCpqoppmjINLMQd6vf7mKY58zvqM6UUsYJBu9cMu5SpM+w6LN2XR9Vm7z02e39joJQ4i66YOJ7coEdlMBxAP0chvTZzO3+vJR6PY9v2DXvMCSGuz3VdbNue+dE/2N0RXFjJQtJhMBiGXc7UcAYumqnOXP+/PTMZAAuxEyQiJZkGHqFms01UmaeQXA27lImwd9Paa2EhhLg1s9r+5XoKyxmiBY1WuxF2KVOjVx+SKETJr81mi52ZDIC6GmEx+RB9txF2KVPBdR2CbpJ8chVVnZ1jdG7ENE0Mw5AAKMRt6na7GIYxM8e/vRpVU8kvZfBiQ1xHZhZGYdC2Wbwnhx6ZzfvWTAZAgPnE3SiKiuc7YZdy6DWbLSJBkWL6SNilTJRoNLq/i1EIcfP2dtHPcvuXaymsZIhkFFrtVtilHHqe46OoCqWTmbBLCc3MBsBi/DQxIyfTwHfI933sjkE6uoypW2GXM1Hi8ThBEMhmECFuUb/fJwgCmf59BdMySC8kGCpdebC8Q73GkGg6QvFYKuxSQjOzATCiJ5lP3EvPrYVdyqHW6bYxvMLMt365FsuyUFVVpoGFuEXdbhdVVbEseah8peJKFj0T0OnIdeVO9BtD5s9kiCSMsEsJzcwGQICF5P0EgY8fyHqK29VrOSSNRRJWNuxSJs7eDUxGAIW4Nb1eb/8BSlwtkY2RKFl0bZkGvl2+6xMEMH82F3YpoZrpT9d84l7iZoGuXQm7lEOpP+ijDvPkU7Lz93oSiQSu69Lr9cIuRYhDodfr4XkeicTsncxwswrLGdSUR78vpw3djm59SDwXYf50JuxSQjXTATBqpFlOPULXkQB4OzrtLhGlQC6xFHYpEysej6OqKq2WPK0LcTNarRaqqsr6vxvIzqewMhqdrhxpejt69SFL9+axUrO9w3ymAyDAUuohVEXD9aW55q3w/QC3FyETW5RpmhtQVZVEIkGv15NF20K8Ct/36fV6JBIJua7cgKqppOYSuPoAP5Dryq1whx6KqrB4Tz7sUkI385+wUvwuUpFFOvZO2KUcKr1+B8PLkUvJ6N+rSaVSBEEgo4BCvIpWq0UQBKTTs3kyw63ILaTRUwG9jiwvuRWd6oDUXJTiCXmPzXwANDSL1czX0HfqYZdyqHTbA6JagURkthfR3oxIJEIkEtk/2F4IcW3tdhvTNKX5801IZKNEcwadvlxXbsWgZbPyUBHDms3mz1ea+QAIsJh8AEOLMnTlg3QzPM8l6CfIxBfl3N+blEwmGQ6HDIey1ECIaxkOh9i2TSo1u33ZboWiKGTmUgQxG0/OHL8pdtfBsDQWzkrXCpAACEA+doJs9AgdezvsUg6FdqeDEeTIJ1fCLuXQSCaTKIoi08BCXEer1UJRFJLJ2TyX9XbkFlIYKaQn4E3qVAZkVxLkj8h7DCQAAqAqGkcyr8P2ugSyoPZVDbouCWMey5RdejdLVVVisRjdrnTwF+KVfN+n2+0Si8Vk88ctsBIR4gWLniMB8NUEfoDdd1l9uISqyXsMJADuW049QszMS0uYV2HbNsogQza5EHYph04qlcLzPDkZRIhX6Ha7eJ4n07+3ITuXQkm4OLaca38j3fqQWDbC0r2ybn2PBMCXJSIlVtKPym7gV9FutzHJkU0sh13KoROLxdB1nXZbencJcaV2u42u68RisbBLOXSyiynMtEq7I9eVG+lW+yzflydRiIZdysSQAHiFtfRr0VQT25MRmmsJggC7p5KyFjC02T0/8U4kEgkGgwGuLNoWAgDXdRkMBnLyx20yDJ3UXJRh0CMIgrDLmUh2z0UzNFYfLoZdykSRAHiFUuIshdgJWsONsEuZSP1BH83JkUtK77/blU6npSegEFeQ3n93LjufQkv5DORouGtqbfcoHElK779XkAB4BVXROJZ9E64/wA+8sMuZOL1Oj4iSJx2bC7uUQ0vXdekJKMQV2u02kUgEXdfDLuXQSheTRDIanZ5cV17J93xc2+fI18zJ5o9XkH+NV1hKPUzCnJOWMNfgDDRSVkl26d2hdDqN67r0etLBX8y2Xq+H53ky+neHVFUlkY/iKtJn9JU6lQGJgsWSHP32VeRO/gpRI81a5nV07Yqsp7iCbQ9RnTTJWCHsUg69eDyOqqo0Go2wSxEiVI1GA1VVicelpdSdSuWTqAlfms1fIQgCeo0ha48UsVJyuswrSQC8hrXM1xDRkwzcRtilTIxut4dJinR8PuxSDj1VVUmn0/T7fQYDWbMjZtNgMKDf75NOp2VWYQRSpTh6QqHf64ddysQYtBwiMYOVB2Xzx7XIp+4actFjLKUepmXLZpA9w4FLzCjK7t8R2bvp1Wq1sEsRIhS1Wm3/YUjcOcPQiecs+o4sLdnT2u6xeG+O3KrsML8WCYDXoCgKJ3J/Dk0xGbrSW8nzPZRBUqZ/R+jKUUDbtsMuR4gDNRwOZfRvDJLZGMRcPE82MQ67Dpqpcvy183Jm/XXIJ+86Som7mE/cS3N4OexSQtfrdtGCFJm47P4dpUwmg6qq1Ov1sEsR4kDtrf3LZDJhlzJVMnMp9DiywQxobvaYP52leDITdikTSwLgdaiKyon81wIKjjfbayr6/QGWliMWkamaUVJVlWQySbfblVFAMTNs26bb7ZJMJmX0b8SiyQiRtE5/MNv3LGfgoihw/PULqKqM/l2PfPpuYDH5AMX4KRqDS2GXEpogCPAGFglLFtGOw94IiOwIFrNib8RbRv/GI5GP4+nDme5i0dzsUTyeYuGubNilTDQJgDegqQYn82/Gx8H1Z3OEZjAcoLlp0jEJgOOg6zqJRIJOpyPHw4mp57ounU6HZDIpjZ/HJF2IoyYChoPZbAfj2h6e63P8dQtoukScG5F/nVexnHqUXPQozcFsrgXsdXsYpEhJABybXC5HEASyFlBMvb33eDYrIzPjksonMBMqvf5srgNsbfbIrSRYuk82Lb4aCYCvwtAsTua/HsfvzeQooDMISESKaKo8rY/L3ihgu92W3XtianmeR7vdJpFIyOjfGGm6SixvMfRnr8eoa3s4A48Tb1jEsLSwy5l4EgBvwpHM68jHjtMYXAy7lAPleR7KMEkyKk9S45bNZgmCgGazGXYpQoxFo9EgCAIZ/TsAyWwMJTp77WCaG13yR5OsPSIzVjdDAuBNMLQoZwpvxfWHuDP0VDUY9NGIk4zKGYrjZpom0WiUVquF7/thlyPESPm+T7vdJhaLYZpyJNe4JbMxtCgMZmgdoDP0cB2fU29awrBkhPlmSAC8SauZx5lLnKU+uBB2KQdmMBiiKwmiZirsUmZCLpfD8zwZBRRTp9ls4nmejP4dkGjKQo+qDIezM2DRWO9QOpFm5UGZsbpZEgBvkq5GOF14C37gY3uzsbjWGfrE9Kz06joglmVhWRbNZlNGAcXU8H2fZrO5//4W46eqKtFMhKE3GyOAdt8l8OHUm5bQTVn7d7Pkzn4LllOPsJC8j8YMjAIGQQBOlJglzZ8PUrFYxPd9qtVq2KUIMRLVahXf9ykWZV3WQYolI2A6M9EPsLHeZf5MhqX7ZLnSrZAAeAs0VedM4RtQUKf+jGDbHqJ6ceKWTNkcJNM0icfjtNttOR1EHHq2be/v/JW1fwcrnomiRsEeTvd1ZNh1UFQ4/WeXpe/fLZJ/rVu0kHyAxdRDNAYXpvrJajAYoAVxElYu7FJmTj6fR1EUarVa2KUIcUeq1SqKopDLyXXkoCWyMTRrt5n/tAqCgMZ6l8W7c8zLqR+3TALgLVIVlbtK34ihxek50ztNNxw6RPQ0hi5rdg6aruuk02m63S6DwfRevMV0GwwG9Ho90um09P0LgRExiCQNhsPpXQfYawwxYzpnv25Fzvy9DRIAb0MxdprjuTfRstfxg+lcrO/bOjEzE3YZMyuTyaDrOuVyOexShLgt5XIZTdPkzN8QxdIRPM0Ju4yx8P2A1laPo18zR+G4dKq4HRIAb4OiKJwpvo1UZInWcPqOiPM8F8WJE49kwi5lZqmqSi6X219DJcRhsreGNZ/PSxeBEMVSUbA8PHf6GkK3Nnuk5uOc+XPLKIqM/t0O+WTepoRZ5GzxbQzcFq4/XUPs/b31f1FZtxOmZDKJaZr7uyiFOAz2drGbpkkymQy7nJmWyETRY9PXENodegw7Dme/dpl4XpYp3S4JgHfgWPZNzCXuotY/F3YpIzUc2BhKXBpAT4BisYjnedIWRhwa1WoVz/Ok7csEiKYsdEtjYPfDLmWk6pc7lE6lOfL4XNilHGoSAO+AoVncXXo7iqIycKfn9AbHdrCkAfREsCyLZDJJq9WStjBi4tm2TavVIplMStPnCbDbENrEdqfn2jFo2yiqwtmvX8GwpOnznZA7/B1aSD7AWvo11KepLYxrETHiYVchXpbL5dA0jUqlEnYpQtxQpVLZX78qJoMZM8GYjjWAe21fVh8usni3vMfulATAO6QqKnfPvZ24WaQ1XA+7nDvm+z64ESKmBMBJoes62WyWfr8vG0LExGq32/T7fXK5nLR9mSCRqAGGPxUdK1pbPeJ5i7NvXpGNHyMgAXAEMtYKdxW+ib5bx/EOd982x7FRgwhRMxF2KeIK6XQa0zSp1WqyIURMHN/3qdVqmKZJOi3HR06SaCKCaoIzPNztYJyBy6DlcPbNK2QWZYBiFCQAjsjJwtexmHqIWv+FQz0VbNs2KhYxQzaATJq9DSEyFSwmTaVSkY0fE8qKm6gm2M7hXQcYBAHVix0W7s5y/LXzYZczNSQAjoiuRrh/7l1E9BQdezvscm6b47poRDGMaNiliFewLItUKkW73abb7YZdjhAAdLtd2u02qVRKNn5MoEjMRI+oOM7hHQHsVAZYSYP7vvEIekQ2foyKBMARKsRPcrrwFjrODq5/OJ+2HNsloiVkfcWEyuVymKZJuVzG86ZjYbc4vDzPo1wuY5qmbPyYUIqiYMZ1XO9wBkDX9uhUB5z+s0vkj8jM1ChJAByxM8W3MBe/m1r/xbBLuS2BqxMxZP3fpFJVlVKphO/7ckycCF25XMb3fUqlkrSNmmCRuImnHM4HxtrFDvOnM5x601LYpUwd+cSOmKnFuH/+L6OrFl37EK7VciNEjFjYVRyId7/73bzvfe8Lu4xbFolEyGaz+1NvQoSh3W7T6/XIZrNEIpGxfZ/D+jmdJJGoAebhC4Dd2gA9onLvW9cwo7KzfNQkAI7BXOJuTubfTGu4jneIpoJd10XxIlgztgPYcRx+8id/kre97W3cf//9vP71r+eHf/iH2d6+s7Wcn/zkJ3n729/O3Xffzdd93dfxX/7LfxlRxZDNZrEsi0qlguu6I3tdIW6G67pUKpX9h5GD8jM/8zP8+T//57n//vt55JFH+I7v+A6eeOKJO3rNcX5OJ0UkFkExA1zn8FwrXNujtdPnxBsWKZ3KhF3OVJIAOAaKonB36e3MJ++l0n/+0OwKtp0hKhZRc7bO7xwMBjz11FN8//d/Px/96Ef5t//233Lu3Dm+7/u+77Zf89KlS3zv934vjz/+OL/5m7/Jd3zHd/AP/+E/5A//8A9HVnexWCQIAnZ2dkb2mkLcjL333EHv+j169Cg/9mM/xm//9m/zH//jf2RpaYnv+q7volar3dbrHcTndBJEEyaayaE5TSgIAqoX2syfznDX10vPv3GRMdUxiegJHlx4N584935aww3S1uSvX7BtB5XpHAHs9Xr8+I//OL/7u79LPB7nPe95z/7vJZNJPvzhD1/19T/2Yz/GO9/5TjY2NlhcXLzl7/ef/tN/Ynl5mb//9/8+ACdOnOCzn/0sH/7wh3nDG95wR3+XPaZpks/nqVQqNJtN6b8mDkSj0aDf71MoFDBNc6SvfaPPKcA3fuM3XvXzf/AP/gH/+T//Z7785S/z2te+9pa/30F8TieBlbBQTQWnZQOTv8Snvd3HSho88C3HiMSNsMuZWhIAx6gYP8U9pW/hMxu/QNTLYGqT3bzSdVwiahxNnb63xfvf/34+/elP88EPfpB8Ps9P/dRP8dRTT3H27Nlrfn273UZRFFKpr+w6e8tb3sLGxsZ1v8fDDz/Mz//8zwPw+c9//qtuSG94wxtGvpYpnU7T6XSoVqtEo9GR35CFuJJt29RqNSzLGssDx618Tm3b5td//ddJJpOcOXNm/9cn8XMaNk1XMaIa9iFYLmL3XPptm4f/0gkKR2XX7zhN351+wpwqfD3l3rOcr/8Rc4l7UJXJnXX3PB9Dm74+Xt1ul4985CN84AMf2L/Y/8RP/ARvfOMbr/n1w+GQD3zgA7ztbW8jkfjKaOjP/dzP3XC93ZUL4SuVCvl8/qrfz+fzdDodBoPBSPulzc3NcenSJba3t1laWpLdmGIsfN9ne3sbRVGYm5sb+evf7Of04x//OD/0Qz9Ev9+nWCzyC7/wC1e1oJnUz2nYdEujH0z2RhDfD6hdbLP2aIkTr18Iu5ypJwFwzDTV4MGFv0JjcIF6/yXysRNhl3Rdgaegq9Nzwdtz8eJFHMfh/vvv3/+1TCbD0aNHv+prHcfhb/2tv0UQBPyTf/JPrvq9paXJnMbXdZ1CocDOzg6NRkP6sYmxaDQa2LZNqVQay1m/N/s53VuvV6/X+Y3f+A3+9t/+23zkIx/ZD3KT+jkNm25q+MFkrwGsX+yQWYzzwDcfRTPkQXbcJAAegGRkjvvnv40/vvgz9JwaMWNCb9C+gTbDh7jvhb/19XV+6Zd+6arRP7i1qaVCoUC1Wr3q96vVKolEYiyjCslkkl6vR71eJxaLTdXIhQjfYDCgXq+TSCRIJsPdJBaLxVhbW2NtbY0HHniAN7/5zXzkIx/hve99LzDZn9Mw6YYG+uRuSOw1hgQE3PuNR0gU5SSqgzC7d/sDtpp+nHL+63m6/FuYWmwiR9oU38DQpm8N2erqKoZh8MQTT+xv6Gg2m5w/f57HHnsM+Er4u3DhAr/8y798zdYWtzK19OCDD/IHf/AHV/3+H//xH/Pggw+O4q90TcVikX6/z/b2NisrKzIVLEbC8zy2t7fRNG2su35v5nN6Lb7vX7W7ddI/p2HRdQ00P+wyrskdejS3etz15hVWHiyEXc7MkAB4QBRF4b65v0RzuM5663PMxe+eqPWAvh8QuDqaNr6GrmGJx+O8853v5P3vfz+ZTIZ8Ps9P//RP7wckx3H4wR/8QZ566in+/b//9/vHW8HuJou9jRW3MrX0rd/6rfzKr/wK73//+3nHO97Bn/7pn/I//sf/4EMf+tDo/4IvU1WV+fl5NjY22Nrauq3dy0K80vb2Np7nsbi4ONaHilf7nPZ6PX72Z3+Wr/3ar6VYLFKv1/nVX/1Vtre3+YZv+Ib915n0z2lY9IgOmo8f+BN376mca7F8b4573romLV8OkATAA2TqcR5Z/E669g7V/gsUY6fCLmmf73soaBhTGAABfuRHfoRut8t73/ve/fYSe6dobG9v87//9/8G4Ju/+Zuv+nO//Mu/zOOPP37L329lZYUPfehD/PN//s/5xV/8Rebn53nf+9439tYSlmVRKBQol8tUKhUKBXmaFrevXC4zGAwoFAoHMiV6o8+ppmm89NJLfPSjH6Ver5PNZrn33nv5tV/7NU6ePHlb3y+sz2kYDFNH0RV810edoPV1tQtt0otxHnznCTnt44ApwWHpUjxFLrc+yx9f+BkMNUoyMh92OQAMhgNa61FOl76OdLwUdjniDu31BiyVSqGv2RKHU7vdplwuk0ql5EFiCjQrHZ7/w8uk3Dms6GQ86Ld3+rhDj9e85wxL9+Rf/Q+IkZqcx4AZspx6mHtKf5GuU2HoTsZZrp7roqJjGpO3NlHculwuRyQS2R/BEeJWDAYDyuUypmnKrvIpYUYMFB08fzJawQy7Dr3mkLv+woqEv5BIAAzJmeJbOZ57E7X+S3i+E3Y5eL6Hgo4+hX0AZ9HeekBN09je3pbzgsVNc12Xra0tNE1jfn5eNhNNCd3UULTdh/2weY5P/WKHo4/PcfrPLYddzsyST3ZINFXnwYV3U0rcRaX/XOjnBXuuj4qOrsqxO9NC13Xm5+fxPI/NzU18fzJ3AIrJ4fv+/ntlfn5+LP3+RDh0U0PV1dBHAIMgoHK+RelUhgfefgxNlxgSFvmXD1HUyPDo0ntIGCWq/RdDrcXzPXTVkh1YUyYSiVAqlXAch0qlEnY5YsKVy2Ucx6FUKl3VLkUcfoqioJsqnhdeAAyCgOqFNom8xcPvOk40PX1txw4TCYAhy8eO88jSd6EpBs3B5dDq8L0ATZEL/jRKJBJkMhk6nQ7NZjPscsSEajQadLtdMpnMVzVBF9Nh9zSQ8AJga6uHZqg8/K4T5FZlc1rYJABOgOX0Izyw8G3YfpeuHc4oTeAHaKpM90yrXC6HZVlUKhV6vV7Y5YgJ0+v1qFarWJYlmz6mmKIrBCGtBOnWBth9jwe++ShL98qmj0kgAXBCnMy/mbuK30Tb3gxlZ3BAgDJBzUHF6M3Pz2MYBjs7O1ednCBmm23b7OzsYJom8/OT0ZZKjIeiKgQc/HrzYcehUx5w9s3LHH/9woF/f3FtcsefEIqicO/cOzme+1pq/Zdw/QNu3RGAIm+Hqba3Mxh2m1/LphDh+z7b29sAzM3NyY7fKaeGEADdoUf9Uodjr53nnm+Qkz4miXzaJ4imGjy8+NdYTj9KufscfnBw2/WDQJERwBlgmialUgnbttnY2JAQOMN832djY2N/08fekYdieimKAsrBBUDf9amca7F0f54H33EMbYJOIBESACdORE/w2PJfpxg/zU73y/gHtGBDQZUnsxkRi8X2Q+DW1paEwBnk+z5bW1vYtk2pVCIWi4VdkjgIKgd21/f9gJ2XWhSOpXj0W08SiUuLsUkjAXACJcwij6/8/8hYK5S7Xz6YHoGBKiOAMySZTFIsFun3+zIdPGP2wl+/36dYLMqO3xmicjAP+YEfUH6xSWYhxmPffop4Xg4YmERyx59QuegRXrPy/SQiJSq9A2gUHSgoB3RxEJMhmUxSKBTo9XqUy+WwyxEHpFwu0+/3KRQKck70rFEVUMd7LwmCgMq5FslilMf/6mmyK/KAMakkAE6wQvwkX7P8XiwjQ7X/wnhDYKDIFPAMSqfT5HI5Op0OOzs7YZcjxmx7e5tOp0MulyOdToddjjhgijrea/xeo2crbfLYXzlF4WhqrN9P3BkJgBNuPnkPjy//dQw1Sq1/bmzfR0E2gcyqbDZLNpul3W7LSOAUK5fLdDqd/f+9xexRx7wJpH6pg2FpPPptJ5k7lRnb9xGjIXf8Q2Ap9RCPLn83qqLSGFwcy/cIAgVV3g4za29EqNVqUavVwi5HjFi1WqXVau2P+IrZtLsLeDyv3VjvomgKj/zlkyzdI42eDwO54x8Sa5nX8PDSd+L5zniOjAvUsU8PiMm2tyasXq9Tr9fDLkeMSL1ep9Fo7K/5FLNLURjLCGBzq4fn+jz0juOsPlQc+euL8ZAAeIgcy/4ZHlr8qzj+gMbg0hi+g7wdZl2pVCKRSFCr1eTc4CnQaDSo1WokEglKpVLY5YiQKeroRwCbG13cocdD7zjG0a+ZG+2Li7GSO/4hoigKJ/Nv5tGl78IPXOr9C6N89RG+ljjMisUi0WiUSqVCq9UKuxxxm1qtFtVqlVgsRrEoozICRn2dr6938Dyfh991nOOvX5CNhIeMBMBDRlEUTuS/lkeXvhuAWv/caHYHKwFB4N3564hDb+/IOMuyKJfLEgIPoVarRblcxrIsOeJN7PM9n1GcBBcEAbVLHQAe+csnOf5aCX+HkVwVDqljuTfy2PL3oCo6tf5Ldx4CFQ/vgE4dEZNPVVUWFhawLItKpSIbQw6RWq22H/4WFhYk/Il9nh+Af2fvhyAIqF3soOoKj37bSY4+LtO+h5VcGQ6xI9nX8fjy96Jr1p33CVR8Al9GAMVX7IXAeDxOvV6XFjGHQLlcpl6vk0gkJPyJrxJ4PtzBc/5enz89ovHYt59i7WFZV3qY6WEXIO7MauZxVFXnk5c/RKX/HIXoqdsbilc9fJkCFq+gqipzc3Nomkaz2cTzPEqlkgSLCeP7Ptvb2/R6PdLptOz2FdfkB7c/Bbx3woeVNHn0206ydK+0ejns5Co+BZZTD/Oale8nZuTZ6T19W0EuwNu9OAhxDYVCgXw+T7fbZXNzU84OniC+77O5uUmv16NQKEj4E9cVeAF4tz5A4Hs+Oy80iWctHn/3KQl/U0IC4JRYTN7P69f+NlnrKNvdp3B9+5b+vKoq+L47purENMhkMpRKJYbDIevr67iuvF/C5rou6+vrDIdDSqWSHO8mbsj3AtRbPPHJtT12nm+SW07w2u8+y8Jd0kh8WkgAnCKF2AnecOSHWEjcx07vGWyvd/N/WFEIArmhixtLJpPMz8/vBw/bvrUHDTE6tm3vB/H5+XmSyWTYJYkJ57s+yi20gnH6LuUXWyzcleV133MX+TV5j00TCYBTJhVZ4PVrf5ujmddT7b/AwL25Zr6qCp6sARQ3IRaLsbi4SBAEbGxsMBgMwi5p5gwGAzY2NgiCgMXFRWKxWNgliUPA94KXjwN5dYO2TfVCmyOPlnjNd54lWYqOuTpx0CQATqGokeE1K9/P6cJfoDG4RNeuvOqfUWQEUNyCSCTC4uIiiqKwsbFBt9sNu6SZ0e122djYQFEUlpeXiUQiYZckDombnQLu1gY0N3ucetMij//VU0TT5gFUJw6aBMApZWhRHll8D/fN/yV6bpXG4OIN28QoioKHjACKm2eaJktLSxiGwdbWlhwddwCazSZbW1sYhsHS0hK6Lo0cxM3zveCGXSKCIKCx0aXXsLnnrWs89JdOYFjyHptWEgCnmKbq3Df3Lh5b+h5Apdx79ro7hBVVRgDFrdN1naWlJSzLolqtUi6XZYfwGPi+T7lcplqtYlmWhD9xWwLPR71OAPS9gPKLLRQFHv3WE9z71jU0XSLCNJMryJTbOzouEZnnM+u/wHbnKQqxUxiaddXXqYqK73v4vi893sQt2WsYXavVaDabDIdD5ufnJaCMiOu6bG1tMRwOSafT5HI5+YyKW+b7Pr4XoF3jveMMParnWuRWEzz8rhOUTmYOvkBx4OQqMiPmE3fzZ478MCvpx6j0n6Pv1K/6fU1T8XFvuX2MELAbAguFAqVSCcdxuHz5Mr3eLexCF9fU6/W4fPkyjuNQKpUoFAoS/sRtcW2PwANN1a769X7TpnKuxfL9Bd7wvXdL+JshciWZIcnIHK9b+0HOFt5G296iObi8vy5Q0zUCxcF2+yFXKQ6zZDLJ0tISmqaxubkpZwjfgVqtxubmJpqmsbS0JG1exB1xBg6+G6BpuwEwCAKam106lT5nv26Z177nDImi7PSdJRIAZ4ypxXh46Tt5dOm7CfCp9J7DDzx0TSfAxXGlpYe4M3ubQxKJBPV6nY2NDTxPNhjdLM/z2NjY2D/Td2lpCdOUXZjizthDh8AF3TDwvYDKuTYB8PC7TvDgO47LZo8ZJP+LzyBVUTlV+HqSkXk+u/GLbHefIhs5io8iAVCMxN4ZwnubQy5fvrz/c3F9g8GA7e1tPM+jUCjIyR5iZJyBh++CT8D2iw1yqwkeesdx5s9mwy5NhERGAGfYQvI+3nTkRzmaeQON4QUCZYjjDcMuS0yRdDrN0tISABsbGzQajXALmmCNRoONjQ0AlpaWJPyJkXKHLoENzfUeRx4r8We+7x4JfzNOCW7UHE7MBM93+HL5Yzz3zB+T7B5n+djr0XSZchKj4/s+W1tb9Pt94vE4pVJJNjO8zPd9dnZ26Ha7xGIx5ubm5N9GjJTn2mw+8yXa5xRO3PsaTv/ZZTRD3mOzTgKg2Ff7zE+yudWjo6+QmL8LM54PuyQxZer1OrVaDcMwmJubm/lTLIbDIdvb2ziOQy6XI5uVERkxWsNOhe72MyTcS5TmTlF89NvCLklMCHkEEPsyCw9xNLFFUTlHb/3TdHaek6a+YqSy2SyLi4v4vs/6+jr1ev3V/9CUqtfrrK+v4/s+i4uLEv7ESPm+T2fnOfobn6GonONocof8YinsssQEkU0g4iuMJIahs5wrEK9X2Gp0afTqpBbuRo8kwq5OTIloNMry8jI7OzvUajU6nQ6lUmlmRgOHwyE7OzvYtk00GqVUKknTbDFS7rBDa/MpLPsyq4k+2WwBpd0AQ1oJia+Qq474CmM35ClKQD6fJxbrslF5gcbFNpHsUaK5o7I2SYyErussLi7SbrepVCqsr6+TyWTIZDJT+x7zfZ9Go0Gj0UBRFEqlkvT2EyPl+z792jns+jkyyhaLJYtoNE+wdwSoIQ/y4iskAIp9ipkCLQLeEPQo0WicI4sWtdom29UmjXaFxNwpzJhMVYnRSCaTRKNRqtUq9XqdbrdLsVicunYxg8GAcrmMbdskEgny+byM+omRsnt1OtvPErHXWYl3yeVy+02f8WzQrN1rvBAvkyuQ+AozA1oUvAHoux3hNU2jWCyQSPTYrj5H/XKdYWqNePEEqmaEW6+YCrquMzc3RzKZZGdnh/X1dVKpFPl8/tCPBvq+T7VapdVqoWkaCwsLxGKxsMsSU8T3HLrlF/BaF8hrZebmYkSjxau/yOuDZoEpD+/iKyQAiq/Q4yhmimBQhcjVF4poNMbqokWqUWW71aLRrRAvnSKSnAupWDFtYrEYq6ur+4Gp1+tRLBYPbWDq9XqUy2Vc152aQCsmy7C1Tbf8HDFvk+WkQyaTu/Z7zB2gWHnQD+dnSYyHBECxT1EUlNgCQXf9mr+vqiq5XI5EfMh29SWqm036rRWSpdNoxnRN2YlwqKpKsVgkmUyyvb3N5uYmiUSCYrF4aMKT53lUKhU6nQ66rrO0tDR1U9oiXJ4zoL3zLEr3EnNGjVIxhRm5wXpSb4ASW0RRlIMrUkw8CYDiKkq0BP6Nz201IxGWF4qkW022mk/TvFDDyh8nmlmRC4wYCcuyWFlZoV6v02g06Pf7FAoFEonJXsTebrepVqv4vk8mkyGbzR6a4ComXxAE9BuXGFRfJBlsMZ+BZKrw6tdd39u9tgtxBQmA4mqRLCgBQRDc8KKiKAqpdIZo3KFaucROuUmjtU2idBIjmjm4esXUUlWVfD5PIpFgZ2eH7e1tWq0WhUIB05ysk2ps26ZSqdDv9zFNk4WFhZlpayMOhtNv0Nl5Hn24zlKkRb6QwdBffR12EASgAJHM2GsUh4sEQHEVJZIFRYfABeXVLy6GbjA/XyTZabNVf5bWpQrEl0gUT6Cbst5E3LlIJMLS0hLNZpNGo8GlS5cmZiet67pUq1U6nc7+Eglp6CxGybV7dMovQHedtFplPm8STxRf/Q/u8R1QtN1ruxBXkAAormbmdncCu30wb36XbzyR5GjMp9Wss9Nu0LqwjZFaIZY/JucKizumqirZbJZkMkmj0aDVatHtdkmlUuRy11n4Pka+71Or1Wi1WgCk02kymUzogVRMD8+16VVfwmldIkmFUioglb6NJQXeYPeabubGU6g4tORqJa5mplD0GIHXB26tZ5SqqmSyWRJJj0Zjm3KzTrO9RSSzRjS3iqpq46lZzAxd1ykUCmQyGarVKs1mk06nQzqdJp1Ojz0I+r5Ps9mk2Wzied7EjESK6eH7Hv3aRYaN88T8Mktxm3Qmg67f5vXT66PoMTDToy1UHHpKEARB2EWIyeJ++ecJ6k+iJI/e0es4jkO91qA8iNPXSsTyx7DSshNNjM5wOKRSqTAYDNB1nWw2Syo1nma3rVaLer2O67pYlkWhUJB1fmJkgiBg0FinVztH1NuhaHXJ5tIYxp3NoATtcyi5+9BPf9eIKhXTQh5bxVdRE6t41S/c8esYhkFprkh62KdSPUd1p0q9sUC8cIzIraxhEeI69tYH9vt9KpUK5XKZZrNJLpcjHo+P5Ht0u11qtRq2bWOaJouLi0Sj0ZG8thAAw06ZbuUlDHuTBaNFoZggEhnRNdJ3URMro3ktMVUkAIqv9nK7gCDwUZQ7n1KLRKIsLUbJdruUG89S3yjTsxaJ59cw4/k7fn0hotEoKysrtNtt6vU6W1tbWJZFPp+/7R58g8GAarXKYDDAMAzm5uYmvg2NOFzsbpVu9TzqYJOCXqdYiBKLj+7hOAh8QAFLWsCIryYBUHwVJTr38kaQ3kgPD4/F46xEY+Q6bSrNp2iub9A1S0RzK5iJOemXJu5YMpkkHo/vB8GNjQ2i0SjZbPamg+BgMKBer9Pv91FVlUKhQDKZlPenGAnf97E72/Rrl1DtHbJqnULWJJHMj355jNvbPQNYegCKa5AAKL6alUcxkgRud6QBEEBVFZKpFPFEQK/bodZ+lsbWJj2jiJVZxkovyWYRcUdUVSWdTl+1Y3h9fR3LsshkMtedGu52uzQaDQaDAZqmkclkyGQyEvzESPi+x6C5zqBxGcMpUzBa5HIRYvE8qjqmddFuF8VIglUYz+uLQ002gYhrcp/9/7d3ZzGSXfd9x7/n3KX2qt6nZ984EilRJCWSDq3dEmMtpmRYlChZecm7JMgwHECAAcPwg/3gwA8WbMDwS2zIVhyFDmQZThxHS2InUhzBpiXLlESKwxnOPtNb7XW3k4fbMxxKHM7W3ber6vcBCjXdM8P5D3l576/O8j//AbfyT5jm3W0EuRnnHINBn7X1PqtRg8ifp9Q6QHnmIN4tNDkVuZksy2i322xsbJAkCWEY0mw2aTTyo7M6nQ4bGxvEcYzv+7RaLZrNpoKfbIk0iRmuv8Ro4wxhcoW5sMvsTJVKpbrtG+Jc+wXM/EP4r/+32/rnyHhSAJRXlZ39KunJ/4KZed2O/ZnD4YD19S4roypDO0fQ2E917rDOGZYtczXsjYZDTDYAwNkKpXL52qihyFZI4yH91VPEnbOUsxUWSgNaM3XK5Z3bQOTWf4h39Bew+9+7Y3+mjA9NAcurqyyBMTiXYszOTMmWyxWWlyvMRREbG2dZ2bhMu30ar7aPyux+HTEnd60SgvPaDNPzxNEQA/hhmbK/l4r6pMkWiAfrDNbOkvTOUXNr7K0OabVahOHObiByLgVjrm3qE/lxCoDyqkxlD/g1iHsQbk9ftRsJw5DFxQVmk5iNjYus9K7Q775IGi5Qbuyh1Nqn00XklmVZRty9TH/9DGZ4iZpZZ389odVq4Zyj3T7HaucS3fYpXHmJ6swBgvqipoDllqVJxGjjHMPORbzoClXTZr6W0mq18P2CRpXjHvi1/F4u8io0BSyvyrmM9Du/gxtextT2F1pLlmX0el3anRHrcY2RaWGqS1Ra+whr27BzTiZCPGwz3DhH1L1EmK7T9DaYbfjUGw28H9tolGYp3U6HtU5CO20RebOE9SXKrb0E5Z39ACTjwTlH1FthsHEO179EyW0wE/RoNUpUa/XCP0C43llMeRHvgV/eknZeMnkUAOWG0hf/guzsf8e0dm4d4M3EcUyn3WatD92sRezPUWrsodzajx+qOe+0S+Mhw43zjLqXsNEKVdNhpjyiUb+1RfdXNyV1un3WhyX6rkEWzlPaDINajypJNGC4cZZR5yJBskrdbjBbhUazSRDsno1rbuOH2AM/i3f4w0WXIruUpoDlhkz9IGC2rCH0VgiCgLn5eWZmHcNBn3bnedbWztJZ/xGUFqm0lgkby4V/+padk2UJUecig42LMLxMiTZ7wwGN2RLVeh3P3voInjGGarVGtVpjIcvod9t0eldYWz1Ne7WJqSxSbu4hbOzBWt0+p0WWZUSdCww2LsDoMhW3wWJpQHOmRrkyt31tXO7Q1QbQpqYTQOTGNAIoN+QGl0i+8zsQ1DE7vA7wdiRJQrfTYaOXsJE0ib0WXnWBUm2esLGkB/UEujr9NmpfJOldJHQbNL0uzZpHo1HH3+IWQkkS0+l0afdS2mmdyLbwq3sot5YJqnNahjCB8g8Wlxh1r5AOVgjSDVp+m1bdp15v4Pu7977iojbEXfwHfllNoOWGFADlhnbTOsBbNRwM6PZ6tAeGblojtg1seZ5SfZ6wvgcvKBVdotyhLEuIeyuMuisk/RX8dJ2a7TBTSWk06pRKOzM9OxoN6XS6rA88elmDxJvBr+bXWFCb1weOMZbGI6LuRUbdFbLhCqHrULM9mhVHvVajPCZnQGv9n9wK3ankhoyxmNbrcN0Xiy7llpUrFcqVCgvkD+pe7yKdwXk6l6q0L9ehNEdYW6DU3IMfVosuV24iifpE3UtEvVWy4SqB61IzfZrlhHqtSrkys+PTb6VSmVKpzFyWL0Po9k7S7r5Ev1OlZ+rY8hxhbY6wvqRrbAwkox6jziWi3hUYrRK6LnNBn0bdUqtVKZXG8LzypIeZebvCn7wmBUB5TbtxHeCtuvagJt880uut0O1foL1apbtaw4VzBNV5wto8fkVHfu0GWZaRDNYYdS8T91ex8Tqh6zMf9KnXParVCqXSzK6YcrXWUK3VqNZqLDrHaDSk379Id3COzuUq3StVsmCWoDpHqb6AX5nVNbYL5NfYOlFvhbi/golWKdFjKehTbwXUajWCYHyPTtP6P7lVCoDymkxt/2Y/wO6O9wPcSkEQMDMzy8xMvmaw32/T619hY73MYL1CaquYcIag0iKszm4GQp1JvN2cc6RRj7i/xqi3SjZcwc+6lE2fPaURlXqJarWG77/6+b27hTGGcrlCuVxhjpevscHgMuvtkOFGjcTW8+UItTmC6ixeWNsVQXbSZVmaB77+KvGgjYvW8bI+ZfoslEbUZsPNa2xnGzVvm7ib9/8bk2U7UhytAZTX5Jwj/d7ncd3TmPqhosvZcmmWMRoOGA5G9EcJ3bjMyOWB0IUtwkqLoDpLUJ3V2q4tkGUJcX89fyAPO7jRBjbrETCk7g2olx21WoVSqbrrdlbeqSxzjEZ9er0B3aGhm1aIKZPZGqbUIiw38CszBNUZXWNbIL/GVon760SDDUy0gZf1KZkB9WBItexTLpcolSt4Ezgi67qnMPXDeG/8jD5gyGtSAJSbys8F/nPMzOuLLmXb5Q/rAcPhkMEwpROHjFyFmCombOFXmgTlBn65iQ1qmtJ7DdeP7sXDDumoDVEbnwEhI2rBgEpoKZdLlMvlLd+5u1slScxwOGQ4HDGIMnpxhYgyiSlD0MQr5deYRglvLssysrhHMmwTDzskgzYu2iAgD3yNIKJS9iiXy5RKlYn5UPFa3PoP8I5+ROf/yk3p46bclGkcAa+ESwYYfzx2wd0paw2VSt40eJY8EEbRkMHgMsPROTrtkNF6iZEpkZoSJmhgwyp+WMMv1fHLDbxgsv8dvZo0iUhHHZKoRxr1SaLB5uhen8ANqHsjamFMuRVQLpUJS3WsLeiIrIL5fkC9HlCv53//LHNEoyHD0RWGo/P0egGDdomuqZDZKqbUwg8reJvXmVdqTOVRiGk8IBl2SEZdkqhHFvVxcQfPjfDciLId0Qgjys2ASqVMGDanIvBdzyUD8EqYxtGiS5ExoBFAuSmXRqTf+fe4uIOp7i26nEI550iSmGg0IopjoihlkHgMkoDYhaQmJPMq4Nc3Q2ENL6zhhVWsXx7rEcN8tKVPMuqSRj3SaEga93FxD5MN8FyMT0ToxVRsRLlkN6faygRTMrq3VeIkZrQ5SjgcZQyykCgNSAhJTYCzFQhq+EEV72o4LNWxQXX8r7FkSBr1SaMeyahHEvUg6WLTAZ6LCExExY+p+Clh6BEGAWGphO8HUz9a6vrnMUED78F/h7H6f05em0YA5aaMF2Jm7sOd+yow3QHQGEMQhARByPXbErLMEccjolGHKF5lFKUM+gHDdkhkAlJ8nAlxNsT4Vaxfwvoh1ivhBWEeDv0SNijv+DqwLEvJkogsGeKSiDQd4ZKILI3zVxLjsiEkfayL8MlfNS+m7KeEVY8g8AmDkCAs43m7e8PGOAj8gOC6UUKANE2Jo4go7hPHK8RxymDgMezmwXBISGZC8KsYW8b6AdbLX8YP8bwSxr96rYU7vskpyxKyeEiWjPKQF0dk6Wjz2htB0odshHExHgmei6n7ERUvolTxN4NeSBDUp25k75bFHczSYwp/cksUAOWWmNY9cO7ruCzBaKH6T7DWXGs7c700yx/aSTIiSTZIkpQkTYkiy2gQEGceqfOIjU+GT4oHNgA8jPFwNn/HeGA9rLFgvbxHo7UY42GuPshdhnMubwPhMhwOMpe/uwycI3MOnMO5BJONcGkEWYwl3XwlWJfim5TApgReQmAzPN8SVHzCwCcIS/h+Qw/hHeZ5Ht5mn8vrZVk+Kh1HPaJ4gzhOSLOMOLLEqX/dNeaR4ZPhkW1eZ8YLcbaEMT4YgzUGjAFjMRiwJn83Nr/mNn8OwGUpzqW4LMuvuSwl23zH5T9nNn8NpJDFeNeusQRrUko2peTFhF6GX/bwfQ/f9/F9P/8wYfVh4la5LAEspnms6FJkTOhJLrfENI5C2IJoA8pj2Bi1IJ718Mo3XhPonCNNE5IkIU0j0jQlTVOyzOGc23yHzDmyxJHhkTpD5uzme/5jA2Ac1jgMDovLn+M4zE9832GNwQssXsnieRbP9/KA4fl4foCnFjhjw1pDGIaE4StHpa+XZilpkr58jSUpaZqRZhlpmm1eZ4Ysv2LInMG5/MfO8Yrv4wwOsCbDGodnHNZk+Tsp1hqsNRjD5nv+dX595SHP8wI8rzL1U7ZbKlqHsKX1f3LLFADllpigno8CXvkHBcAtZIzB94Op2QErxfCshxd6wPRtHpkWJtqAhYcxwYT0M5RtN76rhWXH2Zn7cC7bnNIREZHdwLkU5zLszH1FlyJjRAFQbplpvS6fBh6tF12KiIhcNVrPp39bJ4quRMaIAqDcMlOawc7cC6O1oksREZGrRmvYmfswpZmiK5ExogAot8XM3g9c3XEmIiJFunovNnP3F1yJjBsFQLktpnUPpjQHo9WiSxERkdEqpjSHad5TdCUyZhQA5baYoI6Ze2PeckBERIoVrWPm7scE6pkot0cBUG6bmX1j3qQ4jYouRURkark0ypvBz76h6FJkDCkAym0zzeNQXoTRStGliIhMr9EKVJbye7LIbVIAlNtmvBJm4c0QtXHOFV2OiMjUcc5B1MbMP4TxSkWXI2NIAVDuiJ17E/hViLtFlyIiMn3iLvi1/F4scgcUAOXO1A5imvdghpeLrkREZOqY4aX8eM7awaJLkTGlACh3xBiDXXw4P4JIPQFFRHaMy2Kcc9iFhzHGFF2OjCkFQLljZvYNmMoSDK8UXYqIyPQYXsFUFjGzOvtX7pwCoNwx41cxCw9DtK7NICIiO+Da5o+FhzF+tehyZIwpAMpdyTeD1CHuFF2KiMjkizva/CFbQgFQ7k7tAKZ1QptBRER2gBlewbROQO1A0aXImFMAlLuSbwZ5BOecTgYREdlGLh3hXIZdfFSbP+SuKQDKXTOzb8TUD8HgQtGliIhMrsFFTP2Qjn6TLaEAKHfNeCF2z09DMlRLGBGRbeCyBJIhds9PY7yw6HJkAigAypYw8w9iqntAawFFRLbe8DKmugcz/2DRlciEUACULWGCOmbpsc3zgbOiyxERmRjOZXnrl6XHMEG96HJkQigAypaxC2+B0iyMVosuRURkcgxXoDSLXXi46EpkgigAypYx5fn8BjVcUWNoEZEt4JyD0Sp28RFMea7ocmSCKADKlrJLj0LQgGij6FJERMZftAFBA7v4SNGVyIRRAJStVd2PnX8QBpc0CigichecczC4mN9Tq/uLLkcmjAKgbCljDHb5bRDUIVovuhwRkfEVreejf8tvV+Nn2XIKgLL1agfzDSEaBRQRuSP56N8l7MJbMPWDRZcjE0gBULbctVHAsAmjtaLLEREZP6M1CJvY5bcXXYlMKAVA2Ramtj9ftDy8rFFAEZHb4JyD4eV8529tX9HlyIRSAJRtY/e8DcIZGK0UXYqIyPgYrUA4k99DRbaJAqBsG1NdztvCqC+giMgtyUf/VrBLj2Kqy0WXIxNMAVC2ld3z1vx0EJ0RLCJyc8PL+akfe95adCUy4RQAZVuZylK+iHm0hsvSossREdm1XJbAaA27/A5MZanocmTCKQDKtrPLb8fUDkD/bNGliIjsXv1zmNrBvIuCyDZTAJRtZ4I6dv97II1wybDockREdh2XDCGLsPvfgwnqRZcjU0ABUHaEWXgLZub1GgUUEXk1/bOY1r2YhTcXXYlMCQVA2RHGBtj9j4MJcFG76HJERHYNF7XBBtj978XYoOhyZEooAMqOMa3XYRffAv3zagsjIsJm25f++fzIt9brii5HpogCoOwYYwx233vUFkZE5KqrbV/2vQdjTNHVyBRRAJQdZarL2L3vzNvCpFHR5YiIFMalUd72Ze+71PRZdpwCoOw4u/wOTPMEpnem6FJERApjei9hmifyXqkiO0wBUHac8SvYg+/D2QA3Wi+6HBGRHedG6zgbYg+9H+NXii5HppACoBTCzNyLXfpXMLioE0JEZKq4LIXBReyexzCt1xddjkwpBUAphDEGu/9xTG2/egOKyHTpn8HUDuT3QG38kIIoAEphTGkGe+BnMVmMi3tFlyMisu1c3MVkSX7vC1tFlyNTTAFQCmXm3wzzb4beGfUGFJGJ5pyD3llYeAtm/qGiy5EppwAohTLWwzv4fkx5Afrnii5HRGT79M9hyot4B96HsV7R1ciUUwCUwpnqMvbg+zHpEBd3iy5HRGTLubiLSYf5vU49/2QXUACUXcEs/hRm4dF8Kli7gkVkgrgshd4ZzOKjmMVHiy5HBFAAlF3CWA97+IOY2kFM76WiyxER2TKm9xKmdhB76IOa+pVdQwFQdg1TmsMe+jkcRg2iRWQiuNEaDoM99HOY0lzR5YhcowAou4qZe1N+LNLgAi6Liy5HROSOuSyGwSXs8tsxc28quhyRV1AAlF3FGIM98K8xzRPQPa3WMCIylpxz0DmVn/V74GfV8Fl2HQVA2XVMUMc78iHwqzC4VHQ5IiK3b3AJghrekQ9hglrR1Yj8BAVA2ZVM8x68g++HpKPWMCIyVlzchaSLd/ADmObxossReVUKgLJrmT1vxy79NPTOaj2giIwFl8XQO4tdegyz521FlyNyQwqAsmvlrWE+lK8H7Lyo9YAisqvl6/5ezNf9Hf6QWr7IrqYAKLuaCep4xz6CKc1C/2zR5YiI3Fj/LKY0m9+zgnrR1Yi8JgVA2fVM/RD20BOYLFZ/QBHZldxoHZPF2EMfwtQPFV2OyE0pAMpYMIuPYpbfCYOLuHRUdDkiIte4dASDi5jld2IWHym6HJFbogAoY8EYkx+iPnc/dE7qvGAR2RVclkLnJGbu/vwepX5/MiYUAGVsGL+Cd+wpTONoHgK1KURECpRv+jiJaRzN701+peiSRG6ZAqCMFVOexzv2sXxTSO+lossRkWnWO7256eMpTHm+6GpEbosCoIwd0ziCPfoRwOB0UoiIFCC/93jYox/BNA4XXY7IbVMAlLFk5h7AHvwgRG1ctFF0OSIyRVy0gYk72EMfxM4/WHQ5IndEAVDGkjEGu+9d2H3vgv4FXDIouiQRmQIuGUD/AmbvO7F731l0OSJ3TAFQxpYxNu8PuPAwdE/h0qjokkRkgrk0gu4pzMLD+b3H6BEq40tXr4w145Xwjj2Jmblvsz1MUnRJIjKBXJbkO35n7svvOV6p6JJE7ooCoIw9E7bw7vnFvD1M+wWcU49AEdk6zqXQfgHTPI53zycxYavokkTumgKgTARTXsA78W8wtQPQPolzWdElicgEcC6D9klM7UD+QVPtXmRCKADKxDDVvfkNurKA6byoRtEiclecc5jOi5jKIt6JT2Kqe4suSWTLKADKRDGNw9jjvwhhE9M9rRAoInfEOYfpnoawiT3+CUz9UNEliWwpBUCZOLZ1AnvsKZwXQv9s0eWIyDjqn8V5Jeyxp7CtE0VXI7LlFABlItm5+/GOPgkYXO9M0eWIyJhwzm3eMwze0Y9g5+4vuiSRbaEAKBPLLj6Cd+yjgMX1zmg6WERek3MOemcBi3fsY9jFR4ouSWTbKADKRLOLj+Id+xhgQSFQRG4gD39nwHh4x59S+JOJZ5yeiDIFsiv/SPrCf4IshtpBjDFFlyQiu0Qe/l4CG+Idewq78FDRJYlsO40AylSwC2/GO/4JsCH0tDtYRHLOOeieBlvCO/5xhT+ZGgqAMjXs/IN4xz8OtqwWMSLycqsXr5yHv/kHiy5JZMdoClimTrb6XbIffQmSDq5+WAe6i0wh5zJM9xT4Dezxp7TbV6aOAqBMpWz9h2Qv/Efc4Ao0j2KMV3RJIrJDXJZC5ySmsog99nHszOuKLklkxykAytRy3dOkz38R1z0NzeMY6xddkohsM5cl0HkBUzuYHx2pEz5kSikAylRz/Qukz/9HXPuH0DiK8UpFlyQi28SlQ+i8iGm+Du+eT2Cqy0WXJFIYBUCZem60RvrCl3Arz+QtYoJa0SWJyBZzcRd6ZzDzD+EdewpTmim6JJFCKQCKAC7pk538c7KL34LKkh4OIhPEjdZheBm79Bj26C9g/GrRJYkUTgFQZJNLI7KX/ivu3NdxfgUqy2oYLTLGnHMwuIBJBph9P4M9+AGMFxZdlsiuoAAoch3nMtylb5Ge+ktM0sc1DmuHsMgYci7FdE7h/Cre4ScwS4+p5ZPIdRQARV5Ftv4DspNP43rnoHFEowYiY8SlUd7mpXYAe/RJtXkReRUKgCI34AYXSV/4z7i170HtACaoF12SiNxEvtnjLGb2DXjHPoapLBVdksiupAAo8hpc3CM79RdkF78JpRlMeaHokkTkBtzwCozWsXveij38Ie3oF3kNCoAiN+GyhOzcN8jO/DXGpbj6Ia0lEtlF8mPdTuOMhz3wPuy+d6uxu8hNKACK3ALnHG7tn8lO/UW+LrB+CONXii5LZOq5ZADd05jaPuzhD2Nm79fufZFboAAochvc4DLpqS/jrjwD5TlNCYsUyA2vwHAVs/AQ3uGfx1QWiy5JZGwoAIrcJpdGZOe+Rnb2a5gsxjUOqVWMyA7KW7ycxtkAu/892H3v0U59kdukAChyB5xzuPVnyV78Mq53RlPCIjvk5SnfA9gjP4+ZuU9TviJ3QAFQ5C644Qrpi1/GrfwjhE0oL+lhJLINnHMwvARRGzP/ZrwjP48pzxddlsjYUgAUuUsujcgu/B3Z2a9C3M5HA71S0WWJTAyXjqB7GoImdv97sXvfgbFB0WWJjDUFQJEt4rqnSU99Bbf2LJRmoLyo0UCRu5CP+l2G0Xre2PnwE5j6oaLLEpkICoAiW8ilI7Lz/4vs3Ncg7m2OBmpxusjtcmkE3VMQNLD7fga7950aWRfZQgqAItvAdU6SnvpL3Pr3oTwPpXmNBorcAuccjFZguIKZuTcf9WscLboskYmjACiyTVwyJDv/DbJz34CkB7WDGL9cdFkiu5ZLhtB7Cfwadt+7sXvfrf9nRLaJAqDINnPtF0hf+m+49WfBr0J1r46SE7mOcxn0z0EyxMzeh3fgfZjmsaLLEploCoAiO8ClEe7y/yM7+1Xc4EIeAsNW0WWJFM5FG9A/j6ksY/e/F7P4qNbNiuwABUCRHeSGq2Rn/wfZ5b/PTxGpHdTDTqaSSyNM76X8NI/Fn8LufxxTniu6LJGpoQAossOcc7iNH5C99Ne4jecgbEBlj6aFZSo4l8HgIkQdTOsE9uD7MK3Xa5OUyA5TABQpiEtHZBf+D+78/8QNL0F5EUpzehDKRMp3967C8DKmvITZ+y7s8lvV2kWkIAqAIgVzw1WyC39LdulbEHWguowJm0WXJbJlXNSG/gUIG9ilx7DL79B0r0jBFABFdgnXO0N29htkq89AGkFtP8avFF2WyB1zyQB6Z8ELsfMPYfe9G1M7UHRZIoICoMiukq8PfI7s3Ndh/VkwFlfdp40iMlZcGmH658ClMPMG7L6fwbROaHmDyC6iACiyC7kswa1+l+zc13GdF8H6eesYrZeSXcylI+ifhyzBNI7kwW/uTRjrF12aiPwYBUCRXcylEW71n8jO/x2uexJjfFx1GePpdATZPVw6xPQv4FyKqR/B7n07Zu5BjVyL7GIKgCJjIA+C3yW78HfQeQGwuOpeHZMlhXLJANM/DzhoHMPufQdm9n4FP5ExoAAoMkZcFuNWv0d24W+h/Xz+vcoyJqgVXJlMExf3MIOLgIPmPfmu3rk3YmxQdGkicosUAEXGkMti3Nq/kF3437j2jyAbQXkBwlkttJdt4ZyDaA0GV8ArYZrHsctvw8y+UWv8RMaQAqDIGHMug/YLZJe/Tbb6HYg2IGxCeVEPZdkSLktgeBmiNoQt7NwD2MVHoHlMp9eIjDEFQJEJ4QYXya48g7v897jBJbAhVJbUS1DuiEsGMLgEWYSpLGEWfwq78BCmsqfo0kRkCygAikwYF3fzDSMX/y+uewpcDKU5KM1ijFd0ebKLOZfCaC0/ss0EmPph7J7HMHP3Y4J60eWJyBZSABSZUC5LcO3ncSvfzaeHR2vgBfmZw35NawUF2Fzbl/Tyad40htIsdu4BzPybMM17tJRAZEIpAIpMARdt4NaeJbvyD7jOSUj6m2sFF7Rzc0q5LIbhlXxtn1/FNI5iF96Cmb0PE7aKLk9EtpkCoMgUcS6D3hmy1X/GXfkHGF7Ovxe28ilihcGJ5rI4HwmONvINHOVFzMJbsHP3Q+2ANnWITBEFQJEp5ZIhrv0cbu37uPV/gdHqZhhsQmlOYXBC5KFvFaJ2HvBKc5jZN2Bm7sU0T6iZuMiUUgAUEVzSx7V/hFv/AW7tezBaeXlkMJzRGcRjxqUjiNZfHukrLVwX+o5h/GrRJYpIwRQAReQV8jD4Am7jB7jV7+FGa5BF4JchnIGgoanCXca5DOJOHvqSIdgQU5rNT+dovV6hT0R+ggKgiNyQS4a43kvQPUW29iyufw7ibv6TQRPClqYQC+KSYd74O27n3wjqmOo+7Ox9mPqRfE2f/tuIyA0oAIrILXHO5VPDnVMvjxCOViGNMNbD+XUI6vmOUrWY2VJ5q5Y+xF1M3M379XkhpjT38ghf4zCU5vXvXkRuiQKgiNwRl0bQO4Prn8tDYecFXNSGZADGgF+FoAFBTQ2ob5NzKcS9fFo36YNz4FcwYQvTOIppHMZU9+WjfF5YdLkiMoYUAEVkS7gshsFFXO88rncmb0I9XMmbDDsHxuahcPOl4JJzaZSHvKsvNm/Jfg1Tns+bMdcPYKp7obJHu7NFZEsoAIrItnAuy1vL9C/A8AqufwHXO40bbWyGwgScAb8EXgW8EnhlsMHETWM65yCLIR1COoJ0AMkIjAPj52Gv1MLUDmGqy3mD7uqypnRFZNsoAIrIjnHO5RsXRldwgyu44SVc93QeEJNRHpBc8vJv8MLNUFgCG1x77bZQdC3gXXtt/l3S6OVfZHzwyhi/lAe8+iFMeQlTWciP5wuau+7vJSKTSwFQRArnsiQ/kixax0UbEG3gRmu4wQUYruCSwcvhysXAZlByDqyXB0Pjg/HyqWZjr/vx9e9XA9aPBy338j/PpeCyV3m/+uNks5b0un+eA3NdQPUrUJ7HVJYxpdm8l2LYzNvohE2drysihVMAFJFdLe9x1722Rs5dWy/Xy4NhtIEbrUPaz6dXswiXXRfcuD7EOa6FvWvv14VCY14Oi7wcJI21YMPNaeoqpjSz2QKnAn4tX9N43fpGgrp6JYrIrqYAKCITxbnsuqnYaPN9c9TOZeTBz/1Y/jP5y9jNUTx/8z28btpZgU5EJocCoIiIiMiU0UdaERERkSmjACgiIiIyZRQARURERKaMAqCIiIjIlFEAFBEREZkyCoAiIiIiU0YBUERERGTKKACKiIiITBkFQBEREZEpowAoIiIiMmUUAEVERESmjAKgiIiIyJRRABQRERGZMgqAIiIiIlNGAVBERERkyigAioiIiEwZBUARERGRKaMAKCIiIjJlFABFZKr95m/+Jo899hiLi4uUy2VOnDjBL/3SL3H58uWiSxMR2TbGOeeKLkJEpChPPvkki4uL3HvvvTQaDZ599ln+8A//kKWlJZ555hlqtVrRJYqIbDkFQBGRH/P000/z0Y9+lC9+8Yt84hOfKLocEZEtpylgEZlIv/7rv44xhu9///s89dRTNJtN5ufn+exnP8twOHzN33vkyBEA1tfXt79QEZEC+EUXICKynZ566imOHDnCb/3Wb/Gtb32L3/3d32VtbY0//uM/vvZrnHOsrKyQJAnPPfccn/vc5/A8j3e/+93FFS4iso0UAEVkoh09epQvf/nLAHzqU5+i2Wzy+7//+/zKr/wKDzzwAAAXL15k7969137PgQMH+NM//VPuvffeQmoWEdlumgIWkYn2qU996hVff+YznwHgr/7qr659b25ujr/5m7/hK1/5Cr/xG7/BwsIC3W53R+sUEdlJGgEUkYl24sSJV3x9/PhxrLW8+OKL174XhiGPP/44AE888QTvfe97edvb3sbS0hJPPPHETpYrIrIjNAIoIlPFGHPTX/PWt76VvXv38id/8ic7UJGIyM5TABSRifbcc8+94uvnn3+eLMuu7fS9keFwyMbGxjZWJiJSHAVAEZlov/d7v/eKrz//+c8D8IEPfIBer0e/3/+J3/P000+ztrbGI488siM1iojsNK0BFJGJdvLkST784Q/z/ve/n29+85t84Qtf4JOf/CQPPvggzzzzDI8//jgf//jHuffee7HW8u1vf5svfOELHDlyhM9+9rNFly8isi0UAEVkov3Zn/0Zv/Zrv8bnPvc5fN/n05/+NL/9278N5O1ennzySb72ta/xR3/0R8RxzOHDh/n0pz/Nr/7qrzI/P19w9SIi20MBUEQm2uLiIl/60pde9ecWFhb4gz/4gx2uSESkeFoDKCIiIjJlFABFREREpowCoIiIiMiUMc45V3QRIiIiIrJzNAIoIiIiMmUUAEVERESmjAKgiIiIyJRRABQRERGZMgqAIiIiIlNGAVBERERkyigAioiIiEwZBUARERGRKaMAKCIiIjJlFABFREREpowCoIiIiMiUUQAUERERmTIKgCIiIiJTRgFQREREZMooAIqIiIhMGQVAERERkSnz/wGExMdqgvV3gQAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "from matplotlib_venn import venn3\n", - "import ipywidgets as widgets\n", - "from IPython.display import display, HTML, clear_output\n", - "from ipywidgets import VBox, HBox\n", - "\n", - "# Function to calculate parity bits\n", - "def calculate_parity_bits(data_bits):\n", - " d1, d2, d3, d4 = data_bits\n", - " p1 = d1 ^ d2 ^ d4\n", - " p2 = d1 ^ d3 ^ d4\n", - " p3 = d2 ^ d3 ^ d4\n", - " return [p1, p2, p3]\n", - "\n", - "# Function to update the Venn diagram labels based on data bits\n", - "def update_venn_labels(data_bits):\n", - " # Clear the previous output\n", - " clear_output(wait=True)\n", - "\n", - " # Clear the computed parity bit outputs\n", - " output_p1.clear_output()\n", - " output_p2.clear_output()\n", - " output_p3.clear_output()\n", - "\n", - " # Display the widgets again\n", - " display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", - "\n", - " # Create the Venn diagram\n", - " plt.figure(figsize=(8, 8))\n", - " venn = venn3(subsets=(1, 1, 1, 1, 1, 1, 1), set_labels=('p1', 'p2', 'p3'))\n", - "\n", - " # Set colors for the circles using NVIDIA color palette\n", - " venn.get_patch_by_id('100').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('010').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('001').set_color('#F9A825') # Yellow\n", - "\n", - " # Set colors for the intersections\n", - " venn.get_patch_by_id('110').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('101').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('011').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('111').set_color('#A3A3A3') # Light Gray\n", - "\n", - " # Set transparency for the circles\n", - " venn.get_patch_by_id('100').set_alpha(0.5)\n", - " venn.get_patch_by_id('010').set_alpha(0.5)\n", - " venn.get_patch_by_id('001').set_alpha(0.5)\n", - "\n", - " # Label the intersections with data bits\n", - " venn.get_label_by_id('100').set_text(f'')\n", - " venn.get_label_by_id('010').set_text(f'')\n", - " venn.get_label_by_id('001').set_text(f'')\n", - " venn.get_label_by_id('110').set_text(f'd1={data_bits[0]}')\n", - " venn.get_label_by_id('101').set_text(f'd2={data_bits[1]}')\n", - " venn.get_label_by_id('011').set_text(f'd3={data_bits[2]}')\n", - " venn.get_label_by_id('111').set_text(f'd4={data_bits[3]}')\n", - "\n", - " plt.show()\n", - "\n", - "# Function to update the Venn diagram and display the messages\n", - "def update_venn(data_bits, parity_bit):\n", - " parity_bits = calculate_parity_bits(data_bits)\n", - "\n", - " # Clear the previous output\n", - " clear_output(wait=True)\n", - "\n", - " # Display the widgets again\n", - " display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", - "\n", - " # Create the Venn diagram\n", - " plt.figure(figsize=(8, 8))\n", - " venn = venn3(subsets=(1, 1, 1, 1, 1, 1, 1), set_labels=('p1', 'p2', 'p3'))\n", - "\n", - " # Set colors for the circles using NVIDIA color palette\n", - " venn.get_patch_by_id('100').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('010').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('001').set_color('#F9A825') # Yellow\n", - "\n", - " # Set colors for the intersections\n", - " venn.get_patch_by_id('110').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('101').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('011').set_color('#A3A3A3') # Light Gray\n", - " venn.get_patch_by_id('111').set_color('#A3A3A3') # Light Gray\n", - "\n", - "\n", - " # Set transparency for the circles\n", - " venn.get_patch_by_id('100').set_alpha(0.5)\n", - " venn.get_patch_by_id('010').set_alpha(0.5)\n", - " venn.get_patch_by_id('001').set_alpha(0.5)\n", - "\n", - " # Label the intersections with data bits\n", - " venn.get_label_by_id('100').set_text(f'')\n", - " venn.get_label_by_id('010').set_text(f'')\n", - " venn.get_label_by_id('001').set_text(f'')\n", - " venn.get_label_by_id('110').set_text(f'd1={data_bits[0]}')\n", - " venn.get_label_by_id('101').set_text(f'd2={data_bits[1]}')\n", - " venn.get_label_by_id('011').set_text(f'd3={data_bits[2]}')\n", - " venn.get_label_by_id('111').set_text(f'd4={data_bits[3]}')\n", - "\n", - " # Highlight the selected parity bit and relevant data bits\n", - " if parity_bit == 'p1':\n", - " venn.get_patch_by_id('100').set_edgecolor('black')\n", - " venn.get_patch_by_id('100').set_linewidth(5)\n", - " venn.get_patch_by_id('110').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('101').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('111').set_color('#76B900') # Green\n", - " venn.get_patch_by_id('110').set_edgecolor('black')\n", - " venn.get_patch_by_id('110').set_linewidth(5)\n", - " venn.get_patch_by_id('101').set_edgecolor('black')\n", - " venn.get_patch_by_id('101').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p1.clear_output()\n", - " with output_p1:\n", - " display(HTML(f\"p1 = d1 + d2 + d4 (mod 2)= {data_bits[0]} + {data_bits[1]} + {data_bits[3]} (mod 2) = {parity_bits[0]}\"))\n", - " elif parity_bit == 'p2':\n", - " venn.get_patch_by_id('010').set_edgecolor('black')\n", - " venn.get_patch_by_id('010').set_linewidth(5)\n", - " venn.get_patch_by_id('110').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('011').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('111').set_color('#7A1FA2') # Purple\n", - " venn.get_patch_by_id('110').set_edgecolor('black')\n", - " venn.get_patch_by_id('110').set_linewidth(5)\n", - " venn.get_patch_by_id('011').set_edgecolor('black')\n", - " venn.get_patch_by_id('011').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p2.clear_output()\n", - " with output_p2:\n", - " display(HTML(f\"p2 = d1 + d3 + d4 (mod 2)= {data_bits[0]} + {data_bits[2]} + {data_bits[3]} (mod 2) = {parity_bits[1]}\"))\n", - " elif parity_bit == 'p3':\n", - " venn.get_patch_by_id('001').set_edgecolor('black')\n", - " venn.get_patch_by_id('001').set_linewidth(5)\n", - " venn.get_patch_by_id('101').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('011').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('111').set_color('#F9A825') # Yellow\n", - " venn.get_patch_by_id('101').set_edgecolor('black')\n", - " venn.get_patch_by_id('101').set_linewidth(5)\n", - " venn.get_patch_by_id('011').set_edgecolor('black')\n", - " venn.get_patch_by_id('011').set_linewidth(5)\n", - " venn.get_patch_by_id('111').set_edgecolor('black')\n", - " venn.get_patch_by_id('111').set_linewidth(5)\n", - " output_p3.clear_output()\n", - " with output_p3:\n", - " display(HTML(f\"p3 = d2 + d3 + d4 (mod 2)= {data_bits[1]} + {data_bits[2]} + {data_bits[3]} (mod 2) = {parity_bits[2]}\"))\n", - "\n", - " plt.show()\n", - "\n", - "# Create a title widget\n", - "title = widgets.Label(value=\"Hamming Code Visualization: Computing parity bits (p1, p2, p3)\")\n", - "\n", - "\n", - "# Create widgets for user input\n", - "data_bits_widget = widgets.Dropdown(\n", - " options=['0000', '0001', '0010', '0011', '0100', '0101', '0110', '0111', '1000', '1001', '1010', '1011', '1100', '1101', '1110', '1111'],\n", - " value='1001',\n", - " description='Data Bits (d1, d2, d3, d4):', style={'description_width': 'initial'}\n", - ")\n", - "\n", - "# Create buttons for parity bits\n", - "button_p1 = widgets.Button(description='Compute p1', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#BBE07F')) # Green\n", - "button_p2 = widgets.Button(description='Compute p2', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#BD8FD1')) # Purple\n", - "button_p3 = widgets.Button(description='Compute p3', layout=widgets.Layout(width='150px'), style=widgets.ButtonStyle(button_color='#FCD492')) # Yellow\n", - "\n", - "# Create output areas for parity bit results\n", - "output_p1 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "output_p2 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "output_p3 = widgets.Output(layout=widgets.Layout(width='300px'))\n", - "\n", - "# Define the button click events\n", - "def on_button_p1_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p1')\n", - "\n", - "def on_button_p2_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p2')\n", - "\n", - "def on_button_p3_click(b):\n", - " data_bits_list = [int(bit) for bit in data_bits_widget.value]\n", - " update_venn(data_bits_list, 'p3')\n", - "\n", - "button_p1.on_click(on_button_p1_click)\n", - "button_p2.on_click(on_button_p2_click)\n", - "button_p3.on_click(on_button_p3_click)\n", - "\n", - "# Define the dropdown change event\n", - "def on_data_bits_change(change):\n", - " data_bits_list = [int(bit) for bit in change['new']]\n", - " update_venn_labels(data_bits_list)\n", - "\n", - "data_bits_widget.observe(on_data_bits_change, names='value')\n", "\n", - "# Display the widgets\n", - "display(VBox([title, data_bits_widget, HBox([button_p1, button_p2, button_p3], layout=widgets.Layout(justify_content='space-between')), HBox([output_p1, output_p2, output_p3])]))\n", "\n", - "# Initial update of the Venn diagram labels\n", - "update_venn_labels([int(bit) for bit in data_bits_widget.value])\n" + "" ] }, { @@ -687,11 +407,12 @@ "id": "e9129bd4-a100-4061-86dd-e69fde915617", "metadata": {}, "source": [ - "
\n", - "

Exercise 2 - The matrix form of the Hamming code:

\n", - "

\n", - "The Hamming code is commonly constructed with special matrices so a few simple linear algebra operations can encode and decode messages. The next two cells will have you define these matrices and see if you can reproduce the example above. \n", - "

\n", + "
\n", + "\n", + "**Exercise 2:** The Matrix Form of the Hamming Code\n", + "\n", + "The Hamming code is commonly constructed with special matrices so a few simple linear algebra operations can encode and decode messages. The next two cells will have you define these matrices and see if you can reproduce the example above.\n", + "\n", "
\n", "\n", "\n", @@ -701,19 +422,13 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "f8d5c69d-b07c-4d5c-8511-02ee20fedb11", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0 1 1 0 1 1 0]\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 2\n", + "\n", "message = np.array([0, 1, 1, 0])\n", "\n", "# The G matrix should properly encode the message when the following calculation is performed\n", @@ -728,7 +443,8 @@ "\n", "encoded = np.dot(message, G) % 2\n", "\n", - "print(encoded)\n" + "print(encoded)\n", + "" ] }, { @@ -741,19 +457,10 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "7d68795f-5b03-442d-acc5-10859c13e66c", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[0 0 1 0 1 1 0]\n", - "[1 0 1]\n" - ] - } - ], + "outputs": [], "source": [ "received = np.array([0, 0, 1, 0,1,1,0]) # error applied on second data qubit\n", "print(received)\n", @@ -767,7 +474,8 @@ "])\n", "\n", "decoded = np.dot(H,received) % 2\n", - "print(decoded)\n" + "print(decoded)\n", + "" ] }, { @@ -775,6 +483,8 @@ "id": "354b601a-b343-46d7-9aa5-5d2f5725637c", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.4 What Makes QEC so Hard?" ] }, @@ -787,13 +497,14 @@ " \n", "1. Continuous Errors - Classical errors are always discrete bit flips. Quantum errors are continuous and can manifest in an infinite number of ways, potentially shifting a qubit's state to any point on the Bloch sphere. For instance, the figure below illustrates many possible errors that affect a qubit starting in the $\\ket{0}$ state. Errors can perturb states incoherently (from environmental effects) or coherently from slight hardware imperfections. This invites the question, \"Does QEC require an infinite amount of resources to correct errors?\"\n", " \n", - "\"Drawing\"\n", + "\"Bloch\n", "\n", "2. No Cloning - Quantum states cannot be copied. That is to say that the following expression holds:$~\\nexists U \\text{ such that } U(\\ket{\\psi} \\otimes \\ket{\\rho}) = \\ket{\\psi} \\otimes\\ket{\\psi}$. This means we cannot just send multiple copies of the quantum state through the noisy channel like the classical repetition code. \n", "\n", "3. Destructive Measurement - In classical EC, the state can be accessed at any time, making decoding much easier. Measuring a quantum state collapses it, making the EC moot if the state is destroyed. Therefore, more clever ways to extract syndromes are required. A secondary consequence of this fact is sampling error. Even if an algorithm could perform perfectly ensuring no sources of error, many applications require statistical sampling of the resulting state. If we sampled $\\ket{\\psi} = \\alpha\\ket{0} + \\beta\\ket{1}$ the frequency of 0's would be close to $\\alpha^2$ but deviate based on the number of samples per the Central Limit Theorem.\n", "\n", - "4. Scalability - Though scalability is an issue for classical EC, it is far more severe for QEC. Today's noisy intermediate scale quantum devices are very difficult to control, so each additional qubit required for QEC comes at great cost. Qubits also have short coherence times, so QEC procedures must complete within strict time constraints which gets harder at scale. Finally, the threshold theorem is in play. In classical EC, adding more bits always reduces the logical error rate. This is not true for quantum - physical qubits must have noise below a specific threshold in order for scaling the code to improve the error rates, otherwise, the results just get worse.\n" + "4. Scalability - Though scalability is an issue for classical EC, it is far more severe for QEC. Today's noisy intermediate scale quantum devices are very difficult to control, so each additional qubit required for QEC comes at great cost. Qubits also have short coherence times, so QEC procedures must complete within strict time constraints which gets harder at scale. Finally, the threshold theorem is in play. In classical EC, adding more bits always reduces the logical error rate. This is not true for quantum - physical qubits must have noise below a specific threshold in order for scaling the code to improve the error rates, otherwise, the results just get worse.\n", + "" ] }, { @@ -801,7 +512,9 @@ "id": "30bbbc72-eec4-475c-be5f-53f42fb47963", "metadata": {}, "source": [ - "## 1.5 There is still hope for QEC!" + "---\n", + "\n", + "## 1.5 There is Still Hope for QEC!" ] }, { @@ -811,76 +524,19 @@ "source": [ "The challenges discussed above are daunting but there are many ingenious techniques developed to help circumvent them. That said, practical QEC remains difficult to realize and is an extremely active research field - viewed as one of the most important prerequisites for useful quantum computing. This section will begin to bridge the gap between classical EC and QEC.\n", "\n", + "### Syndrome Extraction\n", "\n", - "### Digitization of errors\n", - "\n", - "Errors can perturb states incoherently from environmental effects or coherently from slight hardware imperfections. While both types of errors can be addressed, we’ll focus on coherent errors first because they’re often easier to isolate and analyze.\n", + "The no cloning principle means quantum states cannot be copied for QEC. We'll need a clever way to extract syndromes from the logical state that does not rely on repetition. But, how is this done without destroying the information that is being protected?\n", "\n", - "For instance, a rotation gate that should be at an angle of $\\frac{\\pi}{16} \\approx 0.196 $ ends up being more like 0.17. This may seem inconsequential, but imperfections like this accumulate and quickly ruin the outcome of a quantum algorithm. Execute the code block below and use the slider to change the number of rotation gates executed to see how the error can become substantial. Feel free to experiment with different values for the `angle`, `noisy_angle`, and the rotation axis in the `rotation_kernel`. \n", + "The solution involves **stabilizers** which are specially designed operators that act on a logical state without changing it, but still enable us to learn about errors by performing projective measurement of ancilla qubits. The next notebook in this series will introduce stabilizers with more mathematical rigor, and the following section of this lab will provide a more concrete example of a simple stabilizer in action. Essentially, stabilizers perform parity checks and project the quantum state into the $1\\ket{\\psi}$ state if the parity check passes and $-1\\ket{\\psi}$ if the parity check is violated. So, you return the same state either way, and with enough atabilizers, can identify which errors occured and fix them.\n", "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e4384ef7", - "metadata": {}, - "outputs": [], - "source": [ - "# Angles of rotation of a qubit\n", - "angle = np.pi / 16 \n", - "noisy_angle = 0.17 \n", "\n", - "# Kernel to initialize a qubit in the zero ket state and rotate it about the x axis by given angle n times\n", - "@cudaq.kernel\n", - "def rotation_kernel(n: int, angle: float):\n", - " qubit = cudaq.qubit()\n", - " for _ in range(n):\n", - " rx(angle, qubit) # CHANGE THE ROTATION AXIS\n", - "\n", - "# Function to plot sample results\n", - "def plot_results(results1, results2):\n", - " # Convert the sample results to a dictionary\n", - " result_dictionary1 = {k: v for k, v in results1.items()}\n", - " result_dictionary2 = {k: v for k, v in results2.items()}\n", - " \n", - " # Get all unique x-values from both dictionaries\n", - " all_keys = set(result_dictionary1.keys()).union(set(result_dictionary2.keys()))\n", - " all_keys = sorted(all_keys)\n", - "\n", - " # Convert the dictionary to lists for x and y values\n", - " x1 = list(all_keys)\n", - " y1 = list(result_dictionary1.values())\n", - " y2 = list(result_dictionary2.values())\n", - "\n", - " # Create the combined histogram\n", - " bar_width = 0.35\n", - " x_indices = range(len(x1))\n", - "\n", - " plt.bar(x_indices, y1, width=bar_width, color='#76B900', label='Noise-Free Results')\n", - " plt.bar([i + bar_width for i in x_indices], y2, width=bar_width, color='#484848', label='Noisy Results')\n", - "\n", - " # Add title and labels\n", - " plt.title('Comparing sampling results of n applications of a noise-free gate with a noisy version')\n", - " plt.xlabel(\"Basis States\")\n", - " plt.ylabel(\"Frequency\")\n", - " plt.xticks([i + bar_width / 2 for i in x_indices], x1)\n", - " plt.legend()\n", - "\n", - " # Show the plot\n", - " plt.tight_layout()\n", - " plt.show()\n", + "### Digitization of errors\n", "\n", - "# Function to update the plot based on the slider value\n", - "def update_plot(num_rotations):\n", - " expected_result = cudaq.sample(rotation_kernel, num_rotations, angle)\n", - " noisy_result = cudaq.sample(rotation_kernel, num_rotations, noisy_angle)\n", - " plot_results(expected_result, noisy_result)\n", + "Errors can perturb states incoherently from environmental effects or coherently from slight hardware imperfections. While both types of errors can be addressed, we’ll focus on coherent errors first because they’re often easier to isolate and analyze.\n", "\n", - "# Create an interactive slider\n", - "slider = widgets.IntSlider(min=1, max=20, step=1, value=1, description='n:', continuous_update=False)\n", - "interact(update_plot, num_rotations=slider)" + "For instance, a rotation gate that should be at an angle of $\\frac{\\pi}{16} \\approx 0.196 $ ends up being more like 0.17. This may seem inconsequential, but imperfections like this accumulate and quickly ruin the outcome of a quantum algorithm.\n", + "\n" ] }, { @@ -888,76 +544,11 @@ "id": "959da618", "metadata": {}, "source": [ - "Among the various coherent errors that can occur on a qubit storing the quantum state $\\ket{\\psi} = \\alpha \\ket{0}+\\beta\\ket{1}$, we will focus on three specific types:\n", - "* **Bit flip errors** swap a qubit's amplitudes, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}+\\alpha\\ket{1}$.\n", - "* **Phase flip errors** introduce a sign change in one of the amplitudes, transforming $\\ket{\\psi}$ to $\\alpha\\ket{0}-\\beta\\ket{1}$.\n", - "* **Combining a bit flip with a phase flip error** swaps amplitudes and applies a sign change, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}-\\alpha\\ket{1}$.\n", + "We will focus on three different coherent errors that can occur on a qubit storing the quantum state $\\ket{\\psi} = \\alpha \\ket{0}+\\beta\\ket{1}$:\n", "\n", - "Run the cell below to open an interactive tool that allows you to visualize the impact of different error types on various quantum states. Observe how some error types may not alter the state. Why do you think that happens? What patterns can you identify?" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1ba829f0", - "metadata": {}, - "outputs": [], - "source": [ - "# Execute this cell to see the interactive widget\n", - "# Don't concern yourself with the code below this line\n", - "# Function to update and display the Bloch sphere\n", - "def update_bloch_sphere(theta, phi, error_type):\n", - " alpha = np.cos(theta / 2)\n", - " beta = np.sin(theta / 2) * np.exp(1j * phi)\n", - " coefficients = [complex(alpha, 0), complex(0, beta)]\n", - " error_types = ['Bit Flip', 'Phase Flip', 'Bit & Phase Flip']\n", - " @cudaq.kernel\n", - " def initial_state_kernel(coefficients: list[complex]):\n", - " qubit = cudaq.qvector(coefficients)\n", - "\n", - " @cudaq.kernel\n", - " def initial_state_error(coefficients: list[complex], error: int):\n", - " qubit = cudaq.qvector(coefficients)\n", - " if error == 0 or error == 2:\n", - " # bit flip error\n", - " x(qubit)\n", - " if error == 1 or error == 2:\n", - " # phase flip error\n", - " z(qubit)\n", - "\n", - " state_no_error = cudaq.get_state(initial_state_kernel, coefficients)\n", - " state_with_error = cudaq.get_state(initial_state_error, coefficients, error_type)\n", - "\n", - " blochSphereList = []\n", - " # Define a sphere object representing the state of the single qubit\n", - " sphere = cudaq.add_to_bloch_sphere(state_no_error)\n", - " blochSphereList.append(sphere)\n", - " sphere = cudaq.add_to_bloch_sphere(state_with_error)\n", - " blochSphereList.append(sphere)\n", - "\n", - " # Create output widgets for the Bloch spheres and text\n", - " out1 = Output()\n", - " out2 = Output()\n", - " text1 = Output()\n", - " text2 = Output()\n", - "\n", - " with out1:\n", - " cudaq.show([blochSphereList[0]], nrows=1, ncols=1)\n", - " with out2:\n", - " cudaq.show([blochSphereList[1]], nrows=1, ncols=1)\n", - " with text1:\n", - " print(f\"|ψ> = cos(θ/2)|0⟩ + e^(iφ)sin(θ/2)|1⟩\")\n", - " with text2:\n", - " print(\"|ψ⟩ with a \", error_types[error_type], \" error\")\n", - "\n", - " display(VBox([HBox([VBox([text1, out1]), VBox([text2, out2])])]))\n", - "\n", - "# Create the interactive widget\n", - "theta_slider = widgets.FloatSlider(value=np.pi/2, min=0, max=2*np.pi, step=0.01, description='θ (radians):')\n", - "phi_slider = widgets.FloatSlider(value=0, min=0, max=np.pi, step=0.01, description='φ (radians):')\n", - "error_selector = widgets.Dropdown(options=[('None', -1), ('Bit Flip', 0), ('Phase Flip', 1), ('Bit & Phase Flip', 2)], value=-1, description='Error Type:')\n", - "\n", - "interact(update_bloch_sphere, theta=theta_slider, phi=phi_slider, error_type=error_selector)\n" + "* **Bit flip errors (X)** swap a qubit's amplitudes, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}+\\alpha\\ket{1}$.\n", + "* **Phase flip errors (Z)** introduce a sign change in one of the amplitudes, transforming $\\ket{\\psi}$ to $\\alpha\\ket{0}-\\beta\\ket{1}$.\n", + "* **Combining a bit flip with a phase flip error (X and Z)** swaps amplitudes and applies a sign change, transforming $\\ket{\\psi}$ to $\\beta\\ket{0}-\\alpha\\ket{1}$." ] }, { @@ -967,18 +558,20 @@ "source": [ "Once we have identified one of these errors, we can correct it. For instance, if a qubit has undergone a bit flip error, we can correct it by applying an $X$ gate. Similarly, to correct a qubit that has experienced a phase flip error, we simply apply a $Z$ gate. How would you correct a qubit that has been identified as having undergone a bit flip error followed by a phase flip error? \n", "\n", - "We can address all coherent errors with a key insight: although the Bloch sphere suggests errors can occur through infinitely many possible rotations, all such errors can be broken down into three basic forms — bit flips, phase flips, or a combination of both bit flips and phase flips. \n", + "We can address all coherent errors with a key insight: although the Bloch sphere suggests errors can occur through infinitely many possible rotations, all such errors can be broken down into three basic forms — bit flips, phase flips, or a combination of both bit flips and phase flips. In other words, any error state can be reached by some combination of $X$, $Y$, and $Z$ rotations. So, why are there not an infinite number of corrections?\n", + "\n", + "The math below explains how an arbitrary error results in a finite set of possible error states. \n", "\n", - "If you'd like an explanation of why this decomposition works, consult the optional section below. For now, remember that by detecting and correcting these three core error types, we can effectively handle any coherent noise. \n", + " Consider a qubit in the following normalized state.\n", + " $$ \\ket{\\psi} = \\cos\\frac{\\theta}{2}\\ket{0} + e^{i\\phi}\\sin\\frac{\\theta}{2}\\ket{1} $$\n", + " \n", + " Coherent errors can be represented by the application of a Unitary $U(\\delta\\theta,\\delta\\phi)$ which acts on the ideal state and perturbs it.\n", + "$$ U(\\delta\\theta,\\delta\\phi)\\ket{\\psi} = \\cos\\frac{\\theta +\\delta\\theta}{2}\\ket{0} + e^{i\\phi+\\delta\\phi}\\sin\\frac{\\theta+\\delta\\theta}{2}\\ket{1} $$\n", + " Using the fact that the Pauli matrices form a basis for any 2x2 unitary matrix and taking advantage of the identity $Y=iXZ$, the operation can be rewritten as\n", + " $$ U(\\delta\\theta,\\delta\\phi) \\ket{\\psi} = \\alpha_II\\ket{\\psi} +\\alpha_X X\\ket{\\psi}+\\alpha_Z Z\\ket{\\psi}+\\alpha_{XZ}XZ\\ket{\\psi} $$\n", + " This means that any coherent error can be **digitized** into X-type bit flip errors ($X \\ket{\\psi} = \\alpha X\\ket{0} + \\beta X\\ket{1} = \\alpha\\ket{1} + \\beta\\ket{0}$), Z-type phase flip errors ($Z\\ket{\\psi} = \\alpha Z\\ket{0} + \\beta Z\\ket{1} = \\alpha\\ket{0} - \\beta\\ket{1}$), or a combination of the two (XZ). This makes correction much more tractable, as there are only three types of errors to consider.\n", "\n", - "> **Optional:** Consider a qubit in the following normalized state.\n", - "> $$ \\ket{\\psi} = \\cos\\frac{\\theta}{2}\\ket{0} + e^{i\\phi}\\sin\\frac{\\theta}{2}\\ket{1} $$\n", - "> \n", - "> Coherent errors can be represented by the application of a Unitary $U(\\delta\\theta,\\delta\\phi)$ which acts on the ideal state and perturbs it.\n", - ">$$ U(\\delta\\theta,\\delta\\phi)\\ket{\\psi} = \\cos\\frac{\\theta +\\delta\\theta}{2}\\ket{0} + e^{i\\phi+\\delta\\phi}\\sin\\frac{\\theta+\\delta\\theta}{2}\\ket{1} $$\n", - "> Using the fact that the Pauli matrices form a basis for any 2x2 unitary matrix and taking advantage of the identity $Y=iXZ$, the operation can be rewritten as\n", - "> $$ U(\\delta\\theta,\\delta\\phi) \\ket{\\psi} = \\alpha_II\\ket{\\psi} +\\alpha_X X\\ket{\\psi}+\\alpha_Z Z\\ket{\\psi}+\\alpha_{XZ}XZ\\ket{\\psi} $$\n", - "> This means that any coherent error can be **digitized** into X-type bit flip errors ($X \\ket{\\psi} = \\alpha X\\ket{0} + \\beta X\\ket{1} = \\alpha\\ket{1} + \\beta\\ket{0}$), Z-type phase flip errors ($Z\\ket{\\psi} = \\alpha Z\\ket{0} + \\beta Z\\ket{1} = \\alpha\\ket{0} - \\beta\\ket{1}$), or a combination of the two (XZ). This makes correction much more tractable, as there are only three types of errors to consider." + " Try the widget [linked here](https://nvidia.github.io/cuda-q-academic/interactive_widgets/error_digitization.html) to see a concrete example of this for an $X$ rotation (bitflip error) impacting the state $\\ket{000}$ and how stabilizers are they key to correcting the error." ] }, { @@ -986,15 +579,9 @@ "id": "b8ddc3f4-6d1e-4ed2-b34c-5018220ef617", "metadata": {}, "source": [ - "### Syndrome Extraction\n", - "\n", - "The no cloning principle means quantum states cannot be copied for QEC. We'll need a clever way to extract syndromes from the logical state that does not rely on repetition. But, how is this done without destroying the information that is being protected?\n", - "\n", - "The solution involves **stabilizers** which are specially designed operators that act on a logical state without changing it, but still enable us to learn about errors by performing projective measurement of ancilla qubits. The next notebook in this series will introduce stabilizers with more mathematical rigor, and the example in section 1.6 of this lab will provide a more concrete example of a simple stabilizer in action.\n", + "### Better QEC Codes and AI Solutions\n", "\n", - "### Better QEC codes and AI solutions\n", - "\n", - "Finally, overcoming the QEC scaling challenges will require breakthroughs on many fronts. Significant research efforts are targeting discovery of more efficient QEC codes that require fewer qubits. AI is already showing great promise as a tool to help find new QEC codes, and accelerate decoding. Later notebooks will explore AI for QEC applications.\n" + "Finally, overcoming the QEC scaling challenges will require breakthroughs on many fronts. Significant research efforts are targeting discovery of more efficient QEC codes that require fewer qubits. AI is already showing great promise as a tool to help find new QEC codes, and accelerate decoding. Later notebooks will explore AI for QEC applications." ] }, { @@ -1002,6 +589,8 @@ "id": "d82e4b87-2d90-499e-91bc-c4f2af3312eb", "metadata": {}, "source": [ + "---\n", + "\n", "## 1.6 The Quantum Repetition Code" ] }, @@ -1012,11 +601,11 @@ "source": [ "A quantum state cannot be cloned, but it can be redundantly encoded across additional entangled qubits. Let's start with a generic normalized qubit state $\\ket{\\psi}$:\n", "\n", - "$$\\ket{\\psi} = \\alpha\\ket{0} +\\beta\\ket{1}.$$ \n", + "$$\\ket{\\psi} = \\alpha\\ket{0} +\\beta\\ket{1}.$$\n", "\n", "The 0 and 1 states can be encoded into a logical state making use of the larger 8-dimensional Hilbert space of three qubits: \n", "\n", - "$$\\ket{\\psi}_L = \\alpha\\ket{000} +\\beta\\ket{111} = \\alpha\\ket{0}_L +\\beta\\ket{1}_L.$$ \n", + "$$\\ket{\\psi}_L = \\alpha\\ket{000} +\\beta\\ket{111} = \\alpha\\ket{0}_L +\\beta\\ket{1}_L.$$\n", "\n", "Note that this is *not* equivalent to $\\ket{\\psi} \\otimes \\ket{\\psi} \\otimes \\ket{\\psi}$.\n", "\n", @@ -1061,7 +650,7 @@ "\n", "\n", "\n", - "\"Drawing\"\n", + "\"Quantum\n", "\n", "\n", "\n", @@ -1084,7 +673,8 @@ "$$ \\ket{0}(\\frac{1+Z_1Z_2}{2})\\ket{011} + \\ket{1}(\\frac{1-Z_1Z_2}{2})\\ket{011} = \\ket{0}(\\frac{1+ -1}{2})\\ket{011} + \\ket{1}(\\frac{1--1}{2})\\ket{011} = \\ket{1}\\ket{011}. $$\n", "\n", "A similar analysis will show that the second ancilla qubit will be measured as 0 with certainty without distubring the data qubits. Accordoing to the syndrome table, \n", - "this will trigger an application of the $X$ gate on the first qubit to correct the error.\n" + "this will trigger an application of the $X$ gate on the first qubit to correct the error.\n", + "" ] }, { @@ -1092,134 +682,30 @@ "id": "f5cbe2ef-044a-46ae-abf3-26f87ba30a12", "metadata": {}, "source": [ - "## 1.7 Exercise 3: Coding the Quantum Repetition Code\n", - "\n", - "
\n", - "

Exercise 3 - The matrix form of the Hamming code:

\n", - "

\n", - " Now that you understand the quantum repetition code, try to code it using CUDA-Q. Fill in each of the steps below marked \"#TODO\". CUDA-Q contains a couple of features particularly helpful for building QEC workflows. First, and already completed for you, is the definition of a custom noise model which produces custom identity operations that can randomly perform bit flips on specific qubits. Second, you can measure the ancilla qubits within the kernel and use the result to perform a correction operation. The documentation example on building kernels and mid-circuit measurement may be helpful for this exercise.\n", - "

\n", - " Try to code all the steps and then sample the kernel to determine the logical error rate.\n", - "

\n", - "
\n", - "\n" + "---\n", + "\n", + "## 1.7 Coding the Quantum Repetition Code\n", + "\n", + "
\n", + "\n", + "**Exercise 3:** The Quantum Repetition Code\n", + "\n", + "Now that you understand the quantum repetition code, try to code it using CUDA-Q. Fill in each of the steps below marked `##TODO##`. CUDA-Q contains a couple of features particularly helpful for building QEC workflows. First, and already completed for you, is the definition of a custom noise model which produces custom identity operations that can randomly perform bit flips on specific qubits. Second, you can measure the ancilla qubits within the kernel and use the result to perform a correction operation. The documentation example on [building kernels](https://nvidia.github.io/cuda-quantum/latest/using/examples/building_kernels.html) and [mid-circuit measurement](https://nvidia.github.io/cuda-quantum/latest/examples/python/measuring_kernels.html) may be helpful for this exercise.\n", + "\n", + "Try to code all the steps and then sample the kernel to determine the logical error rate.\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "8d662290-b51c-4009-8e7c-d9d5f7df40ad", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 1, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 1]\n", - "[0, 0, 0, 0, 1]\n", - "[0, 0, 0, 1, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[0, 0, 0, 1, 1]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 0, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 1, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[0, 0, 0, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 1, 0]\n", - "[0, 0, 0, 0, 1]\n", - "[1, 1, 1, 1, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 1]\n", - "[1, 1, 1, 0, 0]\n", - "[1, 1, 1, 0, 0]\n", - "Logical Error Rate: 0.09999999999999998\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 3\n", + "\n", "cudaq.set_target('density-matrix-cpu')\n", "\n", "\n", @@ -1299,15 +785,22 @@ "source": [ "## Conclusion\n", "\n", - "You now have a basic understanding of EC and QEC. The next lab will explore stabilizers in more detail and equip you to code two of the most famous and fundamental QEC codes: the Shor code and the Steane code." + "You now have a basic understanding of error correction (EC) and quantum error correction (QEC). You explored the five aspects common to all EC procedures, implemented the classical repetition code and Hamming code, identified the unique challenges of QEC, and built the three-qubit quantum repetition code in CUDA-Q. The next lab will explore **stabilizers** in more detail and equip you to code two of the most famous and fundamental QEC codes: the Shor code and the Steane code. Future labs will cover more advanced topics like decoding and other specific QEC codes." + ] + }, + { + "cell_type": "markdown", + "id": "ca54ebb72627406f", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 Lab 2 — Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — introduces the stabilizer formalism for quantum error correction\n", + "* [Quick Start to Quantum — Notebook 1](https://github.com/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/01_quick_start_to_quantum.ipynb) — prerequisite notebook covering qubits, gates, and measurement in CUDA-Q\n", + "* [QEC 101 Lab 3 — Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — applies QEC codes with realistic noise models in CUDA-Q" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -1324,8 +817,23 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.3" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "A2", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/Solutions/02_QEC_Stabilizers_Solution.ipynb b/qec101/Solutions/02_QEC_Stabilizers_Solution.ipynb index 186bc1b..6cf45c7 100644 --- a/qec101/Solutions/02_QEC_Stabilizers_Solution.ipynb +++ b/qec101/Solutions/02_QEC_Stabilizers_Solution.ipynb @@ -2,11 +2,9 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "eead95ce", - "metadata": { - "id": "eead95ce" - }, + "metadata": {}, "outputs": [], "source": [ "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", @@ -27,116 +25,110 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 2 - Stabilizers, the Shor code, and the Steane code\n", - "\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", - "This lab introduces the stabilizer formalism, a powerful tool for working with more sophisticated quantum error correction (QEC) codes. After a brief introduction to the theory, the lab will walk through the Shor and Steane codes with interactive coding exercises. \n", + "# QEC 101 — Lab 2: Stabilizers, the Shor Code, and the Steane Code — Solutions\n", + "$\\renewcommand{\\ket}[1]{|{#1}\\rangle}\\renewcommand{\\bra}[1]{\\langle{#1}|}$\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Define stabilizers and explain their role in quantum error correction\n", + "* Implement the Steane code encoding and syndrome measurement in CUDA-Q\n", + "* Perform a code capacity analysis on the Steane code using CUDA-Q QEC\n", + "* Implement the Shor code encoding, error detection, and correction in CUDA-Q\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of QEC 101 Lab 1 (classical and quantum repetition codes, Hamming code)\n", + "* Familiarity with the Pauli matrices and tensor products\n", + "\n", + "**Key Terminology:**\n", + "* Stabilizer\n", + "* Codespace\n", + "* Logical Operator\n", + "* Syndrome\n", + "* CSS Code (Calderbank-Shor-Steane)\n", + "* Steane Code\n", + "* Shor Code\n", + "* Pauli Group\n", + "* Code Capacity\n", + "* Color Code\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library\n", + "\n", + "This lab introduces the stabilizer formalism, a powerful tool for working with more sophisticated quantum error correction (QEC) codes. After a brief introduction to the theory, the lab will walk through the Shor and Steane codes with interactive coding exercises.\n", "\n", "This lab was motivated by content from \"[Quantum Error Correction: an Introductory Guide](https://arxiv.org/abs/1907.11157)\" and \"[Quantum Error Correction for Dummies](https://arxiv.org/abs/2304.08678)\", both excellent resources we refer readers to for additional detail. For a more technical introduction, see chapter 10 of \"[Quantum Computation and Quantum Information](https://books.google.com/books?hl=en&lr=&id=-s4DEy7o-a0C&oi=fnd&pg=PR17&dq=quantum+computation+and+quantum+information&ots=NJ4KdqnzZt&sig=uKTETo5LLjWB9F_PV_zf0Sw3bvk#v=onepage&q=quantum%20computation%20and%20quantum%20information&f=false)\" or the [PhD thesis](https://arxiv.org/abs/quant-ph/9705052) where the concept of stabilizer codes was introduced.\n", "\n", - "This is the second lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete the first lab in this series.\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **2.1** Define stabilizers and why they are important\n", - "* **2.2** Interactively Learn and Code the Steane Code in CUDA-Q.\n", - "* **2.3** Perform Steane Code Capacity Analysis with CUDA-QX\n", - "* **2.4** Interactively Learn and Code the Shor Code in CUDA-Q.\n", - "\n", - "\n", - "\n", - "Lab 2 Learning Objectives:\n", - "* Understand what a stabilizer is, how it works, and why it is important\n", - "* Understand the approach of the Shor and Steane codes\n", - "* Understand logical operators\n", - "* Code the Shor and Steane codes in CUDA-Q" + "This is the second lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete the first lab in this series." ] }, { "cell_type": "markdown", - "id": "b5046650", - "metadata": { - "id": "b5046650" - }, + "id": "59346183e90b45b2", + "metadata": {}, "source": [ - "Execute the cells below to load all the necessary packages for this lab." + "
\n", + "\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "910c540e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Defaulting to user installation because normal site-packages is not writeable\n", - "Requirement already satisfied: cudaq_qec in /home/cudaq/.local/lib/python3.12/site-packages (0.5.0)\n", - "Requirement already satisfied: cudaq-qec-cu12==0.5.0 in /home/cudaq/.local/lib/python3.12/site-packages (from cudaq_qec) (0.5.0)\n", - "Requirement already satisfied: cuda-quantum-cu12>=0.13 in /home/cudaq/.local/lib/python3.12/site-packages (from cudaq-qec-cu12==0.5.0->cudaq_qec) (0.13.0)\n", - "Requirement already satisfied: astpretty~=3.0 in /home/cudaq/.local/lib/python3.12/site-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (3.0.0)\n", - "Requirement already satisfied: custatevec-cu12~=1.10 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (1.10.1)\n", - "Requirement already satisfied: cutensornet-cu12~=2.9 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (2.9.1)\n", - "Requirement already satisfied: cudensitymat-cu12~=0.3 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (0.3.1)\n", - "Requirement already satisfied: numpy>=1.24 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (1.26.4)\n", - "Requirement already satisfied: scipy>=1.10.1 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (1.12.0)\n", - "Requirement already satisfied: requests>=2.31 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (2.32.5)\n", - "Requirement already satisfied: nvidia-cublas-cu12~=12.0 in /home/cudaq/.local/lib/python3.12/site-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (12.9.1.4)\n", - "Requirement already satisfied: nvidia-curand-cu12~=10.3 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (10.3.10.19)\n", - "Requirement already satisfied: nvidia-cusparse-cu12~=12.5 in /home/cudaq/.local/lib/python3.12/site-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (12.5.10.65)\n", - "Requirement already satisfied: nvidia-cuda-runtime-cu12~=12.0 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (12.9.79)\n", - "Requirement already satisfied: nvidia-cusolver-cu12~=11.4 in /home/cudaq/.local/lib/python3.12/site-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (11.7.5.82)\n", - "Requirement already satisfied: nvidia-cuda-nvrtc-cu12~=12.0 in /usr/local/lib/python3.12/dist-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (12.9.86)\n", - "Requirement already satisfied: cupy-cuda12x~=13.6.0 in /home/cudaq/.local/lib/python3.12/site-packages (from cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (13.6.0)\n", - "Requirement already satisfied: cutensor-cu12<3,>=2.3.1 in /usr/local/lib/python3.12/dist-packages (from cudensitymat-cu12~=0.3->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (2.3.1)\n", - "Requirement already satisfied: fastrlock>=0.5 in /usr/local/lib/python3.12/dist-packages (from cupy-cuda12x~=13.6.0->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (0.8.3)\n", - "Requirement already satisfied: nvidia-nvjitlink-cu12 in /home/cudaq/.local/lib/python3.12/site-packages (from nvidia-cusolver-cu12~=11.4->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (12.9.86)\n", - "Requirement already satisfied: charset_normalizer<4,>=2 in /usr/local/lib/python3.12/dist-packages (from requests>=2.31->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (3.4.4)\n", - "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.12/dist-packages (from requests>=2.31->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (3.11)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.12/dist-packages (from requests>=2.31->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (2.5.0)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.12/dist-packages (from requests>=2.31->cuda-quantum-cu12>=0.13->cudaq-qec-cu12==0.5.0->cudaq_qec) (2025.11.12)\n" - ] - } - ], + "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", - "!pip install cudaq_qec\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", - "#!mv cuda-q-academic-main/qec101/Images ./Images\n" + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "b5046650", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from typing import List\n", "\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "\n", + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec\n", + "\n", "cudaq.set_target('qpp-cpu')" ] }, @@ -145,6 +137,8 @@ "id": "b3e18790-87ab-4180-b349-a4210abca136", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.1 Stabilizers and Logical Operators" ] }, @@ -160,7 +154,7 @@ "An operation $s$ acting on a state $\\ket{\\psi}$ is said to be a stabilizer of the state if the state is a +1 eigenstate of the operation $s \\ket{\\psi} = +1 \\ket{\\psi}$. The high-level intuiton here is that if small errors have accumulated in a logically encoded state, the action of applying this stabilizer is to project the state back to a perfectly error-free state, and we measure $+1$. Sometimes larger errors occur, and we do not measure $+1$, which informs us something has gone wrong.\n", "\n", "\n", - "In lab 1, the codespace was defined by the set of basis codewords, such as $\\ket{000}$ and $\\ket{111}$ for the 3-qubit quantum repetition code. In that lab the codewords were provided to you for each code, but in a stabilizer code, we can equivalently define the codespace by providing the stabilizers which stabilize each basis codeword. In practice, this process of defining a code by the stabilizers is much more efficient and scalable as the codes grow larger.\n", + "In lab 1, the **codespace** was defined by the set of basis codewords, such as $\\ket{000}$ and $\\ket{111}$ for the 3-qubit quantum repetition code. In that lab the codewords were provided to you for each code, but in a stabilizer code, we can equivalently define the codespace by providing the stabilizers which stabilize each basis codeword. In practice, this process of defining a code by the stabilizers is much more efficient and scalable as the codes grow larger.\n", "\n", "The codespace $C$ can be defined as formed by all $\\ket{\\psi}$ such that $s_i\\ket{\\psi} = +1 \\ket{\\psi}$ for each $s_i\\in S$, where these $s_i$ are stabilizers which form a group $S$ (note: in some texts this group $S$ is called the stabilizer, not the elements). That is, the codespace is the joint +1 eigenspace fixed by the stabilizers. \n", "\n", @@ -192,7 +186,7 @@ "\n", "Three key properties for $[[n,k,d]]$ stabilizers:\n", "\n", - "1. Here we consider only to Pauli product stabilizers, that is, $s_i$ needs to be a Pauli-group element. The n-qubit Pauli group $G_n$ is a special group constructed from the Pauli matrices:\n", + "1. Here we consider only to Pauli product stabilizers, that is, $s_i$ needs to be a Pauli-group element. The n-qubit **Pauli group** $G_n$ is a special group constructed from the Pauli matrices:\n", "\n", " $$ I = \\begin{pmatrix} 1 & 0 \\\\ 0 & 1 \\end{pmatrix}, \\quad X = \\begin{pmatrix} 0 & 1 \\\\ 1 & 0 \\end{pmatrix}, \\quad Y = \\begin{pmatrix} 0 & -i \\\\ i & 0 \\end{pmatrix}, \\quad Z = \\begin{pmatrix} 1 & 0 \\\\ 0 & -1 \\end{pmatrix}$$\n", "\n", @@ -226,9 +220,11 @@ "id": "458cbf66-7cdf-4aaf-83ec-8597d941ab3e", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.2 The Steane Code\n", "\n", - "The Steane code is a famous QEC code that is the quantum version of the [7,4,3] Hamming code introduced in the first QEC lab. One immediate difference is that the Steane code encodes a single logical qubit making it a [[7,1,3]] code.\n", + "The **Steane code** is a famous QEC code that is the quantum version of the [7,4,3] Hamming code introduced in the first QEC lab. One immediate difference is that the Steane code encodes a single logical qubit making it a [[7,1,3]] code.\n", "\n", "Remember, that the Hamming code adds additional parity bits that help \"triangulate\" where an error occurred. In the lab 1 exercises you constructed the generator matrix $G$ and used it to produce the logical codewords in the classical Hamming code. For example, $b=0110$ was encoded as\n", "\n", @@ -249,7 +245,7 @@ "\\end{bmatrix}\n", "$$\n", "\n", - "Any logically encoded state, $c$, could then be multiplied by the parity check matrix ($H$) to determine if any syndromes were triggered or not. \n", + "Any logically encoded state, $c$, could then be multiplied by the parity check matrix ($H$) to determine if any **syndromes** were triggered or not. \n", "\n", "\n", "$$\n", @@ -310,43 +306,34 @@ "\n", "The encoding circuit to produce the logical codewords is shown below, and is based off the constraints imposed by the parity check matrix. \n", "\n", - "\"Drawing\"\n", + "\"Quantum\n", + "\n", "\n", "\n", "\n", + "
\n", "\n", - "
\n", - "

Exercise 1 - The Steane Code:

\n", - "

\n", - "In the cell below, build a CUDA-Q kernel to encode the logical 0 state using the Steane code. Sample the circuit to prove that you indeed created the appropriate superposition. In the cells following, complete the entire Steane code by adding stabilizer checks and code to measure the logical state. Complete the numbered tasks as well to confirm your code works as expected. \n", - "

\n", - "
\n" + "**Exercise 1:**\n", + "\n", + "In the cell below, build a CUDA-Q kernel to encode the logical 0 state using the Steane code. Sample the circuit to prove that you indeed created the appropriate superposition. In the cells following, complete the entire Steane code by adding stabilizer checks and code to measure the logical state. Complete the numbered tasks as well to confirm your code works as expected.\n", + "\n", + "
\n", + "" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "42d711f6-f83b-46a7-8f8a-e1cb596e7f6b", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ 0000000000:122 0001111000:121 0110110000:116 0111001000:120 1010101000:147 1011010000:121 1100011000:119 1101100000:134 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 1\n", "@cudaq.kernel\n", "def steane_code():\n", - " \"\"\"Prepares a kernel for the Steane Code\n", - " Returns\n", - " -------\n", - " \"\"\" \n", + " \"\"\"Prepares a kernel for the Steane Code\"\"\"\n", "\n", - " #Initialize Registers\n", + " # Initialize Registers\n", " data_qubits = cudaq.qvector(7)\n", " ancilla_qubits = cudaq.qvector(3)\n", "\n", @@ -355,27 +342,25 @@ " h(data_qubits[5])\n", " h(data_qubits[6])\n", "\n", - " #Entangle states to enforce constraints of parity check matrix\n", - "\n", - " x.ctrl(data_qubits[0],data_qubits[1])\n", - " x.ctrl(data_qubits[0],data_qubits[2])\n", - "\n", - " x.ctrl(data_qubits[4],data_qubits[0])\n", - " x.ctrl(data_qubits[4],data_qubits[1])\n", - " x.ctrl(data_qubits[4],data_qubits[3])\n", + " # Entangle states to enforce constraints of parity check matrix\n", + " x.ctrl(data_qubits[0], data_qubits[1])\n", + " x.ctrl(data_qubits[0], data_qubits[2])\n", "\n", - " x.ctrl(data_qubits[5],data_qubits[0])\n", - " x.ctrl(data_qubits[5],data_qubits[2])\n", - " x.ctrl(data_qubits[5],data_qubits[3])\n", + " x.ctrl(data_qubits[4], data_qubits[0])\n", + " x.ctrl(data_qubits[4], data_qubits[1])\n", + " x.ctrl(data_qubits[4], data_qubits[3])\n", "\n", - " x.ctrl(data_qubits[6],data_qubits[1])\n", - " x.ctrl(data_qubits[6],data_qubits[2])\n", - " x.ctrl(data_qubits[6],data_qubits[3])\n", + " x.ctrl(data_qubits[5], data_qubits[0])\n", + " x.ctrl(data_qubits[5], data_qubits[2])\n", + " x.ctrl(data_qubits[5], data_qubits[3])\n", "\n", + " x.ctrl(data_qubits[6], data_qubits[1])\n", + " x.ctrl(data_qubits[6], data_qubits[2])\n", + " x.ctrl(data_qubits[6], data_qubits[3])\n", "\n", "\n", "results = cudaq.sample(steane_code, shots_count=1000)\n", - "print(results) " + "print(results)" ] }, { @@ -392,7 +377,7 @@ "The syndromes can be visually interpreted by putting a colored X on the syndromes that are flagged. Each coloring of this graph uniquely corresponds to an error on a specific qubit which is why the Steane code is often referred to as a **color code**.\n", "\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", "You are now ready to code the rest of the Steane code. After encoding, introduce an $X$ error and $Z$ error on the qubits of your choice. Try performing the $X$ and $Z$ syndrome measurements using the same three ancilla qubits and resetting them in between. Make your code such that you can measure the data qubits and confirm the state of the logical qubit. \n", "\n", @@ -401,31 +386,12 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "c1194da2-bcdc-4c37-9b4a-21f5f24a4790", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[[0, 0, 0, 1, 1, 1, 1], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 1, 0, 0, 1], [1, 0, 1, 0, 1, 0, 1], [1, 1, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 1, 0, 0, 1], [1, 1, 0, 1, 1, 0, 0], [1, 1, 0, 0, 0, 1, 1], [1, 0, 1, 1, 0, 1, 0], [1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 1, 1, 1, 1], [0, 1, 1, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0], [0, 1, 1, 0, 1, 1, 0], [0, 1, 1, 0, 1, 1, 0], [1, 0, 1, 0, 1, 0, 1], [1, 1, 0, 0, 0, 1, 1], [1, 0, 1, 0, 1, 0, 1], [0, 1, 1, 0, 1, 1, 0], [0, 1, 1, 0, 1, 1, 0], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 1, 0, 0, 1], [1, 0, 1, 0, 1, 0, 1], [1, 1, 0, 0, 0, 1, 1], [1, 0, 1, 0, 1, 0, 1], [1, 1, 0, 0, 0, 1, 1], [1, 1, 0, 1, 1, 0, 0], [1, 1, 0, 1, 1, 0, 0], [0, 0, 0, 1, 1, 1, 1], [1, 1, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 1, 0, 1, 1, 0, 0], [1, 1, 0, 0, 0, 1, 1], [0, 0, 0, 1, 1, 1, 1], [1, 0, 1, 1, 0, 1, 0], [0, 0, 0, 1, 1, 1, 1], [1, 0, 1, 1, 0, 1, 0], [1, 1, 0, 1, 1, 0, 0], [1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 1, 1, 1, 1], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 0, 1, 1, 0], [1, 1, 0, 1, 1, 0, 0], [0, 1, 1, 0, 1, 1, 0], [0, 1, 1, 0, 1, 1, 0], [0, 1, 1, 1, 0, 0, 1], [0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 1, 0, 1, 0], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 1, 0, 1, 0], [1, 1, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 0, 1, 1, 0], [1, 1, 0, 0, 0, 1, 1], [1, 0, 1, 1, 0, 1, 0], [0, 1, 1, 0, 1, 1, 0], [1, 1, 0, 1, 1, 0, 0], [1, 1, 0, 0, 0, 1, 1], [1, 1, 0, 1, 1, 0, 0], [1, 0, 1, 1, 0, 1, 0], [1, 1, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 1, 1], [1, 0, 1, 0, 1, 0, 1], [1, 0, 1, 0, 1, 0, 1], [0, 1, 1, 0, 1, 1, 0], [1, 1, 0, 1, 1, 0, 0], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 0, 1, 1, 0], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 1, 0, 1, 1, 0, 0], [1, 0, 1, 1, 0, 1, 0], [1, 0, 1, 0, 1, 0, 1], [1, 0, 1, 1, 0, 1, 0], [0, 1, 1, 0, 1, 1, 0], [1, 0, 1, 0, 1, 0, 1], [1, 1, 0, 1, 1, 0, 0], [1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 1, 1, 1, 1], [1, 1, 0, 0, 0, 1, 1], [0, 1, 1, 1, 0, 0, 1], [0, 1, 1, 1, 0, 0, 1], [0, 0, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 1, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 1, 1, 1], [0, 1, 1, 1, 0, 0, 1], [1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 0]]\n", - "Confirming Logical Zero State\n", - "0: 100\n", - "1: 0\n", - "Testing if X0X1X4 is a logical operator\n", - "0: 58\n", - "1: 42\n", - "It is not a valid logical operator\n", - "Testing if X0X4X5 is a logical operator\n", - "0: 100\n", - "1: 0\n", - "It is a valid logical operator\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", + "# EXERCISE 1\n", "@cudaq.kernel\n", "def steane_code() -> list[int]:\n", " \"\"\"Prepares a kernel for the Steane Code\n", @@ -616,7 +582,7 @@ "source": [ "Now, test your code! Just measure in the $Z$ basis as the same procedure could be performed with the $X$ basis. \n", "\n", - "1. Try adding single $X$ errors, guess which stabilizers should flag and confrm they do.\n", + "1. Try adding single $X$ errors, guess which stabilizers should flag and confirm they do.\n", "2. Add two errors. Confirm the code cannot correct the errors and a logical bitflip occurs.\n", "3. It turns out there are alternate choices for $\\bar{X}$. Modify your counting code above and test if $X_0X_1X_4$ or $X_0X_4X_5$ are valid choices for $\\bar{X}$. " ] @@ -626,38 +592,31 @@ "id": "c9245417-c131-4d5a-9b05-cfd5940b0464", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.3 Steane Code Capacity Analysis with CUDA-Q QEC\n", "\n", "\n", "[CUDA-QX](https://developer.nvidia.com/cuda-qx) is set of libraries that enable easy acceleration of quantum application development. One of the libraries, [CUDA-Q QEC](https://nvidia.github.io/cudaqx/components/qec/introduction.html), is focused on error correction and can help expedite much of the work done above. This final section will demonstrate how to run a code capacity memory experiment with the Steane code.\n", "\n", - "A memory experiment is a procedure to test how well a protocol can preserve quantum information. Such an experiment can help assess the quality of a QEC code but is often limited by assumptions that deviate from a realistic noise model. One such example is a code capacity experiment. A code capacity procedure determines the logical error rate of a QEC code under strict assumptions such as perfect gates or measurement. Code capacity experiments can help put an upper bound on a procedure's threshold and is therefore a good starting place to compare new codes.\n", + "A memory experiment is a procedure to test how well a protocol can preserve quantum information. Such an experiment can help assess the quality of a QEC code but is often limited by assumptions that deviate from a realistic noise model. One such example is a code capacity experiment. A **code capacity** procedure determines the logical error rate of a QEC code under strict assumptions such as perfect gates or measurement. Code capacity experiments can help put an upper bound on a procedure's threshold and is therefore a good starting place to compare new codes.\n", "\n", "The process is outlined in the diagram below. Assume the 0000000 bitstring is the baseline (no error). Bitflips are then randomly introduced and produce errors in the data vector to produce results like 0100010. If this were a real test on a physical quantum device, the data vector would not be known and a user could only proceed through the bottom path in the figure - performing syndrome extraction and then decoding the result to see if a logical flip occurred. In a code capacity experiment, the data vector with errors is known, so it can be used to directly compute if a logical state flip occurred or not. Dividing the number of times the actual (top path) and predicted (bottom path) results agree by the total number of rounds provides an estimate of the logical error rate for the code being tested. \n", "\n", "\n", - "\"Drawing\"\n", + "\"Flowchart\n", + "\n", + "\n", + "
\n", "\n", + "**Exercise 2:**\n", + "\n", + "CUDA-Q QEC allows researchers to streamline experiments like this with just a few lines of code. Try running the cells below to compute the logical error rate of the Steane code under code capacity assumptions given probability of error $p$.\n", "\n", - "
\n", - "

Exercise 2 - CUDA-Q QEC Code Capacity Experiment:

\n", - "

\n", - "CUDA-Q QEC allows researchers to streamline experiments like this with just a few lines of code. Try running the cells below to compute the logical error rate of the Steane code under code capacity assumptions given probability of error $p$.\n", - "

\n", "
\n", "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "60073994-dd9c-496f-93a0-feb1ec6fcb39", - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np \n", - "import cudaq_qec as qec " + "\n", + "" ] }, { @@ -670,7 +629,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "928c1080-ebcd-463c-93e3-423c0fde59fc", "metadata": {}, "outputs": [], @@ -688,7 +647,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "3ede6c69-47d6-4ce4-9810-b92693492ead", "metadata": {}, "outputs": [], @@ -709,7 +668,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "18b826e5-8306-4e55-a3ba-c7f4fbc534c1", "metadata": {}, "outputs": [], @@ -727,117 +686,10 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "63688b17-420f-4275-abc4-0adad2df0edf", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 1 1 0] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 1] Syndromes: [0 0 1]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 1 0 0 0 0 0] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 1 0 0 1] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 1] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 0 0 1 0 0 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 1 0 0 0 1] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 1] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [1 0 0 0 1 1 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 1 0 0 0 0 0] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 1 0 0 0 1] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 1] Syndromes: [0 0 1]\n", - "Data Qubits: [0 1 0 1 1 0 0] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 1 0 1 0 0 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 1 0 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 1 0 0 0 0 1] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 0 1 1] Syndromes: [0 1 0]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 1 0 0 0 0 0] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 1 0 1 0] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 1 0 0 0 0 1] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 1 1 0] Syndromes: [0 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [1 1 1 0 0 0 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 0 1 0 0 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 1 0 0 0 1] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 1 0 0] Syndromes: [0 1 0]\n", - "Data Qubits: [0 1 0 1 0 0 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 0 0 0] Syndromes: [1 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 0 1 0 0] Syndromes: [1 0 1]\n", - "Data Qubits: [0 0 0 0 0 1 0] Syndromes: [0 1 1]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 1 1 0 0 1] Syndromes: [0 1 1]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 1 0 1 0] Syndromes: [1 1 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n", - "Data Qubits: [1 0 0 0 0 0 0] Syndromes: [1 0 0]\n", - "Data Qubits: [0 0 0 0 0 0 0] Syndromes: [0 0 0]\n" - ] - } - ], + "outputs": [], "source": [ "p = 0.1 # set a probability of a bit flip error occuring\n", "nShots = 100 # specify the number of shots\n", @@ -856,7 +708,7 @@ "\n", "If the experiment is repeated many times with different $p$ values, a plot can be generated like the one shown below. The purple line is the $y=x$ and corresponds to the case that the logical error rate is identical to the physical error rate. Anywhere the green line is below the purple line indicates that the Steane code was able to produce a logical error rate that is less than the physical error rate of the data qubits. When the green line is above the purple, the Steane code produced a worse logical error rate indicating that it would have been better to just use the data qubits and avoid the QEC procedure. The crossover point is an estimate for the code's threshold. Refining this estimate would require more sophisticated circuit level noise models that more accurately represent the performance of the Steane code under realistic conditions. \n", "\n", - "\"Drawing\"\n", + "\"Plot\n", "\n", "Though code capacity has much room to improve, it is a great example of the utility of CUDA-Q QEC and how simple procedures can be streamlined so users can focus on testing codes rather than coding up the details of each test." ] @@ -866,9 +718,11 @@ "id": "29eefa9f-dc3b-4e68-b54e-aee51f15cc5f", "metadata": {}, "source": [ + "---\n", + "\n", "## 2.4 The Shor Code\n", "\n", - "The first QEC code was proposed by Peter Shor in 1995, known as the [Shor code]((https://journals.aps.org/pra/abstract/10.1103/PhysRevA.52.R2493)). The Shor code is a [[9,1,3]] code which uses 9 qubits to encode a single qubit, but can correct single $X$ or $Z$-type errors.\n", + "The first QEC code was proposed by Peter Shor in 1995, known as the **[Shor code](https://journals.aps.org/pra/abstract/10.1103/PhysRevA.52.R2493)**. The Shor code is a [[9,1,3]] code which uses 9 qubits to encode a single qubit, but can correct single $X$ or $Z$-type errors.\n", "\n", "\n", "The motivation for the code, is that the 3-qubit repetition code can correct bit flip errors but not phase flip errors. We can consider why this is by examining the encoded $\\ket{+}_L$ state, which looks like the following:\n", @@ -892,7 +746,8 @@ "\n", "This encoding of $\\psi = \\alpha \\ket{0} + \\beta \\ket{1}$ can be implemented with the following quantum circuit:\n", "\n", - "\"Drawing\"\n" + "\"Quantum\n", + "" ] }, { @@ -916,7 +771,8 @@ "Can you see what the logical operators need to be? \n", "\n", "\n", - "For a logical bit flip to occur ($\\bar{X}$) the phase of each block needs to change. This is accomplished by performing a $Z $ operation on one of the qubits in each block, thus $\\bar{X} = Z_1Z_4Z_7$ is a valid choice, though not the only choice as others like $\\bar{X} = Z_2Z_5Z_8$ or even $\\bar{X} = Z_1Z_2Z_3Z_4Z_5Z_6Z_7Z_8Z_9$ also work. Similarly, for $\\bar{Z}$ to take $\\ket{1}_L$ to $-\\ket{1}_L$ (and $\\ket{0}_L$ to itself) all of the bits need to flip, thus $\\bar{Z} = X_1X_2X_3X_4X_5X_6X_7X_8X_9$. The curious reader can confirm that the anticommutativity holds between these logical operators and that they commute with each stabilizer discussed below.\n" + "For a logical bit flip to occur ($\\bar{X}$) the phase of each block needs to change. This is accomplished by performing a $Z $ operation on one of the qubits in each block, thus $\\bar{X} = Z_1Z_4Z_7$ is a valid choice, though not the only choice as others like $\\bar{X} = Z_2Z_5Z_8$ or even $\\bar{X} = Z_1Z_2Z_3Z_4Z_5Z_6Z_7Z_8Z_9$ also work. Similarly, for $\\bar{Z}$ to take $\\ket{1}_L$ to $-\\ket{1}_L$ (and $\\ket{0}_L$ to itself) all of the bits need to flip, thus $\\bar{Z} = X_1X_2X_3X_4X_5X_6X_7X_8X_9$. The curious reader can confirm that the anticommutativity holds between these logical operators and that they commute with each stabilizer discussed below.\n", + "" ] }, { @@ -969,63 +825,28 @@ "\n", "\n", "\n", - "
\n", - "

Exercise 3 - The Shor Code:

\n", - "

\n", - "Now you have all of the backgound necessary to code the Shor code in CUDA-Q. Fill in the sections below to build up a kernel that performs Shor code encoding and syndrome checks. The kernel should be constructed such that you can apply errors and select mesurement in the $Z$ or $X$ basis. Complete the tasks listed below to ensure your code works. \n", - "

\n", + "
\n", + "\n", + "**Exercise 3:**\n", + "\n", + "Now you have all of the backgound necessary to code the Shor code in CUDA-Q. Fill in the sections below to build up a kernel that performs Shor code encoding and syndrome checks. The kernel should be constructed such that you can apply errors and select mesurement in the $Z$ or $X$ basis. Complete the tasks listed below to ensure your code works.\n", + "\n", "
\n", - "\n" + "\n", + "" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "4cb342c0", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ \n", - " __global__ : { 000000101:1 }\n", - " s0 : { 0:1 }\n", - " s1 : { 0:1 }\n", - " s2 : { 0:1 }\n", - " s3 : { 0:1 }\n", - " s4 : { 0:1 }\n", - " s5 : { 0:1 }\n", - " s6 : { 0:1 }\n", - " s7 : { 0:1 }\n", - "}\n", - "\n", - "No Errors\n", - "Zeros: 100\n", - "Ones: 0\n", - "Zeros: 48\n", - "Ones: 52\n", - "X Errors\n", - "Zeros: 100\n", - "Ones: 0\n", - "Zeros: 50\n", - "Ones: 50\n", - "Z Errors\n", - "Zeros: 100\n", - "Ones: 0\n", - "Zeros: 50\n", - "Ones: 50\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "import numpy as np\n", - "\n", + "# EXERCISE 3\n", "cudaq.set_target('nvidia')\n", "\n", "\n", - "\n", "@cudaq.kernel\n", "def shor_code(error_type: list[int], error_location: list[int], measure: int):\n", " \"\"\"Prepares a kernel for the Shor Code\n", @@ -1033,47 +854,41 @@ " Parameters\n", " -----------\n", " error_type: list[int]\n", - " a list where each element is an applied error designated as 1 = z or 2 = x\n", + " a list where each element is an applied error designated as 1 = x or 2 = z\n", " error_location: list[int]\n", " each element corresponds to the index of the qubit which the error occurs on\n", " measure: int\n", " Option to measure in the z basis (1) or the x basis (2)\n", + " \"\"\"\n", "\n", - " Returns\n", - " -------\n", - " cudaq.kernel\n", - " Kernel for running the Shor code\n", - " \"\"\" \n", + " data_qubits = cudaq.qvector(9)\n", + " ancilla_qubits = cudaq.qvector(8)\n", "\n", + " # Start Psi in the 0 state\n", "\n", - " data_qubits = cudaq.qvector(9) \n", - " ancilla_qubits = cudaq.qvector(8)\n", "\n", - " #Start Psi in the 0 state\n", - " \n", - " \n", - " #Start Psi in the plus state\n", - " #h(data_qubits[0])\n", - " \n", - " #Start with Psi in a state which will make a 75/25 distribution in the Z and X basis.\n", - " #ry(np.pi/8,data_qubits[0])\n", - " #Encoding circuit\n", - " \n", - " cx(data_qubits[0],data_qubits[3])\n", - " cx(data_qubits[0],data_qubits[6])\n", + " # Start Psi in the plus state\n", + " # h(data_qubits[0])\n", + "\n", + " # Start with Psi in a state which will make a 75/25 distribution in the Z and X basis.\n", + " # ry(np.pi/8, data_qubits[0])\n", + "\n", + " # Encoding circuit\n", + " cx(data_qubits[0], data_qubits[3])\n", + " cx(data_qubits[0], data_qubits[6])\n", "\n", " h(data_qubits[0])\n", " h(data_qubits[3])\n", " h(data_qubits[6])\n", "\n", - " x.ctrl(data_qubits[0],data_qubits[1])\n", - " x.ctrl(data_qubits[0],data_qubits[2])\n", + " x.ctrl(data_qubits[0], data_qubits[1])\n", + " x.ctrl(data_qubits[0], data_qubits[2])\n", "\n", - " x.ctrl(data_qubits[3],data_qubits[4])\n", - " x.ctrl(data_qubits[3],data_qubits[5])\n", + " x.ctrl(data_qubits[3], data_qubits[4])\n", + " x.ctrl(data_qubits[3], data_qubits[5])\n", "\n", - " x.ctrl(data_qubits[6],data_qubits[7])\n", - " x.ctrl(data_qubits[6],data_qubits[8])\n", + " x.ctrl(data_qubits[6], data_qubits[7])\n", + " x.ctrl(data_qubits[6], data_qubits[8])\n", "\n", " # Apply optional errors\n", " for i in range(len(error_type)):\n", @@ -1081,10 +896,10 @@ " x(data_qubits[error_location[i]])\n", " if error_type[i] == 2:\n", " z(data_qubits[error_location[i]])\n", - " \n", + "\n", " # Prepare ancilla qubits\n", " h(ancilla_qubits)\n", - " \n", + "\n", " # Bit Flip Syndromes\n", " z.ctrl(ancilla_qubits[0], data_qubits[0])\n", " z.ctrl(ancilla_qubits[0], data_qubits[1])\n", @@ -1118,12 +933,11 @@ " x.ctrl(ancilla_qubits[7], data_qubits[6])\n", " x.ctrl(ancilla_qubits[7], data_qubits[7])\n", " x.ctrl(ancilla_qubits[7], data_qubits[8])\n", - " \n", - " \n", - " # Apply Hadamard gate to ancilla qubits \n", + "\n", + " # Apply Hadamard gate to ancilla qubits\n", " h(ancilla_qubits)\n", "\n", - " # Perform mid-circuit measurements to determine syndromes \n", + " # Perform mid-circuit measurements to determine syndromes\n", " s0 = mz(ancilla_qubits[0])\n", " s1 = mz(ancilla_qubits[1])\n", " s2 = mz(ancilla_qubits[2])\n", @@ -1168,11 +982,7 @@ " if measure == 2:\n", " h(data_qubits)\n", " h(data_qubits)\n", - " mz(data_qubits)\n", - " \n", - "\n", - "\n", - " \n" + " mz(data_qubits)" ] }, { @@ -1191,37 +1001,34 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "72abcc5b-0879-428e-a3a1-d5e8a1fd3c0e", "metadata": {}, "outputs": [], "source": [ - "def post_process(results):\n", - " \"\"\"takes results from a CUDA-Q sample and prints the number of 0's and 1's by computing the parity of the bitstrings.\n", + "# EXERCISE 3\n", + "def post_process(results: cudaq.SampleResult) -> None:\n", + " \"\"\"Takes results from a CUDA-Q sample and prints the number of 0's and 1's by computing the parity of the bitstrings.\n", "\n", " Parameters\n", " -----------\n", " results: cudaq.SampleResult\n", - " A dictionary of the results from sampling the quantum state\n", + " A dictionary of the results from sampling the quantum state\n", " \"\"\"\n", " ones = 0\n", " zeros = 0\n", " for result in results:\n", - " \n", " count = results.count(result)\n", " bits = [int(bit) for bit in result]\n", - " \n", - " parity = sum(bits[0:9]) % 2 \n", - " \n", + " parity = sum(bits[0:9]) % 2\n", "\n", " if parity == 0:\n", - " zeros += 1*count\n", - " else: \n", - " ones += 1*count\n", - " \n", - " #print(results)\n", + " zeros += 1 * count\n", + " else:\n", + " ones += 1 * count\n", + "\n", " print(\"Zeros:\", zeros)\n", - " print(\"Ones:\",ones) " + " print(\"Ones:\", ones)" ] }, { @@ -1244,32 +1051,52 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "8cb4d238-129b-4b47-8770-3db73147f976", "metadata": {}, "outputs": [], "source": [ - "print(cudaq.sample(shor_code,[], [],1 ,shots_count = 1))\n", + "# EXERCISE 3\n", + "print(cudaq.sample(shor_code, [], [], 1, shots_count=1))\n", "\n", "print(\"No Errors\")\n", - "post_process(cudaq.sample(shor_code,[], [],1 ,shots_count = 100))\n", - "post_process(cudaq.sample(shor_code,[], [],2 ,shots_count = 100))\n", + "post_process(cudaq.sample(shor_code, [], [], 1, shots_count=100))\n", + "post_process(cudaq.sample(shor_code, [], [], 2, shots_count=100))\n", "\n", "print(\"X Errors\")\n", - "post_process(cudaq.sample(shor_code,[1], [0],1 ,shots_count = 100))\n", - "post_process(cudaq.sample(shor_code,[1], [0],2 ,shots_count = 100))\n", + "post_process(cudaq.sample(shor_code, [1], [0], 1, shots_count=100))\n", + "post_process(cudaq.sample(shor_code, [1], [0], 2, shots_count=100))\n", "\n", "print(\"Z Errors\")\n", - "post_process(cudaq.sample(shor_code,[2], [0],1 ,shots_count = 100))\n", - "post_process(cudaq.sample(shor_code,[2], [0],2 ,shots_count = 100))" + "post_process(cudaq.sample(shor_code, [2], [0], 1, shots_count=100))\n", + "post_process(cudaq.sample(shor_code, [2], [0], 2, shots_count=100))" + ] + }, + { + "cell_type": "markdown", + "id": "130329ac73b74863", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "In this lab, you explored the stabilizer formalism and applied it to implement two foundational quantum error correction codes in CUDA-Q. You learned how stabilizers define a codespace through their joint +1 eigenspace, eliminating the need to explicitly enumerate basis codewords. You implemented the Steane code — a [[7,1,3]] CSS code — and verified its syndrome extraction for both $X$ and $Z$ errors using ancilla qubits with mid-circuit measurement. Using CUDA-Q QEC, you performed a code capacity analysis to estimate the Steane code's error correction threshold. Finally, you built the Shor code — a [[9,1,3]] concatenated code — and explored how it independently corrects bit flip and phase flip errors by leveraging two layers of repetition codes.\n", + "\n", + "In the next lab, you will explore noisy simulation of QEC codes using CUDA-Q's noise modeling capabilities." + ] + }, + { + "cell_type": "markdown", + "id": "436620bc88954196", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — continues the QEC series with circuit-level noise simulation\n", + "* [QEC 101 — Lab 1: Introduction to QEC](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) — covers the prerequisite classical and quantum repetition codes\n", + "* [QEC 101 — Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — extends stabilizer formalism to surface and toric codes" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -1283,9 +1110,22 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, diff --git a/qec101/Solutions/03_QEC_Noisy_Simulation_Solution.ipynb b/qec101/Solutions/03_QEC_Noisy_Simulation_Solution.ipynb index 2b78e3d..77d9f1e 100644 --- a/qec101/Solutions/03_QEC_Noisy_Simulation_Solution.ipynb +++ b/qec101/Solutions/03_QEC_Noisy_Simulation_Solution.ipynb @@ -1,99 +1,146 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "e182f30007b64a15", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, { "cell_type": "markdown", "id": "29c92634-d993-4c37-a7c7-ab9ff1384e2d", "metadata": {}, "source": [ - "# Lab 3 - Simulating Quantum Noise #\n", - "$\n", - "\\renewcommand{\\ket}[1]{|{#1}\\rangle}\n", - "\\renewcommand{\\bra}[1]{\\langle{#1}|}\n", - "$\n", - "---\n", - "## Overview\n", - "Noise is the enemy of useful quantum computing and the reason quantum error correction (QEC) is necessary in the first place. Noise from either the environment or the unavoidable imperfections of device controls produce errors that corrupt the quantum information stored on the qubits and ruin algorithm results. \n", + "# Simulating Quantum Noise — QEC101: Lab 3 — Solutions\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "Though all QPUs share this reality, each QPU exhibits a unique noise profile depending on its qubit modality and a variety of other design factors. Understanding a device's noise is critical for guiding algorithm development, discovery of new QEC techniques, and improvements to the hardware itself. \n", - "\n", - "\n", - "Simulating noisy QPUs can be extremely helpful for this task and complements the valuable but limited experimental data. For example, researchers can train QEC methods that rely on AI with massive simulated data sets. Similarly, insights from simulation can inform design improvements. Simulation can also be used to model the physics of the individual qubits and identify specific sources of noise, much like NVIDIA uses GPUs to simulate digital twins for next generation GPUs in the design process.\n", + "---\n", "\n", - "This lab will provide an overview of simulating noisy quantum circuits. You will learn how to use CUDA-Q to perform a number of different simulations and apply the results to different uses cases.\n", + "**What You Will Do:**\n", + "* Define quantum noise channels using density matrices and Kraus operators\n", + "* Simulate noisy quantum circuits with both density matrix and trajectory-based methods in CUDA-Q\n", + "* Analyze the impact of different noise patterns on a quantum chemistry algorithm (VQE for H₂)\n", + "* Implement zero noise extrapolation as a quantum error mitigation technique\n", + "* Run noisy QEC experiments on the Steane code using the Stim simulator\n", + "* Build a noise model from dynamical simulation of a superconducting transmon qubit\n", "\n", "**Prerequisites:**\n", - "Learners should have familiarity with Jupyter notebooks and programming in Python and CUDA-Q. It is assumed the reader has some familiarity already with quantum computation and is comfortable with braket notation and the concepts of qubits, quantum circuits, measurement, and circuit sampling. The CUDA-Q Academic course entitled \"[Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\" provides a walkthrough of this prerequisite knowledge if the reader is new to quantum computing and CUDA-Q or needs refreshing. Learners would also benefit from completing the first two notebooks in this series [The Basics of Classical and Quantum Error Correction](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) and [Stabilizers, the Shor code, and the Steane code](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb).\n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **3.1** Define a quantum noise channel, the density matrix, and Kraus operators\n", - "* **3.2** Learn two ways to simulate noise: density matrix and trajectory simulation\n", - "* **3.3** Lean how to use noisy simulation for three different uses cases\n", - " * **3.3a** Study the impact of noise on a standard quantum chemistry algorithm \n", - " * **3.3b** Use noise modeling to implement a noise mitigation technique\n", - " * **3.3c** Run QEC experiments with noise models\n", - "* **3.4** Perform dynamical simulation of a qubit to build a noise model for quantum circuit simulation\n", + "* Python and Jupyter familiarity\n", + "* Basic quantum computing concepts (qubits, gates, measurement, braket notation) — see [Quick Start to Quantum Computing with CUDA-Q](https://github.com/NVIDIA/cuda-q-academic/tree/main/quick-start-to-quantum)\n", + "* Familiarity with CUDA-Q kernel syntax and `cudaq.sample`\n", + "* Completion of [01 — The Basics of Classical and Quantum Error Correction](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb)\n", + "* Completion of [02 — Stabilizers, the Shor Code, and the Steane Code](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb)\n", + "\n", + "**Key Terminology:**\n", + "* noise channel\n", + "* density matrix\n", + "* trajectory simulation\n", + "* density matrix simulation\n", + "* Kraus operator\n", + "* quantum error mitigation\n", + "* zero noise extrapolation\n", + "* circuit-level noise\n", + "* dynamical simulation\n", + "* amplitude damping\n", + "* pure state\n", + "* mixed state\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes\n", + "* [`cudaq.observe`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.observe) — computes expectation value of a spin operator\n", + "* [`cudaq.get_state`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.get_state) — returns the statevector or density matrix\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq.SpinOperator`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.SpinOperator) — Pauli spin operator (Hamiltonian)\n", + "* [`cudaq_solvers.create_molecule`](https://nvidia.github.io/cuda-quantum/latest/api/solvers/python_api.html#cudaq_solvers.create_molecule) — builds molecular Hamiltonian from geometry\n", + "* [`cudaq.evolve`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.evolve) — runs dynamical time evolution of a quantum system" + ] + }, + { + "cell_type": "markdown", + "id": "5ed8e305", + "metadata": {}, + "source": [ + "
\n", "\n", - "Terminology and notation you'll use\n", - "* noise channel, density matrix, trajectory simulation, density matrix simulation, Kraus operator\n", - "* quantum error mitigation, zero noise extrapolation\n", - "* circuit-level noise experiments\n", - "* dynamical simulation, amplitude damping\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "Before we get started, excecute the cells below to load the necessary packages." + "
" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "d87855db", "metadata": {}, "outputs": [], "source": [ - "### Instructions for Google Colab. You can ignore this cell if you have cuda-q set up and have \n", - "# all the dependent files on your system\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { - "cell_type": "code", - "execution_count": 2, - "id": "29a1857d-7eb2-4e66-b760-f9f4f994d761", + "cell_type": "markdown", + "id": "fc00a28e75e448f2", "metadata": {}, - "outputs": [], "source": [ - "# If you are working in an environment that does not have cudaqx installed, \n", - "# uncomment the code below to install cudaq-solvers and the required dependencies. \n", - "# Then restart the kernel before executing the next cell.\n", - "#!sudo apt-get update && sudo apt-get install -y gfortran\n", - "#!pip install cudaq-solvers -q\n", - "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", - "#!unzip -q main.zip\n", - "#!mkdir Images\n", - "#!mv cuda-q-academic-main/qec101/Images/noisy ./Images/noisy" + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "bdcdac7a", "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "import numpy as np\n", - "import cudaq_solvers as solvers\n", + "import os\n", + "import sys\n", + "sys.path.append(os.path.join(os.getcwd(), '..'))\n", + "\n", "from typing import List, Optional\n", + "\n", + "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", + "\n", + "import cudaq\n", "from cudaq import spin, operators, ScalarOperator, Schedule, ScipyZvodeIntegrator\n", - "import cupy as cp\n", - "import os" + "\n", + "## To install cudaq-solvers (if not already installed), uncomment and run:\n", + "## !pip install cudaq-solvers -q\n", + "## Note: cudaq-solvers requires libgfortran. If you see an ImportError, run:\n", + "## !apt-get install -y libgfortran5\n", + "import cudaq_solvers as solvers\n", + "\n", + "import cupy as cp" ] }, { @@ -101,22 +148,24 @@ "id": "c43db24e-0cc1-41bc-8a00-5922541a1bdd", "metadata": {}, "source": [ - "## 3.1 Quantum Noise Channels ## \n", + "---\n", + "\n", + "## 3.1 Quantum Noise Channels\n", "\n", "In the first lab of this series, the concept of a **noise channel** was introduced. A noise channel is a mathematical model used to describe how a quantum state is impacted by the presence of noise. \n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", "A noise channel can correspond to application of a gate to physical qubits, a qubit's interaction with another nearby qubit, or simply the passage of time and the resulting decay of the quantum state as it interacts with anything else from the environment. QEC is a promising solution to this problem as a logically encoded quantum state can go through the noise channel, impacting each data qubit, while providing a means for the original state to be restored.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", - "However, as previous labs have emphasized, QEC is hard to implement, and the development of new QEC protocols is still an active research field. In practice, experimental data obtained from the QPU can help measure quantities like gate fidelity and inform a **noise model** which captures all of the noise channels present in the device. This noise model can then be used to simulate data that emulates the performance of the QPU. \n", + "However, as previous labs have emphasized, QEC is hard to implement, and the development of new QEC protocols is still an active research field. In practice, experimental data obtained from the QPU can help measure quantities like gate fidelity and inform a noise model which captures all of the noise channels present in the device. This noise model can then be used to simulate data that emulates the performance of the QPU. \n", "\n", "There are many practical benefits to this that will be explored in this lab. A recent example of this is [NVIDIA's work with QuEra](https://developer.nvidia.com/blog/nvidia-and-quera-decode-quantum-errors-with-ai/) to build an AI decoder. Training this model required a massive amount of data which could be obtained efficiently via simulation. Noisy circuit simulation allowed for millions of syndromes to be obtained with their associated errors, something not possible to do with experimental data. \n", "\n", "\n", - "### The Density Matrix ###\n", + "### The Density Matrix\n", "\n", "Before discussing some of the ways to simulate noise, it is necessary to take a step back and consider representation of a quantum state using the **density matrix**. The density matrix ($\\rho$) is a mathematical object that completely describes a quantum state and has the following properties. \n", "\n", @@ -124,49 +173,29 @@ "2. It is Hermitian: $\\rho = \\rho ^{\\dagger}$\n", "3. It is positive semi-definite. (All eigenvalues are positive.)\n", "\n", - "If a quantum system is in one of a any quantum states $\\ket{\\psi_i}$ with probability $p_i$, then the density matrix is defined as a linear combination of outer products of those states with probability coefficients:\n", + "If a quantum system is in one of any quantum states $\\ket{\\psi_i}$ with probability $p_i$, then the density matrix is defined as a linear combination of outer products of those states with probability coefficients:\n", "\n", "$$\\rho = \\sum_i p_i \\ket{\\psi_i}\\bra{\\psi_i} $$\n", "\n", "\n", "\n", + "
\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", - "use CUDA-Q's $\\texttt{get\\_state}$ function and the density matrix simulator (more on that later) to produce any three qubit density matrix. Write code to check that the three properties listed above are met. Make sure to set tolerances on these checks so that, for example, an eigenvalue of zero is not wrongfully flagged as `-1.2e-20`. \n", - "

\n", - "
\n", - "\n" + "**Exercise 1:**\n", + "\n", + "Use CUDA-Q’s `get_state` function and the density matrix simulator (more on that later) to produce any three qubit density matrix. Write code to check that the three properties listed above are met. Make sure to set tolerances on these checks so that, for example, an eigenvalue of zero is not wrongfully flagged as `-1.2e-20`.\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "f90689e1-c853-42ac-9c44-9dc961b5a2a4", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "State vector:\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "(0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0) (0.125,0)\n", - "\n", - "\\nTrace(rho) = (0.9999999999999996+0j)\n", - "Hermitian? True\n", - "Eigenvalues: [0.+0.j 1.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j]\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 1\n", "cudaq.set_target(\"density-matrix-cpu\")\n", "\n", "@cudaq.kernel\n", @@ -189,7 +218,8 @@ "# Check positive semi-definite\n", "eigs, _ = np.linalg.eig(rho)\n", "eigs[np.abs(eigs) < 1e-12] = 0\n", - "print(\"Eigenvalues:\", eigs)\n" + "print(\"Eigenvalues:\", eigs)\n", + "" ] }, { @@ -197,53 +227,32 @@ "id": "1c7b3175-0d51-4d49-b36a-7211389730dc", "metadata": {}, "source": [ - "
\n", - "
\n", - "\n", - "Statevectors correspond to **pure states**, while the density matrix can describe **mixed states**, that is an overall state composed of a combination of pure states.\n", + "Statevectors correspond to **pure states**, while the **density matrix** can describe **mixed states**, that is an overall state composed of a combination of pure states.\n", "\n", "A state is considered pure if the trace of $\\rho^2$ is equal to 1.\n", "\n", "This can be a bit confusing because a pure state can actually be a superposition state and a mixed state can be a combination of two states that do not describe superpositions. The following exercise will make this more clear.\n", "\n", "\n", - "
\n", - "

Exercise 2 :

\n", - "

\n", - "Consider the density matrix $\\rho = \\frac{1}{2}\\ket{00}\\bra{00} + \\frac{1}{2}\\ket{11}\\bra{11}$. \n", + "

\n", "\n", - "Using CUDA-Q build kernels for the $\\ket{00}$ state and the $\\ket{11}$ state, using these kernels and the $\\texttt{get\\_state}$ command define the density matrix $\\rho$, and compute trace($\\rho^2$). Is the state pure?\n", - "

\n", - "
\n", - "\n" + "**Exercise 2:**\n", + "\n", + "Consider the density matrix $\\rho = \\frac{1}{2}\\ket{00}\\bra{00} + \\frac{1}{2}\\ket{11}\\bra{11}$.\n", + "\n", + "Using CUDA-Q build kernels for the $\\ket{00}$ state and the $\\ket{11}$ state, using these kernels and the `get_state` command define the density matrix $\\rho$, and compute trace($\\rho^2$). Is the state pure?\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "a30019ac-17e9-4569-862f-050b540c8cd4", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Density Matrix\n", - "[[0.5+0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0.5+0.j]]\n", - "Density Matrix Squared\n", - "[[0.25+0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0.25+0.j]]\n", - "Trace of Density Matrix Squared\n", - "(0.5+0j)\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 2\n", "@cudaq.kernel\n", "def zeros():\n", " reg = cudaq.qvector(2)\n", @@ -273,29 +282,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "16678f45-6878-4a64-96b0-ddbc33efdd9c", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Density Matrix\n", - "[[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]]\n", - "Density Matrix Squared\n", - "[[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0. +0.j 0. +0.j 0. +0.j 0. +0.j]\n", - " [0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]]\n", - "Trace of Density Matrix Squared\n", - "(0.9999999999999998+0j)\n" - ] - } - ], + "outputs": [], "source": [ "@cudaq.kernel\n", "def bell():\n", @@ -317,58 +307,46 @@ "id": "ecdeafb0-ae88-4197-bab3-7bb68c3e9b1c", "metadata": {}, "source": [ - "
\n", - "
\n", - "\n", - "A mixed state means that there is classical uncertainly about which quantum state defines the system, even if both quantum states are deterministic like $\\ket{00}$ and $\\ket{11}$. However, a bell state is pure, meaning that the overall quantum state is known with certainly, even if the state describes a superposition with inherent uncertainty. Another key term is **completely mixed state**, which refers to a density matrix where all of the eigenvalues are the same, meaning the density matrix describes the state with the theoretical maximum of uncertainty. \n", + "A mixed state means that there is classical uncertainty about which quantum state defines the system, even if both quantum states are deterministic like $\\ket{00}$ and $\\ket{11}$. However, a Bell state is pure, meaning that the overall quantum state is known with certainty, even if the state describes a superposition with inherent uncertainty. Another key term is completely mixed state, which refers to a density matrix where all of the eigenvalues are the same, meaning the density matrix describes the state with the theoretical maximum of uncertainty. \n", "\n", - "### Kraus Operators ###\n", + "### Kraus Operators\n", "\n", - "Now, why the business about density matrices? The answer is that a noise channel needs to be an effective model that can generalize to impact mixed states. In fact, many noise channels will produce a mixed state from a pure state.\n", + "Now, why the business about density matrices? The answer is that a noise channel needs to be an effective model that can generalize to impact mixed states. In fact, many noise channels will produce a mixed state from a pure state.\n", "\n", "Mathematically this is done with **Kraus operators** ($K_i$) that evolve the density matrix as the state proceeds through a noisy channel $\\epsilon$.\n", "\n", "$$ \\epsilon(\\rho) = \\sum_i K_i\\rho K_i^{\\dagger} $$\n", "\n", - "Kraus operators have the condition that $ \\sum_i K_i K_i^{\\dagger} =1 $ so the trace of the density matrix is preserved.\n", + "**Kraus operators** have the condition that $ \\sum_i K_i K_i^{\\dagger} =1 $ so the trace of the density matrix is preserved.\n", "\n", "For example, a valid set of operators is $K_0 = \\sqrt{1-p} I $ and $K_1 = \\sqrt{p}X$ which will perform a bitflip error with probability $p$ and apply the identity (no change) with probability $1-p$. Let's apply this to the density matrix, $\\rho_0$, for the $\\ket{0}$ state. The result becomes $ \\epsilon(\\rho_0) = (1-p)I\\rho_0 I + pX\\rho_0 X $. Notice the result is now mixed state. \n", "\n", "The table below summarizes some of the channels included in CUDA-Q which you will use in later exercises. Notice too, that each noise channel can be geometrically represented as a deformation of the Bloch sphere.\n", "\n", "\n", - "\"Drawing\"\n", + "\"Table\n", "\n", " \n", "\n", "By applying any number of Kraus operators to the density matrix, it is possible to evolve it and sample the resulting state to determine how noise impacts the output. This is easily accomplished in CUDA-Q with the `density-matrix-cpu` backend. You can then build a noise model consisting of noisy channels applied to specific gate operations with select probabilities. The exercise below will get you started with the syntax.\n", "\n", - "
\n", - "

Exercise 3 :

\n", - "

\n", - "You will be using CUDA-Q's built in noise channel tools throughout this lab. Get a sense for how it works by building a two qubit kernel and perform an $X$ operation on each qubit. Edit the code block below to build a noise model consisting of two bitflip channels with probabilities of .10 and .25 on the $X$ gate for qubit 0 and 1, respectively. Does the sample distribution agree with what you would expect? \n", - "

\n", - "
\n", + "
\n", + "\n", + "**Exercise 3:**\n", + "\n", + "You will be using CUDA-Q’s built in noise channel tools throughout this lab. Get a sense for how it works by building a two qubit kernel and perform an $X$ operation on each qubit. Edit the code block below to build a noise model consisting of two bitflip channels with probabilities of .10 and .25 on the $X$ gate for qubit 0 and 1, respectively. Does the sample distribution agree with what you would expect?\n", "\n", - "\n" + "
" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "af3cef3b-a370-4373-8d6b-d625c1fd974f", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ 00:33 01:85 10:218 11:664 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 3\n", "cudaq.set_target(\"density-matrix-cpu\")\n", "\n", "noise = cudaq.NoiseModel()\n", @@ -390,53 +368,22 @@ "id": "7a1bcb92-ea4d-4e00-868d-11158003d18f", "metadata": {}, "source": [ - "## 3.2 Two Ways to Simulate Noise ##\n", - "\n", - "Density matrix simulation can produce exact results with the quality of simulation limited only by the accuracy of the underlying noise model. Unfortunately, density matrix simulation is expensive and requires storage of the entire $2^N \\times 2^N $ matrix, limiting it to a smaller number of qubits. \n", + "---\n", "\n", - "This scalability problem can be circumvented with a method called trajectory based simulation which allows for approximate noise simulation at much larger scales. Unlike density matrix simulation that applies Kraus operators to every state, trajectory based simulation assumes the Kraus operators occur as a Markov process. \n", + "## 3.2 Two Ways to Simulate Noise\n", "\n", - "The assumption of a Markov process is that the application of each Kraus operator is independent from the others. This is usually a reasonable assumption as a physical QPU might, for example, only apply gates in an isolated gate zone. \n", + "**Density matrix simulation** can produce exact results with the quality of simulation limited only by the accuracy of the underlying noise model. Unfortunately, **density matrix simulation** is expensive and requires storage of the entire $2^N \\times 2^N $ matrix, limiting it to a smaller number of qubits. \n", "\n", - "The code blocks below will make it clear how the two approaches differ. Consider a very basic circuit that prepares the $\\ket{111}$ state with bitflip errors on each qubit corrupting the result. First, run the cell below. Notice that `get_state` returns the same density matrix each time you run the code. This density matrix describes the mixture of all possible pure states and returns the sample distribution below." + "This scalability problem can be circumvented with a method called **trajectory simulation** which allows for approximate noise simulation at much larger scales. In trajectory simulation, a statevector is evolved through the circuit and at each noisy gate, a Kraus operator is selected at random based on its probability. This is sometimes called Monte Carlo trajectory simulation. This produces one trajectory per shot, and in the limit of many trajectories, the results converge to those of the density matrix simulation. The CUDA-Q density matrix simulator and GPU-accelerated statevector simulator can each produce equivalent results in noise simulations. Run the density matrix version below and look at the output." ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "0131ab05-87c0-40fb-a327-b87f273eef98", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(0.008,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.512,0)\n", - "\n", - "(0.008,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0.032,0) (0,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.128,0) (0,0)\n", - " (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0,0) (0.512,0)\n", - "\n", - "{ 000:4 001:30 010:32 011:120 100:36 101:133 110:123 111:522 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "\n", "cudaq.set_target(\"density-matrix-cpu\")\n", "\n", "noise = cudaq.NoiseModel()\n", @@ -462,9 +409,9 @@ "id": "771b889c-3ddc-4bab-8294-e376b4964a55", "metadata": {}, "source": [ - "Trajectory based simulation can run in CUDA-Q by simply changing the target to `nvidia`. If the kernel below had no noise, the statevector (output from `get_state`) should be [0,0,0,0,0,0,0,1] corresponding to the $\\ket{111}$ state. When sampling is performed with the trajectory based simulator, the Kraus operators are applied based on their probabilities to produce a new state vector for each shot. The widget below allows you to explore the possible outcomes and their associated probabilities. \n", + "**Trajectory simulation** can run in CUDA-Q by simply changing the target to `nvidia`. If the kernel below had no noise, the statevector (output from `get_state`) should be [0,0,0,0,0,0,0,1] corresponding to the $\\ket{111}$ state. When sampling is performed with the trajectory based simulator, the Kraus operators are applied based on their probabilities to produce a new state vector for each shot. The widget below allows you to explore the possible outcomes and their associated probabilities. \n", "\n", - "Try running the CUDA-Q simulation above with two or three different bitflip error probabilities and set the slider below to match. Confirm that the results from the density matrix simulations above match the expected distribution from the trajectory-based approach. You will need to move the `Images > noisy > trajectory_widget.py` file from the [CUDA-Q Academic github repository](https://github.com/NVIDIA/cuda-q-academic) into your working directory to execute this optional cell." + "Try running the CUDA-Q simulation above with two or three different bitflip error probabilities and use the [interactive trajectory noise widget](https://nvidia.github.io/cuda-q-academic/interactive_widgets/trajectory_noise_demo.html) to confirm that the results from the density matrix simulations above match the expected distribution from the trajectory-based approach." ] }, { @@ -475,76 +422,13 @@ "Running the code below, notice `get_state` produces a different state vector each time. Because the number of possible trajectories is small, trajectory based sampling can reproduce the same distribution that would be obtained from density matrix simulation." ] }, - { - "cell_type": "markdown", - "id": "5ed8e305", - "metadata": {}, - "source": [ - "⚠️\n", - "\n", - "Just a heads-up: The rest of this notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning!\n", - "\n", - "⚠️" - ] - }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "28a9093d-db01-495c-a672-4a31b8247456", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (0,0), (0,0), (0,0), (1,0), (0,0)]\n", - "\n", - "SV: [(0,0), (0,0), (0,0), (1,0), (0,0), (0,0), (0,0), (0,0)]\n", - "\n", - "{ 000:9 001:33 010:48 011:120 100:31 101:118 110:132 111:509 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "import cudaq\n", - "\n", "cudaq.set_target(\"nvidia\")\n", "\n", "noise = cudaq.NoiseModel()\n", @@ -570,7 +454,7 @@ "source": [ "Another benefit of trajectory based simulation is that it can be used with tensor network based simulators to simulate circuits that would be far too large for density matrix or statevector simulation. CUDA-Q can run exact tensor network or approximate Matrix Product State (MPS) simulations with trajectory based simulation to simulate systems of hundreds to thousands of qubits.\n", "\n", - "Clever sampling algorithms can also be used to filter trajectories and exclude certain types of errors or focus on sampling only a subset of the most likely errors. A [recent paper published by NVIDIA research]()https://arxiv.org/pdf/2504.16297 explains this in greater detail and described how methods like this can sample trillions of noisy samples in just a few hours using an AI supercomputer. This is extremely helpful for training AI QEC decoders where experimental data cannot be obtained in sufficient volume." + "Clever sampling algorithms can also be used to filter trajectories and exclude certain types of errors or focus on sampling a particular type of error. This technique is called importance sampling and is another active area of research for making noisy simulation more practical and beneficial to QEC researchers." ] }, { @@ -578,9 +462,11 @@ "id": "abbc20c4-ee4d-4749-85f6-9580e11cc127", "metadata": {}, "source": [ - " ## 3.3 Use cases for Noisy Simulations ##\n", + "---\n", "\n", - "This section will explore three use cases of noisy simulation used to model the impact of noise patterns on algorithms, perform quantum error mitigation, and run QEC experiments." + "## 3.3 Use Cases for Noisy Simulations\n", + "\n", + "This section will explore three use cases of noisy simulation used to model the impact of noise patterns on algorithms, perform **quantum error mitigation**, and run QEC experiments." ] }, { @@ -588,7 +474,7 @@ "id": "c46a3fbb-c6bf-4075-9047-505abe3f891b", "metadata": {}, "source": [ - "### 3.3a: Understanding How Noise Impacts Algorithm Results ##3\n", + "### 3.3a: Understanding How Noise Impacts Algorithm Results\n", "\n", "A natural application of noisy simulation is to explore how different noise patterns might impact the results of an algorithm. Such simulations can be beneficial for a number of reasons. This section along with the following two will explore three use cases for noisy circuit simulation. \n", "\n", @@ -599,150 +485,10 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "73ebc8a8-2867-4b8d-b445-9628a9f02559", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ 000:14 001:33 010:30 011:126 100:34 101:129 110:121 111:513 }\n", - "\n", - "-1.0404318468267384\n", - " ╭───╮╭───────────╮ ╭────────────╮╭───╮»\n", - "q0 : ┤ x ├┤ rx(1.571) ├──●──────────────────────────●──┤ rx(-1.571) ├┤ h ├»\n", - " ├───┤╰───────────╯╭─┴─╮ ╭─┴─╮╰────────────╯╰───╯»\n", - "q1 : ┤ x ├─────────────┤ x ├──●────────────────●──┤ x ├───────────────────»\n", - " ├───┤ ╰───╯╭─┴─╮╭──────────╮╭─┴─╮├───┤╭───────────╮ »\n", - "q2 : ┤ h ├──────────────────┤ x ├┤ rz(-0.1) ├┤ x ├┤ h ├┤ rx(1.571) ├──────»\n", - " ├───┤ ╰───╯╰──────────╯╰───╯╰───╯╰───────────╯ »\n", - "q3 : ┤ h ├────────────────────────────────────────────────────────────────»\n", - " ╰───╯ »\n", - "\n", - "################################################################################\n", - "\n", - " ╭───╮ ╭───╮ »\n", - "──●─────────────────────────────●───────────┤ h ├────┤ h ├─────────────────»\n", - "╭─┴─╮ ╭─┴─╮ ╭───┴───┴───╮╰───╯ »\n", - "┤ x ├──●───────────────●──────┤ x ├─────┤ rx(1.571) ├──●───────────────────»\n", - "╰───╯╭─┴─╮╭─────────╮╭─┴─╮╭───┴───┴────╮╰───────────╯╭─┴─╮ »\n", - "─────┤ x ├┤ rz(0.1) ├┤ x ├┤ rx(-1.571) ├─────────────┤ x ├──●──────────────»\n", - " ╰───╯╰─────────╯╰───╯╰────────────╯ ╰───╯╭─┴─╮╭──────────╮»\n", - "──────────────────────────────────────────────────────────┤ x ├┤ rz(-0.1) ├»\n", - " ╰───╯╰──────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "─────────────────────────────────────────────────────────────────────»\n", - " ╭────────────╮╭───╮ »\n", - "───────●──┤ rx(-1.571) ├┤ h ├──●─────────────────────────────●───────»\n", - " ╭─┴─╮╰────────────╯╰───╯╭─┴─╮ ╭─┴─╮ »\n", - "──●──┤ x ├───────────────────┤ x ├──●───────────────●──────┤ x ├─────»\n", - "╭─┴─╮├───┤╭───────────╮ ╰───╯╭─┴─╮╭─────────╮╭─┴─╮╭───┴───┴────╮»\n", - "┤ x ├┤ h ├┤ rx(1.571) ├───────────┤ x ├┤ rz(0.1) ├┤ x ├┤ rx(-1.571) ├»\n", - "╰───╯╰───╯╰───────────╯ ╰───╯╰─────────╯╰───╯╰────────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────────●─────────────────────────────────────────────────»\n", - " ╭───╮ ╭───╮╭─┴─╮ »\n", - "────┤ h ├────┤ h ├┤ x ├──●───────────────────────────────●────────────»\n", - " ├───┤ ╰───╯╰───╯╭─┴─╮ ╭─┴─╮ ╭───╮»\n", - "────┤ h ├──────────────┤ x ├──●─────────────────●──────┤ x ├─────┤ h ├»\n", - "╭───┴───┴───╮ ╰───╯╭─┴─╮╭───────────╮╭─┴─╮╭───┴───┴────╮├───┤»\n", - "┤ rx(1.571) ├───────────────┤ x ├┤ rz(0.025) ├┤ x ├┤ rx(-1.571) ├┤ h ├»\n", - "╰───────────╯ ╰───╯╰───────────╯╰───╯╰────────────╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────────────────────────────────────────●─────────────────────────»\n", - " ╭─┴─╮ ╭───╮╭───────────╮»\n", - "───────────────●───────────────────────────●──────┤ x ├─────┤ h ├┤ rx(1.571) ├»\n", - "╭───────────╮╭─┴─╮ ╭─┴─╮╭───┴───┴────╮├───┤╰───────────╯»\n", - "┤ rx(1.571) ├┤ x ├──●─────────────────●──┤ x ├┤ rx(-1.571) ├┤ h ├─────────────»\n", - "╰───────────╯╰───╯╭─┴─╮╭───────────╮╭─┴─╮╰───╯╰────────────╯╰───╯ »\n", - "──────────────────┤ x ├┤ rz(0.025) ├┤ x ├─────────────────────────────────────»\n", - " ╰───╯╰───────────╯╰───╯ »\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "──●────────────────────────────────────────────────────────────────────────»\n", - "╭─┴─╮ »\n", - "┤ x ├──●────────────────────────────●──────────────────────────────●───────»\n", - "╰───╯╭─┴─╮ ╭─┴─╮ ╭───╮ ╭───────────╮╭─┴─╮ »\n", - "─────┤ x ├──●──────────────────●──┤ x ├────┤ h ├────┤ rx(1.571) ├┤ x ├──●──»\n", - " ╰───╯╭─┴─╮╭────────────╮╭─┴─╮├───┤╭───┴───┴───╮╰───────────╯╰───╯╭─┴─╮»\n", - "──────────┤ x ├┤ rz(-0.025) ├┤ x ├┤ h ├┤ rx(1.571) ├──────────────────┤ x ├»\n", - " ╰───╯╰────────────╯╰───╯╰───╯╰───────────╯ ╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " ╭───╮ ╭───────────╮ »\n", - "─────────────────────────●──────┤ h ├─────┤ rx(1.571) ├──●────────────»\n", - " ╭─┴─╮╭───┴───┴────╮╰───┬───┬───╯╭─┴─╮ »\n", - "────────────────────●──┤ x ├┤ rx(-1.571) ├────┤ h ├────┤ x ├──●───────»\n", - " ╭─┴─╮╰───╯╰────────────╯ ╰───╯ ╰───╯╭─┴─╮ »\n", - "───────────────●──┤ x ├─────────────────────────────────────┤ x ├──●──»\n", - "╭───────────╮╭─┴─╮╰───╯ ╰───╯╭─┴─╮»\n", - "┤ rz(0.025) ├┤ x ├───────────────────────────────────────────────┤ x ├»\n", - "╰───────────╯╰───╯ ╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "───────────────────────────────────────────────────────────────────────────»\n", - " »\n", - "────────────────────────●────────────────────────────●─────────────────────»\n", - " ╭─┴─╮ ╭────────────╮╭───╮╭─┴─╮ »\n", - "───────────────●──────┤ x ├─────┤ rx(-1.571) ├┤ h ├┤ x ├──●────────────────»\n", - "╭───────────╮╭─┴─╮╭───┴───┴────╮╰───┬───┬────╯╰───╯╰───╯╭─┴─╮╭────────────╮»\n", - "┤ rz(0.025) ├┤ x ├┤ rx(-1.571) ├────┤ h ├───────────────┤ x ├┤ rz(-0.025) ├»\n", - "╰───────────╯╰───╯╰────────────╯ ╰───╯ ╰───╯╰────────────╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "────────────────●──────────────────────────●───────────────────────────────»\n", - " ╭─┴─╮ ╭───╮╭───────────╮╭─┴─╮ »\n", - "───────●──────┤ x ├────┤ h ├┤ rx(1.571) ├┤ x ├──●──────────────────────────»\n", - " ╭─┴─╮ ╰───╯ ╰───╯╰───────────╯╰───╯╭─┴─╮ »\n", - "──●──┤ x ├────────────────────────────────────┤ x ├──●──────────────────●──»\n", - "╭─┴─╮├───┤╭───────────╮ ╰───╯╭─┴─╮╭────────────╮╭─┴─╮»\n", - "┤ x ├┤ h ├┤ rx(1.571) ├────────────────────────────┤ x ├┤ rz(-0.025) ├┤ x ├»\n", - "╰───╯╰───╯╰───────────╯ ╰───╯╰────────────╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " »\n", - "──────────────────────────────────────────────────────────────────»\n", - " »\n", - "──────●───────────────────────────●────────────────────────────●──»\n", - " ╭─┴─╮ ╭───╮╭───────────╮╭─┴─╮ ╭─┴─╮»\n", - "────┤ x ├─────┤ h ├┤ rx(1.571) ├┤ x ├──●──────────────────●──┤ x ├»\n", - "╭───┴───┴────╮├───┤╰───────────╯╰───╯╭─┴─╮╭────────────╮╭─┴─╮├───┤»\n", - "┤ rx(-1.571) ├┤ h ├──────────────────┤ x ├┤ rz(-0.025) ├┤ x ├┤ h ├»\n", - "╰────────────╯╰───╯ ╰───╯╰────────────╯╰───╯╰───╯»\n", - "\n", - "################################################################################\n", - "\n", - " ╭────────────╮\n", - "──────●───────┤ rx(-1.571) ├\n", - " ╭─┴─╮ ├────────────┤\n", - "────┤ x ├─────┤ rx(-1.571) ├\n", - "╭───┴───┴────╮╰────────────╯\n", - "┤ rx(-1.571) ├──────────────\n", - "╰────────────╯ \n", - "────────────────────────────\n", - " \n", - "\n" - ] - } - ], + "outputs": [], "source": [ "cudaq.set_target(\"nvidia\")\n", "print(cudaq.sample(test, noise_model = noise))\n", @@ -786,21 +532,23 @@ "id": "56c3fa72-0c52-4f47-afc3-2cb91b8c29ed", "metadata": {}, "source": [ - "
\n", - "

Exercise 4 :

\n", - "

\n", + "

\n", + "\n", + "**Exercise 4:**\n", + "\n", "Now, write a function that computes the expectation values for various configurations of errors. The function comments will guide you on the inputs and what the function should return.\n", - "

\n", - "
\n" + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "4eaab9d0-3014-43b2-8f46-323decf4b2e9", "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 4\n", "def get_noisy_data(e_type =[], gate=[] , qubit=[], prob=[], shots=-1, trajectories=None):\n", " \"\"\"The function takes in various configurations of noise channels, builds a noise model, uses the noise model to obtain 40 expectation values,\n", " and the returns a list of the difference between the noisy expectation values and the noiseless.\n", @@ -864,14 +612,11 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "08cc39a7-ac88-4a4f-860c-e953541929ae", "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", "def plot_data(datasets, categories, labels=None):\n", " \"\"\"\n", " Plots the mean and ±1 SD error bars for one or more datasets,\n", @@ -930,7 +675,8 @@ " if len(datasets) > 1:\n", " plt.legend()\n", "\n", - " plt.show()\n" + " plt.show()\n", + "" ] }, { @@ -947,21 +693,10 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "0bf20375-77ef-41f1-a316-38480d93677e", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAksAAAGzCAYAAAA/lFPrAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAU0ZJREFUeJzt3XdYk1f/BvA7rISNqIAoMhQHah1YFfcAsa7XVUcdiFatu+KuA9f7at3WWe2vWq2jVTu1Dhx1FEQruEdpi6MqoCJbZs7vDy9SY0JIMCFB78915aqc5zxPvoc8TW6ecSIRQggQERERkVpmxi6AiIiIyJQxLBERERFpwLBEREREpAHDEhEREZEGDEtEREREGjAsEREREWnAsERERESkAcMSERERkQYMS0REREQaMCwR0Vtn6dKlqFWrFuRyubFLKRO2bdsGiUSCO3fuGLsU0pNff/0VEokEv/76q0G2379/f/Tt29cg2zYGhqU3WOEbnEQiwdmzZ1WWCyHg4eEBiUSCrl27GqFCw/vmm28waNAg+Pr6QiKRoG3btjqt//jxY0ycOBG1atWCtbU1XFxc0KRJE0yfPh0ZGRmKfkOHDlX8riUSCezs7ODj44M+ffpg//79Wn8oz5s3T2k7rz4SEhJ0qr+0tG3bVqlOKysreHt7Y+TIkbh//76xy1OSlpaGTz/9FNOnT4eZmfJbYHZ2NlatWoWmTZvC0dERMpkMNWrUwLhx4/DHH38YtK4NGzZg27ZtBn0OUxEZGYl58+YhJSVF63UePHiAvn37wsnJCQ4ODvjPf/6Dv//+23BFFqFwX+/WrZvKsjt37kAikWD58uV6ez4vLy9IJBKMHz9eZVlh4Nm3b5/enk9fpk+fjv379+Py5cvGLkUvLIxdABmeTCbDrl270LJlS6X2U6dO4Z9//oFUKjVSZYa3ceNGXLx4Ee+++y6ePn2q07rJyclo3Lgx0tLSMGzYMNSqVQtPnz7FlStXsHHjRowePRp2dnaK/lKpFF988QUA4Pnz57h79y5+/vln9OnTB23btsWPP/4IBwcHret+eduFnJycdBpDaapSpQoWL14MAMjNzcWNGzewadMmHDlyBDdv3oSNjY2RK3zhyy+/RH5+PgYMGKDU/uTJE3Tq1AkXL15E165d8cEHH8DOzg63b9/Gnj17sHnzZuTm5hqsrg0bNqBChQoYOnSowZ7DVERGRmL+/PkYOnSoVvt0RkYG2rVrh9TUVHzyySewtLTEqlWr0KZNG1y6dAnly5c3fNGvOHDgAC5evAh/f/9Seb4tW7Zg5syZcHd318v2WrdujefPn8PKykov23tVw4YN0bhxY6xYsQLbt283yHOUKkFvrK1btwoAolevXqJChQoiLy9PafmIESOEv7+/8PT0FF26dDFSlSVz8uRJAUDEx8dr7Hfv3j1RUFAghBCiTp06ok2bNlo/x9KlSwUA8dtvv6ksS01NFc+fP1f8HBISImxtbdVuZ/HixQKA6Nu3b7HPGR4eLgCIx48fa11noefPnyvG+qqMjAydt/eygoICpfG+qk2bNqJOnToq7evWrRMAxNGjR1/r+Qu97jiEEOKdd94RgwYNUmnv0qWLMDMzE/v27VNZlp2dLSZPnvzaz62JrvunNvLy8kROTs5rb6fwvaS4/9+0tWzZMp229+mnnwoA4vz584q2mzdvCnNzczFz5ky91KStNm3aiKpVq4py5cqJbt26KS2Lj48XAMSyZcv09nyenp6iTp06wsLCQowfP15pWeH74N69e/X2fPq0fPlyYWtrK9LT041dymvjabi3wIABA/D06VNEREQo2nJzc7Fv3z588MEHateRy+VYvXo16tSpA5lMBldXV4waNQrPnj1T6vfjjz+iS5cucHd3h1QqRbVq1bBw4UIUFBQo9Wvbti3q1q2LGzduoF27drCxsUHlypWxdOlS/Q/4JR4eHiqnWrT1119/wdzcHM2aNVNZ5uDgAJlMptV2ZsyYgY4dO2Lv3r16O5VTePh9z549mD17NipXrgwbGxukpaVh6NChsLOzw19//YXOnTvD3t4eAwcOBABkZmZi8uTJ8PDwgFQqRc2aNbF8+XIIIZS2L5FIMG7cOOzcuRN16tSBVCrF4cOHda7Tzc0NAGBh8e9B7Lt372LMmDGoWbMmrK2tUb58ebz//vsq18MUnkY+deoUxowZAxcXF1SpUgUAkJ6ejo8//hheXl6QSqVwcXFBUFAQYmJiNNYTHx+PK1euIDAwUKk9OjoaBw8exPDhw9G7d2+V9aRSqcqplVu3bqFPnz5wdnaGTCZD48aN8dNPP6kdw2+//YawsDBUrFgRtra26NmzJx4/fqzo5+XlhevXr+PUqVOKU5kvnzJOSUnBxx9/rHjdqlevjk8//VTp9O7Lp4BWr16NatWqQSqV4saNG1rXCwDXr19H+/btYW1tjSpVqmDRokVan0a+cuUKhg4dCh8fH8hkMri5uWHYsGFKR3XnzZuHqVOnAgC8vb0V49V0PdS+ffvw7rvv4t1331W01apVCx06dMC3336rVW36ZG9vj0mTJuHnn38udp8DgL///hvvv/8+nJ2dYWNjg2bNmuHgwYNaP5+XlxeGDBmCLVu24OHDh8X2j42NxXvvvQcHBwfY2dmhQ4cOOHfunFIfddcsxcXFoXfv3nBzc4NMJkOVKlXQv39/pKamKq379ddfw9/fH9bW1nB2dkb//v3Vnm4PCgpCZmam0mdPWcXTcG8BLy8vBAQEYPfu3XjvvfcAAIcOHUJqair69++Pzz77TGWdUaNGYdu2bQgNDcWECRMQHx+PdevWITY2Fr/99hssLS0BvPgwsLOzQ1hYGOzs7HDixAnMnTsXaWlpWLZsmdI2nz17hk6dOqFXr17o27cv9u3bh+nTp6NevXqKuoqSmpqKvLw8pZ8Lt/ny6SqZTKb29FVJeHp6oqCgADt27EBISMhrbWvw4ME4evQoIiIiUKNGjWL7Jycnq7RZWFionLJYuHAhrKysMGXKFOTk5CgOqefn5yM4OBgtW7bE8uXLYWNjAyEEunfvjpMnT2L48OFo0KABjhw5gqlTp+LBgwdYtWqV0rZPnDiBb7/9FuPGjUOFChXg5eWlseaCggI8efIEAJCXl4ebN28iPDwc1atXR4sWLRT9Lly4gMjISPTv3x9VqlTBnTt3sHHjRrRt2xY3btxQOV03ZswYVKxYEXPnzkVmZiYA4KOPPsK+ffswbtw4+Pn54enTpzh79ixu3ryJRo0aFVljZGQkAKj0KQwNgwcP1jjGQtevX0eLFi1QuXJlzJgxA7a2tvj222/Ro0cP7N+/Hz179lTqP378eJQrVw7h4eG4c+cOVq9ejXHjxuGbb74BAKxevRrjx4+HnZ0dZs2aBQBwdXUFAGRlZaFNmzZ48OABRo0ahapVqyIyMhIzZ87Eo0ePsHr1aqXn2rp1K7KzszFy5EhIpVI4OztrXW9CQgLatWuH/Px8Rb/NmzfD2tpaq99LREQE/v77b4SGhsLNzQ3Xr1/H5s2bcf36dZw7dw4SiQS9evXCH3/8gd27d2PVqlWoUKECAKBixYpqtymXy3HlyhUMGzZMZVmTJk1w9OhRpKenw97evsi6Xn3/KIou7x8TJ07EqlWrMG/ePLWhs1BiYiKaN2+OrKwsTJgwAeXLl8dXX32F7t27Y9++fSr7SlFmzZqF7du3Y8mSJWrfswtdv34drVq1goODA6ZNmwZLS0t8/vnnaNu2LU6dOoWmTZuqXS83NxfBwcHIycnB+PHj4ebmhgcPHuDAgQNISUmBo6MjAOC///0v5syZg759++LDDz/E48ePsXbtWrRu3RqxsbFK71F+fn6wtrbGb7/9pvU4TZaxD22R4RQeOr9w4YJYt26dsLe3F1lZWUIIId5//33Rrl07IYRQOQ135swZAUDs3LlTaXuHDx9WaS/c3stGjRolbGxsRHZ2tqKtTZs2AoDYvn27oi0nJ0e4ubmJ3r17FzuWwvWLe4SEhBS5DV1PcyQkJIiKFSsKAKJWrVrio48+Ert27RIpKSkqfTWdhhNCiNjYWAFATJo0SeNzFp6GU/eoWbOmol/h4XcfHx+V1yAkJEQAEDNmzFBq/+GHHwQAsWjRIqX2Pn36CIlEIv78809FGwBhZmYmrl+/rrHeQkW9PrVr1xZ///23Ul91+0xUVJTK/lG4/7Zs2VLk5+cr9Xd0dBRjx47VqraXzZ49WwBQOS3Qs2dPAUA8e/ZMq+106NBB1KtXT2kfl8vlonnz5sLX11dlDIGBgUIulyvaJ02aJMzNzZX2paL2z4ULFwpbW1vxxx9/KLXPmDFDmJubi3v37gkh/j0F5ODgIJKSkkpU78cffywAiOjoaEVbUlKScHR01Oq0mbrXdvfu3QKAOH36tKJNl9Nwjx8/FgDEggULVJatX79eABC3bt3SuA19vH+8vK3CU87z588XAMTFixeFEOpPwxX+Ts+cOaNoS09PF97e3sLLy6vIU+eFXn5/Dg0NFTKZTDx8+FAIof40XI8ePYSVlZX466+/FG0PHz4U9vb2onXr1oq2wnVPnjwphPj3PUrTKb07d+4Ic3Nz8d///lep/erVq8LCwkKlXQghatSoId577z2NYywLeBruLdG3b188f/4cBw4cQHp6Og4cOFDkKbi9e/fC0dERQUFBePLkieLh7+8POzs7nDx5UtH35b8409PT8eTJE7Rq1QpZWVm4deuW0nbt7OwwaNAgxc9WVlZo0qSJVne0rFixAhEREYpH4SmRr7/+Wql92rRpOv1eNHF1dcXly5fx0Ucf4dmzZ9i0aRM++OADuLi4YOHChSqnrjQp/Gs1PT1dq/779+9XGldERAS2bt2q0i8kJKTIv/pHjx6t9PMvv/wCc3NzTJgwQal98uTJEELg0KFDSu1t2rSBn5+fVvUCL45gFtZ66NAhrF69GqmpqXjvvfeUTjm9XG9eXh6ePn2K6tWrw8nJSe0pjREjRsDc3FypzcnJCdHR0VqdknjZ06dPYWFhoXL0IC0tDQA0Hp0olJycjBMnTqBv376Kff7Jkyd4+vQpgoODERcXhwcPHiitM3LkSEgkEsXPrVq1QkFBAe7evVvs8+3duxetWrVCuXLllP5/DAwMREFBAU6fPq3Uv3fv3kpHaXSp95dffkGzZs3QpEkTxfoVK1ZUnMYtzsuvbXZ2Np48eaI4ja3N6Sp1nj9/DgBqb0QpPBVe2Kcor75/FPXQ9f1j4sSJKFeuHObPn19kn19++QVNmjRRusHGzs4OI0eOxJ07dxSnSbUxe/Zs5OfnY8mSJWqXFxQU4OjRo+jRowd8fHwU7ZUqVcIHH3yAs2fPKvb1VxUeOTpy5AiysrLU9vnuu+8gl8vRt29fpX3Rzc0Nvr6+Sp8NhQr327KOp+HeEhUrVkRgYCB27dqFrKwsFBQUoE+fPmr7xsXFITU1FS4uLmqXJyUlKf59/fp1zJ49GydOnFD5n/DV89xVqlRR+sAAXvyPdOXKlWLrf/WOk8JrYFq0aFHs6aHXUalSJWzcuBEbNmxAXFwcjhw5gk8//RRz585FpUqV8OGHH2q1ncJpBrT5MAZe3KlSeHpCE29vb7XtFhYWiut7Ct29exfu7u4qNdSuXVuxXJttF8XW1lbpWqBOnTqhZcuWaNy4MZYsWYIVK1YAePHBtnjxYmzduhUPHjxQCp2v7jNF1bF06VKEhITAw8MD/v7+6Ny5M4YMGaL0AaGLwrsU09PTi707688//4QQAnPmzMGcOXPU9klKSkLlypUVP1etWlVpebly5QBA5RpAdeLi4nDlypUiT1O9/P8joPr70qXeu3fvqj1NU7NmzWLrBF4Es/nz52PPnj0qdal7bbVRGMBycnJUlmVnZyv1KYqh7lhzdHTExx9/jPDwcMTGxipe15cV9Tt9+f+7unXravV8Pj4+GDx4MDZv3owZM2aoLH/8+DGysrLUvl61a9eGXC7H/fv3UadOHZXl3t7eCAsLw8qVK7Fz5060atUK3bt3x6BBgxRBKi4uDkII+Pr6qq2v8PKMlwkhVN73yyKGpbfIBx98gBEjRiAhIQHvvfdekR8KcrkcLi4u2Llzp9rlhW/aKSkpaNOmDRwcHLBgwQJUq1YNMpkMMTExmD59uspFoa8eHSikyxEaY5FIJKhRowZq1KiBLl26wNfXFzt37tQ6LF27dg0AUL16db3WVdSHhFQqLfGF7cVtWxf+/v5wdHRUOvoxfvx4bN26FR9//DECAgLg6OgIiUSC/v37q72QWF0dffv2RatWrfD999/j6NGjWLZsGT799FN89913Gq9/K1++PPLz81WucalVqxYA4OrVq2jVqpXGMRXWOGXKFAQHB6vt8+rr/Dr7vlwuR1BQUJFHPV69Bu7V31dJ6i2pvn37IjIyElOnTkWDBg1gZ2cHuVyOTp06lXgCUGdnZ0ilUjx69EhlWWFbcbfTJycnazXtg7W1tSIYaKvw2qX58+erXD9mCLNmzcKOHTvw6aefokePHnrd9ooVKzB06FD8+OOPOHr0KCZMmIDFixfj3LlzqFKlCuRyOSQSCQ4dOqR2n1Z3vdezZ8+KDFdlCcPSW6Rnz54YNWoUzp07p7iwVJ1q1arh2LFjaNGihcYPzF9//RVPnz7Fd999h9atWyva4+Pj9Vq3qfHx8UG5cuXUvnkXZceOHZBIJAgKCjJgZZp5enri2LFjKkGh8HSpp6enQZ63oKBAaQLPffv2ISQkRHGkCXhxhECXCQqBF0f9xowZgzFjxiApKQmNGjXCf//7X41hqTAUxcfH45133lG0d+vWDYsXL8bXX39dbFgqPHplaWmpclfd6yjqr+9q1aohIyOjxM+lS72enp6Ii4tTab99+3axz/Ps2TMcP34c8+fPx9y5cxXt6rany5EGMzMz1KtXD7///rvKsujoaPj4+BR7xLZXr144depUsc8VEhKi88SghUeX5s2bp/ZGEE9PT7W/v5L+f1etWjUMGjQIn3/+ucoRq4oVK8LGxqbI5zMzM4OHh4fG7derVw/16tXD7NmzERkZiRYtWmDTpk1YtGgRqlWrBiEEvL29tbpRJT8/H/fv30f37t11GqMp4jVLbxE7Ozts3LgR8+bNUzv7bKG+ffuioKAACxcuVFmWn5+v+FAr/Mvi5b+Oc3NzsWHDBv0Wrkbbtm0hhDDoKbjo6GjF3VcvO3/+PJ4+far1qYklS5bg6NGj6Nevn1H/wurcuTMKCgqwbt06pfZVq1ZBIpEUe0diSZw8eRIZGRmoX7++os3c3FzliMratWtVppsoSkFBgcopHRcXF7i7u6s9VfOygIAAAFD54A0ICECnTp3wxRdf4IcfflBZLzc3F1OmTFE8V9u2bfH555+rDcwvX5+lC1tbW7WBsW/fvoiKisKRI0dUlqWkpCA/P1/jdnWpt3Pnzjh37hzOnz+vtLyoo8wvU/d+AEDt0RZbW1tF/dro06cPLly4oPS63b59GydOnMD7779f7PqGumap0McffwwnJycsWLBAZVnnzp1x/vx5REVFKdoyMzOxefNmeHl56XRdYKHZs2cjLy9PZeoVc3NzdOzYET/++KPSVAyJiYmKiYmLmhg3LS1NZV+qV68ezMzMFP9f9erVC+bm5pg/f77K6yyEUJn498aNG8jOzkbz5s11HqOp4ZGlt4w2t8C3adMGo0aNwuLFi3Hp0iV07NgRlpaWiIuLw969e7FmzRr06dMHzZs3R7ly5RASEoIJEyZAIpFgx44dBjmtFhERgcTExGL7VatWTfGBCACnT59WnAJ6/PgxMjMzsWjRIgAvrgt6+YjYq3bs2IGdO3eiZ8+e8Pf3h5WVFW7evIkvv/wSMpkMn3zyiVL//Px8fP311wBeHCm5e/cufvrpJ1y5cgXt2rXD5s2btR7vvn371B7SDgoKUtxSrqtu3bqhXbt2mDVrFu7cuYP69evj6NGj+PHHH/Hxxx+jWrVqJdpuodTUVMX48/Pzcfv2bWzcuBHW1tZK11d07doVO3bsgKOjI/z8/BAVFYVjx45pPQtzeno6qlSpgj59+qB+/fqws7PDsWPHcOHCBaWjVer4+Pigbt26OHbsmMqt6Nu3b0fHjh3Rq1cvdOvWDR06dICtrS3i4uKwZ88ePHr0SHFjwfr169GyZUvUq1cPI0aMgI+PDxITExEVFYV//vmnRF/x4O/vj40bN2LRokWoXr06XFxc0L59e0ydOhU//fQTunbtiqFDh8Lf3x+ZmZm4evUq9u3bhzt37hR7fZu29U6bNg07duxAp06dMHHiRMXUAZ6ensVeW+jg4IDWrVtj6dKlyMvLQ+XKlXH06FG1R5oLryGaNWsW+vfvD0tLS3Tr1k0Rol41ZswYbNmyBV26dMGUKVNgaWmJlStXwtXVFZMnT9bqd2tIjo6OmDhxotoLvWfMmKGYtmXChAlwdnbGV199hfj4eOzfv79Ep8sLjy599dVXKssWLVqEiIgItGzZEmPGjIGFhQU+//xz5OTkaJzX7sSJExg3bhzef/991KhRA/n5+dixYwfMzc0Vc49Vq1YNixYtwsyZM3Hnzh306NED9vb2iI+Px/fff4+RI0cq/qgAXrxv29jYGPWIut6U+v13VGpenjpAk6Jm8N68ebPw9/cX1tbWwt7eXtSrV09MmzZNcduqEEL89ttvolmzZsLa2lq4u7uLadOmiSNHjijdkipE0TM8h4SECE9Pz2LHUtJbfzXdih8eHq7xOa9cuSKmTp0qGjVqJJydnYWFhYWoVKmSeP/990VMTIzKOF7eto2NjfDy8hK9e/cW+/btK/b2YG3qffl3qmnmXk3TGKSnp4tJkyYJd3d3YWlpKXx9fcWyZcuUbmsX4sXUAbrcmv/q6yORSISzs7Po3r274rbqQs+ePROhoaGiQoUKws7OTgQHB4tbt24JT09PpdevqP03JydHTJ06VdSvX1/Y29sLW1tbUb9+fbFhwwatal25cqWws7NTe5t7VlaWWL58uXj33XeFnZ2dsLKyEr6+vmL8+PFKUysIIcRff/0lhgwZItzc3ISlpaWoXLmy6Nq1q9IM4EWN4dXbtoV4MVVFly5dhL29vQCgNI1Aenq6mDlzpqhevbqwsrISFSpUEM2bNxfLly8Xubm5QojiZ4/Wpl4hXuz3bdq0ETKZTFSuXFksXLhQ/N///Z9Wt/r/888/omfPnsLJyUk4OjqK999/Xzx8+FDt/28LFy4UlStXFmZmZlpt+/79+6JPnz7CwcFB2NnZia5du4q4uDiN6xhCUe9lz549U0yx8Opr8Ndff4k+ffoIJycnIZPJRJMmTcSBAwe0er6i3p/j4uKEubm52veBmJgYERwcLOzs7ISNjY1o166diIyMVOrz6j74999/i2HDholq1aoJmUwmnJ2dRbt27cSxY8dUnnv//v2iZcuWwtbWVtja2opatWqJsWPHitu3byv1a9q0qdrZ8ssiiRBl4OpaIiI9SU1NhY+PD5YuXYrhw4cbuxyiN9KlS5fQqFEjxMTEoEGDBsYu57UxLBHRW+fTTz/F1q1bcePGjde+a5CIVBXe3WqMr6MxBIYlIiIiIg34JxURERGRBgxLRERERBowLBERERFpwLBEREREpAEnpdQDuVyOhw8fwt7e/o34wkAiIqK3gRAC6enpcHd313hnLMOSHjx8+LDY79shIiIi03T//n1UqVKlyOUMS3pQ+CWO9+/fL/J7d4iIiMi0pKWlwcPDo9gvY2ZY0oPCU28ODg4MS0RERGVMcZfQ8AJvIiIiIg0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINChzYWn9+vXw8vKCTCZD06ZNcf78eY399+7di1q1akEmk6FevXr45Zdfiuz70UcfQSKRYPXq1XqumoiIiMqqMhWWvvnmG4SFhSE8PBwxMTGoX78+goODkZSUpLZ/ZGQkBgwYgOHDhyM2NhY9evRAjx49cO3aNZW+33//Pc6dOwd3d3dDD4OIiIjKkDIVllauXIkRI0YgNDQUfn5+2LRpE2xsbPDll1+q7b9mzRp06tQJU6dORe3atbFw4UI0atQI69atU+r34MEDjB8/Hjt37oSlpWVpDIWIiIjKiDITlnJzc3Hx4kUEBgYq2szMzBAYGIioqCi160RFRSn1B4Dg4GCl/nK5HIMHD8bUqVNRp04drWrJyclBWlqa0oOIiIjeTGUmLD158gQFBQVwdXVVand1dUVCQoLadRISEort/+mnn8LCwgITJkzQupbFixfD0dFR8fDw8NBhJERERFSWlJmwZAgXL17EmjVrsG3bNkgkEq3XmzlzJlJTUxWP+/fvG7BKIiIiMqYyE5YqVKgAc3NzJCYmKrUnJibCzc1N7Tpubm4a+585cwZJSUmoWrUqLCwsYGFhgbt372Ly5Mnw8vIqshapVAoHBwelBxEREb2ZykxYsrKygr+/P44fP65ok8vlOH78OAICAtSuExAQoNQfACIiIhT9Bw8ejCtXruDSpUuKh7u7O6ZOnYojR44YbjBERERUZlgYuwBdhIWFISQkBI0bN0aTJk2wevVqZGZmIjQ0FAAwZMgQVK5cGYsXLwYATJw4EW3atMGKFSvQpUsX7NmzB7///js2b94MAChfvjzKly+v9ByWlpZwc3NDzZo1S3dwREREZJLKVFjq168fHj9+jLlz5yIhIQENGjTA4cOHFRdx37t3D2Zm/x4sa968OXbt2oXZs2fjk08+ga+vL3744QfUrVvXWEMgIiKiMkYihBDGLqKsS0tLg6OjI1JTU3n9EhERURmh7ed3mblmiYiIiMgYGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0sDC2AUQkJSWjaT0HJ3Xc7GXwsVBZoCKiIiIqBDDkgnYGX0Pa47H6bzexA6+mBRUwwAVERERUSGGJRMwsGlVBPm5KrVl5xWgz6YoAMC+jwIgszRXWc/FXloq9REREb3NGJZMgIuDTOV0WlZuvuLffu4OsLHiS0VERGQMvMCbiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg10uh9dLpfj1KlTOHPmDO7evYusrCxUrFgRDRs2RGBgIDw8PAxVJxEREZFRaHVk6fnz51i0aBE8PDzQuXNnHDp0CCkpKTA3N8eff/6J8PBweHt7o3Pnzjh37pyhayYiIiIqNVodWapRowYCAgKwZcsWBAUFwdLSUqXP3bt3sWvXLvTv3x+zZs3CiBEj9F4sERERUWnTKiwdPXoUtWvX1tjH09MTM2fOxJQpU3Dv3j29FEdERERkbFqdhisuKL3M0tIS1apVK3FBRERERKakRHfDnTlzBoMGDUJAQAAePHgAANixYwfOnj2r1+KIiIiIjE3nsLR//34EBwfD2toasbGxyMnJAQCkpqbif//7n94LJCIiIjImncPSokWLsGnTJmzZskXpQu8WLVogJiZGr8URERERGZvOYen27dto3bq1SrujoyNSUlL0URMRERGRydA5LLm5ueHPP/9UaT979ix8fHz0UhQRERGRqdA5LI0YMQITJ05EdHQ0JBIJHj58iJ07d2LKlCkYPXq0IWokIiIiMhqdvu4EAGbMmAG5XI4OHTogKysLrVu3hlQqxZQpUzB+/HhD1EhERERkNDqHJYlEglmzZmHq1Kn4888/kZGRAT8/P9jZ2RmiPiIiIiKj0jksFbKysoKfn58+ayEiIiIyOTqHpXbt2kEikRS5/MSJE69VEBEREZEp0TksNWjQQOnnvLw8XLp0CdeuXUNISIi+6iIiIiIyCTqHpVWrVqltnzdvHjIyMl67ICIiIiJTUqLvhlNn0KBB+PLLL/W1OSIiIiKToLewFBUVBZlMpq/NEREREZkEnU/D9erVS+lnIQQePXqE33//HXPmzNFbYURERESmQOew5OjoqPSzmZkZatasiQULFqBjx456K4yIiIjIFOgclrZu3WqIOoiIiIhMkt6uWSIiIiJ6E2l1ZKlcuXIaJ6J8WXJy8msVRERERGRKtApLq1evNnAZRERERKZJq7DEmbmJiIjobVXiL9IFgOzsbOTm5iq1OTg4vFZBRERERKZE5wu8MzMzMW7cOLi4uMDW1hblypVTehARERG9SXQOS9OmTcOJEyewceNGSKVSfPHFF5g/fz7c3d2xfft2Q9RIREREZDQ6n4b7+eefsX37drRt2xahoaFo1aoVqlevDk9PT+zcuRMDBw40RJ1ERERERqHzkaXk5GT4+PgAeHF9UuFUAS1btsTp06f1Wx0RERGRkekclnx8fBAfHw8AqFWrFr799lsAL444OTk56bU4IiIiImPTOSyFhobi8uXLAIAZM2Zg/fr1kMlkmDRpEqZOnar3Al+1fv16eHl5QSaToWnTpjh//rzG/nv37kWtWrUgk8lQr149/PLLL4pleXl5mD59OurVqwdbW1u4u7tjyJAhePjwoaGHQURERGWE1mFpypQpuHXrFiZNmoQJEyYAAAIDA3Hr1i3s2rULsbGxmDhxosEKBYBvvvkGYWFhCA8PR0xMDOrXr4/g4GAkJSWp7R8ZGYkBAwZg+PDhiI2NRY8ePdCjRw9cu3YNAJCVlYWYmBjMmTMHMTEx+O6773D79m10797doOMgIiKiskMihBDadPT19cXff/+Npk2b4sMPP0S/fv1ga2tr6PqUNG3aFO+++y7WrVsHAJDL5fDw8MD48eMxY8YMlf79+vVDZmYmDhw4oGhr1qwZGjRogE2bNql9jgsXLqBJkya4e/cuqlatqlVdaWlpcHR0RGpqqt7mmcrKzYff3CMAgBsLgmFj9VpTYhEREdErtP381vrIUlxcHE6ePIkaNWpg4sSJcHNzw7BhwxAZGamXgouTm5uLixcvIjAwUNFmZmaGwMBAREVFqV0nKipKqT8ABAcHF9kfAFJTUyGRSDRef5WTk4O0tDSlBxEREb2ZdLpmqXXr1ti2bRsSEhKwZs0axMXFoWXLlqhduzaWL1+OxMREQ9WJJ0+eoKCgAK6urkrtrq6uSEhIULtOQkKCTv2zs7Mxffp0DBgwQGPCXLx4MRwdHRUPDw8PHUdDREREZYXOF3gDgK2tLYYNG4YzZ87gjz/+QK9evbB48WKtT1uZory8PPTt2xdCCGzcuFFj35kzZyI1NVXxuH//filVSURERKXttS6EyczMxJkzZ3Dq1Ck8e/YMNWvW1FddKipUqABzc3OVo1eJiYlwc3NTu46bm5tW/QuD0t27d3HixIlirzuSSqWQSqUlGAURERGVNSU6snT27FkMGzYMlSpVwoQJE1CjRg2cOXMGN2/e1Hd9ClZWVvD398fx48cVbXK5HMePH0dAQIDadQICApT6A0BERIRS/8KgFBcXh2PHjqF8+fKGGQARERGVSVofWXr06BG++uorbNu2DX/88QeaNWuGlStXon///rCzszNkjQphYWEICQlB48aN0aRJE6xevRqZmZkIDQ0FAAwZMgSVK1fG4sWLAQATJ05EmzZtsGLFCnTp0gV79uzB77//js2bNwN4EZT69OmDmJgYHDhwAAUFBYrrmZydnWFlZVUq4yIiIiLTpXVY8vDwQPny5TF48GAMHz4ctWvXNmRdavXr1w+PHz/G3LlzkZCQgAYNGuDw4cOKi7jv3bsHM7N/D5Y1b94cu3btwuzZs/HJJ5/A19cXP/zwA+rWrQsAePDgAX766ScAQIMGDZSe6+TJk2jbtm2pjIuIiIhMl9bzLH333Xfo3r07LCw438+rOM8SERFR2aPt57fWn8C9evXSS2FEREREZUmJLvAmIiIielswLBERERFpwLBEREREpAHDEhEREZEGOt9ilZmZiSVLluD48eNISkqCXC5XWv7333/rrTgiIiIiY9M5LH344Yc4deoUBg8ejEqVKkEikRiiLiJ6TUlp2UhKz9F5PRd7KVwcZAaoiIiobNI5LB06dAgHDx5EixYtDFEPEenJzuh7WHM8Tuf1JnbwxaSgGgaoiIiobNI5LJUrVw7Ozs6GqIWI9Ghg06oI8nNVasvOK0CfTVEAgH0fBUBmaa6ynos9vySaiOhlOoelhQsXYu7cufjqq69gY2NjiJqISA9cHGQqp9OycvMV//Zzd+DM8EREWtD5nXLFihX466+/4OrqCi8vL1haWiotj4mJ0VtxRERERMamc1jq0aOHAcogIiIiMk06h6Xw8HBD1EFERERkkkp8wcLFixdx8+ZNAECdOnXQsGFDvRVFREREZCp0DktJSUno378/fv31Vzg5OQEAUlJS0K5dO+zZswcVK1bUd41ERERERqPz152MHz8e6enpuH79OpKTk5GcnIxr164hLS0NEyZMMESNREREREaj85Glw4cP49ixY6hdu7aizc/PD+vXr0fHjh31WhwRERGRsel8ZEkul6tMFwAAlpaWKt8TR0RERFTW6RyW2rdvj4kTJ+Lhw4eKtgcPHmDSpEno0KGDXosjIiIiMjadw9K6deuQlpYGLy8vVKtWDdWqVYO3tzfS0tKwdu1aQ9RIREREZDQ6X7Pk4eGBmJgYHDt2DLdu3QIA1K5dG4GBgXovjoiIiMjYSjTPkkQiQVBQEIKCgvRdDxEREZFJ0SosffbZZxg5ciRkMhk+++wzjX05fQARERG9SbQKS6tWrcLAgQMhk8mwatWqIvtJJBKGJSIiInqjaBWW4uPj1f6biIiI6E2n891wCxYsQFZWlkr78+fPsWDBAr0URURERGQqdA5L8+fPR0ZGhkp7VlYW5s+fr5eiiIiIiEyFzmFJCAGJRKLSfvnyZTg7O+ulKCIiIiJTofXUAeXKlYNEIoFEIkGNGjWUAlNBQQEyMjLw0UcfGaRIIiIiImPROiytXr0aQggMGzYM8+fPh6Ojo2KZlZUVvLy8EBAQYJAiiYiIiIxF67AUEhICAPD29kbz5s3VfpkuERER0ZtG5xm827Rpo/h3dnY2cnNzlZY7ODi8flVEREREJkLnC7yzsrIwbtw4uLi4wNbWFuXKlVN6EBEREb1JdA5LU6dOxYkTJ7Bx40ZIpVJ88cUXmD9/Ptzd3bF9+3ZD1EhERERkNDqfhvv555+xfft2tG3bFqGhoWjVqhWqV68OT09P7Ny5EwMHDjREnURERERGofORpeTkZPj4+AB4cX1ScnIyAKBly5Y4ffq0fqsjIiIiMjKdw5KPj4/i++Fq1aqFb7/9FsCLI05OTk56LY6IiIjI2HQOS6Ghobh8+TIAYMaMGVi/fj1kMhkmTZqEqVOn6r1AIiIiImPS+ZqlSZMmKf4dGBiIW7du4eLFi6hevTreeecdvRZHRESmJyktG0npOTqv52IvhYuDzAAVERmWzmFp+/bt6NevH6RSKQDA09MTnp6eyM3Nxfbt2zFkyBC9F0lERKZjZ/Q9rDkep/N6Ezv4YlJQDQNURGRYOoel0NBQdOrUCS4uLkrt6enpCA0NZVgiInrDDWxaFUF+rkpt2XkF6LMpCgCw76MAyCzNVdZzsZeWSn1E+qZzWBJCKH2JbqF//vlH6fviiIjozeTiIFM5nZaVm6/4t5+7A2ysdP54ITJZWu/NDRs2hEQigUQiQYcOHWBh8e+qBQUFiI+PR6dOnQxSJBEREZGxaB2WevToAQC4dOkSgoODYWdnp1hmZWUFLy8v9O7dW+8FEhERERmT1mEpPDwcAODl5YX+/fsrLvAmIiIiepPpPM+Sn58fLl26pNIeHR2N33//XR81EREREZkMncPS2LFjcf/+fZX2Bw8eYOzYsXopioiIiMhU6ByWbty4gUaNGqm0N2zYEDdu3NBLUURERESmQuewJJVKkZiYqNL+6NEjpTvkiIiIiN4EOoeljh07YubMmUhNTVW0paSk4JNPPkFQUJBeiyMiIiIyNp0PBS1fvhytW7eGp6cnGjZsCODFdAKurq7YsWOH3gskIiIiMiadw1LlypVx5coV7Ny5E5cvX4a1tTVCQ0MxYMAAWFpaGqJGIiIiIqMp0UVGtra2GDlypL5rISIiIjI5Ol+zBAA7duxAy5Yt4e7ujrt37wIAVq1ahR9//FGvxREREREZm85haePGjQgLC8N7772HZ8+eoaCgAABQrlw5rF69Wt/1ERERERmVzmFp7dq12LJlC2bNmqU0VUDjxo1x9epVvRZHREREZGw6h6X4+HjFXXAvk0qlyMzM1EtRRERERKZC57Dk7e2t9rvhDh8+jNq1a+ujJiIiIiKTofPdcGFhYRg7diyys7MhhMD58+exe/duLF68GF988YUhaiQiIiIyGp3D0ocffghra2vMnj0bWVlZ+OCDD+Du7o41a9agf//+hqiRiIhMXIFcKP59Pj4ZrXwrwtxMYsSKqKxJSstGUnqOzuu52Evh4iAzQEX/KtE8SwMHDsTAgQORlZWFjIwMuLi46LsuIiIqIw5fe4Twn64rfh669QIqOcoQ3s0PnepWMmJlVJbsjL6HNcfjdF5vYgdfTAqqYYCK/lWieZYAICkpCRcvXsTt27fx+PFjfdak0fr16+Hl5QWZTIamTZvi/PnzGvvv3bsXtWrVgkwmQ7169fDLL78oLRdCYO7cuahUqRKsra0RGBiIuDjdXywiorfR4WuPMPrrGCSmKR8RSEjNxuivY3D42iMjVUZlzcCmVXFgfEvF45P3aqG8rZVSnwp2VvjkvVpK/QY2rWrw2nQOS+np6Rg8eDDc3d3Rpk0btGnTBu7u7hg0aJDSl+sawjfffIOwsDCEh4cjJiYG9evXR3BwMJKSktT2j4yMxIABAzB8+HDExsaiR48e6NGjB65du6bos3TpUnz22WfYtGkToqOjYWtri+DgYGRnZxt0LEREZV2BXGD+zzcg1CwrbJv/8w2lU3RERXFxkKFuZUfUreyIf55lYfGhW3iamavU52lGLhYfuoV/nmUp+hr6FBwASIQQOu3F/fr1Q2xsLNauXYuAgAAAQFRUFCZOnIgGDRpgz549BikUAJo2bYp3330X69atAwDI5XJ4eHhg/PjxmDFjhtpaMzMzceDAAUVbs2bN0KBBA2zatAlCCLi7u2Py5MmYMmUKACA1NRWurq7Ytm1bkddg5eTkICfn37+i0tLS4OHhgfv378PBwQEAYGlpCWtrazx//hx5eXmKvlKpVDHNQuGEngAgk8lgZWWFjIwMyOVyZOXmo8l/j0NiKcXNRZ2Rn52lVIOtrS3MzMyQnp6u1G5vbw+5XK4yjYODgwPy8/ORlfXvdszMzGBnZ4fc3FylcGhubg5bW1uVcb7umArZ2NjAwsICaWlpHFMpj6lAYo5aM34EhBznZ3WAjZVFmR/Tm/g6lZUxnb71CEO2xaA424c2QutalcrEmN7E16msjalALtBp/XkkpisHpUISAK4OUhwa867iuriSjunhw4eoXLkyUlNTFZ/fagkd2djYiDNnzqi0nz59WtjY2Oi6Oa3l5OQIc3Nz8f333yu1DxkyRHTv3l3tOh4eHmLVqlVKbXPnzhXvvPOOEEKIv/76SwAQsbGxSn1at24tJkyYUGQt4eHhAi/+cCryMXz4cCGEEMOHD1dqDw8PF0II0bFjR6X2LVu2CCGE8PPzU2p3eX++yMzJE/b29krt165dE6mpqSrPm5qaKq5du6bUZm9vL4QQ4vDhw0rtfn5+QgghtmzZotTesWNHtePU15gOHz4shBAckxHGlJmTJ2ReDd+oMb2Jr1NZGdPg2WuE5/QDxT4Gz15TZsb0Jr5OZW1MUo96Wu1XUo96rz2mmjVrKmrTROcjS1WrVsXBgwdRr149pfYrV66gc+fO+Oeff3TZnNYK019kZKTiiBYATJs2DadOnUJ0dLTKOlZWVvjqq68wYMAARduGDRswf/58JCYmIjIyEi1atMDDhw9RqdK/FyH27dsXEokE33zzjdpaeGTJNP8a4ZiKH1OuMEOdWQcACGwc2BDNfZxha2Ndpsf0Jr5OZWVMPLJUNl6nsjamX64nYcaPt1GcJf+pic51XF5rTNoeWdL5brjZs2cjLCwMO3bsgJubGwAgISEBU6dOxZw5c3TdXJlU+Mt/lYODg8ov29raGtbW1ip9bW1t1W7bzs4OAGCRmw8zqY3SttVR125ubq623cLCQm27lZUVrKysVNqLGmdJx6RN7UW1c0yvP6bCO5bMLF9sY+y3N5TuWCqLYyr0Jr1OhcrCmFrUcEMlRxkSUrOh7q9uCQA3Rxla1HjxWVEWxvQmvk5lbUyeLnkqy9XxdCmnt8/c4pToi3TPnTuHqlWronr16qhevTqqVq2KyMhIfP7552jUqJHioU8VKlSAubk5EhMTldoTExMVoe1Vbm5uGvsX/leXbRKVRbxjiQzB3EyC8G5+AF4Eo5cV/hzezY/zLZFOmng7o5KjTGWfKiQBUMlRhibezqVWk85Hlnr06GGAMopnZWUFf39/HD9+XFGDXC7H8ePHMW7cOLXrBAQE4Pjx4/j4448VbREREYrTeN7e3nBzc8Px48fRoEEDAC9OqUVHR2P06NGGHA5RqSnujiUJXtyxFOTnxg810lmnupWwcVAjhP90XSmMu3GeJSqhwhA++usYSACl9y5jhXCdw1J4eLgh6tBKWFgYQkJC0LhxYzRp0gSrV69GZmYmQkNDAQBDhgxB5cqVsXjxYgDAxIkT0aZNG6xYsQJdunTBnj178Pvvv2Pz5s0AAIlEgo8//hiLFi2Cr68vvL29MWfOHLi7uxstFBLp2/n4ZDxKLXoqDAHgUWo2zscnI6Ba+dIrjN4YnepWQovqFVBv3lEAwLbQdzmDN+ns5Rm8q5Szwcz3auHz038rTR9Q3s4KI1v5oEo5G1x78GK6IpOcwfvkyZNo166d2mWff/45Ro0a9dpFFaVfv354/Pgx5s6di4SEBDRo0ACHDx+Gq6srAODevXswM/v3zGLz5s2xa9cuzJ49G5988gl8fX3xww8/oG7duoo+06ZNQ2ZmJkaOHImUlBS0bNkShw8fhkxm+HkbiEpDUrp2c4Zp249InZeDURNvZwYl0pk2M3g/ycjF/w7dUmorjRm8db4bTiqVYsKECfjf//4HS0tLAMCTJ08QGhqKs2fP4tmzZwYp1JSlpaXB0dGx+HkadJCVmw+/uUcAADcWBMPGqkTfTEOEqL+eYsCWc8X22z2iGY8sUYnxPYtelzG+G07bz+8SHVkaMmQIIiIisGvXLsTHx2P48OGoWbMmLl26VKJiichwCi+WLO6OpdK8WJKI6FUuDrJSmY27JHS+G6558+a4dOkS6tati0aNGqFnz56YNGkSfv31V3h6ehqiRiJ6DbxjiYjo9ZToi3T/+OMP/P7776hSpQosLCxw+/ZtpQmyiMi0FN6x5OKgPNeKm6MMGwc14h1LREQa6ByWlixZgoCAAAQFBeHatWs4f/48YmNj8c477yAqKsoQNRKRHnSqWwnHwtooft4W+i7OTm/PoEREVAydw9KaNWvwww8/YO3atZDJZKhbty7Onz+PXr16oW3btgYokYj0hXcsERHpTucLvK9evYoKFSootVlaWmLZsmXo2rWr3gojIiIiMgU6H1l6NSi9rHbt2q9VDBEREZGp0Tos2djY4PHjx4qfu3TpgkeP/v0+qcTERFSqxGsfiIiI6M2idVjKzs7Gy/NXnj59Gs+fP1fqo+P8lkREREQmr0RTBxRFIuHFokRERPRm0WtYIiIiInrTaB2WJBKJ0pGjV38mIiIiehNpPXWAEAI1atRQBKSMjAw0bNgQZmZmiuVEREREbxqtw9LWrVsNWQcRERGRSdI6LIWEhBiyDiIiIiKTpFVYEkLw+iQiIgIAJKVlIyk9R6ktO69A8e8bD9MgszRXWc/FXgoXB5nB6yPSN63CUp06dTB37lz06tULVlZWRfaLi4vDypUr4enpiRkzZuitSCIiMh07o+9hzfG4Ipf32aT+S9UndvDFpKAahiqLyGC0Cktr167F9OnTMWbMGAQFBaFx48Zwd3eHTCbDs2fPcOPGDZw9exbXr1/HuHHjMHr0aEPXTURERjKwaVUE+bnqvJ6LvdQA1RAZnlZhqUOHDvj9999x9uxZfPPNN9i5cyfu3r2L58+fo0KFCmjYsCGGDBmCgQMHoly5coaumYiIjMjFQcbTafRW0foCbwBo2bIlWrZsaahaiIiIiEwOZ/AmIiIi0oBhyUQVyP+d5PN8fLLSz0RERFR6GJZM0OFrjxC48pTi56FbL6Dlpydw+NojI1ZFRET0dmJYMjGHrz3C6K9jkJimPIdJQmo2Rn8dw8BERERUyhiWTEiBXGD+zzeg7oRbYdv8n2/wlBwREVEpKlFY+uuvvzB79mwMGDAASUlJAIBDhw7h+vXrei3ubXM+PhmPUrOLXC4APErNxvn45NIrioiI6C2nc1g6deoU6tWrh+joaHz33XfIyMgAAFy+fBnh4eF6L/BtkpRedFAqST8iIiJ6fTqHpRkzZmDRokWIiIhQ+uqT9u3b49y5c3ot7m3jYq/dJG/a9iMiIqLXp3NYunr1Knr27KnS7uLigidPnuilqLdVE29nVHKUoaivLJYAqOQoQxNv59Isi4iI6K2mc1hycnLCo0eqd2TFxsaicuXKeinqbWVuJkF4Nz8AUAlMhT+Hd/ODuVlRcYqIiIj0Teew1L9/f0yfPh0JCQmQSCSQy+X47bffMGXKFAwZMsQQNb5VOtWthI2DGsHFQfkLJ90cZdg4qBE61a1kpMqIiIjeTjp9NxwA/O9//8PYsWPh4eGBgoIC+Pn5oaCgAB988AFmz55tiBrfOp3qVkKL6hVQb95RAMC20HfRyrcijygREREZgc5hycrKClu2bMGcOXNw7do1ZGRkoGHDhvD19TVEfW+tl4NRE29nBiUiIiIj0TksFapatSqqVq2qz1qIiIiITI7OYWnYsGEal3/55ZclLoaIiIjI1Ogclp49e6b0c15eHq5du4aUlBS0b99eb4URERERmQKdw9L333+v0iaXyzF69GhUq1ZNL0URERERmQq9fJGumZkZwsLCsGrVKn1sjoiIiMhk6CUsAS++XDc/P19fmyMiIiIyCTqfhgsLC1P6WQiBR48e4eDBgwgJCdFbYURERESmQOewFBsbq/SzmZkZKlasiBUrVhR7pxwRERFRWaNzWDp58qQh6iAiIiIySXq7ZomIiIjoTaTVkaWGDRtCItHu6zZiYmJeqyAiIiIiU6JVWOrRo4eByyAiIiIyTVqFpfDwcEPXQURERGSSeM0SERERkQY63w1XUFCAVatW4dtvv8W9e/eQm5urtDw5OVlvxREREREZm85HlubPn4+VK1eiX79+SE1NRVhYGHr16gUzMzPMmzfPACUSERERGY/OYWnnzp3YsmULJk+eDAsLCwwYMABffPEF5s6di3PnzhmiRiIiIiKj0TksJSQkoF69egAAOzs7pKamAgC6du2KgwcP6rc6IiIiIiPTOSxVqVIFjx49AgBUq1YNR48eBQBcuHABUqlUv9URERERGZnOYalnz544fvw4AGD8+PGYM2cOfH19MWTIEH43HBEREb1xtL4bbt26dRg0aBCWLFmiaOvXrx+qVq2KqKgo+Pr6olu3bgYpkoiIiMhYtD6yNGvWLLi7u2PgwIE4ceKEoj0gIABhYWEMSkRERPRG0josJSQkYNOmTXj48CGCgoLg7e2NhQsX4v79+4asj4iIiMiotA5L1tbWGDJkCE6ePIm4uDgMHjwY//d//wdvb2906tQJe/fuRV5eniFrJSIiIip1Jfq6Ex8fHyxYsADx8fE4dOgQypcvj6FDh6Jy5cr6ro+IiIjIqF7ru+EkEgksLCwgkUgghOCRJSIiInrjlCgs3b9/HwsWLICPjw+CgoLw8OFDbNmyRTH/EhEREdGbQuuwlJubiz179qBjx47w9vbGli1b8MEHH+CPP/7AiRMnMHDgQMhkMoMVmpycjIEDB8LBwQFOTk4YPnw4MjIyNK6TnZ2NsWPHonz58rCzs0Pv3r2RmJioWH758mUMGDAAHh4esLa2Ru3atbFmzRqDjYGIiIjKHq3nWXJzc0NWVha6du2Kn3/+GcHBwTAze62zeDoZOHAgHj16hIiICOTl5SE0NBQjR47Erl27ilxn0qRJOHjwIPbu3QtHR0eMGzcOvXr1wm+//QYAuHjxIlxcXPD111/Dw8MDkZGRGDlyJMzNzTFu3LjSGhqRQSSlZSMpPUepLTuvQPHvGw/TILM0V1nPxV4KFwfD/eFDRFTWSIQQQpuOK1euxODBg1GxYkVD16Ti5s2b8PPzw4ULF9C4cWMAwOHDh9G5c2f8888/cHd3V1knNTUVFStWxK5du9CnTx8AwK1bt1C7dm1ERUWhWbNmap9r7NixuHnzptJcUsVJS0uDo6MjUlNT4eDgUIIRqsrKzYff3CMAgBsLgmFjpXWuJQIArIr4A2uOx+m83sQOvpgUVMMAFRERmRZtP7+1/gQOCwvTS2ElERUVBScnJ0VQAoDAwECYmZkhOjoaPXv2VFnn4sWLyMvLQ2BgoKKtVq1aihnHiwpLqampcHZ21lhPTk4OcnL+/Ys9LS1N1yERGdzAplUR5Oeq83ou9vyORyKil5WJwxUJCQlwcXFRarOwsICzszMSEhKKXMfKygpOTk5K7a6urkWuExkZiW+++QYHDx7UWM/ixYsxf/587QdAZAQuDjKeTiMi0oPSu+hIjRkzZkAikWh83Lp1q1RquXbtGv7zn/8gPDwcHTt21Nh35syZSE1NVTw4izkREdGby6hHliZPnoyhQ4dq7OPj4wM3NzckJSUptefn5yM5ORlubm5q13Nzc0Nubi5SUlKUji4lJiaqrHPjxg106NABI0eOxOzZs4utWyqVQirlqQoiIqK3gVHDUsWKFbW6YDwgIAApKSm4ePEi/P39AQAnTpyAXC5H06ZN1a7j7+8PS0tLHD9+HL179wYA3L59G/fu3UNAQICi3/Xr19G+fXuEhITgv//9rx5GRURERG8SncNSQUEBtm3bhuPHjyMpKQlyuVxpuS53kWmrdu3a6NSpE0aMGIFNmzYhLy8P48aNQ//+/RV3wj148AAdOnTA9u3b0aRJEzg6OmL48OEICwuDs7MzHBwcMH78eAQEBCgu7r527Rrat2+P4OBghIWFKa5lMjc3N8pdf0RERGR6dA5LEydOxLZt29ClSxfUrVsXEonEEHWp2LlzJ8aNG4cOHTrAzMwMvXv3xmeffaZYnpeXh9u3byMrK0vRtmrVKkXfnJwcBAcHY8OGDYrl+/btw+PHj/H111/j66+/VrR7enrizp07pTIuIiIiMm1az7NUqEKFCti+fTs6d+5sqJrKHM6zREREVPZo+/mt891wVlZWqF69+msVR0RERFRW6ByWJk+ejDVr1kDHA1JEREREZZLO53bOnj2LkydP4tChQ6hTpw4sLS2Vln/33Xd6K46IiIjI2HQOS05OTmq/XoSIiIjoTaRzWNq6dash6iAiIiIySUb9uhMiIiIiU1ei+9H37duHb7/9Fvfu3UNubq7SspiYGL0URkRERGQKdD6y9NlnnyE0NBSurq6IjY1FkyZNUL58efz999947733DFEjERERkdHoHJY2bNiAzZs3Y+3atbCyssK0adMQERGBCRMmIDU11RA1EhERERmNzmHp3r17aN68OQDA2toa6enpAIDBgwdj9+7d+q2OiIiIyMh0Dktubm5ITk4GAFStWhXnzp0DAMTHx3OiSiIiInrj6ByW2rdvj59++gkAEBoaikmTJiEoKAj9+vXj/EtERET0xtH5brjNmzdDLpcDAMaOHYvy5csjMjIS3bt3x6hRo/ReIBEREZEx6RyWzMzMYGb27wGp/v37o3///notioiIiMhUlGhSyjNnzmDQoEEICAjAgwcPAAA7duzA2bNn9VocERERkbHpHJb279+P4OBgWFtbIzY2Fjk5OQCA1NRU/O9//9N7gURERETGpHNYWrRoETZt2oQtW7bA0tJS0d6iRQvO3k1ERERvHJ3D0u3bt9G6dWuVdkdHR6SkpOijJiIiIiKTUaJ5lv7880+V9rNnz8LHx0cvRRERERGZCp3D0ogRIzBx4kRER0dDIpHg4cOH2LlzJ6ZMmYLRo0cbokYiIiIio9F56oAZM2ZALpejQ4cOyMrKQuvWrSGVSjFlyhSMHz/eEDUSERERGY3OYUkikWDWrFmYOnUq/vzzT2RkZMDPzw92dnaGqI+IiIjIqHQOS4WsrKzg5+enz1qIiIiITI7WYWnYsGFa9fvyyy9LXAwRERGRqdE6LG3btg2enp5o2LAhhBCGrImIiIjIZGgdlkaPHo3du3cjPj4eoaGhGDRoEJydnQ1ZGxEREZHRaT11wPr16/Ho0SNMmzYNP//8Mzw8PNC3b18cOXKER5qIiIjojaXTPEtSqRQDBgxAREQEbty4gTp16mDMmDHw8vJCRkaGoWokIiIiMhqdJ6VUrGhmBolEAiEECgoK9FkTERERkcnQKSzl5ORg9+7dCAoKQo0aNXD16lWsW7cO9+7d4zxLRERE9EbS+gLvMWPGYM+ePfDw8MCwYcOwe/duVKhQwZC1ERERERmd1mFp06ZNqFq1Knx8fHDq1CmcOnVKbb/vvvtOb8URERERGZvWYWnIkCGQSCSGrIWIiIjI5Og0KSURERHR26bEd8MRERERvQ0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0oBhiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0YFgiIiIi0qDMhKXk5GQMHDgQDg4OcHJywvDhw5GRkaFxnezsbIwdOxbly5eHnZ0devfujcTERLV9nz59iipVqkAikSAlJcUAIyAiIqKyqMyEpYEDB+L69euIiIjAgQMHcPr0aYwcOVLjOpMmTcLPP/+MvXv34tSpU3j48CF69eqltu/w4cPxzjvvGKJ0IiIiKsPKRFi6efMmDh8+jC+++AJNmzZFy5YtsXbtWuzZswcPHz5Uu05qair+7//+DytXrkT79u3h7++PrVu3IjIyEufOnVPqu3HjRqSkpGDKlCmlMRwiIiIqQ8pEWIqKioKTkxMaN26saAsMDISZmRmio6PVrnPx4kXk5eUhMDBQ0VarVi1UrVoVUVFRirYbN25gwYIF2L59O8zMtPt15OTkIC0tTelBREREb6YyEZYSEhLg4uKi1GZhYQFnZ2ckJCQUuY6VlRWcnJyU2l1dXRXr5OTkYMCAAVi2bBmqVq2qdT2LFy+Go6Oj4uHh4aHbgIiIiKjMMGpYmjFjBiQSicbHrVu3DPb8M2fORO3atTFo0CCd10tNTVU87t+/b6AKiYiIyNgsjPnkkydPxtChQzX28fHxgZubG5KSkpTa8/PzkZycDDc3N7Xrubm5ITc3FykpKUpHlxITExXrnDhxAlevXsW+ffsAAEIIAECFChUwa9YszJ8/X+22pVIppFKpNkMkIiKiMs6oYalixYqoWLFisf0CAgKQkpKCixcvwt/fH8CLoCOXy9G0aVO16/j7+8PS0hLHjx9H7969AQC3b9/GvXv3EBAQAADYv38/nj9/rljnwoULGDZsGM6cOYNq1aq97vCIiIjoDWDUsKSt2rVro1OnThgxYgQ2bdqEvLw8jBs3Dv3794e7uzsA4MGDB+jQoQO2b9+OJk2awNHREcOHD0dYWBicnZ3h4OCA8ePHIyAgAM2aNQMAlUD05MkTxfO9eq0TERERvZ3KRFgCgJ07d2LcuHHo0KEDzMzM0Lt3b3z22WeK5Xl5ebh9+zaysrIUbatWrVL0zcnJQXBwMDZs2GCM8omIiKiMkojCC3WoxNLS0uDo6IjU1FQ4ODjoZZtZufnwm3sEAHBjQTBsrMpMriUiIioTtP38LhNTBxAREREZC8MSERERkQYMS0REREQaMCwRERERacCwRERERKQBwxIRERGRBgxLRERERBowLBERERFpwLBEREREpAHDEhEREZEGDEtEREREGjAsEREREWnAsERERESkAcMSERERkQYMS0REREQaMCwRERERacCwRERERKQBwxIRERGRBgxLRERERBowLBERERFpwLBEREREpAHDEhEREZEGDEtEREREGjAsEREREWnAsERERESkAcMSERERkQYMS0REREQaMCwRERERacCwRERERKQBwxIRERGRBgxLRERERBowLBERERFpwLBEREREpIGFsQt4EwghAABpaWklWv9xWjYeZ+QotWXnFUCekwUAOH/7H8gszVXWq2gnRUUHWYmek4iI6G1X+Lld+DleFIkorgcV659//oGHh4exyyAiIqISuH//PqpUqVLkcoYlPZDL5Xj48CHs7e0hkUj0tt20tDR4eHjg/v37cHBw0Nt26e3G/YoMhfsWGYIh9yshBNLT0+Hu7g4zs6KvTOJpOD0wMzPTmEhfl4ODA994SO+4X5GhcN8iQzDUfuXo6FhsH17gTURERKQBwxIRERGRBgxLJkwqlSI8PBxSqdTYpdAbhPsVGQr3LTIEU9iveIE3ERERkQY8skRERESkAcMSERERkQYMS0REREQaMCwRERERacCwRERERKQBw5IJOH36NLp16wZ3d3dIJBL88MMPSsuFEJg7dy4qVaoEa2trBAYGIi4uzjjFksnSx36UnJyMgQMHwsHBAU5OThg+fDgyMjJKcRRkbKW1H125cgWtWrWCTCaDh4cHli5dauihUSkypf1o7969qFWrFmQyGerVq4dffvlF5/EwLJmAzMxM1K9fH+vXr1e7fOnSpfjss8+wadMmREdHw9bWFsHBwcjOzi7lSsmU6WM/GjhwIK5fv46IiAgcOHAAp0+fxsiRI0trCGQCSmM/SktLQ8eOHeHp6YmLFy9i2bJlmDdvHjZv3mzw8VHpMJX9KDIyEgMGDMDw4cMRGxuLHj16oEePHrh27ZpuAxJkUgCI77//XvGzXC4Xbm5uYtmyZYq2lJQUIZVKxe7du41QIZUFJdmPbty4IQCICxcuKPocOnRISCQS8eDBg1KrnUyHofajDRs2iHLlyomcnBxFn+nTp4uaNWsaeERkDMbcj/r27Su6dOmiVE/Tpk3FqFGjdBoDjyyZuPj4eCQkJCAwMFDR5ujoiKZNmyIqKsqIlVFZos1+FBUVBScnJzRu3FjRJzAwEGZmZoiOji71msn06Gs/ioqKQuvWrWFlZaXoExwcjNu3b+PZs2elNBoyltLcj6KiopSep7CPrp+fDEsmLiEhAQDg6uqq1O7q6qpYRlQcbfajhIQEuLi4KC23sLCAs7Mz9zUCoL/9KCEhQe02Xn4OenOV5n5UVB9d9zOGJSIiIiINGJZMnJubGwAgMTFRqT0xMVGxjKg42uxHbm5uSEpKUlqen5+P5ORk7msEQH/7kZubm9ptvPwc9OYqzf2oqD667mcMSybO29sbbm5uOH78uKItLS0N0dHRCAgIMGJlVJZosx8FBAQgJSUFFy9eVPQ5ceIE5HI5mjZtWuo1k+nR134UEBCA06dPIy8vT9EnIiICNWvWRLly5UppNGQspbkfBQQEKD1PYR+dPz91uhycDCI9PV3ExsaK2NhYAUCsXLlSxMbGirt37wohhFiyZIlwcnISP/74o7hy5Yr4z3/+I7y9vcXz58+NXDmZEn3sR506dRINGzYU0dHR4uzZs8LX11cMGDDAWEMiIyiN/SglJUW4urqKwYMHi2vXrok9e/YIGxsb8fnnn5f6eMkwTGU/+u2334SFhYVYvny5uHnzpggPDxeWlpbi6tWrOo2HYckEnDx5UgBQeYSEhAghXtxmOWfOHOHq6iqkUqno0KGDuH37tnGLJpOjj/3o6dOnYsCAAcLOzk44ODiI0NBQkZ6eboTRkLGU1n50+fJl0bJlSyGVSkXlypXFkiVLSmuIVApMaT/69ttvRY0aNYSVlZWoU6eOOHjwoM7jkQghhG7HooiIiIjeHrxmiYiIiEgDhiUiIiIiDRiWiIiIiDRgWCIiIiLSgGGJiIiISAOGJSIiIiINGJaIiIiINGBYIiIiItKAYYmIiIhIA4YlIiIiIg0YloiIiIg0+H/HcFmx60FF0QAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "data = [get_noisy_data(shots=10),\n", " get_noisy_data(shots=100),\n", @@ -973,7 +708,8 @@ "labels =['series 1']\n", "\n", "plot_data([data], categories, labels)\n", - "\n" + "\n", + "" ] }, { @@ -992,21 +728,10 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "id": "e4a556de-4864-48ee-bbf2-e09d1a991079", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlQAAAGzCAYAAADpMYmOAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAY49JREFUeJzt3Xl8TNf/P/DXZJuJRBIhyQgRYougliCidiGK9htLicYWKa3aYy8VyqeWVqlaUvoppZRS1daSilCKCI2gtnzQWCqSIJLJIvv5/eGXqTGTmMnMJBlez8djHu2ce+6575PczLydc++5EiGEABERERGVmVlFB0BERERk6phQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQERFpsHz5cnh6eqKoqKiiQzEJmzdvhkQiwa1btyo6FDKQ33//HRKJBL///rtR2g8MDMTgwYON0nZFYEL1iiv+EJRIJDhx4oTadiEE3NzcIJFI0K9fvwqI0Ph27tyJYcOGoWHDhpBIJOjatatO+z948ACTJ0+Gp6cnrK2t4ezsjHbt2mHWrFnIzMxU1hs1apTyZy2RSGBrawsPDw8MGjQIP/74o9Zf3AsWLFBp5/lXUlKSTvGXl65du6rEaWVlhXr16mHs2LG4e/duRYenQqFQYNmyZZg1axbMzFQ/JnNycrBy5Ur4+PjA3t4eMpkMjRo1woQJE/C///3PqHGtW7cOmzdvNuoxKotTp05hwYIFSEtL03qfe/fuYfDgwXBwcICdnR3+7//+D3///bfxgixB8bn+5ptvqm27desWJBIJPvvsM4Mdr27dupBIJJg4caLatuKkaPfu3QY7nqHMmjULP/74Iy5cuFDRoRiERUUHQJWDTCbD9u3b0bFjR5XyY8eO4Z9//oFUKq2gyIxv/fr1iI2NRdu2bfHo0SOd9k1NTUWbNm2gUCgwevRoeHp64tGjR7h48SLWr1+PcePGwdbWVllfKpXi66+/BgA8efIEt2/fxq+//opBgwaha9eu+Pnnn2FnZ6d13M+2XczBwUGnPpSn2rVrY8mSJQCAvLw8XLlyBeHh4fjtt99w9epVVKlSpYIjfOqbb75BQUEBhg4dqlL+8OFD9O7dG7GxsejXrx/eeecd2NraIj4+Hjt27MCGDRuQl5dntLjWrVuHGjVqYNSoUUY7RmVx6tQpLFy4EKNGjdLqnM7MzES3bt2Qnp6ODz/8EJaWlli5ciW6dOmC8+fPo3r16sYP+jn79u1DbGwsvL29y+V4GzduxJw5c+Dq6mqQ9jp37ownT57AysrKIO09r1WrVmjTpg1WrFiBLVu2GOUY5UrQK23Tpk0CgBgwYICoUaOGyM/PV9k+ZswY4e3tLdzd3UXfvn0rKMqyOXr0qAAgEhISSq13584dUVhYKIQQomnTpqJLly5aH2P58uUCgDh58qTatvT0dPHkyRPl+5EjRwobGxuN7SxZskQAEIMHD37hMcPCwgQA8eDBA63jLPbkyRNlX5+XmZmpc3vPKiwsVOnv87p06SKaNm2qVr5mzRoBQBw6dEiv4xfTtx9CCPHaa6+JYcOGqZX37dtXmJmZid27d6tty8nJEdOmTdP72KXR9fzURn5+vsjNzdW7neLPkhf9vWnr008/1am9ZcuWCQDizJkzyrKrV68Kc3NzMWfOHIPEpK0uXbqIOnXqiGrVqok333xTZVtCQoIAID799FODHc/d3V00bdpUWFhYiIkTJ6psK/4c3LVrl8GOZ0ifffaZsLGxERkZGRUdit445UcAgKFDh+LRo0eIjIxUluXl5WH37t145513NO5TVFSEVatWoWnTppDJZHBxccF7772Hx48fq9T7+eef0bdvX7i6ukIqlaJ+/fpYtGgRCgsLVep17doVzZo1w5UrV9CtWzdUqVIFtWrVwvLlyw3f4We4ubmpTeto6+bNmzA3N0f79u3VttnZ2UEmk2nVzuzZs9GrVy/s2rXLYNNGxUP9O3bswLx581CrVi1UqVIFCoUCo0aNgq2tLW7evIk+ffqgatWqCAoKAgBkZWVh2rRpcHNzg1QqRePGjfHZZ59BCKHSvkQiwYQJE7Bt2zY0bdoUUqkUEREROscpl8sBABYW/w6Y3759Gx988AEaN24Ma2trVK9eHW+//bba9TnFU9bHjh3DBx98AGdnZ9SuXRsAkJGRgSlTpqBu3bqQSqVwdnZGz549ce7cuVLjSUhIwMWLF+Hn56dSHhMTg/379yMkJAQDBw5U208qlapN41y7dg2DBg2Co6MjZDIZ2rRpg19++UVjH06ePInQ0FA4OTnBxsYG/fv3x4MHD5T16tati8uXL+PYsWPKadNnp6fT0tIwZcoU5e+tQYMGWLZsmcpU8rPTTatWrUL9+vUhlUpx5coVreMFgMuXL6N79+6wtrZG7dq1sXjxYq2nrC9evIhRo0bBw8MDMpkMcrkco0ePVhkdXrBgAWbMmAEAqFevnrK/pV2ftXv3brRt2xZt27ZVlnl6eqJHjx744YcftIrNkKpWrYqpU6fi119/feE5BwB///033n77bTg6OqJKlSpo37499u/fr/Xx6tatixEjRmDjxo1ITEx8Yf24uDi88cYbsLOzg62tLXr06IHTp0+r1NF0DdX169cxcOBAyOVyyGQy1K5dG4GBgUhPT1fZ97vvvoO3tzesra3h6OiIwMBAjVP7PXv2RFZWlsp3j6nilB8BePrH6Ovri++//x5vvPEGAODgwYNIT09HYGAgVq9erbbPe++9h82bNyM4OBiTJk1CQkIC1qxZg7i4OJw8eRKWlpYAnn5h2NraIjQ0FLa2tjhy5Ajmz58PhUKBTz/9VKXNx48fo3fv3hgwYAAGDx6M3bt3Y9asWWjevLkyrpKkp6cjPz9f5X1xm89OjclkMo1TZWXh7u6OwsJCbN26FSNHjtSrreHDh+PQoUOIjIxEo0aNXlg/NTVVrczCwkJtemTRokWwsrLC9OnTkZubqxy+LygogL+/Pzp27IjPPvsMVapUgRACb731Fo4ePYqQkBC0bNkSv/32G2bMmIF79+5h5cqVKm0fOXIEP/zwAyZMmIAaNWqgbt26pcZcWFiIhw8fAgDy8/Nx9epVhIWFoUGDBnj99deV9c6ePYtTp04hMDAQtWvXxq1bt7B+/Xp07doVV65cUZsa/OCDD+Dk5IT58+cjKysLAPD+++9j9+7dmDBhAry8vPDo0SOcOHECV69eRevWrUuM8dSpUwCgVqc4sRg+fHipfSx2+fJlvP7666hVqxZmz54NGxsb/PDDDwgICMCPP/6I/v37q9SfOHEiqlWrhrCwMNy6dQurVq3ChAkTsHPnTgDAqlWrMHHiRNja2mLu3LkAABcXFwBAdnY2unTpgnv37uG9995DnTp1cOrUKcyZMwf379/HqlWrVI61adMm5OTkYOzYsZBKpXB0dNQ63qSkJHTr1g0FBQXKehs2bIC1tbVWP5fIyEj8/fffCA4Ohlwux+XLl7FhwwZcvnwZp0+fhkQiwYABA/C///0P33//PVauXIkaNWoAAJycnDS2WVRUhIsXL2L06NFq29q1a4dDhw4hIyMDVatWLTGu5z8/SqLL58fkyZOxcuVKLFiwQGNiWiw5ORkdOnRAdnY2Jk2ahOrVq+Pbb7/FW2+9hd27d6udKyWZO3cutmzZgqVLl2r8zC52+fJldOrUCXZ2dpg5cyYsLS3x1VdfoWvXrjh27Bh8fHw07peXlwd/f3/k5uZi4sSJkMvluHfvHvbt24e0tDTY29sDAP7zn//go48+wuDBg/Huu+/iwYMH+PLLL9G5c2fExcWpfEZ5eXnB2toaJ0+e1LqflVZFD5FRxSoepj979qxYs2aNqFq1qsjOzhZCCPH222+Lbt26CSGE2pTfH3/8IQCIbdu2qbQXERGhVl7c3rPee+89UaVKFZGTk6Ms69KliwAgtmzZoizLzc0VcrlcDBw48IV9Kd7/Ra+RI0eW2IauUypJSUnCyclJABCenp7i/fffF9u3bxdpaWlqdUub8hNCiLi4OAFATJ06tdRjFk/5aXo1btxYWa94qN/Dw0PtdzBy5EgBQMyePVulfO/evQKAWLx4sUr5oEGDhEQiETdu3FCWARBmZmbi8uXLpcZbrKTfT5MmTcTff/+tUlfTORMdHa12fhSfvx07dhQFBQUq9e3t7cX48eO1iu1Z8+bNEwDUpiD69+8vAIjHjx9r1U6PHj1E8+bNVc7xoqIi0aFDB9GwYUO1Pvj5+YmioiJl+dSpU4W5ubnKuVTS+blo0SJhY2Mj/ve//6mUz549W5ibm4s7d+4IIf6dbrKzsxMpKSllinfKlCkCgIiJiVGWpaSkCHt7e62m6DT9br///nsBQBw/flxZpsuU34MHDwQA8fHHH6ttW7t2rQAgrl27Vmobhvj8eLat4unthQsXCgAiNjZWCKF5yq/4Z/rHH38oyzIyMkS9evVE3bp1S5ymL/bs53NwcLCQyWQiMTFRCKF5yi8gIEBYWVmJmzdvKssSExNF1apVRefOnZVlxfsePXpUCPHvZ1Rp04e3bt0S5ubm4j//+Y9K+V9//SUsLCzUyoUQolGjRuKNN94otY+mgFN+pDR48GA8efIE+/btQ0ZGBvbt21fidN+uXbtgb2+Pnj174uHDh8qXt7c3bG1tcfToUWXdZ//lmpGRgYcPH6JTp07Izs7GtWvXVNq1tbXFsGHDlO+trKzQrl07re7UWbFiBSIjI5Wv4umX7777TqV85syZOv1cSuPi4oILFy7g/fffx+PHjxEeHo533nkHzs7OWLRokdo0WWmK/9WbkZGhVf0ff/xRpV+RkZHYtGmTWr2RI0eWOHowbtw4lfcHDhyAubk5Jk2apFI+bdo0CCFw8OBBlfIuXbrAy8tLq3iBpyOhxbEePHgQq1atQnp6Ot544w2V6a1n483Pz8ejR4/QoEEDODg4aJw+GTNmDMzNzVXKHBwcEBMTo9X0x7MePXoECwsLtVEIhUIBAKWOchRLTU3FkSNHMHjwYOU5//DhQzx69Aj+/v64fv067t27p7LP2LFjIZFIlO87deqEwsJC3L59+4XH27VrFzp16oRq1aqp/D36+fmhsLAQx48fV6k/cOBAldEeXeI9cOAA2rdvj3bt2in3d3JyUk4Zv8izv9ucnBw8fPhQOWWuzdSYJk+ePAEAjTfPFE+7F9cpyfOfHyW9dP38mDx5MqpVq4aFCxeWWOfAgQNo166dyk1Btra2GDt2LG7duqWcktXGvHnzUFBQgKVLl2rcXlhYiEOHDiEgIAAeHh7K8po1a+Kdd97BiRMnlOf684pHoH777TdkZ2drrLNnzx4UFRVh8ODBKueiXC5Hw4YNVb4bihWft6aOU36k5OTkBD8/P2zfvh3Z2dkoLCzEoEGDNNa9fv060tPT4ezsrHF7SkqK8v8vX76MefPm4ciRI2p/qM/Pu9euXVvlSwV4+sd28eLFF8b//J00xdfkvP766y+citJHzZo1sX79eqxbtw7Xr1/Hb7/9hmXLlmH+/PmoWbMm3n33Xa3aKV5iQZsvbODpHTjFUyGlqVevnsZyCwsL5fVGxW7fvg1XV1e1GJo0aaLcrk3bJbGxsVG5Nql3797o2LEj2rRpg6VLl2LFihUAnn75LVmyBJs2bcK9e/dUEtPnz5mS4li+fDlGjhwJNzc3eHt7o0+fPhgxYoTKl4guiu++zMjIeOFdZzdu3IAQAh999BE++ugjjXVSUlJQq1Yt5fs6deqobK9WrRoAqF2TqMn169dx8eLFEqfEnv17BNR/XrrEe/v2bY1TQo0bN35hnMDT5G3hwoXYsWOHWlyafrfaKE7ScnNz1bbl5OSo1CmJse7Es7e3x5QpUxAWFoa4uDjl7/VZJf1Mn/27a9asmVbH8/DwwPDhw7FhwwbMnj1bbfuDBw+QnZ2t8ffVpEkTFBUV4e7du2jatKna9nr16iE0NBSff/45tm3bhk6dOuGtt97CsGHDlMnW9evXIYRAw4YNNcZXfCnIs4QQap/7pogJFal45513MGbMGCQlJeGNN94o8YujqKgIzs7O2LZtm8btxR/saWlp6NKlC+zs7PDxxx+jfv36kMlkOHfuHGbNmqV2IevzowzFdBnpqSgSiQSNGjVCo0aN0LdvXzRs2BDbtm3TOqG6dOkSAKBBgwYGjaukLxKpVFrmi/Ff1LYuvL29YW9vrzKKMnHiRGzatAlTpkyBr68v7O3tIZFIEBgYqPHiZ01xDB48GJ06dcJPP/2EQ4cO4dNPP8WyZcuwZ8+eUq/Hq169OgoKCtSuufH09AQA/PXXX+jUqVOpfSqOcfr06fD399dY5/nfsz7nflFREXr27Fni6Mnz1+Q9//MqS7xlNXjwYJw6dQozZsxAy5YtYWtri6KiIvTu3bvMi6g6OjpCKpXi/v37atuKy160lEBqaqpWS15YW1srkwdtFV9LtXDhQrXr2Yxh7ty52Lp1K5YtW4aAgACDtr1ixQqMGjUKP//8Mw4dOoRJkyZhyZIlOH36NGrXro2ioiJIJBIcPHhQ4zmt6fqzx48fl5iAmRImVKSif//+eO+993D69GnlxbCa1K9fH4cPH8brr79e6pfq77//jkePHmHPnj3o3LmzsjwhIcGgcVc2Hh4eqFatmsYP+JJs3boVEokEPXv2NGJkpXN3d8fhw4fVkoniqVl3d3ejHLewsFBlEdTdu3dj5MiRyhEr4OlIgy6LPAJPRw8/+OADfPDBB0hJSUHr1q3xn//8p9SEqjhxSkhIwGuvvaYsf/PNN7FkyRJ89913L0yoikfBLC0t1e4W1EdJ/4qvX78+MjMzy3wsXeJ1d3fH9evX1crj4+NfeJzHjx8jKioKCxcuxPz585XlmtrTZcTCzMwMzZs3x59//qm2LSYmBh4eHi8c+R0wYACOHTv2wmONHDlS58VVi0epFixYoPHmFXd3d40/v7L+3dWvXx/Dhg3DV199pTby5eTkhCpVqpR4PDMzM7i5uZXafvPmzdG8eXPMmzcPp06dwuuvv47w8HAsXrwY9evXhxAC9erV0+rmmoKCAty9exdvvfWWTn2sjHgNFamwtbXF+vXrsWDBAo2r/BYbPHgwCgsLsWjRIrVtBQUFyi++4n+hPPuv7Ly8PKxbt86wgWvQtWtXCCGMOt0XExOjvKvsWWfOnMGjR4+0ngZZunQpDh06hCFDhlTov9T69OmDwsJCrFmzRqV85cqVkEgkL7zTsiyOHj2KzMxMtGjRQllmbm6uNjLz5Zdfqi21UZLCwkK16SNnZ2e4urpqnBZ6lq+vLwCofTn7+vqid+/e+Prrr7F37161/fLy8jB9+nTlsbp27YqvvvpKY1L97PViurCxsdGYVA4ePBjR0dH47bff1LalpaWhoKCg1HZ1ibdPnz44ffo0zpw5o7K9pNHqZ2n6PACgcdTGxsZGGb82Bg0ahLNnz6r83uLj43HkyBG8/fbbL9zfWNdQFZsyZQocHBzw8ccfq23r06cPzpw5g+joaGVZVlYWNmzYgLp16+p0nWKxefPmIT8/X23ZGXNzc/Tq1Qs///yzyjIUycnJysWdS1pcWKFQqJ1LzZs3h5mZmfLvasCAATA3N8fChQvVfs9CCLXFk69cuYKcnBx06NBB5z5WNhyhIjXa3P7fpUsXvPfee1iyZAnOnz+PXr16wdLSEtevX8euXbvwxRdfYNCgQejQoQOqVauGkSNHYtKkSZBIJNi6datRpvAiIyORnJz8wnr169dXfmkCwPHjx5XTTQ8ePEBWVhYWL14M4Ol1Ss+OrD1v69at2LZtG/r37w9vb29YWVnh6tWr+OabbyCTyfDhhx+q1C8oKMB3330H4OmIy+3bt/HLL7/g4sWL6NatGzZs2KB1f3fv3q1x+Lxnz57K2+l19eabb6Jbt26YO3cubt26hRYtWuDQoUP4+eefMWXKFNSvX79M7RZLT09X9r+goADx8fFYv349rK2tVa736NevH7Zu3Qp7e3t4eXkhOjoahw8f1nq164yMDNSuXRuDBg1CixYtYGtri8OHD+Ps2bMqo16aeHh4oFmzZjh8+LDabfhbtmxBr169MGDAALz55pvo0aMHbGxscP36dezYsQP3799X3gyxdu1adOzYEc2bN8eYMWPg4eGB5ORkREdH459//inT4za8vb2xfv16LF68GA0aNICzszO6d++OGTNm4JdffkG/fv0watQoeHt7IysrC3/99Rd2796NW7duvfB6O23jnTlzJrZu3YrevXtj8uTJymUT3N3dX3ito52dHTp37ozly5cjPz8ftWrVwqFDhzSOWBdf0zR37lwEBgbC0tISb775pjLRet4HH3yAjRs3om/fvpg+fTosLS3x+eefw8XFBdOmTdPqZ2tM9vb2mDx5ssaL02fPnq1csmbSpElwdHTEt99+i4SEBPz4449lmpovHqX69ttv1bYtXrwYkZGR6NixIz744ANYWFjgq6++Qm5ubqnr/h05cgQTJkzA22+/jUaNGqGgoABbt26Fubm5cm22+vXrY/HixZgzZw5u3bqFgIAAVK1aFQkJCfjpp58wduxY5T88gKef21WqVKnQkXmDKff7CqlSeXbZhNKUtFL6hg0bhLe3t7C2thZVq1YVzZs3FzNnzlTesiuEECdPnhTt27cX1tbWwtXVVcycOVP89ttvKrfjClHyStojR44U7u7uL+xLWW97Lm0ZgrCwsFKPefHiRTFjxgzRunVr4ejoKCwsLETNmjXF22+/Lc6dO6fWj2fbrlKliqhbt64YOHCg2L179wtvjdYm3md/pqWtkFzaEg4ZGRli6tSpwtXVVVhaWoqGDRuKTz/9VOWWfiGeLpugy7IEz/9+JBKJcHR0FG+99ZbylvJijx8/FsHBwaJGjRrC1tZW+Pv7i2vXrgl3d3eV319J529ubq6YMWOGaNGihahataqwsbERLVq0EOvWrdMq1s8//1zY2tpqvMU/OztbfPbZZ6Jt27bC1tZWWFlZiYYNG4qJEyeqLCshhBA3b94UI0aMEHK5XFhaWopatWqJfv36qay0XlIfnr9lXYiny3T07dtXVK1aVQBQWUIhIyNDzJkzRzRo0EBYWVmJGjVqiA4dOojPPvtM5OXlCSFevEq3NvEK8fS879Kli5DJZKJWrVpi0aJF4r///a9Wyxz8888/on///sLBwUHY29uLt99+WyQmJmr8e1u0aJGoVauWMDMz06rtu3fvikGDBgk7Oztha2sr+vXrJ65fv17qPsZQ0mfZ48ePlctLPP87uHnzphg0aJBwcHAQMplMtGvXTuzbt0+r45X0+Xz9+nVhbm6u8XPg3Llzwt/fX9ja2ooqVaqIbt26iVOnTqnUef4c/Pvvv8Xo0aNF/fr1hUwmE46OjqJbt27i8OHDasf+8ccfRceOHYWNjY2wsbERnp6eYvz48SI+Pl6lno+Pj8anEpgiiRAmcLUvEVE5Sk9Ph4eHB5YvX46QkJCKDofopXT+/Hm0bt0a586dQ8uWLSs6HL0xoSIi0mDZsmXYtGkTrly5ovfdkESkrviu3Yp4NJAxMKEiIiIi0hP/2UVERESkJyZURERERHpiQkVERESkJyZURERERHriwp7loKioCImJiahatepL8QBIIiKiV4EQAhkZGXB1dX3h3b5MqMpBYmLiC5+NRERERJXT3bt3Ubt27VLrMKEqB8UP5bx7926Jz0h6kQeKHDzIVH0GWU5+IUZ8cxYAsGV0W8gs1Z/s7WQrhZOdrEzHJCIiepUpFAq4ubm98OHaABOqclE8zWdnZ1fmhMrOzg7PP0UtO68AZtLLAIB2jWujihV/nURERIamzeU6vCidiIiISE9MqIiIiIj0xISKiIiISE9MqIiIiIj0xISKiIiISE9MqIiIiIj0xISKiIiISE8ml1CtXbsWdevWhUwmg4+PD86cOVNq/V27dsHT0xMymQzNmzfHgQMHlNvy8/Mxa9YsNG/eHDY2NnB1dcWIESOQmJio0kZqaiqCgoJgZ2cHBwcHhISEIDMz0yj9IyIiItNjUgnVzp07ERoairCwMJw7dw4tWrSAv78/UlJSNNY/deoUhg4dipCQEMTFxSEgIAABAQG4dOkSACA7Oxvnzp3DRx99hHPnzmHPnj2Ij4/HW2+9pdJOUFAQLl++jMjISOzbtw/Hjx/H2LFjjd5fIiIiMg0SIYSo6CC05ePjg7Zt22LNmjUAnj502M3NDRMnTsTs2bPV6g8ZMgRZWVnYt2+fsqx9+/Zo2bIlwsPDNR7j7NmzaNeuHW7fvo06derg6tWr8PLywtmzZ9GmTRsAQEREBPr06YN//vkHrq6uL4xboVDA3t4e6enpZV4pXZPsvAJ4zf8NAHDlY3+ulE5ERGRAunx/m8wIVV5eHmJjY+Hn56csMzMzg5+fH6KjozXuEx0drVIfAPz9/UusDwDp6emQSCRwcHBQtuHg4KBMpgDAz88PZmZmiImJ0dhGbm4uFAqFyouIiIheXiaTUD18+BCFhYVwcXFRKXdxcUFSUpLGfZKSknSqn5OTg1mzZmHo0KHKTDQpKQnOzs4q9SwsLODo6FhiO0uWLIG9vb3y5ebmplUfiYiIyDSZTEJlbPn5+Rg8eDCEEFi/fr1ebc2ZMwfp6enK1927dw0UJREREVVGJnPRTY0aNWBubo7k5GSV8uTkZMjlco37yOVyreoXJ1O3b9/GkSNHVOZJ5XK52kXvBQUFSE1NLfG4UqkUUqlU674RERGRaTOZESorKyt4e3sjKipKWVZUVISoqCj4+vpq3MfX11elPgBERkaq1C9Opq5fv47Dhw+jevXqam2kpaUhNjZWWXbkyBEUFRXBx8fHEF0jIiIiE2cyI1QAEBoaipEjR6JNmzZo164dVq1ahaysLAQHBwMARowYgVq1amHJkiUAgMmTJ6NLly5YsWIF+vbtix07duDPP//Ehg0bADxNpgYNGoRz585h3759KCwsVF4X5ejoCCsrKzRp0gS9e/fGmDFjEB4ejvz8fEyYMAGBgYFa3eFHRERELz+TSqiGDBmCBw8eYP78+UhKSkLLli0RERGhvPD8zp07MDP7d9CtQ4cO2L59O+bNm4cPP/wQDRs2xN69e9GsWTMAwL179/DLL78AAFq2bKlyrKNHj6Jr164AgG3btmHChAno0aMHzMzMMHDgQKxevdr4HSYiIiKTYFLrUJkqrkNFRERkel7KdaiIiIiIKismVERERER6YkJFREREpCcmVERERER6YkJFREREpCcmVERERER6YkJFREREpCcmVERERER6YkJFREREpCcurU30CktR5CAlI1fn/ZyrSuFsJzNCREREpokJFdErbFvMHXwRdV3n/Sb3aIipPRsZISIiItPEhIroFRbkUwc9vVxUynLyCzEoPBoAsPt9X8gszdX2c64qLZf4iIhMBRMqoleYs51MbeouO69A+f9ernZ86DYRkRZ4UToRERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnphQEREREemJCRURERGRnkwuoVq7di3q1q0LmUwGHx8fnDlzptT6u3btgqenJ2QyGZo3b44DBw6obN+zZw969eqF6tWrQyKR4Pz582ptdO3aFRKJROX1/vvvG7JbREREZMJMKqHauXMnQkNDERYWhnPnzqFFixbw9/dHSkqKxvqnTp3C0KFDERISgri4OAQEBCAgIACXLl1S1snKykLHjh2xbNmyUo89ZswY3L9/X/lavny5QftGREREpsuiogPQxeeff44xY8YgODgYABAeHo79+/fjm2++wezZs9Xqf/HFF+jduzdmzJgBAFi0aBEiIyOxZs0ahIeHAwCGDx8OALh161apx65SpQrkcrlWcebm5iI3N1f5XqFQaLUfERERmSaTGaHKy8tDbGws/Pz8lGVmZmbw8/NDdHS0xn2io6NV6gOAv79/ifVLs23bNtSoUQPNmjXDnDlzkJ2dXWLdJUuWwN7eXvlyc3PT+XhERERkOkxmhOrhw4coLCyEi4uLSrmLiwuuXbumcZ+kpCSN9ZOSknQ69jvvvAN3d3e4urri4sWLmDVrFuLj47Fnzx6N9efMmYPQ0FDle4VCwaSKiIjoJWYyCVVFGjt2rPL/mzdvjpo1a6JHjx64efMm6tevr1ZfKpVCKpWWZ4hERERUgUxmyq9GjRowNzdHcnKySnlycnKJ1zbJ5XKd6mvLx8cHAHDjxg292iEiIqKXg8kkVFZWVvD29kZUVJSyrKioCFFRUfD19dW4j6+vr0p9AIiMjCyxvraKl1aoWbOmXu0QERHRy8GkpvxCQ0MxcuRItGnTBu3atcOqVauQlZWlvOtvxIgRqFWrFpYsWQIAmDx5Mrp06YIVK1agb9++2LFjB/78809s2LBB2WZqairu3LmDxMREAEB8fDyAp6NbcrkcN2/exPbt29GnTx9Ur14dFy9exNSpU9G5c2e89tpr5fwTICIiosrIpBKqIUOG4MGDB5g/fz6SkpLQsmVLREREKC88v3PnDszM/h1069ChA7Zv34558+bhww8/RMOGDbF37140a9ZMWeeXX35RJmQAEBgYCAAICwvDggULYGVlhcOHDyuTNzc3NwwcOBDz5s0rp14TERFRZScRQoiKDuJlp1AoYG9vj/T0dNjZ2Rms3ey8AnjN/w0AcOVjf1SxMqn8mCopnldERE/p8v1tMtdQEREREVVWTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MQV+4iIyOBSFDlIycjVeT/nqlI428mMEBGRcTGhIiIig9sWcwdfRF3Xeb/JPRpias9GRoiIyLiYUBERkcEF+dRBTy8XlbKc/EIMCo8GAOx+3xcyS3O1/ZyrSsslPiJDY0JFREQG52wnU5u6y84rUP6/l6sdnxNJLxVelE5ERESkJ53+eVBUVIRjx47hjz/+wO3bt5GdnQ0nJye0atUKfn5+cHNzM1acRERERJWWViNUT548weLFi+Hm5oY+ffrg4MGDSEtLg7m5OW7cuIGwsDDUq1cPffr0wenTp40dMxEREVGlotUIVaNGjeDr64uNGzeiZ8+esLS0VKtz+/ZtbN++HYGBgZg7dy7GjBlj8GCJiIiIKiOtEqpDhw6hSZMmpdZxd3fHnDlzMH36dNy5c8cgwRERERGZAq2m/F6UTD3L0tIS9evXL3NARFSxCouE8v/PJKSqvCciIs3KdJffH3/8gWHDhsHX1xf37t0DAGzduhUnTpwwaHBEVL4iLt2H3+fHlO9HbTqLjsuOIOLS/QqMioio8tM5ofrxxx/h7+8Pa2trxMXFITf36aMF0tPT8cknnxg8QCIqHxGX7mPcd+eQrFB9XEhSeg7GfXeOSRURUSl0TqgWL16M8PBwbNy4UeXi9Ndffx3nzp0zaHBEVD4KiwQW/noFmib3issW/nqF039ERCXQOaGKj49H586d1crt7e2RlpZmiJiIqJydSUjF/fScErcLAPfTc3AmIbX8giIiMiE6J1RyuRw3btxQKz9x4gQ8PDwMEhQRla+UjJKTqbLUIyJ61eicUI0ZMwaTJ09GTEwMJBIJEhMTsW3bNkyfPh3jxo0zRoxEZGTOVWUvrqRDPSKiV43OT6acPXs2ioqK0KNHD2RnZ6Nz586QSqWYPn06Jk6caIwYicjI2tVzRE17GZLSczReRyUBILeXoV09x/IOjV4izy/J0amhE8zNJBUYEZHh6DxCJZFIMHfuXKSmpuLSpUs4ffo0Hjx4gEWLFhkjPiIqB+ZmEoS96QXgafL0rOL3YW968cuPyoxLctDLTucRqmJWVlbw8vIyZCxEVIF6N6uJ9cNaI+yXyypLJ8jtZQh70wu9m9WswOjIlBUvyfH86Gfxkhzrh7Xm+UVaSVHkICUj98UVn+NcVQpnO+NesqBzQtWtWzdIJCX/K/XIkSN6BUREFad3s5p4vUENNF9wCACwObgtp2VILy9akkOCp0ty9PSS8zyjF9oWcwdfRF3Xeb/JPRpias9GRojoXzonVC1btlR5n5+fj/Pnz+PSpUsYOXKkoeIiogry7Jdau3qO/JIjveiyJIdv/erlFxiZpCCfOujp5aJ8f+rGQ3x1/G88yspTltWwtcLYTh7o0KCGssy5qtTosemcUK1cuVJj+YIFC5CZmal3QERE9PLgkhxkSM52MuXUXcSl+1hy8Jra6OejzDwsOXit3KeSy/QsP02GDRuGb775xlDNERHRS4BLcpAxVManOxgsoYqOjoZMZvw/iLVr16Ju3bqQyWTw8fHBmTNnSq2/a9cueHp6QiaToXnz5jhw4IDK9j179qBXr16oXr06JBIJzp8/r9ZGTk4Oxo8fj+rVq8PW1hYDBw5EcnKyIbtFRPRSKl6So6SJYwmAmlySg3RUGZ/uoHNCNWDAAJVX//790b59ewQHB+O9994zRoxKO3fuRGhoKMLCwnDu3Dm0aNEC/v7+SElJ0Vj/1KlTGDp0KEJCQhAXF4eAgAAEBATg0qVLyjpZWVno2LEjli1bVuJxp06dil9//RW7du3CsWPHkJiYiAEDBhi8f0RELxsuyUHGUBmnkiVCCJ3Gw4KDg1Xem5mZwcnJCd27d0evXr0MGtzzfHx80LZtW6xZswYAUFRUBDc3N0ycOBGzZ89Wqz9kyBBkZWVh3759yrL27dujZcuWCA8PV6l769Yt1KtXD3FxcSoX3qenp8PJyQnbt2/HoEGDAADXrl1DkyZNEB0djfbt278wboVCAXt7e6Snp8POzq4sXdcoO68AXvN/AwBc+dgfVazKvAoGkRLPKzKGiEv31ZbkqMklOaiMom8+wtCNp19Y7/sx7fW62UGX72+dPyk3bdpU5sD0kZeXh9jYWMyZM0dZZmZmBj8/P0RHR2vcJzo6GqGhoSpl/v7+2Lt3r9bHjY2NRX5+Pvz8/JRlnp6eqFOnTokJVW5uLnJz//3QUCgUWh+PiOhlxCU5yJAq49MdDHYNlbE9fPgQhYWFcHFxUSl3cXFBUlKSxn2SkpJ0ql9SG1ZWVnBwcNC6nSVLlsDe3l75cnNz0/p4REQvKy7JQYZSGaeStRqhqlatWqmLeT4rNbX8LgCrrObMmaMyMqZQKJhUERER6enZldJrV6uCOW94qq1DVf3/r0NVu1oVXLqXDqASrZS+atUqowahjRo1asDc3Fzt7rrk5GTI5XKN+8jlcp3ql9RGXl4e0tLSVEapSmtHKpVCKjX+ImJERESvEm1WSn+YmYdPDl5TKas0K6VXhhXQrays4O3tjaioKAQEBAB4elF6VFQUJkyYoHEfX19fREVFYcqUKcqyyMhI+Pr6an1cb29vWFpaIioqCgMHDgQAxMfH486dOzq1Q0RERPp5fqV0bVXKldKflZOTg7y8PJUyQ97F9rzQ0FCMHDkSbdq0Qbt27bBq1SpkZWUp7zwcMWIEatWqhSVLlgAAJk+ejC5dumDFihXo27cvduzYgT///BMbNmxQtpmamoo7d+4gMTERwNNkCXg6MiWXy2Fvb4+QkBCEhobC0dERdnZ2mDhxInx9fbW6w4+IiIgM49mV0isbnROqrKwszJo1Cz/88AMePXqktr2wsNAggWkyZMgQPHjwAPPnz0dSUhJatmyJiIgI5YXnd+7cgZnZv9fZd+jQAdu3b8e8efPw4YcfomHDhti7dy+aNWumrPPLL7+oLAURGBgIAAgLC8OCBQsAPH3cjpmZGQYOHIjc3Fz4+/tj3bp1RusnERERmRad16EaP348jh49ikWLFmH48OFYu3Yt7t27h6+++gpLly5FUFCQsWI1WVyHikwJzysyFp5bZGqMug7Vr7/+ii1btqBr164IDg5Gp06d0KBBA7i7u2Pbtm1MqIiIiOiVo/M6VKmpqfDw8ADw9Hqp4mUSOnbsiOPHjxs2OiIiIiIToHNC5eHhgYSEBABPVwz/4YcfADwduXp+8UsiIiKiV4HOCVVwcDAuXLgAAJg9ezbWrl0LmUyGqVOnYsaMGQYPkIiIiKiy0/oaqunTp+Pdd9/F1KlTlWV+fn64du0aYmNj0aBBA7z22mtGCZKIiIioMtM6ofr555+xcuVK+Pj44N1338WQIUNgY2MDd3d3uLu7GzNGIiIyMc8+IqRYTv6/y+pcSVRAZmmutl95PCKEyBi0TqiuX7+O48eP45tvvsHkyZMxefJkvP3223j33XfRoUMHY8ZIJSgs+nfFizMJqXxyOxFVGi96RMig8GiN5eXxiBAiY9B5HSrg6eKeO3fuxKZNm3Dy5Ek0btwYISEhGD58uHKRTfqXMdahirh0H2G/XEay4t9/Ada0lyHsTS/0blbTIMegVxPXCiJD0DRCpQ2OUFFlosv3d5kSqmfduHEDmzZtQnh4ODIzM5Gbq/sf0MvO0AlVxKX7GPfdOTz/iysem1o/rDWTKiozJlRERE/p8v2t811+z8rKysIff/yBY8eO4fHjx8r1qch4CosEFv56RS2ZAqAsW/jrFZXpQCIiIjKuMiVUJ06cwOjRo1GzZk1MmjQJjRo1wh9//IGrV68aOj56zpmEVNxPzylxuwBwPz0HZxJSyy8oIiKiV5zWY/n379/Ht99+i82bN+N///sf2rdvj88//xyBgYGwtbU1Zoz0jJSMkpOpstQjIiIi/WmdULm5uaF69eoYPnw4QkJC0KRJE2PGRSVwrqrdxZra1iMiIiL9aZ1Q/fDDD3jrrbdgYcELVCtSu3qOqGkvQ1J6jsbrqCQA5PYytKvnWN6hERERvbK0voZqwIABTKYqAXMzCcLe9ALw7119xYrfh73pxfWoiIiIypFed/lRxejdrCbWD2sNZzupSrncXsYlE4iIiCoAh5xMVO9mNfF6gxpovuAQAGBzcFuulE5ERFRBOEJlwp5NntrVc2QyRUREVEGYUBERERHpSecpv6ysLCxduhRRUVFISUlBUVGRyva///7bYMERERERmQKdE6p3330Xx44dw/Dhw1GzZk1IJJxmIiIiolebzgnVwYMHsX//frz++uvGiIeIiIjI5Oh8DVW1atXg6MhFI4mIiIiK6ZxQLVq0CPPnz0d2drYx4iEiIiIyOTpP+a1YsQI3b96Ei4sL6tatC0tLS5Xt586dM1hwRERERKZA54QqICDACGEQERERmS6dE6qwsDBjxEFERERkssr86JnY2FhcvXoVANC0aVO0atXKYEERERERmRKdE6qUlBQEBgbi999/h4ODAwAgLS0N3bp1w44dO+Dk5GToGImIiIgqNZ3v8ps4cSIyMjJw+fJlpKamIjU1FZcuXYJCocCkSZOMESMRERFRpabzCFVERAQOHz6MJk2aKMu8vLywdu1a9OrVy6DBEREREZkCnUeoioqK1JZKAABLS0u15/oRERERvQp0Tqi6d++OyZMnIzExUVl27949TJ06FT169DBocJqsXbsWdevWhUwmg4+PD86cOVNq/V27dsHT0xMymQzNmzfHgQMHVLYLITB//nzUrFkT1tbW8PPzw/Xr11Xq1K1bFxKJROW1dOlSg/eNiIiITJPOCdWaNWugUChQt25d1K9fH/Xr10e9evWgUCjw5ZdfGiNGpZ07dyI0NBRhYWE4d+4cWrRoAX9/f6SkpGisf+rUKQwdOhQhISGIi4tDQEAAAgICcOnSJWWd5cuXY/Xq1QgPD0dMTAxsbGzg7++PnJwclbY+/vhj3L9/X/maOHGiUftKREREpkMihBC67iSEwOHDh3Ht2jUAQJMmTeDn52fw4J7n4+ODtm3bYs2aNQCeTj+6ublh4sSJmD17tlr9IUOGICsrC/v27VOWtW/fHi1btkR4eDiEEHB1dcW0adMwffp0AEB6ejpcXFywefNmBAYGAng6QjVlyhRMmTKlTHErFArY29sjPT0ddnZ2ZWpDk+y8AnjN/w0AcOVjf1SxKvMqGPSKSlHkICUjV6UsJ78Qg8KjAQC73/eFzNJcbT/nqlI428nKJUYiooqiy/d3mb6BJRIJevbsiZ49e5YpwLLIy8tDbGws5syZoywzMzODn58foqOjNe4THR2N0NBQlTJ/f3/s3bsXAJCQkICkpCSVZNDe3h4+Pj6Ijo5WJlQAsHTpUixatAh16tTBO++8g6lTp8LCQvOPLzc3F7m5/35JKRQKnftLVB62xdzBF1HXS9xenFg9b3KPhpjas5GxwiIiMjlaJVSrV6/G2LFjIZPJsHr16lLrGmvphIcPH6KwsBAuLi4q5S4uLsqRsuclJSVprJ+UlKTcXlxWUh3gaZ9at24NR0dHnDp1CnPmzMH9+/fx+eefazzukiVLsHDhQt06SFQBgnzqoKeXy4srPse5qtQI0RARmS6tEqqVK1ciKCgIMpkMK1euLLGeRCJ5KdeienaU67XXXoOVlRXee+89LFmyBFKp+hfLnDlzVPZRKBRwc3Mrl1iJdOFsJ+PUHRGRAWiVUCUkJGj8//JUo0YNmJubIzk5WaU8OTkZcrlc4z5yubzU+sX/TU5ORs2aNVXqtGzZssRYfHx8UFBQgFu3bqFx48Zq26VSqcZEi4iIiF5OOt/l9/HHHyM7O1ut/MmTJ/j4448NEpQmVlZW8Pb2RlRUlLKsqKgIUVFR8PX11biPr6+vSn0AiIyMVNavV68e5HK5Sh2FQoGYmJgS2wSA8+fPw8zMDM7Ozvp0iYiIiF4SOidUCxcuRGZmplp5dna20a8bCg0NxcaNG/Htt9/i6tWrGDduHLKyshAcHAwAGDFihMpF65MnT0ZERARWrFiBa9euYcGCBfjzzz8xYcIEAE+nKKdMmYLFixfjl19+wV9//YURI0bA1dUVAQEBAJ5e2L5q1SpcuHABf//9N7Zt24apU6di2LBhqFatmlH7S0RERKZB57v8hBCQSCRq5RcuXICjo6NBgirJkCFD8ODBA8yfPx9JSUlo2bIlIiIilBeV37lzB2Zm/+aIHTp0wPbt2zFv3jx8+OGHaNiwIfbu3YtmzZop68ycORNZWVkYO3Ys0tLS0LFjR0REREAme3pdiVQqxY4dO7BgwQLk5uaiXr16mDp1qtrdg0RERPTq0nodqmrVqkEikSjXYng2qSosLERmZibef/99rF271mjBmiquQ0VERGR6jLIO1apVqyCEwOjRo7Fw4ULY29srt1lZWaFu3bqlXndERERE9LLSOqEaOXIkgKcXcnfo0EHjA5KJiIiIXkU6zxF16dJF+f85OTnIy8tT2W7IKS0iIiIiU6DzXX7Z2dmYMGECnJ2dYWNjg2rVqqm8iIiIiF41OidUM2bMwJEjR7B+/XpIpVJ8/fXXWLhwIVxdXbFlyxZjxEhERERUqek85ffrr79iy5Yt6Nq1K4KDg9GpUyc0aNAA7u7u2LZtG4KCgowRJxEREVGlpfMIVWpqKjw8PAA8vV4qNTUVANCxY0ccP37csNERERERmQCdEyoPDw/l8/w8PT3xww8/AHg6cuXg4GDQ4IiIiIhMgc4JVXBwMC5cuAAAmD17NtauXQuZTIapU6dixowZBg+QiIiIqLLT+RqqqVOnKv/fz88P165dQ2xsLBo0aIDXXnvNoMERERERmQKdR6i2bNmC3Nxc5Xt3d3cMGDAAnp6evMuPiIiIXkllmvJLT09XK8/IyEBwcLBBgiIiIiIyJTonVEIIlQcjF/vnn39Unu9HRERE9KrQ+hqqVq1aQSKRQCKRoEePHrCw+HfXwsJCJCQkoHfv3kYJkoiIiKgy0zqhCggIAACcP38e/v7+sLW1VW6zsrJC3bp1MXDgQIMHSERERFTZaZ1QhYWFAQDq1q2LwMBASKVSowVFREREZEp0vobKy8sL58+fVyuPiYnBn3/+aYiYiIiIiEyKzgnV+PHjcffuXbXye/fuYfz48QYJioiIiMiU6JxQXblyBa1bt1Yrb9WqFa5cuWKQoIiIiIhMic4JlVQqRXJyslr5/fv3Ve78IyIiInpV6JxQ9erVC3PmzFFZ3DMtLQ0ffvghevbsadDgiIiIiEyBzkNKn332GTp37gx3d3e0atUKwNOlFFxcXLB161aDB0hERERU2emcUNWqVQsXL17Etm3bcOHCBVhbWyM4OBhDhw6FpaWlMWIkIiIiqtTKdNGTjY0Nxo4da+hYiIiIiEySztdQAcDWrVvRsWNHuLq64vbt2wCAlStX4ueffzZocERERESmQOeEav369QgNDcUbb7yBx48fo7CwEABQrVo1rFq1ytDxEREREVV6OidUX375JTZu3Ii5c+eqLJPQpk0b/PXXXwYNjoiIiMgU6JxQJSQkKO/ue5ZUKkVWVpZBgiIiIiIyJTonVPXq1dP4LL+IiAg0adLEEDERERERmRSd7/ILDQ3F+PHjkZOTAyEEzpw5g++//x5LlizB119/bYwYiYiIiCo1nROqd999F9bW1pg3bx6ys7PxzjvvwNXVFV988QUCAwONESMRERFRpVamdaiCgoIQFBSE7OxsZGZmwtnZ2dBxEREREZmMMq1DBQApKSmIjY1FfHw8Hjx4YMiYSrV27VrUrVsXMpkMPj4+OHPmTKn1d+3aBU9PT8hkMjRv3hwHDhxQ2S6EwPz581GzZk1YW1vDz88P169fV6mTmpqKoKAg2NnZwcHBASEhIcjMzDR434iIiMg06ZxQZWRkYPjw4XB1dUWXLl3QpUsXuLq6YtiwYSoPTDaGnTt3IjQ0FGFhYTh37hxatGgBf39/pKSkaKx/6tQpDB06FCEhIYiLi0NAQAACAgJw6dIlZZ3ly5dj9erVCA8PR0xMDGxsbODv74+cnBxlnaCgIFy+fBmRkZHYt28fjh8/zpXiiYiISEkihBC67DBkyBDExcXhyy+/hK+vLwAgOjoakydPRsuWLbFjxw6jBAoAPj4+aNu2LdasWQMAKCoqgpubGyZOnIjZs2drjDUrKwv79u1TlrVv3x4tW7ZEeHg4hBBwdXXFtGnTMH36dABAeno6XFxcsHnzZgQGBuLq1avw8vLC2bNn0aZNGwBP72js06cP/vnnH7i6uqodNzc3F7m5ucr3CoUCbm5uuHv3Luzs7AAAlpaWsLa2xpMnT5Cfn6+sK5VKlUtQFC+aCgAymQxWVlbIzMxEUVERACA7rwA+y09AYmaO09N9UcXq3xlcGxsbmJmZISMjQyW2qlWroqioSG2JCzs7OxQUFCA7O1tZZmZmBltbW+Tl5akkmObm5rCxsVHrpyH6BABVqlSBhYUFFAqFSozsE/vEPrFP7BP7VJ59SkxMRK1atZCenq78/i6R0FGVKlXEH3/8oVZ+/PhxUaVKFV2b01pubq4wNzcXP/30k0r5iBEjxFtvvaVxHzc3N7Fy5UqVsvnz54vXXntNCCHEzZs3BQARFxenUqdz585i0qRJQggh/vvf/woHBweV7fn5+cLc3Fzs2bNH43HDwsIEgFJfISEhQgghQkJCVMrDwsKEEEL06tVLpXzjxo1CCCG8vLxUyp3fXijcZ+0TVatWVSm/dOmSSE9PVztuenq6uHTpkkpZ1apVhRBCREREqJR7eXkJIYTYuHGjSnmvXr009tNQfYqIiBBCCPaJfWKf2Cf2iX2q0D41btxYGduL6DxCVadOHezfvx/NmzdXKb948aJy1MYYirPEU6dOKUfGAGDmzJk4duwYYmJi1PaxsrLCt99+i6FDhyrL1q1bh4ULFyI5ORmnTp3C66+/jsTERNSsWVNZZ/DgwZBIJNi5cyc++eQTfPvtt4iPj1dp29nZGQsXLsS4cePUjssRKv6rhn1in9gn9ol9Mv0+6TJCpfNdfvPmzUNoaCi2bt0KuVwOAEhKSsKMGTPw0Ucf6drcS6n4F/Q8Ozs7tV+ItbU1rK2t1era2NhobNvW1lb5/xZ5BZCYmSvbfjahevaYzzM3N9dYbmFhobHcysoKVlZWauUl9VOfPr0o9pLK2Sf2CWCfAPappHL2iX0CDNcnTXROqNavX48bN26gTp06qFOnDgDgzp07kEqlePDgAb766itl3XPnzunafIlq1KgBc3NzJCcnq5QnJycrE7vnyeXyUusX/zc5OVllhCo5ORktW7ZU1nn+oveCggKkpqaWeFwiIiJ6teicUAUEBBghjBezsrKCt7c3oqKilDEUFRUhKioKEyZM0LiPr68voqKiMGXKFGVZZGSkcsqwXr16kMvliIqKUiZQCoUCMTExyqk8X19fpKWlITY2Ft7e3gCAI0eOoKioCD4+PsbpLBEREZkUnROqsLAwY8ShldDQUIwcORJt2rRBu3btsGrVKmRlZSE4OBgAMGLECNSqVQtLliwBAEyePBldunTBihUr0LdvX+zYsQN//vknNmzYAACQSCSYMmUKFi9ejIYNG6JevXr46KOP4OrqqkzamjRpgt69e2PMmDEIDw9Hfn4+JkyYgMDAQI13+BEREdGrR+d1qI4ePVritmen+4xhyJAh+OyzzzB//ny0bNkS58+fR0REBFxcXAA8nXq8f/++sn6HDh2wfft2bNiwAS1atMDu3buxd+9eNGvWTFln5syZmDhxIsaOHYu2bdsiMzMTERERkMlkyjrbtm2Dp6cnevTogT59+qBjx47KpIyIiIhI57v8pFIpJk2ahE8++QSWlpYAgIcPHyI4OBgnTpzA48ePjRKoKVMoFLC3t9duHQsdZOcVwGv+bwCAKx/7a7wonYiIiMpGl+/vMo1Q/fTTT2jbti2uXLmC/fv3o1mzZlAoFDh//nxZYyYiIiIyWTonVB06dMD58+fRrFkztG7dGv3798fUqVPx+++/w93d3RgxEhEREVVqZXo48v/+9z/8+eefqF27NiwsLBAfH6+y4BYRERHRq0TnhGrp0qXw9fVFz549cenSJZw5cwZxcXF47bXXEB0dbYwYiYiIiCo1nROqL774Anv37sWXX34JmUyGZs2a4cyZMxgwYAC6du1qhBCJiIiIKjedbwv766+/UKNGDZUyS0tLfPrpp+jXr5/BAiMiIiIyFTqPUD2fTD2rSZMmegVDREREZIq0TqiqVKmCBw8eKN/37dtXZRHN55+HR0RERPSq0DqhysnJwbNrgB4/fhxPnjxRqaPjGqFEREREL4UyLZtQEolEYsjmiIiIiEyCQRMqIiIioleR1gmVRCJRGYF6/j0RERHRq0rrZROEEGjUqJEyicrMzESrVq1gZmam3E5ERET0KtI6odq0aZMx4yAiIiIyWVonVCNHjjRmHEREREQmS6trqDidR0RERFQyrRKqpk2bYseOHcjLyyu13vXr1zFu3DgsXbrUIMERERERmQKtpvy+/PJLzJo1Cx988AF69uyJNm3awNXVFTKZDI8fP8aVK1dw4sQJXL58GRMmTMC4ceOMHTcRERFRpaFVQtWjRw/8+eefOHHiBHbu3Ilt27bh9u3bePLkCWrUqIFWrVphxIgRCAoKQrVq1YwdMxEREVGlovVF6QDQsWNHdOzY0VixEBEREZkkrpROREREpCcmVERERER6YkJFREREpCcmVERERER6YkJFREREpKcyJVQ3b97EvHnzMHToUKSkpAAADh48iMuXLxs0OCIiIiJToHNCdezYMTRv3hwxMTHYs2cPMjMzAQAXLlxAWFiYwQMkIiIiqux0Tqhmz56NxYsXIzIyElZWVsry7t274/Tp0wYNjoiIiMgU6JxQ/fXXX+jfv79aubOzMx4+fGiQoIiIiIhMic4JlYODA+7fv69WHhcXh1q1ahkkKCIiIiJTonNCFRgYiFmzZiEpKQkSiQRFRUU4efIkpk+fjhEjRhgjRiIiIqJKTeeE6pNPPoGnpyfc3NyQmZkJLy8vdO7cGR06dMC8efOMESMAIDU1FUFBQbCzs4ODgwNCQkKUF8SXJCcnB+PHj0f16tVha2uLgQMHIjk5WaXOnTt30LdvX1SpUgXOzs6YMWMGCgoKlNt///13SCQStVdSUpJR+klERESmR6eHIwOAlZUVNm7ciI8++giXLl1CZmYmWrVqhYYNGxojPqWgoCDcv38fkZGRyM/PR3BwMMaOHYvt27eXuM/UqVOxf/9+7Nq1C/b29pgwYQIGDBiAkydPAgAKCwvRt29fyOVynDp1Cvfv38eIESNgaWmJTz75RKWt+Ph42NnZKd87Ozsbp6NERERkciRCCFHRQbzI1atX4eXlhbNnz6JNmzYAgIiICPTp0wf//PMPXF1d1fZJT0+Hk5MTtm/fjkGDBgEArl27hiZNmiA6Ohrt27fHwYMH0a9fPyQmJsLFxQUAEB4ejlmzZuHBgwewsrLC77//jm7duuHx48dwcHAoU/wKhQL29vZIT09XScr0lZ1XAK/5vwEArnzsjypWOufHREREVAJdvr91/gYePXp0qdu/+eYbXZt8oejoaDg4OCiTKQDw8/ODmZkZYmJiNN51GBsbi/z8fPj5+SnLPD09UadOHWVCFR0djebNmyuTKQDw9/fHuHHjcPnyZbRq1UpZ3rJlS+Tm5qJZs2ZYsGABXn/99RLjzc3NRW5urvK9QqEoc9+JiIio8tM5oXr8+LHK+/z8fFy6dAlpaWno3r27wQJ7VlJSktoUm4WFBRwdHUu8likpKQlWVlZqo0ouLi7KfZKSklSSqeLtxdsAoGbNmggPD0ebNm2Qm5uLr7/+Gl27dkVMTAxat26t8dhLlizBwoULde4nERERmSadE6qffvpJrayoqAjjxo1D/fr1dWpr9uzZWLZsWal1rl69qlObhta4cWM0btxY+b5Dhw64efMmVq5cia1bt2rcZ86cOQgNDVW+VygUcHNzM3qsREREVDEMctGNmZkZQkND0bVrV8ycOVPr/aZNm4ZRo0aVWsfDwwNyuVz5zMBiBQUFSE1NhVwu17ifXC5HXl4e0tLSVEapkpOTlfvI5XKcOXNGZb/iuwBLahcA2rVrhxMnTpS4XSqVQiqVltovIiIienkY7Crmmzdvqiw3oA0nJyc4OTm9sJ6vry/S0tIQGxsLb29vAMCRI0dQVFQEHx8fjft4e3vD0tISUVFRGDhwIICnd+rduXMHvr6+ynb/85//ICUlRTmlGBkZCTs7O3h5eZUYz/nz51GzZk2d+kpEREQvL50TqmensgBACIH79+9j//79GDlypMECe1aTJk3Qu3dvjBkzBuHh4cjPz8eECRMQGBiovMPv3r176NGjB7Zs2YJ27drB3t4eISEhCA0NhaOjI+zs7DBx4kT4+vqiffv2AIBevXrBy8sLw4cPx/Lly5GUlIR58+Zh/PjxyhGmVatWoV69emjatClycnLw9ddf48iRIzh06JBR+kpERESmR+eEKi4uTuW9mZkZnJycsGLFihfeAaiPbdu2YcKECejRowfMzMwwcOBArF69Wrk9Pz8f8fHxyM7OVpatXLlSWTc3Nxf+/v5Yt26dcru5uTn27duHcePGwdfXFzY2Nhg5ciQ+/vhjZZ28vDxMmzYN9+7dQ5UqVfDaa6/h8OHD6Natm9H6SkRERKbFJNahMnWGWIcqRZGDlIxclbKc/EIMCo8GAOx+3xcyS3O1/ZyrSuFsJyvTMYmIiF5lRl2HiirGtpg7+CLqeonbixOr503u0RBTezYyVlhEREQELROqVq1aQSKRaNXguXPn9AqINAvyqYOeXi4vrvgc56q825CIiMjYtEqoAgICjBwGvYiznYxTd0RERJUUr6EqB8Z6lh8REREZjy7f32blFBMRERHRS0vni9ILCwuxcuVK/PDDD7hz5w7y8vJUtqemphosOCIiIiJToPMI1cKFC/H5559jyJAhSE9PR2hoKAYMGAAzMzMsWLDACCESERERVW46J1Tbtm3Dxo0bMW3aNFhYWGDo0KH4+uuvMX/+fJw+fdoYMRIRERFVajonVElJSWjevDkAwNbWFunp6QCAfv36Yf/+/YaNjoiIiMgE6JxQ1a5dG/fv3wcA1K9fX/lMu7Nnzyqff0dERET0KtE5oerfvz+ioqIAABMnTsRHH32Ehg0bYsSIEUZ9lh8RERFRZaX1OlRr1qzBsGHD4ODgoFIeHR2N6OhoNGzYEG+++aYxYjR5XIeKiIjI9Ojy/a11QmVvb4/8/Hz0798fISEh6N69u0GCfRUwoSIiIjI9RlnYMykpCeHh4UhMTETPnj1Rr149LFq0CHfv3tU7YCIiIiJTpnVCZW1tjREjRuDo0aO4fv06hg8fjv/+97+oV68eevfujV27diE/P9+YsRIRERFVSno9y08IgcOHD2Pz5s3Yu3cvbGxskJKSYsj4Xgqc8iMiIjI95fYsP4lEAgsLC0gkEgghOEJFREREr6QyJVR3797Fxx9/DA8PD/Ts2ROJiYnYuHGjcn0qIiIioleJ1g9HzsvLw549e/DNN9/gyJEjqFmzJkaOHInRo0fDw8PDmDESERERVWpaJ1RyuRzZ2dno168ffv31V/j7+8PMTK8ZQyIiIqKXgtYJ1bx58zB8+HA4OTkZMx4iIiIik6N1QhUaGmrMOIiIiIhMFufsiIiIiPTEhIqIiIhIT0yoiIiIiPTEhIqIiIhIT1pflF6ssLAQmzdvRlRUFFJSUlBUVKSy/ciRIwYLjoiIiMgU6JxQTZ48GZs3b0bfvn3RrFkzSCQSY8RFREREZDJ0Tqh27NiBH374AX369DFGPEREREQmR+drqKysrNCgQQNjxEJERERkknROqKZNm4YvvvgCQghjxENERERkcnSe8jtx4gSOHj2KgwcPomnTprC0tFTZvmfPHoMFR0RERGQKdB6hcnBwQP/+/dGlSxfUqFED9vb2Ki9jSU1NRVBQEOzs7ODg4ICQkBBkZmaWuk9OTg7Gjx+P6tWrw9bWFgMHDkRycrJKnUmTJsHb2xtSqRQtW7bU2M7FixfRqVMnyGQyuLm5Yfny5YbqFhEREb0EdB6h2rRpkzHieKGgoCDcv38fkZGRyM/PR3BwMMaOHYvt27eXuM/UqVOxf/9+7Nq1C/b29pgwYQIGDBiAkydPqtQbPXo0YmJicPHiRbU2FAoFevXqBT8/P4SHh+Ovv/7C6NGj4eDggLFjxxq8n0RERGR6JMIELoa6evUqvLy8cPbsWbRp0wYAEBERgT59+uCff/6Bq6ur2j7p6elwcnLC9u3bMWjQIADAtWvX0KRJE0RHR6N9+/Yq9RcsWIC9e/fi/PnzKuXr16/H3LlzkZSUBCsrKwDA7NmzsXfvXly7dk2r+BUKBezt7ZGeng47Oztdu09EREQVQJfv7zKtlL57924MHjwY7du3R+vWrVVexhAdHQ0HBwdlMgUAfn5+MDMzQ0xMjMZ9YmNjkZ+fDz8/P2WZp6cn6tSpg+joaJ2O3blzZ2UyBQD+/v6Ij4/H48ePNe6Tm5sLhUKh8iIiIqKXl84J1erVqxEcHAwXFxfExcWhXbt2qF69Ov7++2+88cYbxogRSUlJcHZ2VimzsLCAo6MjkpKSStzHysoKDg4OKuUuLi4l7lNSOy4uLmptFG/TZMmSJSrXlbm5uWl9PCIiIjI9OidU69atw4YNG/Dll1/CysoKM2fORGRkJCZNmoT09HSd2po9ezYkEkmpL22n1SqTOXPmID09Xfm6e/duRYdERERERqTzRel37txBhw4dAADW1tbIyMgAAAwfPhzt27fHmjVrtG5r2rRpGDVqVKl1PDw8IJfLkZKSolJeUFCA1NRUyOVyjfvJ5XLk5eUhLS1NZZQqOTm5xH1Kauf5OwOL35fUjlQqhVQq1foYREREZNp0TqjkcjlSU1Ph7u6OOnXq4PTp02jRogUSEhJ0XuzTyckJTk5OL6zn6+uLtLQ0xMbGwtvbG8DThzAXFRXBx8dH4z7e3t6wtLREVFQUBg4cCACIj4/HnTt34Ovrq3WMvr6+mDt3LvLz85VrbkVGRqJx48aoVq2a1u0QERHRy0vnKb/u3bvjl19+AQAEBwdj6tSp6NmzJ4YMGYL+/fsbPEAAaNKkCXr37o0xY8bgzJkzOHnyJCZMmIDAwEDlHX737t2Dp6cnzpw5AwCwt7dHSEgIQkNDcfToUcTGxiI4OBi+vr4qd/jduHED58+fR1JSEp48eYLz58/j/PnzyMvLAwC88847sLKyQkhICC5fvoydO3fiiy++QGhoqFH6SkRERCZI6KiwsFDk5+cr33///fdi4sSJYvXq1SI3N1fX5rT26NEjMXToUGFrayvs7OxEcHCwyMjIUG5PSEgQAMTRo0eVZU+ePBEffPCBqFatmqhSpYro37+/uH//vkq7Xbp0EQDUXgkJCco6Fy5cEB07dhRSqVTUqlVLLF26VKfY09PTBQCRnp5epr4TERFR+dPl+9sk1qEydVyHioiIyPQYfR2qP/74A8OGDYOvry/u3bsHANi6dStOnDhRluaIiIiITJrOCdWPP/4If39/WFtbIy4uDrm5uQCerkz+ySefGDxAIiIiospO54Rq8eLFCA8Px8aNG5V3vQHA66+/jnPnzhk0OCIiIiJToHNCFR8fj86dO6uV29vbIy0tzRAxEREREZkUnRMquVyOGzduqJWfOHECHh4eBgmKiIiIyJTonFCNGTMGkydPRkxMDCQSCRITE7Ft2zZMnz4d48aNM0aMRERERJWaziulz549G0VFRejRoweys7PRuXNnSKVSTJ8+HRMnTjRGjERERESVWpnXocrLy8ONGzeQmZkJLy8v2NraGjq2lwbXoSIiIjI9unx/6zxCVczKygpeXl5l3Z2IiIjopaF1QjV69Git6n3zzTdlDoaIiIjIFGmdUG3evBnu7u5o1aoV+LQaIiIion9pnVCNGzcO33//PRISEhAcHIxhw4bB0dHRmLERERERmQStl01Yu3Yt7t+/j5kzZ+LXX3+Fm5sbBg8ejN9++40jVkRERPRKK/Ndfrdv38bmzZuxZcsWFBQU4PLly7zTrwS8y4+IiMj06PL9rfPCnsodzcwgkUgghEBhYWFZmyEiIiIyeTolVLm5ufj+++/Rs2dPNGrUCH/99RfWrFmDO3fucHSKiIiIXllaX5T+wQcfYMeOHXBzc8Po0aPx/fffo0aNGsaMjYiIiMgkaH0NlZmZGerUqYNWrVpBIpGUWG/Pnj0GC+5lwWuoiIiITI9RVkofMWJEqYkUERER0atKp4U9iYiIiEhdme/yIyIiIqKnmFARERER6YkJFREREZGemFARERER6YkJFREREZGemFARERER6YkJFREREZGemFARERER6YkJFREREZGemFARERER6YkJFREREZGemFARERER6clkEqrU1FQEBQXBzs4ODg4OCAkJQWZmZqn75OTkYPz48ahevTpsbW0xcOBAJCcnq9SZNGkSvL29IZVK0bJlS7U2bt26BYlEovY6ffq0IbtHREREJsxkEqqgoCBcvnwZkZGR2LdvH44fP46xY8eWus/UqVPx66+/YteuXTh27BgSExMxYMAAtXqjR4/GkCFDSm3r8OHDuH//vvLl7e2tV3+IiIjo5WFR0QFo4+rVq4iIiMDZs2fRpk0bAMCXX36JPn364LPPPoOrq6vaPunp6fjvf/+L7du3o3v37gCATZs2oUmTJjh9+jTat28PAFi9ejUA4MGDB7h48WKJMVSvXh1yuVyreHNzc5Gbm6t8r1AotOsoERERmSSTGKGKjo6Gg4ODMpkCAD8/P5iZmSEmJkbjPrGxscjPz4efn5+yzNPTE3Xq1EF0dLTOMbz11ltwdnZGx44d8csvv5Rad8mSJbC3t1e+3NzcdD4eERERmQ6TSKiSkpLg7OysUmZhYQFHR0ckJSWVuI+VlRUcHBxUyl1cXErcRxNbW1usWLECu3btwv79+9GxY0cEBASUmlTNmTMH6enpytfdu3e1Ph4RERGZngqd8ps9ezaWLVtWap2rV6+WUzSa1ahRA6Ghocr3bdu2RWJiIj799FO89dZbGveRSqWQSqXlFSIRERFVsApNqKZNm4ZRo0aVWsfDwwNyuRwpKSkq5QUFBUhNTS3xuia5XI68vDykpaWpjFIlJydrfS1USXx8fBAZGalXG0RERPTyqNCEysnJCU5OTi+s5+vri7S0NMTGxirvrjty5AiKiorg4+OjcR9vb29YWloiKioKAwcOBADEx8fjzp078PX11Svu8+fPo2bNmnq1QURERC8Pk7jLr0mTJujduzfGjBmD8PBw5OfnY8KECQgMDFTe4Xfv3j306NEDW7ZsQbt27WBvb4+QkBCEhobC0dERdnZ2mDhxInx9fZV3+AHAjRs3kJmZiaSkJDx58gTnz58HAHh5ecHKygrffvstrKys0KpVKwDAnj178M033+Drr78u958DERERVU4mkVABwLZt2zBhwgT06NEDZmZmGDhwoHLJAwDIz89HfHw8srOzlWUrV65U1s3NzYW/vz/WrVun0u67776LY8eOKd8XJ04JCQmoW7cuAGDRokW4ffs2LCws4OnpiZ07d2LQoEFG7C0RERGZEokQQlR0EC87hUIBe3t7pKenw87OrqLDISIiIi3o8v1tEssmEBEREVVmTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPTKiIiIiI9MSEioiIiEhPJpNQpaamIigoCHZ2dnBwcEBISAgyMzNL3ScnJwfjx49H9erVYWtri4EDByI5OVm5/cKFCxg6dCjc3NxgbW2NJk2a4IsvvlBr5/fff0fr1q0hlUrRoEEDbN682dDdIyIiIhNmMglVUFAQLl++jMjISOzbtw/Hjx/H2LFjS91n6tSp+PXXX7Fr1y4cO3YMiYmJGDBggHJ7bGwsnJ2d8d133+Hy5cuYO3cu5syZgzVr1ijrJCQkoG/fvujWrRvOnz+PKVOm4N1338Vvv/1mtL4SERGRaZEIIURFB/EiV69ehZeXF86ePYs2bdoAACIiItCnTx/8888/cHV1VdsnPT0dTk5O2L59OwYNGgQAuHbtGpo0aYLo6Gi0b99e47HGjx+Pq1ev4siRIwCAWbNmYf/+/bh06ZKyTmBgINLS0hAREaFV/AqFAvb29khPT4ednZ1OfSciIqKKocv3t0mMUEVHR8PBwUGZTAGAn58fzMzMEBMTo3Gf2NhY5Ofnw8/PT1nm6emJOnXqIDo6usRjpaenw9HRUeXYz7YBAP7+/qW2kZubC4VCofIiIiKil5dJJFRJSUlwdnZWKbOwsICjoyOSkpJK3MfKygoODg4q5S4uLiXuc+rUKezcuVNlKjEpKQkuLi5qbSgUCjx58kRjO0uWLIG9vb3y5ebm9qIuEhERkQmr0IRq9uzZkEgkpb6uXbtWLrFcunQJ//d//4ewsDD06tVLr7bmzJmD9PR05evu3bsGipKIiIgqI4uKPPi0adMwatSoUut4eHhALpcjJSVFpbygoACpqamQy+Ua95PL5cjLy0NaWprKKFVycrLaPleuXEGPHj0wduxYzJs3T62dZ+8MLG7Dzs4O1tbWGo8tlUohlUpL7RcRERG9PCo0oXJycoKTk9ML6/n6+iItLQ2xsbHw9vYGABw5cgRFRUXw8fHRuI+3tzcsLS0RFRWFgQMHAgDi4+Nx584d+Pr6KutdvnwZ3bt3x8iRI/Gf//xH47EPHDigUhYZGanSBhEREb3aTOIuPwB44403kJycjPDwcOTn5yM4OBht2rTB9u3bAQD37t1Djx49sGXLFrRr1w4AMG7cOBw4cACbN2+GnZ0dJk6cCODptVLA02m+7t27w9/fH59++qnyWObm5spELyEhAc2aNcP48eMxevRoHDlyBJMmTcL+/fvh7++vVey8y4+IiMj0vHR3+QHAtm3b4OnpiR49eqBPnz7o2LEjNmzYoNyen5+P+Ph4ZGdnK8tWrlyJfv36YeDAgejcuTPkcjn27Nmj3L579248ePAA3333HWrWrKl8tW3bVlmnXr162L9/PyIjI9GiRQusWLECX3/9tdbJFBEREb38TGaEypRxhIqIiMj0vJQjVERERESVFRMqIiIiIj0xoSIiIiLSExMqIiIiIj0xoSIiIiLSExMqIiIiIj1V6Erpr4rilSkUCkUFR0JERETaKv7e1maFKSZU5SAjIwMA4ObmVsGREBERka4yMjJgb29fah0u7FkOioqKkJiYiKpVq0IikRi0bYVCATc3N9y9e5eLhpLB8LwiY+G5RcZgrPNKCIGMjAy4urrCzKz0q6Q4QlUOzMzMULt2baMew87Ojh9OZHA8r8hYeG6RMRjjvHrRyFQxXpROREREpCcmVERERER6YkJl4qRSKcLCwiCVSis6FHqJ8LwiY+G5RcZQGc4rXpROREREpCeOUBERERHpiQkVERERkZ6YUBERERHpiQkVERERkZ6YUBERERHpiQmViTh+/DjefPNNuLq6QiKRYO/evSrbhRCYP38+atasCWtra/j5+eH69esVEyxVWoY4j1JTUxEUFAQ7Ozs4ODggJCQEmZmZ5dgLqmjldR5dvHgRnTp1gkwmg5ubG5YvX27srlE5qUzn0K5du+Dp6QmZTIbmzZvjwIEDZeoTEyoTkZWVhRYtWmDt2rUaty9fvhyrV69GeHg4YmJiYGNjA39/f+Tk5JRzpFSZGeI8CgoKwuXLlxEZGYl9+/bh+PHjGDt2bHl1gSqB8jiPFAoFevXqBXd3d8TGxuLTTz/FggULsGHDBqP3j4yvspxDp06dwtChQxESEoK4uDgEBAQgICAAly5d0r1TgkwOAPHTTz8p3xcVFQm5XC4+/fRTZVlaWpqQSqXi+++/r4AIyRSU5Ty6cuWKACDOnj2rrHPw4EEhkUjEvXv3yi12qjyMdR6tW7dOVKtWTeTm5irrzJo1SzRu3NjIPaLyVpHn0ODBg0Xfvn1V4vHx8RHvvfeezv3gCNVLICEhAUlJSfDz81OW2dvbw8fHB9HR0RUYGZkSbc6j6OhoODg4oE2bNso6fn5+MDMzQ0xMTLnHTJWPoc6j6OhodO7cGVZWVso6/v7+iI+Px+PHj8upN1QRyvMcio6OVjlOcZ2yfHcyoXoJJCUlAQBcXFxUyl1cXJTbiF5Em/MoKSkJzs7OKtstLCzg6OjIc40AGO48SkpK0tjGs8egl1N5nkMl1SnLOcaEioiIiEhPTKheAnK5HACQnJysUp6cnKzcRvQi2pxHcrkcKSkpKtsLCgqQmprKc40AGO48ksvlGtt49hj0cirPc6ikOmU5x5hQvQTq1asHuVyOqKgoZZlCoUBMTAx8fX0rMDIyJdqcR76+vkhLS0NsbKyyzpEjR1BUVAQfH59yj5kqH0OdR76+vjh+/Djy8/OVdSIjI9G4cWNUq1atnHpDFaE8zyFfX1+V4xTXKdN3p86XsVOFyMjIEHFxcSIuLk4AEJ9//rmIi4sTt2/fFkIIsXTpUuHg4CB+/vlncfHiRfF///d/ol69euLJkycVHDlVJoY4j3r37i1atWolYmJixIkTJ0TDhg3F0KFDK6pLVAHK4zxKS0sTLi4uYvjw4eLSpUtix44dokqVKuKrr74q9/6S4VWWc+jkyZPCwsJCfPbZZ+Lq1asiLCxMWFpair/++kvnPjGhMhFHjx4VANReI0eOFEI8vc30o48+Ei4uLkIqlYoePXqI+Pj4ig2aKh1DnEePHj0SQ4cOFba2tsLOzk4EBweLjIyMCugNVZTyOo8uXLggOnbsKKRSqahVq5ZYunRpeXWRjKwynUM//PCDaNSokbCyshJNmzYV+/fvL1OfJEIIofu4FhEREREV4zVURERERHpiQkVERESkJyZURERERHpiQkVERESkJyZURERERHpiQkVERESkJyZURERERHpiQkVERESkJyZURERERHpiQkVERESkJyZURERERHr6f0zi6u2fw/a1AAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "data = [\n", " get_noisy_data([0]*4, ['x']*4, [0,1,2,3], [0.01]*4, trajectories=10),\n", @@ -1040,21 +765,10 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "id": "5ea57749-0a31-46d1-a5ab-746ff4cc747c", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkIAAAGzCAYAAADDgXghAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAYmBJREFUeJzt3XdYU+fbB/BvWGGDCxFkKoqorRZHcaF14Gzd1Tpw1Fn3xGpVqr+qdWHVutq6amvds1r3nhUcoFJUHFUQFdmykuf9g5fUGEYCCcN8P9eVq81znnPOfeCY3DzrSIQQAkRERER6yKC4AyAiIiIqLkyEiIiISG8xESIiIiK9xUSIiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIjovfP999/D09MTcrm8uEMpFTZs2ACJRIKHDx8WdyikJadOnYJEIsGpU6d0cvxevXqhZ8+eOjl2UWMiVIplf3hJJBKcO3dOZbsQAk5OTpBIJOjYsWMxRKh7f/zxB/r27QsPDw9IJBI0b95co/1fvHiBsWPHwtPTE2ZmZrCzs0ODBg0wdepUJCUlKeoNGDBA8bOWSCSwtLSEu7s7unfvjp07d6r9hTt79myl47z7io6O1ij+otK8eXOlOE1MTODm5oahQ4fiyZMnxR2ekoSEBCxYsABTp06FgYHyR1xqaiqWLl2Khg0bwsbGBqampqhWrRpGjRqFf/75R6dx/fjjj9iwYYNOz1FSXLhwAbNnz0ZcXJza+zx9+hQ9e/aEra0trK2t8dlnn+HBgwe6CzIX2fd6p06dVLY9fPgQEokEixYt0tr5XF1dIZFIMHr0aJVt2cnMjh07tHY+bZk6dSp27tyJGzduFHcohWZU3AFQ4ZmamuK3335DkyZNlMpPnz6Nf//9F1KptJgi071Vq1bh2rVrqF+/Pl69eqXRvrGxsahXrx4SEhIwaNAgeHp64tWrV7h58yZWrVqFESNGwNLSUlFfKpXip59+AgC8efMGjx49wv79+9G9e3c0b94ce/fuhbW1tdpxv33sbLa2thpdQ1GqXLky5s2bBwBIT0/H7du3sXr1avz111+4c+cOzM3NiznCLL/88gsyMzPRu3dvpfKXL1+ibdu2uHbtGjp27IgvvvgClpaWCA8Px9atW7F27Vqkp6frLK4ff/wR5cuXx4ABA3R2jpLiwoULCAwMxIABA9S6p5OSktCiRQvEx8fj66+/hrGxMZYuXQpfX19cv34d5cqV033Q7zhw4ACuXbsGb2/vIjnfunXrMG3aNDg4OGjleM2aNcObN29gYmKileO9q27duqhXrx4WL16MTZs26eQcRUZQqbV+/XoBQHTt2lWUL19eZGRkKG0fMmSI8Pb2Fi4uLqJDhw7FFGXBnDx5UgAQkZGRedZ7/PixkMlkQgghatasKXx9fdU+x/fffy8AiPPnz6tsi4+PF2/evFG89/f3FxYWFjkeZ968eQKA6NmzZ77nnDVrlgAgXrx4oXac2d68eaO41nclJSVpfLy3yWQypet9l6+vr6hZs6ZK+YoVKwQAceTIkUKdP1thr0MIIT744APRt29flfIOHToIAwMDsWPHDpVtqampYuLEiYU+d140vT/VkZGRIdLS0gp9nOzPkvz+valr4cKFGh1vwYIFAoC4cuWKouzOnTvC0NBQTJs2TSsxqcvX11c4OzuLMmXKiE6dOilti4yMFADEwoULtXY+FxcXUbNmTWFkZCRGjx6ttC37c3D79u1aO582LVq0SFhYWIjExMTiDqVQ2DX2HujduzdevXqFo0ePKsrS09OxY8cOfPHFFznuI5fLERQUhJo1a8LU1BQVK1bEsGHD8Pr1a6V6e/fuRYcOHeDg4ACpVIoqVapgzpw5kMlkSvWaN2+OWrVq4fbt22jRogXMzc3h6OiI77//XvsX/BYnJyeV7g913b9/H4aGhvj4449VtllbW8PU1FSt4wQEBKBNmzbYvn271rpXspvEt27dihkzZsDR0RHm5uZISEjAgAEDYGlpifv376N9+/awsrJCnz59AADJycmYOHEinJycIJVKUb16dSxatAhCCKXjSyQSjBo1Clu2bEHNmjUhlUpx+PBhjeO0t7cHABgZ/de4/OjRI4wcORLVq1eHmZkZypUrhx49eqiMP8nu2j19+jRGjhwJOzs7VK5cGQCQmJiIcePGwdXVFVKpFHZ2dmjdujWCg4PzjCcyMhI3b95Eq1atlMovX76MgwcPYvDgwejWrZvKflKpVKW74+7du+jevTvKli0LU1NT1KtXD/v27cvxGs6fP48JEyagQoUKsLCwQJcuXfDixQtFPVdXV4SFheH06dOK7sW3u3Hj4uIwbtw4xe+tatWqWLBggVKX69vdMkFBQahSpQqkUilu376tdrwAEBYWhk8++QRmZmaoXLky5s6dq3bX7s2bNzFgwAC4u7vD1NQU9vb2GDRokFJr7OzZszF58mQAgJubm+J68xp/tGPHDtSvXx/169dXlHl6eqJly5bYtm2bWrFpk5WVFcaPH4/9+/fne88BwIMHD9CjRw+ULVsW5ubm+Pjjj3Hw4EG1z+fq6or+/ftj3bp1ePbsWb71Q0JC0K5dO1hbW8PS0hItW7bEpUuXlOrkNEYoIiIC3bp1g729PUxNTVG5cmX06tUL8fHxSvv++uuv8Pb2hpmZGcqWLYtevXrl2AXeunVrJCcnK333lEbsGnsPuLq6wsfHB7///jvatWsHADh06BDi4+PRq1cv/PDDDyr7DBs2DBs2bMDAgQMxZswYREZGYsWKFQgJCcH58+dhbGwMIOuD3tLSEhMmTIClpSVOnDiBmTNnIiEhAQsXLlQ65uvXr9G2bVt07doVPXv2xI4dOzB16lTUrl1bEVdu4uPjkZGRofQ++5hvdyGZmprm2KVUEC4uLpDJZNi8eTP8/f0Ldax+/frhyJEjOHr0KKpVq5Zv/djYWJUyIyMjlW6EOXPmwMTEBJMmTUJaWpqimTszMxN+fn5o0qQJFi1aBHNzcwgh8Omnn+LkyZMYPHgw6tSpg7/++guTJ0/G06dPsXTpUqVjnzhxAtu2bcOoUaNQvnx5uLq65hmzTCbDy5cvAQAZGRm4c+cOZs2ahapVq6Jx48aKelevXsWFCxfQq1cvVK5cGQ8fPsSqVavQvHlz3L59W6ULbeTIkahQoQJmzpyJ5ORkAMDw4cOxY8cOjBo1Cl5eXnj16hXOnTuHO3fu4KOPPso1xgsXLgCASp3shKBfv355XmO2sLAwNG7cGI6OjggICICFhQW2bduGzp07Y+fOnejSpYtS/dGjR6NMmTKYNWsWHj58iKCgIIwaNQp//PEHACAoKAijR4+GpaUlpk+fDgCoWLEiACAlJQW+vr54+vQphg0bBmdnZ1y4cAHTpk1DVFQUgoKClM61fv16pKamYujQoZBKpShbtqza8UZHR6NFixbIzMxU1Fu7di3MzMzU+rkcPXoUDx48wMCBA2Fvb4+wsDCsXbsWYWFhuHTpEiQSCbp27Yp//vkHv//+O5YuXYry5csDACpUqJDjMeVyOW7evIlBgwapbGvQoAGOHDmCxMREWFlZ5RrXu58fudHk82Ps2LFYunQpZs+enWNCme358+do1KgRUlJSMGbMGJQrVw4bN27Ep59+ih07dqjcK7mZPn06Nm3ahPnz5+f4mZ0tLCwMTZs2hbW1NaZMmQJjY2OsWbMGzZs3x+nTp9GwYcMc90tPT4efnx/S0tIwevRo2Nvb4+nTpzhw4ADi4uJgY2MDAPjf//6Hb775Bj179sSXX36JFy9eYPny5WjWrBlCQkKUPqO8vLxgZmaG8+fPq32dJVJxN0lRwWU3Z1+9elWsWLFCWFlZiZSUFCGEED169BAtWrQQQgiVrrGzZ88KAGLLli1Kxzt8+LBKefbx3jZs2DBhbm4uUlNTFWW+vr4CgNi0aZOiLC0tTdjb24tu3brley3Z++f38vf3z/UYmnY9REdHiwoVKggAwtPTUwwfPlz89ttvIi4uTqVuXl1jQggREhIiAIjx48fnec7srrGcXtWrV1fUy24Sd3d3V/kd+Pv7CwAiICBAqXzPnj0CgJg7d65Seffu3YVEIhH37t1TlAEQBgYGIiwsLM94s+X2+6lRo4Z48OCBUt2c7pmLFy+q3B/Z92+TJk1EZmamUn0bGxvx1VdfqRXb22bMmCEAqDTVd+nSRQAQr1+/Vus4LVu2FLVr11a6x+VyuWjUqJHw8PBQuYZWrVoJuVyuKB8/frwwNDRUupdyuz/nzJkjLCwsxD///KNUHhAQIAwNDcXjx4+FEP91y1hbW4uYmJgCxTtu3DgBQFy+fFlRFhMTI2xsbNTqysrpd/v7778LAOLMmTOKMk26xl68eCEAiG+//VZl28qVKwUAcffu3TyPoY3Pj7ePld0NHBgYKACIa9euCSFy7hrL/pmePXtWUZaYmCjc3NyEq6trrt3Z2d7+fB44cKAwNTUVz549E0Lk3DXWuXNnYWJiIu7fv68oe/bsmbCyshLNmjVTlGXve/LkSSHEf59ReXWzPXz4UBgaGor//e9/SuW3bt0SRkZGKuVCCFGtWjXRrl27PK+xpGPX2HuiZ8+eePPmDQ4cOIDExEQcOHAg126x7du3w8bGBq1bt8bLly8VL29vb1haWuLkyZOKum//pZiYmIiXL1+iadOmSElJwd27d5WOa2lpib59+yrem5iYoEGDBmrN/Fi8eDGOHj2qeGV3U/z6669K5VOmTNHo55KXihUr4saNGxg+fDhev36N1atX44svvoCdnR3mzJmj0p2Ul+y/MhMTE9Wqv3PnTqXrOnr0KNavX69Sz9/fP9e/1keMGKH0/s8//4ShoSHGjBmjVD5x4kQIIXDo0CGlcl9fX3h5eakVL5DV8pgd66FDhxAUFIT4+Hi0a9dOqRvo7XgzMjLw6tUrVK1aFba2tjl2MwwZMgSGhoZKZba2trh8+bJa3QRve/XqFYyMjFT+6k9ISACAPFsVssXGxuLEiRPo2bOn4p5/+fIlXr16BT8/P0RERODp06dK+wwdOhQSiUTxvmnTppDJZHj06FG+59u+fTuaNm2KMmXKKP17bNWqFWQyGc6cOaNUv1u3bkqtK5rE++eff+Ljjz9GgwYNFPtXqFBB0bWan7d/t6mpqXj58qWia1mdLqScvHnzBgBynNSR3T2dXSc3735+5PbS9PNj7NixKFOmDAIDA3Ot8+eff6JBgwZKk1UsLS0xdOhQPHz4UNF1qY4ZM2YgMzMT8+fPz3G7TCbDkSNH0LlzZ7i7uyvKK1WqhC+++ALnzp1T3Ovvym7x+euvv5CSkpJjnV27dkEul6Nnz55K96K9vT08PDyUvhuyZd+3pRm7xt4TFSpUQKtWrfDbb78hJSUFMpkM3bt3z7FuREQE4uPjYWdnl+P2mJgYxf+HhYVhxowZOHHihMo/sHf7lStXrqz0ZQBk/SO5efNmvvG/OzMje8xJ48aN8+2yKYxKlSph1apV+PHHHxEREYG//voLCxYswMyZM1GpUiV8+eWXah0ne6q9Ol+0QNaMjuwug7y4ubnlWG5kZKQYT5Pt0aNHcHBwUImhRo0aiu3qHDs3FhYWSmNv2rZtiyZNmqBevXqYP38+Fi9eDCDrS2vevHlYv349nj59qpRQvnvP5BbH999/D39/fzg5OcHb2xvt27dH//79lT78NZE9my8xMTHfWUz37t2DEALffPMNvvnmmxzrxMTEwNHRUfHe2dlZaXuZMmUAQGXMXU4iIiJw8+bNXLuO3v73CKj+vDSJ99GjRzl2nVSvXj3fOIGspCswMBBbt25ViSun3606spOrtLQ0lW2pqalKdXKjq5ldNjY2GDduHGbNmoWQkBDF7/Vtuf1M3/53V6tWLbXO5+7ujn79+mHt2rUICAhQ2f7ixQukpKTk+PuqUaMG5HI5njx5gpo1a6psd3Nzw4QJE7BkyRJs2bIFTZs2xaeffoq+ffsqkqSIiAgIIeDh4ZFjfNlDJt4mhFD53C9tmAi9R7744gsMGTIE0dHRaNeuXa4f+HK5HHZ2dtiyZUuO27M/kOPi4uDr6wtra2t8++23qFKlCkxNTREcHIypU6eqDLB896/6bJq0rBQXiUSCatWqoVq1aujQoQM8PDywZcsWtROh0NBQAEDVqlW1GlduXwBSqbTAg8TzO7YmvL29YWNjo9RqMXr0aKxfvx7jxo2Dj48PbGxsIJFI0KtXrxwH5eYUR8+ePdG0aVPs3r0bR44cwcKFC7FgwQLs2rUrz/Fm5cqVQ2ZmpsqYEk9PTwDArVu30LRp0zyvKTvGSZMmwc/PL8c67/6eC3Pvy+VytG7dOtfWinfHnL378ypIvAXVs2dPXLhwAZMnT0adOnVgaWkJuVyOtm3bFnjxyrJly0IqlSIqKkplW3ZZflPKY2Nj1Vr6wMzMTPGlr67ssUKBgYEq47V0Yfr06di8eTMWLFiAzp07a/XYixcvxoABA7B3714cOXIEY8aMwbx583Dp0iVUrlwZcrkcEokEhw4dyvGezml81evXr3NNnEoLJkLvkS5dumDYsGG4dOmSYpBmTqpUqYJjx46hcePGeX4Znjp1Cq9evcKuXbvQrFkzRXlkZKRW4y5p3N3dUaZMmRw/mHOzefNmSCQStG7dWoeR5c3FxQXHjh1TSQKyuzBdXFx0cl6ZTKa0+OSOHTvg7++vaCECsv6y12RxPSCrtW7kyJEYOXIkYmJi8NFHH+F///tfnolQdsITGRmJDz74QFHeqVMnzJs3D7/++mu+iVB2q5OxsbHK7LPCyO2v5ipVqiApKanA59IkXhcXF0RERKiUh4eH53ue169f4/jx4wgMDMTMmTMV5TkdT5MWAgMDA9SuXRt///23yrbLly/D3d0935bWrl274vTp0/mey9/fX+NFLbNbhWbPnp3jpAoXF5ccf34F/XdXpUoV9O3bF2vWrFFpaapQoQLMzc1zPZ+BgQGcnJzyPH7t2rVRu3ZtzJgxAxcuXEDjxo2xevVqzJ07F1WqVIEQAm5ubmpN+sjMzMSTJ0/w6aefanSNJQ3HCL1HLC0tsWrVKsyePTvHVVGz9ezZEzKZDHPmzFHZlpmZqfjCyv6L4O2/atPT0/Hjjz9qN/AcNG/eHEIInXaLXb58WTFL6W1XrlzBq1ev1O4umD9/Po4cOYLPP/+8WP8yat++PWQyGVasWKFUvnTpUkgkknxn7hXEyZMnkZSUhA8//FBRZmhoqNISsnz5cpUlF3Ijk8lUulns7Ozg4OCQY/fJ23x8fABA5UvVx8cHbdu2xU8//YQ9e/ao7Jeeno5JkyYpztW8eXOsWbMmx2T47fFQmrCwsMgxGezZsycuXryIv/76S2VbXFwcMjMz8zyuJvG2b98ely5dwpUrV5S259Y6/LacPg8A5NhKYmFhoYhfHd27d8fVq1eVfm/h4eE4ceIEevToke/+uhojlG3cuHGwtbXFt99+q7Ktffv2uHLlCi5evKgoS05Oxtq1a+Hq6qrROLxsM2bMQEZGhsryI4aGhmjTpg327t2rtBzB8+fPFYvq5raoa0JCgsq9VLt2bRgYGCj+XXXt2hWGhoYIDAxU+T0LIVQWrb19+zZSU1PRqFEjja+xJGGL0HtGnWngvr6+GDZsGObNm4fr16+jTZs2MDY2RkREBLZv345ly5ahe/fuaNSoEcqUKQN/f3+MGTMGEokEmzdv1klX19GjR/H8+fN861WpUkXxZQcAZ86cUXTLvHjxAsnJyZg7dy6ArHE4b7dkvWvz5s3YsmULunTpAm9vb5iYmODOnTv45ZdfYGpqiq+//lqpfmZmJn799VcAWS0cjx49wr59+3Dz5k20aNECa9euVft6d+zYkWMzc+vWrRXTqjXVqVMntGjRAtOnT8fDhw/x4Ycf4siRI9i7dy/GjRuHKlWqFOi42eLj4xXXn5mZifDwcKxatQpmZmZK4xk6duyIzZs3w8bGBl5eXrh48SKOHTum9urAiYmJqFy5Mrp3744PP/wQlpaWOHbsGK5evarUypQTd3d31KpVC8eOHVOZjr1p0ya0adMGXbt2RadOndCyZUtYWFggIiICW7duRVRUlGKQ/sqVK9GkSRPUrl0bQ4YMgbu7O54/f46LFy/i33//LdBjBby9vbFq1SrMnTsXVatWhZ2dHT755BNMnjwZ+/btQ8eOHTFgwAB4e3sjOTkZt27dwo4dO/Dw4cN8x5OpG++UKVOwefNmtG3bFmPHjlVMn3dxccl3LJ+1tTWaNWuG77//HhkZGXB0dMSRI0dybCHOHrMzffp09OrVC8bGxujUqZMiQXrXyJEjsW7dOnTo0AGTJk2CsbExlixZgooVK2LixIlq/Wx1ycbGBmPHjs1x0HRAQIBi6ZIxY8agbNmy2LhxIyIjI7Fz584CdWFntwpt3LhRZdvcuXNx9OhRNGnSBCNHjoSRkRHWrFmDtLS0PNdtO3HiBEaNGoUePXqgWrVqyMzMxObNm2FoaKhYW6tKlSqYO3cupk2bhocPH6Jz586wsrJCZGQkdu/ejaFDhyr+YACyPrfNzc2LtSVcK4p8nhppzdvT5/OS28rSa9euFd7e3sLMzExYWVmJ2rVriylTpiimbgohxPnz58XHH38szMzMhIODg5gyZYr466+/lKZlCpH7ysP+/v7CxcUl32sp6PTXvKajz5o1K89z3rx5U0yePFl89NFHomzZssLIyEhUqlRJ9OjRQwQHB6tcx9vHNjc3F66urqJbt25ix44d+U6RVSfet3+mea0om9dU/sTERDF+/Hjh4OAgjI2NhYeHh1i4cKHS1G4hsqbPazI9/d3fj0QiEWXLlhWffvqpYmpxttevX4uBAweK8uXLC0tLS+Hn5yfu3r0rXFxclH5/ud2/aWlpYvLkyeLDDz8UVlZWwsLCQnz44Yfixx9/VCvWJUuWCEtLyxyneqekpIhFixaJ+vXrC0tLS2FiYiI8PDzE6NGjlZYXEEKI+/fvi/79+wt7e3thbGwsHB0dRceOHZVWps7tGt6duixE1nINHTp0EFZWVgKA0lT6xMREMW3aNFG1alVhYmIiypcvLxo1aiQWLVok0tPThRD5r2qsTrxCZN33vr6+wtTUVDg6Ooo5c+aIn3/+Wa3p7v/++6/o0qWLsLW1FTY2NqJHjx7i2bNnOf57mzNnjnB0dBQGBgZqHfvJkyeie/fuwtraWlhaWoqOHTuKiIiIPPfRhdw+y16/fq1YZuDd38H9+/dF9+7dha2trTA1NRUNGjQQBw4cUOt8uX0+R0RECENDwxw/B4KDg4Wfn5+wtLQU5ubmokWLFuLChQtKdd69Bx88eCAGDRokqlSpIkxNTUXZsmVFixYtxLFjx1TOvXPnTtGkSRNhYWEhLCwshKenp/jqq69EeHi4Ur2GDRvmuIp7aSMRohSMZCUiUlN8fDzc3d3x/fffY/DgwcUdDtF76fr16/joo48QHByMOnXqFHc4hcJEiIjeOwsWLMD69etx+/btQs+uIyJV2bNAi+MRKNrGRIiIiIj0Fv9UIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvcUHFfMjlcjx79gxWVlal/sFyRERE+kIIgcTERDg4OOQ5e5SJUD6ePXuW77NbiIiIqGR68uQJKleunOt2JkL5yH7Y35MnT3J9hovOJD4Hkt557ER6MvDb/z97p3UgUOkjwOCdpwRbVgSsCvaYBiIiovdBQkICnJyc8n1oLxOhfGR3h1lbWxd9ImRtDeCth3je3gccmQJI/7+L7sxswNoBaLsA8CrdT/8lIiLShfyGtXCwdGlxex+wrT+Q+M7TpROisspv7yueuIiIiEoxJkKlgVwGHJ6KrOddvuv/yw4HZNUjIiIitTERKg0eXQASnuVRQQAJT7PqERERkdo4Rqg0eHfAdGHrERFpiRACmZmZkMnYIk1Fy9DQEEZGRoVe2oaJUGlgqeYMMHXrERFpQXp6OqKiopCSklLcoZCeMjc3R6VKlWBiYlLgYzARKg1cGmXNDkuIQs7jhCRZ210aFXVkRKSn5HI5IiMjYWhoCAcHB5iYmHDRWSoyQgikp6fjxYsXiIyMhIeHR56LJuaFiVBpYGCYNUV+W38AEignQ///wdN2vup6QkREOpKeng65XA4nJyeYm5sXdzikh8zMzGBsbIxHjx4hPT0dpqamBToOE6HSwutToOcm4NAU5Sn01g5ZSRDXESKiYqDpX+ExCamISUzT+Dx2VlLYWRfsi47eXwVtBXobE6GSLDE665XN1hno9hOwoUPW+3YLAMf6WS1Bz67/V8/KPutFRFTCbLn8GMuOR2i839iWHhjfupoOIiJ9x0SoJPt7PXB6fu7bD03Nudw3AGgxTTcxEREVQp+GzmjtpTyxIzVDhu6rLwIAdgz3gamxaje/nZW0SOIj/cNEqCSrNxCo3k7z/dgaREQllJ21qUoXV2JqhuL/k9IyUde5DAwNOPCaigYToZKMXVxE9J47HBqFWfvCFO8HrL+KSjammNXJC21rVSrGyEiXmjdvjjp16iAoKKi4Q+HK0kREVDwOh0ZhxK/BeJ6gPHg6Oj4VI34NxuHQqFz2LFrZi0a+Kz09vUDHK+h+pBtMhIiIqMjJ5AKB+2/n9QRFBO6/DZk8pxqFI5fLMW/ePLi5ucHMzAwffvghduzYodh+6tQpSCQSHDp0CN7e3pBKpTh37hyaN2+OUaNGYdy4cShfvjz8/PwAAKdPn0aDBg0glUpRqVIlBAQEKCVOOe0nhMDs2bPh7OwMqVQKBwcHjBkzJs+4V61ahSpVqsDExATVq1fH5s2blbZLJBL89NNP6NKlC8zNzeHh4YF9+3J/IPfdu3dhbm6O3377TVG2bds2mJmZ4fbt27nut2/fPnh4eMDU1BQtWrTAxo0bIZFIEBcXBwB49eoVevfuDUdHR5ibm6N27dr4/fffFfsPGDAAp0+fxrJlyyCRSCCRSPDw4UMAQGhoKNq1awdLS0tUrFgR/fr1w8uXL/P8uRQWEyEiIipyVyJjERWfmut2ASAqPhVXImO1fu558+Zh06ZNWL16NcLCwjB+/Hj07dsXp0+fVqoXEBCA+fPn486dO/jggw8AABs3boSJiQnOnz+P1atX4+nTp2jfvj3q16+PGzduYNWqVfj5558xd+5cpWO9u9/OnTuxdOlSrFmzBhEREdizZw9q166da8y7d+/G2LFjMXHiRISGhmLYsGEYOHAgTp48qVQvMDAQPXv2xM2bN9G+fXv06dMHsbE5/ww9PT2xaNEijBw5Eo8fP8a///6L4cOHY8GCBfDy8spxn8jISHTv3h2dO3fGjRs3MGzYMEyfPl2pTmpqKry9vXHw4EGEhoZi6NCh6NevH65cuQIAWLZsGXx8fDBkyBBERUUhKioKTk5OiIuLwyeffIK6devi77//xuHDh/H8+XP07Nkz15+LVgjKU3x8vAAg4uPjizsUIqIS482bN+L27dvizZs3Bdp/T8i/wmXqgXxfe0L+1WrcqampwtzcXFy4cEGpfPDgwaJ3795CCCFOnjwpAIg9e/Yo1fH19RV169ZVKvv6669F9erVhVwuV5StXLlSWFpaCplMlut+ixcvFtWqVRPp6elqxd2oUSMxZMgQpbIePXqI9u3bK94DEDNmzFC8T0pKEgDEoUOH8jx2hw4dRNOmTUXLli1FmzZtlK7lXVOnThW1atVSKps+fboAIF6/fp3nOSZOnKh47+vrK8aOHatUZ86cOaJNmzZKZU+ePBEARHh4eI7Hzes+VPf7m4OliYioyNlZqbc4orr11HXv3j2kpKSgdevWSuXp6emoW7euUlm9evVU9vf29lZ6f+fOHfj4+Cg9XqRx48ZISkrCv//+C2dn5xz369GjB4KCguDu7o62bduiffv26NSpE4yMcv5avnPnDoYOHapU1rhxYyxbtkypLLvlCgAsLCxgbW2NmJiYHI+Z7ZdffkG1atVgYGCAsLCwPB+VEh4ejvr16yuVNWjQQOm9TCbDd999h23btuHp06dIT09HWlpaviuQ37hxAydPnoSlpaXKtvv376NaNd2sI8VEiIiIilwDt7KoZGOK6PjU3J6gCHsbUzRwK6vV8yYlJQEADh48CEdHR6VtUqnyWkUWFhYq++dUpo5393NyckJ4eDiOHTuGo0ePYuTIkVi4cCFOnz4NY2PjAp0DgMq+EokEcrk8z31u3LiB5ORkGBgYICoqCpUqFW623sKFC7Fs2TIEBQWhdu3asLCwwLhx4/IdJJ6UlIROnTphwYIFKtsKG1NemAgREVGRMzSQYFYnL4z4NTi3JyhiVicvra8n5OXlBalUisePH8PX17fQx6tRowZ27twJIYSiJeX8+fOwsrJC5cqV89zXzMwMnTp1QqdOnfDVV1/B09MTt27dwkcffZTjec6fPw9/f39F2fnz53Mdy6Ou2NhYDBgwANOnT0dUVBT69OmD4OBgmJmZ5Vi/evXq+PPPP5XKrl69qvT+/Pnz+Oyzz9C3b18AWYPT//nnH6VYTUxMIJPJlPb76KOPsHPnTri6uubaMqYLHCxNRETFom2tSljV9yPYWSu3xNjbmGJV3490so6QlZUVJk2ahPHjx2Pjxo24f/8+goODsXz5cmzcuFHj440cORJPnjzB6NGjcffuXezduxezZs3ChAkT8nwO1oYNG/Dzzz8jNDQUDx48wK+//gozMzO4uLjkWH/y5MnYsGEDVq1ahYiICCxZsgS7du3CpEmTNI75bcOHD4eTkxNmzJiBJUuWQCaT5XnMYcOG4e7du5g6dSr++ecfbNu2DRs2bAAARSLo4eGBo0eP4sKFC7hz5w6GDRuG58+fKx3H1dUVly9fxsOHD/Hy5UvI5XJ89dVXiI2NRe/evXH16lXcv38ff/31FwYOHKiSNGlVniOIiIOliYhyUNjB0m9LeJOuGBx98u5zkSnLfbCuNsjlchEUFCSqV68ujI2NRYUKFYSfn584ffq0EOK/wdLvDv7NaYCvEEKcOnVK1K9fX5iYmAh7e3sxdepUkZGRked+u3fvFg0bNhTW1tbCwsJCfPzxx+LYsWN5xv3jjz8Kd3d3YWxsLKpVqyY2bdqktB2A2L17t1KZjY2NWL9+fY7H27hxo7CwsBD//POPouzy5cvC2NhY/Pnnn7nGsXfvXlG1alUhlUpF8+bNxapVqwQAxb3w6tUr8dlnnwlLS0thZ2cnZsyYIfr37y8+++wzxTHCw8PFxx9/LMzMzAQAERkZKYQQ4p9//hFdunQRtra2wszMTHh6eopx48blOoBbG4OlJUII7S/S8B5JSEiAjY0N4uPjYW1tXdzhEBGVCKmpqYiMjISbmxtMTdUf0JzT0+fVfdYYnz5fMv3vf//D6tWr8eTJkyI/d173obrf3xwjRERERSa/p89nJ0Tv4tPnS44ff/wR9evXR7ly5XD+/HksXLgQo0aNKu6wCoyJEBERFZmcnj6vDj59vuSIiIjA3LlzERsbC2dnZ0ycOBHTpk0r7rAKjIkQEREVmZyePk+ly9KlS7F06dLiDkNrOGuMiIiI9BYTISIiItJbTISIiIhIb3GMEBERFZ3E6KyXpqzss15EWsZEiIiIis7f64HT8zXfzzcAaFF6ZyZRycVEiIiIik69gUD1dsplmW+AX9pm/f+gw4BRDs+5YmsQ6QgTISIiKjo5dXGlJvz3/2lJQOUGgIHq6tKkfa6urhg3bhzGjRtX3KEUGw6WJiKi4nN7H7CywX/vt3QHgmplldN769SpU5BIJIiLiyvuUJgIERFRMbm9D9jWH0iMUi5PiMoqLyHJkBACmZmZKuXp6ekFOl5B9yPdYCJERERFTy4DDk8FkNNzv/+/7HBAVj1tn1oux7x58+Dm5gYzMzN8+OGH2LFjh2J7dmvFoUOH4O3tDalUinPnzqF58+YYNWoUxo0bh/Lly8PPzw8AcPr0aTRo0ABSqRSVKlVCQECAUuKU035CCMyePRvOzs6QSqVwcHDAmDFj8ox71apVqFKlCkxMTFC9enVs3rxZabtEIsFPP/2ELl26wNzcHB4eHti3L/9kMiUlBYMGDYKVlRWcnZ2xdu3aPOsnJiaiT58+sLCwQKVKlbB06VI0b95cqXtt8+bNqFevHqysrGBvb48vvvgCMTExAICHDx+iRYsWAIAyZcpAIpFgwIABAPL/3egCEyEiIip6jy4ACc/yqCCAhKdZ9bRs3rx52LRpE1avXo2wsDCMHz8effv2xenTp5XqBQQEYP78+bhz5w4++OADAMDGjRthYmKC8+fPY/Xq1Xj69Cnat2+P+vXr48aNG1i1ahV+/vlnzJ07V+lY7+63c+dOLF26FGvWrEFERAT27NmD2rVr5xrz7t27MXbsWEycOBGhoaEYNmwYBg4ciJMnTyrVCwwMRM+ePXHz5k20b98effr0QWxsbJ4/j8WLF6NevXoICQnByJEjMWLECISHh+daf8KECTh//jz27duHo0eP4uzZswgODlaqk5GRgTlz5uDGjRvYs2cPHj58qEh2nJycsHPnTgBAeHg4oqKisGzZMgDq/260SlCe4uPjBQARHx9f3KEQEZUYb968Ebdv3xZv3rwp2AFubhdilnX+r5vbtRp3amqqMDc3FxcuXFAqHzx4sOjdu7cQQoiTJ08KAGLPnj1KdXx9fUXdunWVyr7++mtRvXp1IZfLFWUrV64UlpaWQiaT5brf4sWLRbVq1UR6erpacTdq1EgMGTJEqaxHjx6iffv2ivcAxIwZMxTvk5KSBABx6NChXI/r4uIi+vbtq3gvl8uFnZ2dWLVqVY71ExIShLGxsdi+/b/fS1xcnDA3Nxdjx47N9TxXr14VAERiYqIQ4r+f8evXrxV11PndvCuv+1Dd72+2CBERUdGzVPMJ9OrWU9O9e/eQkpKC1q1bw9LSUvHatGkT7t+/r1S3Xr16Kvt7e3srvb9z5w58fHwgkUgUZY0bN0ZSUhL+/fffXPfr0aMH3rx5A3d3dwwZMgS7d+/OcRzS2+dp3LixUlnjxo1x584dpbLslisAsLCwgLW1taJLKjdv7yORSGBvb5/rPg8ePEBGRgYaNPhvgLuNjQ2qV6+uVO/atWvo1KkTnJ2dYWVlBV9fXwDA48ePc41Dk9+NNnH6PBERFT2XRoC1Q9bA6BzHCUmytrs00uppk5KSAAAHDx6Eo6Oj0japVKr03sLCQmX/nMrU8e5+Tk5OCA8Px7Fjx3D06FGMHDkSCxcuxOnTp2FsbFygcwBQ2VcikUAul2t9n7wkJyfDz88Pfn5+2LJlCypUqIDHjx/Dz88vz4HimvxutIktQkREVPQMDIG2C/7/jeSdjf//vu18ra8n5OXlBalUisePH6Nq1apKLycnJ42PV6NGDVy8eBFC/JfMnT9/HlZWVqhcuXKe+5qZmaFTp0744YcfcOrUKVy8eBG3bt3K9Tznz59XKjt//jy8vLw0jrkw3N3dYWxsjKtXryrK4uPj8c8//yje3717F69evcL8+fPRtGlTeHp6qrQwmZiYAABksv8Gw2v7d6MutggREVHx8PoU6LkJODRFeQq9tUNWEuT1qdZPaWVlhUmTJmH8+PGQy+Vo0qQJ4uPjcf78eVhbW8Pf31+j440cORJBQUEYPXo0Ro0ahfDwcMyaNQsTJkyAgUHubQ0bNmyATCZDw4YNYW5ujl9//RVmZmZwcXHJsf7kyZPRs2dP1K1bF61atcL+/fuxa9cuHDt2TKN4C8vKygr+/v6YPHkyypYtCzs7O8yaNQsGBgaK7kFnZ2eYmJhg+fLlGD58OEJDQzFnzhyl47i4uEAikeDAgQNo3749zMzMtP67UVepaxFauXIlXF1dYWpqioYNG+LKlStq7bd161ZIJBJ07txZtwESEZH6vD4Fvnrrc7zPDmDcLZ0kQdnmzJmDb775BvPmzUONGjXQtm1bHDx4EG5ubhofy9HREX/++SeuXLmCDz/8EMOHD8fgwYMxY8aMPPeztbXFunXr0LhxY3zwwQc4duwY9u/fj3LlyuVYv3Pnzli2bBkWLVqEmjVrYs2aNVi/fj2aN2+uccyFtWTJEvj4+KBjx45o1aoVGjdujBo1asDU1BQAUKFCBWzYsAHbt2+Hl5cX5s+fj0WLFikdw9HREYGBgQgICEDFihUxatQoANr93ahLIt5uzyvh/vjjD/Tv3x+rV69Gw4YNERQUhO3btyM8PBx2dna57vfw4UM0adIE7u7uKFu2LPbs2aP2ORMSEmBjY4P4+HhYW1tr4SqIiEq/1NRUREZGws3NTfEFqJacnj6v7rPG+LyxEik5ORmOjo5YvHgxBg8eXKTnzus+VPf7u1R1jS1ZsgRDhgzBwIEDAQCrV6/GwYMH8csvvyAgICDHfWQyGfr06YPAwECcPXu2RCznTUSkt/J7+nx2QvQuPn2+xAgJCcHdu3fRoEEDxMfH49tvvwUAfPbZZ8UcWcGUmkQoPT0d165dw7Rp//1DMDAwQKtWrXDx4sVc9/v2229hZ2eHwYMH4+zZs/meJy0tDWlpaYr3CQkJedQmIiKN5PT0eXWwNahEWbRoEcLDw2FiYgJvb2+cPXsW5cuXL+6wCqTUJEIvX76ETCZDxYrKa0pUrFgRd+/ezXGfc+fO4eeff8b169fVPs+8efMQGBhYmFCJiCg37OIq9erWrYtr164VdxhaU+oGS6srMTER/fr1w7p16zTKUqdNm4b4+HjF68mTJzqMkoiIiIpTqWkRKl++PAwNDfH8+XOl8ufPn8PeXvWvi/v37+Phw4fo1KmToix7gSgjIyOEh4ejSpUqKvtJpVKdLtxERPQ+KUXzbeg9pI37r9S0CGX3Qx4/flxRJpfLcfz4cfj4+KjU9/T0xK1bt3D9+nXF69NPP0WLFi1w/fp1nS7ORET0vstejTglJaWYIyF9ln3/FWY17lLTIgRkPfHW398f9erVQ4MGDRAUFITk5GTFLLL+/fvD0dER8+bNg6mpKWrVqqW0v62tLQColBMRkWYMDQ1ha2urWDHY3Nxc6XlbRLokhEBKSgpiYmJga2sLQ8OCr0BeqhKhzz//HC9evMDMmTMRHR2NOnXq4PDhw4oB1I8fP85zJU8iItKe7GEJ+T3Uk0hXbG1tcxweo4lStaBiceCCikREeZPJZMjIyCjuMEjPGBsb59kS9F4uqEhERCWPoaFhobomiIoT+5GIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvMREiIiIivcVEiIiIiPQWEyEiIiLSW0yEiIiISG8xESIiIiK9xUSIiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvMREiIiIivcVEiIiIiPQWEyEiIiLSW0yEiIiISG8ZaVJZLpfj9OnTOHv2LB49eoSUlBRUqFABdevWRatWreDk5KSrOImIiIi0Tq0WoTdv3mDu3LlwcnJC+/btcejQIcTFxcHQ0BD37t3DrFmz4Obmhvbt2+PSpUu6jpmIiIhIK9RqEapWrRp8fHywbt06tG7dGsbGxip1Hj16hN9++w29evXC9OnTMWTIEK0HS0RERKRNEiGEyK/SnTt3UKNGDbUOmJGRgcePH6NKlSqFDq4kSEhIgI2NDeLj42FtbV3c4RAREZEa1P3+VqtrTN0kCACMjY3fmySIiIiI3m8FmjV29uxZ9O3bFz4+Pnj69CkAYPPmzTh37pxWgyMiIiLSJY0ToZ07d8LPzw9mZmYICQlBWloaACA+Ph7fffed1gMkIiIi0hWNE6G5c+di9erVWLdundKg6caNGyM4OFirwRERERHpksaJUHh4OJo1a6ZSbmNjg7i4OG3ERERERFQkNE6E7O3tce/ePZXyc+fOwd3dXStBERERERUFjROhIUOGYOzYsbh8+TIkEgmePXuGLVu2YNKkSRgxYoQuYiQiIiLSCY0esQEAAQEBkMvlaNmyJVJSUtCsWTNIpVJMmjQJo0eP1kWMRERERDqh1oKKOUlPT8e9e/eQlJQELy8vWFpaaju2EoELKhIREZU+6n5/a9wilM3ExAReXl4F3Z2IiIio2GmcCLVo0QISiSTX7SdOnChUQERERERFReNEqE6dOkrvMzIycP36dYSGhsLf319bcRERERHpnMaJ0NKlS3Msnz17NpKSkgodEBEREVFRKdCzxnLSt29f/PLLL9o6HBEREZHOaS0RunjxIkxNTbV1OCIiIiKd07hrrGvXrkrvhRCIiorC33//jW+++UZrgRERERHpmsaJkI2NjdJ7AwMDVK9eHd9++y3atGmjtcCIiIiIdE3jRGj9+vW6iIOIiIioyGltjBARERFRaaNWi1CZMmXyXETxbbGxsYUKiIiIiKioqJUIBQUF6TgMIiIioqKnViLEFaOJiIjofVTgh64CQGpqKtLT05XK+IR2IiIiKi00HiydnJyMUaNGwc7ODhYWFihTpozSi4iIiKi00DgRmjJlCk6cOIFVq1ZBKpXip59+QmBgIBwcHLBp0yZdxEhERESkExp3je3fvx+bNm1C8+bNMXDgQDRt2hRVq1aFi4sLtmzZgj59+ugiTiIiIiKt07hFKDY2Fu7u7gCyxgNlT5dv0qQJzpw5o93oiIiIiHRI40TI3d0dkZGRAABPT09s27YNQFZLka2trVaDIyIiItIljROhgQMH4saNGwCAgIAArFy5Eqamphg/fjwmT56s9QCJiIiIdEUihBDqVJw0aRK+/PJLeHp6KpU/evQI165dQ9WqVfHBBx/oJMjilJCQABsbG8THx3NpACIiolJC3e9vtRMhDw8PPHjwAA0bNsSXX36Jzz//HBYWFloLuKRiIkRERFT6qPv9rXbXWEREBE6ePIlq1aph7NixsLe3x6BBg3DhwgWtBKyulStXwtXVFaampmjYsCGuXLmSa91169ahadOmijWOWrVqlWd9IiIi0i8ajRFq1qwZNmzYgOjoaCxbtgwRERFo0qQJatSogUWLFuH58+e6ihMA8Mcff2DChAmYNWsWgoOD8eGHH8LPzw8xMTE51j916hR69+6NkydP4uLFi3ByckKbNm3w9OlTncZJREREpYPaXWO5uXfvHtavX4/Vq1cjKSkJaWlp2opNRcOGDVG/fn2sWLECACCXy+Hk5ITRo0cjICAg3/1lMhnKlCmDFStWoH///mqdk11jREREpY/Wu8ZykpycjLNnz+L06dN4/fq1Yn0hXUhPT8e1a9fQqlUrRZmBgQFatWqFixcvqnWMlJQUZGRkoGzZsrnWSUtLQ0JCgtKLiIiI3k8FSoTOnTuHQYMGoVKlShgzZgyqVauGs2fP4s6dO9qOT+Hly5eQyWSoWLGiUnnFihURHR2t1jGmTp0KBwcHpWTqXfPmzYONjY3i5eTkVKi4iYiIqORSOxGKiorC/Pnz4enpiWbNmuHu3btYsmQJoqKi8Msvv6Bx48a6jLPQ5s+fj61bt2L37t0wNTXNtd60adMQHx+veD158qQIoyQiIqKipPazxpycnFCuXDn069cPgwcPRo0aNXQZl4ry5cvD0NBQZUD28+fPYW9vn+e+ixYtwvz583Hs2LF81zqSSqWQSqWFjpeIiIhKPrVbhLZt24anT59i0aJFRZ4EAYCJiQm8vb1x/PhxRZlcLsfx48fh4+OT637ff/895syZg8OHD6NevXpFESoRERGVEmq3CHXt2lWXcahlwoQJ8Pf3R7169dCgQQMEBQUhOTkZAwcOBAD0798fjo6OmDdvHgBgwYIFmDlzJn777Te4uroqxhJZWlrC0tKy2K6DiIiISga1E6GS4PPPP8eLFy8wc+ZMREdHo06dOjh8+LBiAPXjx49hYPBfI9eqVauQnp6O7t27Kx1n1qxZmD17dlGGTkRERCVQodcRet9xHSEiIqLSp0jWESIiIiIqzZgIERERkd7SeIxQcnIy5s+fj+PHjyMmJgZyuVxp+4MHD7QWHBEREZEuaZwIffnllzh9+jT69euHSpUqQSKR6CIuIiIiIp3TOBE6dOgQDh48WOJXkiYiIiLKj8ZjhMqUKZPnQ0uJiIiISguNE6E5c+Zg5syZSElJ0UU8REREREVG466xxYsX4/79+6hYsSJcXV1hbGystD04OFhrwRERERHpksaJUOfOnXUQBhEREVHR48rS+eDK0kRERKWPut/fBX7W2LVr13Dnzh0AQM2aNVG3bt2CHoqIiIioWGicCMXExKBXr144deoUbG1tAQBxcXFo0aIFtm7digoVKmg7RiIiIiKd0HjW2OjRo5GYmIiwsDDExsYiNjYWoaGhSEhIwJgxY3QRIxEREZFOaDxGyMbGBseOHUP9+vWVyq9cuYI2bdogLi5Om/EVO44RIiIiKn109vR5uVyuMmUeAIyNjVWeO0ZERERUkmmcCH3yyScYO3Ysnj17pih7+vQpxo8fj5YtW2o1OCIiIiJd0jgRWrFiBRISEuDq6ooqVaqgSpUqcHNzQ0JCApYvX66LGImIiIh0QuNZY05OTggODsaxY8dw9+5dAECNGjXQqlUrrQdHREREpEtcUDEfHCxNRERU+mh1QcUffvgBQ4cOhampKX744Yc863IKff5kcoErkbGISUyFnZUpGriVhaGBpLjDIiIi0jtqtQi5ubnh77//Rrly5eDm5pb7wSQSPHjwQKsBFrfCtgjFJKQiJjFN8f7CvZdYe/YBXialK8rKW5pgaFN3NKpaXlFmZyWFnbVp4YInIiLSU+p+f7NrLB+FTYSWHv0Hy45HaLzf2JYeGN+6msb7ERERkQ7XEfr222+RkpKiUv7mzRt8++23mh7uvdenoTMOjG6CvV81RnlLkzzrlrc0wd6vGuPA6Cbo09C5iCIkIiLSXxonQoGBgUhKSlIpT0lJQWBgoFaCep/YWZuilqMNUtJlSt1hOXmZlI6UdBlqOdqwW4yIiKgIaJwICSEgkagO7L1x4wbKli2rlaDeRzGJqVqtR0RERIWn9jpCZcqUgUQigUQiQbVq1ZSSIZlMhqSkJAwfPlwnQb4P7KzUa+FRtx4REREVntqJUFBQEIQQGDRoEAIDA2FjY6PYZmJiAldXV/j4+OgkyNIse9aYuYkhylua5Nk9Vt7SBOYmhgh9Gs9ZY0REREVA41ljp0+fRqNGjXJ88Or7iLPGiIiISp8imT6fmpqK9HTlFo73bfVlriNERERU+ugsEUpJScGUKVOwbds2vHr1SmW7TCbTPNoSTBeP2ODK0kRERLqls3WEJk+ejBMnTmDVqlWQSqX46aefEBgYCAcHB2zatKlQQesLQwMJfKqUw2d1HOFTpRyTICIiomKi8dPn9+/fj02bNqF58+YYOHAgmjZtiqpVq8LFxQVbtmxBnz59dBEnERERkdZp3CIUGxsLd3d3AFnjgWJjYwEATZo0wZkzZ7QbHREREZEOaZwIubu7IzIyEgDg6emJbdu2AchqKbK1tdVqcESkA3IZEHkWuLUj67/y92tcHxGRJjTuGhs4cCBu3LgBX19fBAQEoFOnTlixYgUyMjKwZMkSXcRIRNpyex9weCqQ8Oy/MmsHoO0CwOvT4ouLiKiYFPrp848ePcK1a9dQtWpVfPDBB9qKq8TQxawxoiKTGJ31AoDIM8DRb3Kv23oO4NYs6/+t7LNeRESllM6mz2/atAmff/45pFKpUnl6ejq2bt2K/v37FyziEoqJEJVqJ+cBp+drvp9vANBimvbjISIqIjpLhAwNDREVFQU7Ozul8levXsHOzo7rCBGVJNktQs9CgAPj8q/fMQhwqMsWISIq9dT9/tZ4jFBuT5//999/lZ4/RkQlQHZC8+qeevWlVoBDHZ2GRERUkqidCNWtW1fx9PmWLVvCyOi/XWUyGSIjI9G2bVudBElEhWRZUbv1iIjeE2onQp07dwYAXL9+HX5+frC0tFRsy376fLdu3bQeIBEVQnbXmLE5YFEBSH6Re10Lu6x6z66za4yI9IbGY4Q2btyIXr16qQyWfl9xjBCVahwsTUR6SmdjhLy8vHD9+nU0bNhQqfzy5cswNDREvXr1NI+WiHSj3kCgerv/3keeAS78oNwyZGEHNBr939R5gK1BRKQ3NE6EvvrqK0yZMkUlEXr69CkWLFiAy5cvay04Iiqkd7u4HOoAPl8Bjy4ASc+zxgS5NAIMDIstRCKi4qRxInT79m189NFHKuV169bF7du3tRIUEemQgSHg1rS4oyAiKhE0ftaYVCrF8+fPVcqjoqKUZpIRERERlXQaJ0Jt2rTBtGnTEB8fryiLi4vD119/jdatW2s1OCIiIiJd0rgJZ9GiRWjWrBlcXFxQt25dAFlT6itWrIjNmzdrPUAiIiIiXdE4EXJ0dMTNmzexZcsW3LhxA2ZmZhg4cCB69+4NY2NjXcRIREREpBMFGtRjYWGBoUOHajsWIiIioiKl8RghANi8eTOaNGkCBwcHPHr0CACwdOlS7N27V6vBEREREemSxonQqlWrMGHCBLRr1w6vX79WPG2+TJkyCAoK0nZ8RERERDqjcSK0fPlyrFu3DtOnT1eaLl+vXj3cunVLq8ERERER6ZLGY4QiIyMVs8XeJpVKkZycrJWgiCh/MrnAlchYxCSmws7KFA3cysLQQFLcYRERlSoaJ0Jubm64fv06XFxclMoPHz6MGjVqaC0wIsrd4dAoBO6/jaj4VEVZJRtTzOrkhba1KhVjZKQ35DI+qoXeCxonQhMmTMBXX32F1NRUCCFw5coV/P7775g3bx5++uknXcRIRG85HBqFEb8GQ7xTHh2fihG/BmNV34+YDJFu3d4HHJ4KJDz7r8zaAWi7APD6tPjiIioAjROhL7/8EmZmZpgxYwZSUlLwxRdfwMHBAcuWLUOvXr10ESOR3otJSEVMYhpkcoEZe0JVkiAAirIZe0JRycYMhgYS2FlJYWdtWpShUilSoO7V2/uAbf2Bd+/ChKis8p6bmAzpmQJ305eQVkWJECKnz1S1pKSkICkpCXZ2dtqMqURJSEiAjY0N4uPjYW1tXdzhkJ5aevQfLDseofF+Y1t6YHzrajqIiEq7AnWvymVAUC3lliAlkqyWoXG32E2mJwrcTV8ErYrqfn8XaB0hAIiJicG1a9cQHh6OFy9eFPQwGlu5ciVcXV1hamqKhg0b4sqVK3nW3759Ozw9PWFqaoratWvjzz//LKJIibTHr2ZFBH1eB/0+dsm/MoB+H7sg6PM68KtZUceRUWmU3b369pcX8F/36uHQKOUdEqOBZ9eB4E15JEEAIICEp1n1nl3P2o/eWxrfR9myWxXfvZeyWxVv79NRxDnTOBFKTExEv3794ODgAF9fX/j6+sLBwQF9+/ZVehCrLvzxxx+YMGECZs2aheDgYHz44Yfw8/NDTExMjvUvXLiA3r17Y/DgwQgJCUHnzp3RuXNnhIaG6jROIm37K+w5xv1xHZsvPVKr/uZLjzDuj+v4K+y5jiOj0kYmFwjcfzvP7tXA/bchk79V48JyYK0vcGCceic5MC6r/oXlhQuWSqwC3UdAVqvi4alQ6Vp9e8/DAVn1iojGXWOff/45QkJCsHz5cvj4+AAALl68iLFjx6JOnTrYunWrTgIFgIYNG6J+/fpYsWIFAEAul8PJyQmjR49GQEBAjrEmJyfjwIEDirKPP/4YderUwerVq3M8R1paGtLS0hTvExIS4OTkhCdPniia1oyNjWFmZoY3b94gIyNDUVcqlSqWEcheaBIATE1NYWJigqSkJMjlckW5ubk5jIyMkJCQoBSDhYUFDAwMkJiYqFRuZWUFuVyuskyBtbU1MjMzkZKSoigzMDCApaUl0tPTkZr6X7ZuaGgICwsLlevkNZXsa3qRkIoXSWmQGBjhq22heJmUjtyUMzfG8u6eMDCQoHJ5G1Qub10irynb+/R7KsnXdPPxK4Q+eoEHr97gp0t5tepkGd7EGc42xqhqZwHP0GUwDl4LM2P1lmdIThcwNgRMGo9GUpNp/D29R9eULDPEy+QMXImIwpy/HiA/33WpBS97S9jG3kLZNw9hGBsBi79X5Ltfcr1RkJX1AMpXg7FT3QJd07Nnz+Do6Jj/0BahIXNzc3H27FmV8jNnzghzc3NND6e2tLQ0YWhoKHbv3q1U3r9/f/Hpp5/muI+Tk5NYunSpUtnMmTPFBx98kOt5Zs2aJZCVlub6Gjx4sBBCiMGDByuVz5o1SwghRJs2bZTK161bJ4QQwsvLS6n88OHDQgghrKyslMpDQ0NFfHy8ynnj4+NFaGioUpmVlZUQQojDhw8rlXt5eQkhhFi3bp1SeZs2bXK8Tl5T6bmmQ7eeCZcp+4XzlH3CZeoBxct16gHhMmW/MKvmU+qu6X38PZW0a2o2eokAIMxrNFO6b3J7lf8sQPmaencW4snf4tXXFYRsppUQs6xVXrKZVuLROEthIIFYt/AbIRKi+Ht6z65p1Iq9wmncNrXvI5epB4Rdj0DF/r1qGeV477z76lXLqNDXVL16dcXPOy8atwg5Ozvj4MGDqF27tlL5zZs30b59e/z777+aHE5t2ZndhQsXFC1RADBlyhScPn0aly9fVtnHxMQEGzduRO/evRVlP/74IwIDA/H8ec5dBmwR4jWVhmvaG/wI3x36B88T/2sZqmRjiqmtq+CTamVL5TW9j7+nknRNhWoRqmSjuKa06ztgsudLAIAE/319CGS1Fr3ptAaZHu34e3pPryno5CP8fOERREYaDEzUm5Eq5DJMqJGEgdXTS2SLkMaJ0Nq1a7F9+3Zs3rwZ9vb2AIDo6Gj4+/uja9euGDZsmCaHU1tRJULv4qwxKqm4sjQVhEwu0GTBCUTHpyKnD38JAHsbU5yb+knu91OOM34cgbbzOXX+Pff2Uh6DN17Ns5u+vKUJfvavr7yUh2LmYRSQ2x2opZmH6n5/a7yO0KpVq3Dv3j04OzvD2dkZAPD48WNIpVK8ePECa9asUdQNDg4uQOg5K1++PAwNDVUSmOfPnysSsnfZ29trVJ+oNDE0kMCnSrniDoNKGUMDCWZ18sKIX4MhgfJXUXbaM6uTV95JtdengGeHErEGDBUtO2tTxdpkczvXwohfs77nc7qP5nauhQ+dbJUPYGCYNUV+W///r5nDnm3nF+m9pHEi1LlzZx2EkT8TExN4e3vj+PHjihjkcjmOHz+OUaNG5biPj48Pjh8/jnHjxinKjh49qtSiRESkb9rWqoRVfT9SWf/FXpPHtBgYAm5NdRgllXQFvo+8Ps1aeDPHdYSKvlWxUAsqFrU//vgD/v7+WLNmDRo0aICgoCBs27YNd+/eRcWKFdG/f384Ojpi3rx5ALKmz/v6+mL+/Pno0KEDtm7diu+++w7BwcGoVauWWudk1xgRva/YvUraUFJXltZZ19jJkyfRokWLHLetWbNGZ2OEgKzp8C9evMDMmTMRHR2NOnXq4PDhw6hYMWvRuMePH8PA4L+lkRo1aoTffvsNM2bMwNdffw0PDw/s2bNH7SSIiOh9xu5V0oYC30clpFVR4xYhqVSKMWPG4LvvvoOxsTEA4OXLlxg4cCDOnTuH169f6yTQ4sIWISIiotJHZ4/YOHnyJHbv3o369evj9u3bOHjwIGrVqoWEhARcv369MDETERERFSmNE6FGjRrh+vXrqFWrFj766CN06dIF48ePx6lTp+Di4qKLGImIiIh0okAPXf3nn3/w999/o3LlyjAyMkJ4eLjSAlBEREREpYHGidD8+fPh4+OD1q1bIzQ0FFeuXEFISAg++OADXLx4URcxEhEREemExonQsmXLsGfPHixfvhympqaoVasWrly5gq5du6J58+Y6CJGIiIhINzSePn/r1i2UL19eqczY2BgLFy5Ex44dtRYYERERka5p3CL0bhL0tho1ahQqGCIiIqKipHYiZG5ujhcvXijed+jQAVFRUYr3z58/R6VKaizLTkRERFRCqJ0Ipaam4u21F8+cOYM3b94o1SlFT+sgIiIiKtj0+dxIJHxGDREREZUeWk2EiIiIiEoTtRMhiUSi1OLz7nsiIiKi0kbt6fNCCFSrVk2R/CQlJaFu3bqKp71zfBARERGVNmonQuvXr9dlHERERERFTu1EyN/fX5dxEBERERU5tcYIsduLiIiI3kdqJUI1a9bE1q1bkZ6enme9iIgIjBgxAvPnz9dKcERERES6pFbX2PLlyzF16lSMHDkSrVu3Rr169eDg4ABTU1O8fv0at2/fxrlz5xAWFoZRo0ZhxIgRuo6biIiIqNAkQoN+r3PnzuGPP/7A2bNn8ejRI7x58wbly5dH3bp14efnhz59+qBMmTK6jLfIJSQkwMbGBvHx8bC2ti7ucIiIiEgN6n5/a5QI6SMmQkRERKWPut/fXFmaiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0VoESofv372PGjBno3bs3YmJiAACHDh1CWFiYVoMjIiIi0iWNE6HTp0+jdu3auHz5Mnbt2oWkpCQAwI0bNzBr1iytB0hERESkKxonQgEBAZg7dy6OHj0KExMTRfknn3yCS5cuaTU4IiIiIl3SOBG6desWunTpolJuZ2eHly9faiUoIiIioqKgcSJka2uLqKgolfKQkBA4OjpqJSgiIiKioqBxItSrVy9MnToV0dHRkEgkkMvlOH/+PCZNmoT+/fvrIkYiIiIindA4Efruu+/g6ekJJycnJCUlwcvLC82aNUOjRo0wY8YMXcRIREREpBMFfujq48ePERoaiqSkJNStWxceHh7ajq1E4ENXiYiISh91v7+NCnoCZ2dnODs7F3R3IiIiomKncSI0aNCgPLf/8ssvBQ6GiIiIqChpnAi9fv1a6X1GRgZCQ0MRFxeHTz75RGuBEREREemaxonQ7t27VcrkcjlGjBiBKlWqaCUoIiIioqKglYeuGhgYYMKECVi6dKk2DkdERERUJLT29Pn79+8jMzNTW4cjIiIi0jmNu8YmTJig9F4IgaioKBw8eBD+/v5aC4yIiIhI1zROhEJCQpTeGxgYoEKFCli8eHG+M8qIiIiIShKNE6GTJ0/qIg4iIiKiIqe1MUJEREREpY1aLUJ169aFRCJR64DBwcGFCoiIiIioqKiVCHXu3FnHYRAREREVvQI/dFVf8KGrREREpY+6398cI0RERER6S+NZYzKZDEuXLsW2bdvw+PFjpKenK22PjY3VWnBEREREuqRxi1BgYCCWLFmCzz//HPHx8ZgwYQK6du0KAwMDzJ49WwchEhEREemGxonQli1bsG7dOkycOBFGRkbo3bs3fvrpJ8ycOROXLl3SRYxEREREOqFxIhQdHY3atWsDACwtLREfHw8A6NixIw4ePKjd6IiIiIh0SONEqHLlyoiKigIAVKlSBUeOHAEAXL16FVKpVLvREREREemQxolQly5dcPz4cQDA6NGj8c0338DDwwP9+/fns8aIiIioVFF7HaEVK1agb9++sLW1VSq/ePEiLl68CA8PD3Tq1EkXMRYrriNERERU+qj7/a12ImRjY4OMjAx06dIFgwcPxieffKK1YEsyJkJERESlj9YXVIyOjsbq1avx7NkztG7dGm5ubpgzZw6ePHmilYCJiIiIipraiZCZmRn69++PkydPIiIiAv369cPPP/8MNzc3tG3bFtu3b0dGRoYuYyUiIiLSqgI9YsPd3R3ffvstIiMjcejQIZQrVw4DBgyAo6OjtuNTiI2NRZ8+fWBtbQ1bW1sMHjwYSUlJedYfPXo0qlevDjMzMzg7O2PMmDGK6f5EREREhXrWmEQigZGRESQSCYQQOm0R6tOnD8LCwnD06FEcOHAAZ86cwdChQ3Ot/+zZMzx79gyLFi1CaGgoNmzYgMOHD2Pw4ME6i5GIiIhKlwI9ff7JkydYv349NmzYgMePH6NZs2YYPHgwunXrBlNTU60HeefOHXh5eeHq1auoV68eAODw4cNo3749/v33Xzg4OKh1nO3bt6Nv375ITk6GkZF6j1njYGkiIqLSR93vb7Ufupqeno5du3bhl19+wYkTJ1CpUiX4+/tj0KBBcHd310rQubl48SJsbW0VSRAAtGrVCgYGBrh8+TK6dOmi1nGyfxh5JUFpaWlIS0tTvE9ISCh44ERERFSiqZ0I2dvbIyUlBR07dsT+/fvh5+cHA4NC9aypLTo6GnZ2dkplRkZGKFu2LKKjo9U6xsuXLzFnzpw8u9MAYN68eQgMDCxwrERERFR6qJ3JzJgxA0+ePMGOHTvQrl07rSRBAQEBkEgkeb7u3r1b6PMkJCSgQ4cO8PLywuzZs/OsO23aNMTHxyteXB6AiIjo/aV2i9CECRO0fvKJEydiwIABedZxd3eHvb09YmJilMozMzMRGxsLe3v7PPdPTExE27ZtYWVlhd27d8PY2DjP+lKplM9MIyIi0hNqJ0K6UKFCBVSoUCHfej4+PoiLi8O1a9fg7e0NADhx4gTkcjkaNmyY634JCQnw8/ODVCrFvn37dDKQm4iIiEqvohnkU0g1atRA27ZtMWTIEFy5cgXnz5/HqFGj0KtXL8WMsadPn8LT0xNXrlwBkJUEtWnTBsnJyfj555+RkJCA6OhoREdHQyaTFeflEBERUQlRrC1CmtiyZQtGjRqFli1bwsDAAN26dcMPP/yg2J6RkYHw8HCkpKQAAIKDg3H58mUAQNWqVZWOFRkZCVdX1yKLnYiIiEqmAq0jpE+4jhAREVHpo/V1hLLJZDJs2LABx48fR0xMDORyudL2EydOaB4tERERUTHQOBEaO3YsNmzYgA4dOqBWrVqQSCS6iIuIiIhI5zROhLZu3Ypt27ahffv2uoiHiIiIqMhoPGvMxMREZfAxERERUWmkcSI0ceJELFu2DBxjTURERKWdxl1j586dw8mTJ3Ho0CHUrFlTZaXmXbt2aS04IiIiIl3SOBGytbVV+2nvRERERCWZxonQ+vXrdREHERERUZErFY/YICIiItKFAj1iY8eOHdi2bRseP36M9PR0pW3BwcFaCYyIiIhI1zRuEfrhhx8wcOBAVKxYESEhIWjQoAHKlSuHBw8eoF27drqIkYiIiEgnNE6EfvzxR6xduxbLly+HiYkJpkyZgqNHj2LMmDGIj4/XRYxEREREOqFxIvT48WM0atQIAGBmZobExEQAQL9+/fD7779rNzoiIiIiHdI4EbK3t0dsbCwAwNnZGZcuXQIAREZGcpFFIiIiKlU0ToQ++eQT7Nu3DwAwcOBAjB8/Hq1bt8bnn3/O9YWIiIioVJEIDZtx5HI55HI5jIyyJpxt3boVFy5cgIeHB4YNGwYTExOdBFpcEhISYGNjg/j4eFhbWxd3OERERKQGdb+/NU6E9A0TISIiotJH3e/vAi2oePbsWfTt2xc+Pj54+vQpAGDz5s04d+5cwaIlIiIiKgYaJ0I7d+6En58fzMzMEBISgrS0NABAfHw8vvvuO60HSERERKQrGidCc+fOxerVq7Fu3TqlJ883btyYq0oTERFRqaJxIhQeHo5mzZqplNvY2CAuLk4bMREREREViQKtI3Tv3j2V8nPnzsHd3V0rQREREREVBY0ToSFDhmDs2LG4fPkyJBIJnj17hi1btmDSpEkYMWKELmIkIiIi0gmNnz4fEBAAuVyOli1bIiUlBc2aNYNUKsWkSZMwevRoXcRIREREpBMFXkcoPT0d9+7dQ1JSEry8vGBpaant2EoEriNERERU+qj7/a1xi1A2ExMTeHl5FXR3IiIiomKndiI0aNAgter98ssvBQ6GiIiIqCipnQht2LABLi4uqFu3Lp8yT0RERO8FtROhESNG4Pfff0dkZCQGDhyIvn37omzZsrqMjYiIiEin1J4+v3LlSkRFRWHKlCnYv38/nJyc0LNnT/z1119sISIiIqJSqcCzxh49eoQNGzZg06ZNyMzMRFhY2Hs5c4yzxoiIiEofnT59HgAMDAwgkUgghIBMJivoYYiIiIiKjUaJUFpaGn7//Xe0bt0a1apVw61bt7BixQo8fvz4vWwNIiIioveb2oOlR44cia1bt8LJyQmDBg3C77//jvLly+syNiIiIiKdUnuMkIGBAZydnVG3bl1IJJJc6+3atUtrwZUEHCNERERU+mh9Zen+/fvnmQARERERlTYaLahIRERE9D4p8KwxIiIiotKOiRARERHpLSZCREREpLeYCBEREZHeYiJEREREeouJEBEREektJkJERESkt5gIERERkd5iIkRERER6i4kQERER6S0mQkRERKS3mAgRERGR3mIiRERERHqLiRARERHpLSZCREREpLeYCBEREZHeYiJEREREeouJEBEREektJkJERESkt5gIERERkd4qNYlQbGws+vTpA2tra9ja2mLw4MFISkpSa18hBNq1aweJRII9e/boNlAiIiIqNUpNItSnTx+EhYXh6NGjOHDgAM6cOYOhQ4eqtW9QUBAkEomOIyQiIqLSxqi4A1DHnTt3cPjwYVy9ehX16tUDACxfvhzt27fHokWL4ODgkOu+169fx+LFi/H333+jUqVKRRUyERERlQKlokXo4sWLsLW1VSRBANCqVSsYGBjg8uXLue6XkpKCL774AitXroS9vb1a50pLS0NCQoLSi4iIiN5PpSIRio6Ohp2dnVKZkZERypYti+jo6Fz3Gz9+PBo1aoTPPvtM7XPNmzcPNjY2ipeTk1OB4yYiIqKSrVgToYCAAEgkkjxfd+/eLdCx9+3bhxMnTiAoKEij/aZNm4b4+HjF68mTJwU6PxEREZV8xTpGaOLEiRgwYECeddzd3WFvb4+YmBil8szMTMTGxuba5XXixAncv38ftra2SuXdunVD06ZNcerUqRz3k0qlkEql6l4CERERlWLFmghVqFABFSpUyLeej48P4uLicO3aNXh7ewPISnTkcjkaNmyY4z4BAQH48ssvlcpq166NpUuXolOnToUPnoiIiEq9UjFrrEaNGmjbti2GDBmC1atXIyMjA6NGjUKvXr0UM8aePn2Kli1bYtOmTWjQoAHs7e1zbC1ydnaGm5tbUV8CERERlUClYrA0AGzZsgWenp5o2bIl2rdvjyZNmmDt2rWK7RkZGQgPD0dKSkoxRklERESliUQIIYo7iJIsISEBNjY2iI+Ph7W1dXGHQ0RERGpQ9/u71LQIEREREWkbEyEiIiLSW0yEiIiISG8xESIiIiK9xUSIiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvMREiIiIivcVEiIiIiPQWEyEiIiLSW0yEiIiISG8xESIiIiK9xUSIiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvMREiIiIivcVEiIiIiPQWEyEiIiLSW0yEiIiISG8xESIiIiK9xUSIiIiI9BYTISIiItJbTISIiIhIbzERIiIiIr3FRIiIiIj0FhMhIiIi0ltMhIiIiEhvMREiIiIivcVEiIiIiPQWEyEiIiLSW0yEiIiISG8xESIiIiK9ZVTcAZR0QggAQEJCQjFHQkREROrK/t7O/h7PDROhfCQmJgIAnJycijkSIiIi0lRiYiJsbGxy3S4R+aVKek4ul+PZs2ewsrKCRCIpsvMmJCTAyckJT548gbW1dZGdl94fvIdIG3gfUWEV1z0khEBiYiIcHBxgYJD7SCC2COXDwMAAlStXLrbzW1tb88OHCoX3EGkD7yMqrOK4h/JqCcrGwdJERESkt5gIERERkd5iIlRCSaVSzJo1C1KptLhDoVKK9xBpA+8jKqySfg9xsDQRERHpLbYIERERkd5iIkRERER6i4kQERER6S0mQkRERKS3mAgRERGR3mIiVIrt2rULbdq0Qbly5SCRSHD9+vXiDolKOCEEZs6ciUqVKsHMzAytWrVCRESEUp3//e9/aNSoEczNzWFra1s8gVKJoc49Exsbiz59+sDa2hq2trYYPHgwkpKSFNtTU1MxYMAA1K5dG0ZGRujcuXMRXwUVJW3cMwBw8+ZNNG3aFKampnBycsL333+vtD0sLAzdunWDq6srJBIJgoKCChQvE6FSLDk5GU2aNMGCBQuKOxQqJb7//nv88MMPWL16NS5fvgwLCwv4+fkhNTVVUSc9PR09evTAiBEjijFSKinUuWf69OmDsLAwHD16FAcOHMCZM2cwdOhQxXaZTAYzMzOMGTMGrVq1Ko7LoCKkjXsmISEBbdq0gYuLC65du4aFCxdi9uzZWLt2raJOSkoK3N3dMX/+fNjb2xc8YEE64+vrK7766ivx1VdfCWtra1GuXDkxY8YMIZfLtXqeyMhIAUCEhIRo9bhU9HR5z8jlcmFvby8WLlyoKIuLixNSqVT8/vvvKvXXr18vbGxsCn1e0q3ivmdu374tAIirV68q6hw6dEhIJBLx9OlTlWP6+/uLzz77rNCxUcGVhnvmxx9/FGXKlBFpaWmKOlOnThXVq1fP8bwuLi5i6dKlBYqZLUI6tnHjRhgZGeHKlStYtmwZlixZgp9++gkAMHz4cFhaWub5Iv2jq3smMjIS0dHRSn+R29jYoGHDhrh48aLOr4t0pzjvmYsXL8LW1hb16tVT1GnVqhUMDAxw+fJlHV0xFVZJv2cuXryIZs2awcTERFHHz88P4eHheP36tVZ/Fnz6vI45OTlh6dKlkEgkqF69Om7duoWlS5diyJAh+PbbbzFp0qTiDpFKGF3dM9HR0QCAihUrKpVXrFhRsY1Kp+K8Z6Kjo2FnZ6e03cjICGXLluV9VYKV9HsmOjoabm5uKsfI3lamTJkCxZcTJkI69vHHH0MikSje+/j4YPHixZDJZLCzs1O5GYh4z5CmeM+QpnjP/IddY8WIXWOkqcLcM9mDCZ8/f65U/vz588INNKQSTdf3jL29PWJiYpS2Z2ZmIjY2lvdVKVUS7hl7e/scj/H2ObSFLUI69m4f+aVLl+Dh4QFDQ0N2jVGOdHXPuLm5wd7eHsePH0edOnUAZM3MuHz5MmeIlXLFec/4+PggLi4O165dg7e3NwDgxIkTkMvlaNiwYcEvinSqpN8zPj4+mD59OjIyMmBsbAwAOHr0KKpXr67VbjEAnDWmS76+vsLS0lKMHz9e3L17V/z222/CwsJCrF69ukDHq169uti1a5fi/atXr0RISIg4ePCgACC2bt0qQkJCRFRUlLYugYqYru+Z+fPnC1tbW7F3715x8+ZN8dlnnwk3Nzfx5s0bRZ1Hjx6JkJAQERgYKCwtLUVISIgICQkRiYmJhb4+0r6ScM+0bdtW1K1bV1y+fFmcO3dOeHh4iN69eysdNywsTISEhIhOnTqJ5s2bK+4rKnql4Z6Ji4sTFStWFP369ROhoaFi69atwtzcXKxZs0ZRJy0tTXEfVapUSUyaNEmEhISIiIgIjeJnIqRDvr6+YuTIkWL48OHC2tpalClTRnz99dcFnqIIQKxfv17xfv369QKAymvWrFnauQAqcrq+Z+Ryufjmm29ExYoVhVQqFS1bthTh4eFK+/j7++d4X508ebIQV0a6UhLumVevXonevXsLS0tLYW1tLQYOHKiSOLu4uOR4X1HRKy33zI0bN0STJk2EVCoVjo6OYv78+Urbs5eOeffl6+urUfyS/78I0oHmzZujTp06BV7tkvQP7xnSFO8Z0hTvGWUcLE1ERER6i4kQERER6S12jREREZHeYosQERER6S0mQkRERKS3mAgRERGR3mIiRERERHqLiRARERHpLSZCREREpLeYCBEREZHeYiJEREREeuv/AFRwF2Vv/fzlAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "datax = [get_noisy_data([0]*4,['x']*4,[0,1,2,3],[.1,.1,.1,.1]),\n", " get_noisy_data([0]*4,['x']*4,[0,1,2,3],[.01,.01,.01,.01]),\n", @@ -1090,21 +804,10 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "id": "98aa7b73-7139-4a8c-a8e4-dc00a3e1e276", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlYAAAGzCAYAAADtxFmzAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAckNJREFUeJzt3XdYFFfbBvB7aUsHC0iRqqhgF9RgNxYsMbGXaOw92Bu+FmwJGnuMwaixxmisicYSe6+xF0SsWEBUpEvbPd8ffGxYWWBXl7Jy/65rr2TPnJl5ZmfcfTjnzBmJEEKAiIiIiD6aXmEHQERERPSpYGJFREREpCVMrIiIiIi0hIkVERERkZYwsSIiIiLSEiZWRERERFrCxIqIiIhIS5hYEREREWkJEysiIiIiLWFiRUSfnB9++AGVKlWCXC4v7FB0wrp16yCRSPD48ePCDoW05Pjx45BIJDh+/Hi+bL979+7o2rVrvmxb1zGx0mGZX4YSiQSnT5/OtlwIAScnJ0gkEnzxxReFEGH+++OPP9CrVy94eHhAIpGgSZMmGq3/6tUrjBo1CpUqVYKJiQlsbW1Rp04dTJo0CQkJCYp6ffv2VXzWEokE5ubmcHd3R+fOnbFjxw61f8BnzJihtJ33X5GRkRrFX1CaNGmiFKeRkRHc3NwwePBgPH36tLDDUxIXF4d58+Zh0qRJ0NNT/opLTk7G4sWLUbduXVhZWcHY2BgVKlSAv78/7t27l69x/fzzz1i3bl2+7qOoOHv2LGbMmIGYmBi113n+/Dm6du0Ka2trWFpa4quvvsLDhw/zL8gcZF7r7dq1y7bs8ePHkEgkWLBggdb25+rqColEghEjRmRblpkcbd++XWv705ZJkyZhx44duH79emGHUuQYFHYA9PGMjY3x+++/o0GDBkrlJ06cwLNnzyCVSgspsvwXHByMy5cvo3bt2njz5o1G60ZHR8PHxwdxcXHo378/KlWqhDdv3uDGjRsIDg7GsGHDYG5urqgvlUqxevVqAMC7d+/w5MkT7NmzB507d0aTJk3w119/wdLSUu24s247k7W1tUbHUJDKli2LoKAgAEBqairu3LmDFStW4J9//kFISAhMTU0LOcIMa9asQXp6Onr06KFU/vr1a7Rq1QqXL1/GF198ga+//hrm5uYIDQ3Fli1bsHLlSqSmpuZbXD///DNKly6Nvn375ts+ioqzZ89i5syZ6Nu3r1rXdEJCApo2bYrY2Fj873//g6GhIRYvXozGjRvj2rVrKFWqVP4H/Z6///4bly9fhre3d4Hsb9WqVZg8eTIcHBy0sr1GjRrh3bt3MDIy0sr23lezZk34+Phg4cKF2LBhQ77sQ2cJ0llr164VAETHjh1F6dKlRVpamtLyQYMGCW9vb+Hi4iLatm1bSFF+mGPHjgkA4tGjR7nWCw8PFzKZTAghROXKlUXjxo3V3scPP/wgAIgzZ85kWxYbGyvevXuneN+nTx9hZmamcjtBQUECgOjatWue+wwMDBQAxKtXr9SOM9O7d+8Ux/q+hIQEjbeXlUwmUzre9zVu3FhUrlw5W/lPP/0kAIiDBw9+1P4zfexxCCFEtWrVRK9evbKVt23bVujp6Ynt27dnW5acnCzGjRv30fvOjabXpzrS0tJESkrKR28n87skr39v6po/f75G25s3b54AIC5evKgoCwkJEfr6+mLy5MlaiUldjRs3Fs7OzqJEiRKiXbt2SssePXokAIj58+drbX8uLi6icuXKwsDAQIwYMUJpWeb34LZt27S2P21asGCBMDMzE/Hx8YUdSpHCrsBPQI8ePfDmzRscOnRIUZaamort27fj66+/VrmOXC7HkiVLULlyZRgbG6NMmTIYMmQI3r59q1Tvr7/+Qtu2beHg4ACpVIpy5cph9uzZkMlkSvWaNGmCKlWq4M6dO2jatClMTU3h6OiIH374QfsHnIWTk1O27h51PXjwAPr6+vjss8+yLbO0tISxsbFa2wkICEDLli2xbds2rXUnZXYBbNmyBVOnToWjoyNMTU0RFxeHvn37wtzcHA8ePECbNm1gYWGBnj17AgASExMxbtw4ODk5QSqVomLFiliwYAGEEErbl0gk8Pf3x6ZNm1C5cmVIpVIcOHBA4zjt7OwAAAYG/zV+P3nyBMOHD0fFihVhYmKCUqVKoUuXLtnG72R2ZZ84cQLDhw+Hra0typYtCwCIj4/H6NGj4erqCqlUCltbW7Ro0QJXrlzJNZ5Hjx7hxo0baN68uVL5hQsXsHfvXgwYMACdOnXKtp5UKs3WvXP37l107twZJUuWhLGxMXx8fLB7926Vx3DmzBmMHTsWNjY2MDMzQ4cOHfDq1StFPVdXV9y+fRsnTpxQdKdm7baOiYnB6NGjFeetfPnymDdvnlIXc9ZuqCVLlqBcuXKQSqW4c+eO2vECwO3bt/H555/DxMQEZcuWxZw5c9Tuyr5x4wb69u0Ld3d3GBsbw87ODv3791dqLZ4xYwYmTJgAAHBzc1Mcb27jt7Zv347atWujdu3airJKlSqhWbNm2Lp1q1qxaZOFhQXGjBmDPXv25HnNAcDDhw/RpUsXlCxZEqampvjss8+wd+9etffn6uqK3r17Y9WqVXjx4kWe9a9evYrWrVvD0tIS5ubmaNasGc6fP69UR9UYq7CwMHTq1Al2dnYwNjZG2bJl0b17d8TGxiqt+9tvv8Hb2xsmJiYoWbIkunfvrrLLv0WLFkhMTFT67SF2BX4SXF1d4evri82bN6N169YAgP379yM2Nhbdu3fHjz/+mG2dIUOGYN26dejXrx9GjhyJR48e4aeffsLVq1dx5swZGBoaAsj44TA3N8fYsWNhbm6Oo0ePYvr06YiLi8P8+fOVtvn27Vu0atUKHTt2RNeuXbF9+3ZMmjQJVatWVcSVk9jYWKSlpSm9z9xm1i4zY2NjlV1oH8LFxQUymQwbN25Enz59Pmpb33zzDQ4ePIhDhw6hQoUKedaPjo7OVmZgYJCt22T27NkwMjLC+PHjkZKSomjWT09Ph5+fHxo0aIAFCxbA1NQUQgh8+eWXOHbsGAYMGIAaNWrgn3/+wYQJE/D8+XMsXrxYadtHjx7F1q1b4e/vj9KlS8PV1TXXmGUyGV6/fg0ASEtLQ0hICAIDA1G+fHnUr19fUe/SpUs4e/YsunfvjrJly+Lx48cIDg5GkyZNcOfOnWxdhsOHD4eNjQ2mT5+OxMREAMDQoUOxfft2+Pv7w8vLC2/evMHp06cREhKCWrVq5Rjj2bNnASBbncwE45tvvsn1GDPdvn0b9evXh6OjIwICAmBmZoatW7eiffv22LFjBzp06KBUf8SIEShRogQCAwPx+PFjLFmyBP7+/vjjjz8AAEuWLMGIESNgbm6OKVOmAADKlCkDAEhKSkLjxo3x/PlzDBkyBM7Ozjh79iwmT56MiIgILFmyRGlfa9euRXJyMgYPHgypVIqSJUuqHW9kZCSaNm2K9PR0Rb2VK1fCxMRErc/l0KFDePjwIfr16wc7Ozvcvn0bK1euxO3bt3H+/HlIJBJ07NgR9+7dw+bNm7F48WKULl0aAGBjY6Nym3K5HDdu3ED//v2zLatTpw4OHjyI+Ph4WFhY5BjX+98fOdHk+2PUqFFYvHgxZsyYoTJBzfTy5UvUq1cPSUlJGDlyJEqVKoX169fjyy+/xPbt27NdKzmZMmUKNmzYgLlz56r8zs50+/ZtNGzYEJaWlpg4cSIMDQ3xyy+/oEmTJjhx4gTq1q2rcr3U1FT4+fkhJSUFI0aMgJ2dHZ4/f46///4bMTExsLKyAgB89913mDZtGrp27YqBAwfi1atXWLZsGRo1aoSrV68qfUd5eXnBxMQEZ86cUfs4i4XCbjKjD5fZfH/p0iXx008/CQsLC5GUlCSEEKJLly6iadOmQgiRrSvw1KlTAoDYtGmT0vYOHDiQrTxze1kNGTJEmJqaiuTkZEVZ48aNBQCxYcMGRVlKSoqws7MTnTp1yvNYMtfP69WnT58ct6FpV0tkZKSwsbERAESlSpXE0KFDxe+//y5iYmKy1c2tK1AIIa5evSoAiDFjxuS6z8yuQFWvihUrKupldgG4u7tnOwd9+vQRAERAQIBS+Z9//ikAiDlz5iiVd+7cWUgkEnH//n1FGQChp6cnbt++nWu8mXI6P56enuLhw4dKdVVdM+fOnct2fWRevw0aNBDp6elK9a2srMS3336rVmxZTZ06VQDI1jXRoUMHAUC8fftWre00a9ZMVK1aVekal8vlol69esLDwyPbMTRv3lzI5XJF+ZgxY4S+vr7StZTT9Tl79mxhZmYm7t27p1QeEBAg9PX1RXh4uBDiv24oS0tLERUV9UHxjh49WgAQFy5cUJRFRUUJKysrtbruVJ3bzZs3CwDi5MmTijJNugJfvXolAIhZs2ZlW7Z8+XIBQNy9ezfXbWjj+yPrtjK7vWfOnCkAiMuXLwshVHcFZn6mp06dUpTFx8cLNzc34erqmmP3faas38/9+vUTxsbG4sWLF0II1V2B7du3F0ZGRuLBgweKshcvXggLCwvRqFEjRVnmuseOHRNC/PcdlVu34uPHj4W+vr747rvvlMpv3rwpDAwMspULIUSFChVE69atcz3G4oZdgZ+Irl274t27d/j7778RHx+Pv//+O8duwG3btsHKygotWrTA69evFS9vb2+Ym5vj2LFjirpZ/5KNj4/H69ev0bBhQyQlJeHu3btK2zU3N0evXr0U742MjFCnTh217uxZuHAhDh06pHhldsv89ttvSuUTJ07U6HPJTZkyZXD9+nUMHToUb9++xYoVK/D111/D1tYWs2fPztZ9lpvMv4Lj4+PVqr9jxw6l4zp06BDWrl2brV6fPn1ybE0YNmyY0vt9+/ZBX18fI0eOVCofN24chBDYv3+/Unnjxo3h5eWlVrxARstoZqz79+/HkiVLEBsbi9atWyt1e2WNNy0tDW/evEH58uVhbW2tsltl0KBB0NfXVyqztrbGhQsX1OoWyerNmzcwMDDI1ioRFxcHALm2emSKjo7G0aNH0bVrV8U1//r1a7x58wZ+fn4ICwvD8+fPldYZPHgwJBKJ4n3Dhg0hk8nw5MmTPPe3bds2NGzYECVKlFD699i8eXPIZDKcPHlSqX6nTp2UWn80iXffvn347LPPUKdOHcX6NjY2iq7kvGQ9t8nJyXj9+rWiK12dLjNV3r17BwAqb7LJ7I7PrJOT978/cnpp+v0xatQolChRAjNnzsyxzr59+1CnTh2lm4fMzc0xePBgPH78WNFVq46pU6ciPT0dc+fOVblcJpPh4MGDaN++Pdzd3RXl9vb2+Prrr3H69GnFtf6+zBapf/75B0lJSSrr7Ny5E3K5HF27dlW6Fu3s7ODh4aH025Ap87ql/7Ar8BNhY2OD5s2b4/fff0dSUhJkMhk6d+6ssm5YWBhiY2Nha2urcnlUVJTi/2/fvo2pU6fi6NGj2f7Bvt8vX7ZsWaUfFyDjH92NGzfyjP/9O28yx+zUr18/zy6qj2Fvb4/g4GD8/PPPCAsLwz///IN58+Zh+vTpsLe3x8CBA9XaTubUDOr8cAMZd+xkdpHkxs3NTWW5gYGBYjxSpidPnsDBwSFbDJ6enorl6mw7J2ZmZkpjl1q1aoUGDRrAx8cHc+fOxcKFCwFk/AgGBQVh7dq1eP78uVKC+v41k1McP/zwA/r06QMnJyd4e3ujTZs26N27t9KPiSYy79aMj4/P8y61+/fvQwiBadOmYdq0aSrrREVFwdHRUfHe2dlZaXmJEiUAINuYRVXCwsJw48aNHLvKsv57BLJ/XprE++TJE5VdRRUrVswzTiAjiZs5cya2bNmSLS5V51YdmclaSkpKtmXJyclKdXKSX3fuWVlZYfTo0QgMDMTVq1cV5zWrnD7TrP/uqlSpotb+3N3d8c0332DlypUICAjItvzVq1dISkpSeb48PT0hl8vx9OlTVK5cOdtyNzc3jB07FosWLcKmTZvQsGFDfPnll+jVq5ci6QoLC4MQAh4eHirjyxwikpUQItv3fnHHxOoT8vXXX2PQoEGIjIxE69atc/wBkcvlsLW1xaZNm1Quz/yCj4mJQePGjWFpaYlZs2ahXLlyMDY2xpUrVzBp0qRsA17fb3XIpEnLT2GRSCSoUKECKlSogLZt28LDwwObNm1SO7G6desWAKB8+fJajSunHxSpVPrBg/bz2rYmvL29YWVlpdSqMmLECKxduxajR4+Gr68vrKysIJFI0L17d5WDpFXF0bVrVzRs2BC7du3CwYMHMX/+fMybNw87d+7MdbxeqVKlkJ6enm1MTqVKlQAAN2/eRMOGDXM9pswYx48fDz8/P5V13j/PH3Pty+VytGjRIsfWlPfH7L3/eX1IvB+qa9euOHv2LCZMmIAaNWrA3NwccrkcrVq1+uDJWEuWLAmpVIqIiIhsyzLL8pqCIDo6Wq2pMkxMTBRJhLoyx1rNnDkz23i3/DBlyhRs3LgR8+bNQ/v27bW67YULF6Jv377466+/cPDgQYwcORJBQUE4f/48ypYtC7lcDolEgv3796u8plWNT3v79m2OiVhxxcTqE9KhQwcMGTIE58+fVwyaVaVcuXI4fPgw6tevn+uP6/Hjx/HmzRvs3LkTjRo1UpQ/evRIq3EXNe7u7ihRooTKL/qcbNy4ERKJBC1atMjHyHLn4uKCw4cPZ0sqMrtsXVxc8mW/MplMaTLV7du3o0+fPooWLCCj5UGTySKBjNbE4cOHY/jw4YiKikKtWrXw3Xff5ZpYZSZQjx49QrVq1RTl7dq1Q1BQEH777bc8E6vMVjFDQ8Nsdxd+jJz+qi9XrhwSEhI+eF+axOvi4oKwsLBs5aGhoXnu5+3btzhy5AhmzpyJ6dOnK8pVbU+TFgw9PT1UrVoV//77b7ZlFy5cgLu7e54twR07dsSJEyfy3FefPn00nqQ1s9VqxowZKm9ycXFxUfn5fei/u3LlyqFXr1745ZdfsrWE2djYwNTUNMf96enpwcnJKdftV61aFVWrVsXUqVNx9uxZ1K9fHytWrMCcOXNQrlw5CCHg5uam1k046enpePr0Kb788kuNjvFTxzFWnxBzc3MEBwdjxowZKmcNztS1a1fIZDLMnj0727L09HTFD2DmXyxZ/+pOTU3Fzz//rN3AVWjSpAmEEPnaDXjhwgXFXWhZXbx4EW/evFG7e2Tu3Lk4ePAgunXrVqh/ubVp0wYymQw//fSTUvnixYshkUjyvDPzQxw7dgwJCQmoXr26okxfXz9bS82yZcuyTdGRE5lMlq1bydbWFg4ODiq7i7Ly9fUFgGw/0r6+vmjVqhVWr16NP//8M9t6qampGD9+vGJfTZo0wS+//KIyuc46nkwTZmZmKpPLrl274ty5c/jnn3+yLYuJiUF6enqu29Uk3jZt2uD8+fO4ePGi0vKcWq+zUvV9AEBlK46ZmZkifnV07twZly5dUjpvoaGhOHr0KLp06ZLn+vk1xirT6NGjYW1tjVmzZmVb1qZNG1y8eBHnzp1TlCUmJmLlypVwdXXVaBxjpqlTpyItLS3bdDX6+vpo2bIl/vrrL6XpK16+fKmYJDqnSYrj4uKyXUtVq1aFnp6e4t9Vx44doa+vj5kzZ2Y7z0KIbJMw37lzB8nJyahXr57Gx/gpY4vVJ0adaQMaN26MIUOGICgoCNeuXUPLli1haGiIsLAwbNu2DUuXLkXnzp1Rr149lChRAn369MHIkSMhkUiwcePGfOnaO3ToEF6+fJlnvXLlyil+PAHg5MmTim6oV69eITExEXPmzAGQMY4pa0vb+zZu3IhNmzahQ4cO8Pb2hpGREUJCQrBmzRoYGxvjf//7n1L99PR0/PbbbwAyWmCePHmC3bt348aNG2jatClWrlyp9vFu375dZbN6ixYtFLfha6pdu3Zo2rQppkyZgsePH6N69eo4ePAg/vrrL4wePRrlypX7oO1mio2NVRx/eno6QkNDERwcDBMTE6XxIF988QU2btwIKysreHl54dy5czh8+LDas2fHx8ejbNmy6Ny5M6pXrw5zc3McPnwYly5dUmoFU8Xd3R1VqlTB4cOHs92+v2HDBrRs2RIdO3ZEu3bt0KxZM5iZmSEsLAxbtmxBRESE4qaJ5cuXo0GDBqhatSoGDRoEd3d3vHz5EufOncOzZ88+6DEe3t7eCA4Oxpw5c1C+fHnY2tri888/x4QJE7B792588cUX6Nu3L7y9vZGYmIibN29i+/btePz4cZ7j8dSNd+LEidi4cSNatWqFUaNGKaZbcHFxyXMspKWlJRo1aoQffvgBaWlpcHR0xMGDB1W2YGeOeZoyZQq6d+8OQ0NDtGvXTpFwvW/48OFYtWoV2rZti/Hjx8PQ0BCLFi1CmTJlMG7cOLU+2/xkZWWFUaNGqRzEHhAQoJjqZuTIkShZsiTWr1+PR48eYceOHR/UZZ/ZarV+/fpsy+bMmYNDhw6hQYMGGD58OAwMDPDLL78gJSUl13kDjx49Cn9/f3Tp0gUVKlRAeno6Nm7cCH19fcXcbuXKlcOcOXMwefJkPH78GO3bt4eFhQUePXqEXbt2YfDgwYo/QICM721TU9NCbakvkgr8PkTSmqzTLeQmp5nXV65cKby9vYWJiYmwsLAQVatWFRMnTlTc6iuEEGfOnBGfffaZMDExEQ4ODmLixInin3/+UbqNV4icZ+bu06ePcHFxyfNYPvR26dymLwgMDMx1nzdu3BATJkwQtWrVEiVLlhQGBgbC3t5edOnSRVy5ciXbcWTdtqmpqXB1dRWdOnUS27dvz/OWanXizfqZ5jbjcm5TP8THx4sxY8YIBwcHYWhoKDw8PMT8+fOVpgIQImO6BU2mM3j//EgkElGyZEnx5ZdfKm5Fz/T27VvRr18/Ubp0aWFubi78/PzE3bt3hYuLi9L5y+n6TUlJERMmTBDVq1cXFhYWwszMTFSvXl38/PPPasW6aNEiYW5urnJqgKSkJLFgwQJRu3ZtYW5uLoyMjISHh4cYMWKE0nQUQgjx4MED0bt3b2FnZycMDQ2Fo6Oj+OKLL5Rmbs/pGN6/1V2IjOk92rZtKywsLAQApakX4uPjxeTJk0X58uWFkZGRKF26tKhXr55YsGCBSE1NFULkPeu3OvEKkXHdN27cWBgbGwtHR0cxe/Zs8euvv6o1PcKzZ89Ehw4dhLW1tbCyshJdunQRL168UPnvbfbs2cLR0VHo6empte2nT5+Kzp07C0tLS2Fubi6++OILERYWlus6+SGn77K3b98qpqV4/xw8ePBAdO7cWVhbWwtjY2NRp04d8ffff6u1v5y+n8PCwoS+vr7K74ErV64IPz8/YW5uLkxNTUXTpk3F2bNnleq8fw0+fPhQ9O/fX5QrV04YGxuLkiVLiqZNm4rDhw9n2/eOHTtEgwYNhJmZmTAzMxOVKlUS3377rQgNDVWqV7duXZVPOSjuJELowMhiIiI1xcbGwt3dHT/88AMGDBhQ2OEQfZKuXbuGWrVq4cqVK6hRo0Zhh1OkMLEiok/OvHnzsHbtWty5c+ej754kouwy7/ItjEcOFXVMrIiIiIi0hH/KEREREWkJEysiIiIiLWFiRURERKQlTKyIiIiItIQThOZBLpfjxYsXsLCw4IMmiYiIdIQQAvHx8XBwcCjQu4OZWOXhxYsXeT57iYiIiIqmp0+fomzZsgW2PyZWech8+OfTp09zfAYTERER5SL+JZCQ5bFlT84C54OBd1meP2haGqg7FHDJ8uxB8zKAxYc95isuLg5OTk55PsRb25hY5SGz+8/S0pKJFRER0YewtATw/w+pv7MbOPs9AAFIswyxkUVnlJfdAHh9qbVdF/QwHp0bvL58+XK4urrC2NgYdevWVXpKuyoxMTH49ttvYW9vD6lUigoVKmDfvn0FFC0REREpyGXAgUnIeOzo+/6/7EBARj0dpVOJ1R9//IGxY8ciMDAQV65cQfXq1eHn54eoqCiV9VNTU9GiRQs8fvwY27dvR2hoKFatWgVHR8cCjpyIiIjw5CwQ9yKXCgKIe55RT0fpVFfgokWLMGjQIPTr1w8AsGLFCuzduxdr1qxBQEBAtvpr1qxBdHQ0zp49C0NDQwCAq6trQYZMREREmbKOs9JGvSJIZ1qsUlNTcfnyZTRv3lxRpqenh+bNm+PcuXMq19m9ezd8fX3x7bffokyZMqhSpQq+//57yGQ5NzGmpKQgLi5O6UVERERaYK7mQHR16xVBOpNYvX79GjKZDGXKKH/YZcqUQWRkpMp1Hj58iO3bt0Mmk2Hfvn2YNm0aFi5ciDlz5uS4n6CgIFhZWSlenGqBiIhIS1zqAZYOAHIaUC4BLB2V7wzUMTqTWH0IuVwOW1tbrFy5Et7e3ujWrRumTJmCFStW5LjO5MmTERsbq3g9ffq0ACMmIiL6BMVHAi+uAZE3gbrDoHrwOjLK6w7NqPfiWsZ6OkZnxliVLl0a+vr6ePlSud/15cuXsLOzU7mOvb09DA0Noa+vryjz9PREZGQkUlNTYWRklG0dqVQKqVSq3eCJiIiKs3/XAifmqlf30LT//r9xANB0cv7ElE90JrEyMjKCt7c3jhw5gvbt2wPIaJE6cuQI/P39Va5Tv359/P7775DL5Yrp7O/duwd7e3uVSRURERHlA59+QMXWymVyGRB5A0h6A5iWAuyqAXr6ynUsVDecFGU6k1gBwNixY9GnTx/4+PigTp06WLJkCRITExV3Cfbu3RuOjo4ICgoCAAwbNgw//fQTRo0ahREjRiAsLAzff/89Ro4cWZiHQUREVLxY2KlOksp6F3ws+UynEqtu3brh1atXmD59OiIjI1GjRg0cOHBAMaA9PDxc6UGLTk5O+OeffzBmzBhUq1YNjo6OGDVqFCZNmlRYh0BERESfMIkQIqcRZISMZw1ZWVkhNjaWj7QhIiLSEYX1+/1J3xVIREREVJCYWBERERFpCRMrIiIiIi1hYkVERESkJUysiIiIiLSEiRURERGRljCxIiIiItISJlZEREREWsLEioiIiEhLdOqRNsVOfGTGS1M5PZOJiIiI8hUTq6Ls37XAibmar9c4AGg6WfvxEBERUa6YWBVlPv2Aiq3/e//oJHBmKZD0+r8yMxug3kjArdF/ZWytokxs9SQiKlBMrIqyrD9ud3YDh6YDeO+Z2YmvM8q7bgC8vizwEKmIY6snEVGBYmKlC+Qy4MAkZEuqgP8vkwAHAoBKbQE9/QIOjoo0tnoSERUo3hWoC56cBeJe5FJBAHHPM+oRZWVhBzjUyHjFhGe0bmZNqoD/Wj1jwv+ry8SKiOiDMLHSBQkvtVuPip88Wz2R0eoplxVkVEREnxwmVrrAvIx261Hxw1ZPIqICwcRKF7jUAywdAEhyqCABLB0z6hGpwlZPIqICwcRKF+jpA63m/f+b95Or/3/fai4HrlPO2OpJRFQgmFgVZfGRwItrGS9rZ6DFLMC0lHIdM5uMcmvn/+p+yLxF9GljqycRUYHgdAtFmTpzECVGAYemKZdxDiJ6X2ar59beyEiusg5iZ6snEZG2MLEqyt6fg0hdvFWeVPH6MmMi2f0TgfiI/8otHTKSKk4wS0T00SRCCFX3X9P/i4uLg5WVFWJjY2FpaVnY4RBpRtUjbVITgHVtM/6/9TzAsXb2lio+0oaIdFxh/X6zxYroU5ZXd/L+SarL2Z1MRPRBmFgRfcrYnUxEVKCYWBF9ytilR0RUoDjdAhEREZGWMLEiIiIi0hImVkRERERawsSKiIiISEuYWBERERFpCRMrIiIiIi1hYkVERESkJUysiIiIiLSEiRURERGRljCxIiIiItISJlZEREREWsLEioiIiEhLmFgRERERaQkTKyIiIiItYWJFREREpCVMrIiIiIi0hIkVERERkZYwsSIiIiLSEiZWRERERFrCxIqIiIhIS5hYEREREWkJEysiIiIiLdG5xGr58uVwdXWFsbEx6tati4sXL6q13pYtWyCRSNC+ffv8DZCIiIiKLZ1KrP744w+MHTsWgYGBuHLlCqpXrw4/Pz9ERUXlut7jx48xfvx4NGzYsIAiJSIiouJIpxKrRYsWYdCgQejXrx+8vLywYsUKmJqaYs2aNTmuI5PJ0LNnT8ycORPu7u4FGC0REREVNzqTWKWmpuLy5cto3ry5okxPTw/NmzfHuXPnclxv1qxZsLW1xYABA9TaT0pKCuLi4pReREREROrQmcTq9evXkMlkKFOmjFJ5mTJlEBkZqXKd06dP49dff8WqVavU3k9QUBCsrKwULycnp4+Km4iIiIoPnUmsNBUfH49vvvkGq1atQunSpdVeb/LkyYiNjVW8nj59mo9REhER0afEoLADUFfp0qWhr6+Ply9fKpW/fPkSdnZ22eo/ePAAjx8/Rrt27RRlcrkcAGBgYIDQ0FCUK1cu23pSqRRSqVTL0RMREVFxoDMtVkZGRvD29saRI0cUZXK5HEeOHIGvr2+2+pUqVcLNmzdx7do1xevLL79E06ZNce3aNXbxERF9DLkMeHQKuLk9479yWWFHRFQk6EyLFQCMHTsWffr0gY+PD+rUqYMlS5YgMTER/fr1AwD07t0bjo6OCAoKgrGxMapUqaK0vrW1NQBkKyciolzER2a8Mj06CZz9EUh89V+ZmQ1QbyTg1ui/Mgu7jBdRMaJTiVW3bt3w6tUrTJ8+HZGRkahRowYOHDigGNAeHh4OPT2daYQjItIN/64FTszNvU7iK+DQNOWyxgFA08n5FxdRESQRQojCDqIoi4uLg5WVFWJjY2FpaVnY4RARFbzMFiu5DNjcTbml6n1mtkCPLYCePlusqFAV1u83m3eIiCh3FnaAQw0gLSn3pAoAEqMy6jnUYFJFxRITKyIiUk/Cy7zraFKP6BPExIqIiNRjXibvOprUI/oEMbEiIqLcxUcCL64BhqYZd//lxsw2o96La8p3EhIVExrdFSiXy3HixAmcOnUKT548QVJSEmxsbFCzZk00b96cc0MREX2K1LkrMFNiFLD684z/512BVAypdVfgu3fvsHDhQgQHByM6Oho1atSAg4MDTExMEB0djVu3buHFixdo2bIlpk+fjs8++6wgYi8QvCuQiIo9teaxsgXqjeA8VlRkFNbvt1otVhUqVICvry9WrVqFFi1awNDQMFudJ0+e4Pfff0f37t0xZcoUDBo0SOvBEhFRIXg/QXKoAfh+Czw5mzFQ3bwM4FIvY4oFomJOrRarkJAQeHp6qrXBtLQ0hIeHq3wOny5iixUREZHuKdLzWKmbVAGAoaHhJ5NUEREREWnig+4KPHXqFHr16gVfX188f/4cALBx40acPn1aq8ERERER6RKNE6sdO3bAz88PJiYmuHr1KlJSUgAAsbGx+P7777UeIBEREZGu0DixmjNnDlasWIFVq1YpDWKvX78+rly5otXgiIiIiHSJxolVaGgoGjVqlK3cysoKMTEx2oiJiIiISCdpnFjZ2dnh/v372cpPnz4Nd3d3rQRFREREpIs0TqwGDRqEUaNG4cKFC5BIJHjx4gU2bdqE8ePHY9iwYfkRIxEREZFO0OiRNgAQEBAAuVyOZs2aISkpCY0aNYJUKsX48eMxYsSI/IiRiIiISCeoNUGoKqmpqbh//z4SEhLg5eUFc3NzbcdWJHCCUCIiIt1TpB9po4qRkRG8vLy0GQsRERGRTtM4sWratCkkEkmOy48ePfpRARERERHpKo0Tqxo1aii9T0tLw7Vr13Dr1i306dNHW3ERERER6RyNE6vFixerLJ8xYwYSEhI+OiAiIiIiXfVBzwpUpVevXlizZo22NkdERESkc7SWWJ07dw7Gxsba2hwRERGRztG4K7Bjx45K74UQiIiIwL///otp06ZpLTAiIiIiXaNxYmVlZaX0Xk9PDxUrVsSsWbPQsmVLrQVGREREpGs0TqzWrl2bH3EQERER6TytjbEiIiIiKu7UarEqUaJErpOCZhUdHf1RARERERHpKrUSqyVLluRzGERERES6T63EijOqExEREeXtgx/CDADJyclITU1VKivIJ0gTERERFSUaD15PTEyEv78/bG1tYWZmhhIlSii9iIiIiIorjROriRMn4ujRowgODoZUKsXq1asxc+ZMODg4YMOGDfkRIxEREZFO0LgrcM+ePdiwYQOaNGmCfv36oWHDhihfvjxcXFywadMm9OzZMz/iJCIiIiryNG6xio6Ohru7O4CM8VSZ0ys0aNAAJ0+e1G50RERERDpE48TK3d0djx49AgBUqlQJW7duBZDRkmVtba3V4IiIiIh0icaJVb9+/XD9+nUAQEBAAJYvXw5jY2OMGTMGEyZM0HqARERERLpCIoQQ6lQcP348Bg4ciEqVKimVP3nyBJcvX0b58uVRrVq1fAmyMMXFxcHKygqxsbGcSoKIiEhHFNbvt9qJlYeHBx4+fIi6deti4MCB6NatG8zMzPI7vkLHxIqIiEj3FNbvt9pdgWFhYTh27BgqVKiAUaNGwc7ODv3798fZs2fzMz4iIiIinaHRGKtGjRph3bp1iIyMxNKlSxEWFoYGDRrA09MTCxYswMuXL/MrTiIiIqIiT+2uwJzcv38fa9euxYoVK5CQkICUlBRtxVYksCuQiIhI9xT5rkBVEhMTcerUKZw4cQJv375VzG9FREREVBx9UGJ1+vRp9O/fH/b29hg5ciQqVKiAU6dOISQkRNvxEREREekMtR9pExERgfXr12PdunW4d+8ePvvsMyxatAjdu3eHubl5fsZIREREpBPUTqycnJxQqlQpfPPNNxgwYAA8PT3zMy4iIiIinaN2YrV161Z8+eWXMDDQ+LnNRERERMWC2llSx44d8zMOIiIiIp33UXcFFobly5fD1dUVxsbGqFu3Li5evJhj3VWrVqFhw4YoUaIESpQogebNm+dan4iIiOhj6FRi9ccff2Ds2LEIDAzElStXUL16dfj5+SEqKkpl/ePHj6NHjx44duwYzp07BycnJ7Rs2RLPnz8v4MiJiIioOPjoCUILUt26dVG7dm389NNPAAC5XA4nJyeMGDECAQEBea4vk8lQokQJ/PTTT+jdu7da++QEoURERLpHJycILUipqam4fPkymjdvrijT09ND8+bNce7cObW2kZSUhLS0NJQsWTLHOikpKYiLi1N6EREREalD41v8EhMTMXfuXBw5cgRRUVGQy+VKyx8+fKi14LJ6/fo1ZDIZypQpo1RepkwZ3L17V61tTJo0CQ4ODkrJ2fuCgoIwc+bMj4qViIiIiieNE6uBAwfixIkT+Oabb2Bvbw+JRJIfcWnd3LlzsWXLFhw/fhzGxsY51ps8eTLGjh2reB8XFwcnJ6eCCJGIiIh0nMaJ1f79+7F3717Ur18/P+LJUenSpaGvr4+XL18qlb98+RJ2dna5rrtgwQLMnTsXhw8fRrVq1XKtK5VKIZVKPzpeIiIiKn40HmNVokSJXMco5RcjIyN4e3vjyJEjijK5XI4jR47A19c3x/V++OEHzJ49GwcOHICPj09BhEpERETFlMaJ1ezZszF9+nQkJSXlRzy5Gjt2LFatWoX169cjJCQEw4YNQ2JiIvr16wcA6N27NyZPnqyoP2/ePEybNg1r1qyBq6srIiMjERkZiYSEhAKPnYiIiD59GncFLly4EA8ePECZMmXg6uoKQ0NDpeVXrlzRWnDv69atG169eoXp06cjMjISNWrUwIEDBxQD2sPDw6Gn91+uGBwcjNTUVHTu3FlpO4GBgZgxY0a+xUlERETFk8bzWOV1x1xgYOBHBVTUcB4rIiIi3VNYv986NUFoYWBiRUREpHsK6/db467ATJcvX0ZISAgAoHLlyqhZs6bWgiIiIiLSRRonVlFRUejevTuOHz8Oa2trAEBMTAyaNm2KLVu2wMbGRtsxEhEREekEje8KHDFiBOLj43H79m1ER0cjOjoat27dQlxcHEaOHJkfMRIRERHpBI3HWFlZWeHw4cOoXbu2UvnFixfRsmVLxMTEaDO+QscxVkRERLpHZx7CLJfLs02xAACGhobZnhtIREREVJxonFh9/vnnGDVqFF68eKEoe/78OcaMGYNmzZppNTgiIiIiXaJxYvXTTz8hLi4Orq6uKFeuHMqVKwc3NzfExcVh2bJl+REjERERkU7Q+K5AJycnXLlyBYcPH8bdu3cBAJ6enmjevLnWgyMiIiLSJZwgNA8cvE5ERKR7ivQEoT/++CMGDx4MY2Nj/Pjjj7nW5ZQLREREVFyp1WLl5uaGf//9F6VKlYKbm1vOG5NI8PDhQ60GWNjYYkVERKR7inSL1aNHj1T+PxERERH9R+O7AmfNmoWkpKRs5e/evcOsWbO0EhQRERGRLtJ48Lq+vj4iIiJga2urVP7mzRvY2tpCJpNpNcDCxq5AIiIi3VOkuwKzEkJAIpFkK79+/TpKliyplaA+dTK5wMVH0YiKT4athTHquJWEvl72z5SIKD/xu4hI+9ROrEqUKAGJRAKJRIIKFSooJVcymQwJCQkYOnRovgSpy6LikhEVn6J4f/b+a6w89RCvE1IVZaXNjTC4oTvqlS+tKLO1kMLW0rhAYyWi4uPArQjM3HMHEbHJijJ7K2MEtvNCqyr2hRgZkW5Tuytw/fr1EEKgf//+WLJkCaysrBTLjIyM4OrqCl9f33wLtLB8bFPi4kP3sPRImMbrjWrmgTEtKmi8HhGRKln/yDt7/zW+3383x7r/a11J8Yce/8gjXVVYXYEaj7E6ceIE6tWrp/JBzJ+ijz0xmV9mMrnAgPWXlFqq3lfa3Ai/9qkNfT0Jv8yISKv4Rx4VNzozxqpx48aK/09OTkZqqnKiwAHeymwtjWFraYxzD97kmlQBwOuEVCSlyuBbrlQBRUdExUXPus5o4VUGN57F4H+7buVZ//sOVVCtrDVsLaQFEB3Rp0Pj6RaSkpLg7+8PW1tbmJmZoUSJEkovUi0qPjnvShrUIyLShK2lMao4WsFMqt7f02ZSA1RxtGLLOZGGNE6sJkyYgKNHjyI4OBhSqRSrV6/GzJkz4eDggA0bNuRHjJ8EWwv1vpzUrUdEpImouGTceh6LxJR0teonpqTj1vNYRMXxjz0iTWjcFbhnzx5s2LABTZo0Qb9+/dCwYUOUL18eLi4u2LRpE3r27JkfceqszDFWpkb6KG1ulOcYK1Mjfdx6HssxVkSkVZsuhGs0xiqzu5BjrIg0o3FiFR0dDXd3dwAZ46mio6MBAA0aNMCwYcO0G90nQJMvs9cJqfhq+RkA/DIjIu3KHGMFaH5XIBGpT+PEyt3dHY8ePYKzszMqVaqErVu3ok6dOtizZw+sra3zIUTdlvXLDNBsHisiIm3JvJEGAKo4WsG5lCnnsSLKBxpPt7B48WLo6+tj5MiROHz4MNq1awchBNLS0rBo0SKMGjUqv2ItFPlxuyZnOyaiooDfRfQp05l5rN735MkTXL58GeXLl0e1atW0FVeRwWcFEhER5ayoJug6M4/Vhg0b0K1bN0ilGV1VLi4ucHFxQWpqKjZs2IDevXtrPUgiIiIqevhopOw0brHS19dHREQEbG1tlcrfvHkDW1tbyGQyrQZY2NhiRURElEGXHo2kMy1WQgilBzBnevbsmdLzA4mIiOjTosmd7lmTruJ0p7vaiVXNmjUhkUggkUjQrFkzGBj8t6pMJsOjR4/QqlWrfAmSiIiICh8fjZQ3tROr9u3bAwCuXbsGPz8/mJubK5YZGRnB1dUVnTp10nqAREREVDRkTtvx4FWCWvUzH41UnKidWAUGBgIAXF1d0b17d8XgdSIiIioeMsdYafpopOL0NBGNx1h5eXnh2rVrqFu3rlL5hQsXoK+vDx8fH60FR0REREUHH42UN40Tq2+//RYTJ07Mllg9f/4c8+bNw4ULF7QWHBERERUdfDRS3jROrO7cuYNatWplK69Zsybu3LmjlaCIiIio6OGjkfKmcWIllUrx8uVLxYOYM0VERCjdKUhERESftlZV7NHCy65IzrxeWDSeILRHjx6IiIjAX3/9pZi3KiYmBu3bt4etrS22bt2aL4EWFk4QSkREpHt0ZoLQBQsWoFGjRnBxcUHNmjUBZEzBUKZMGWzcuFHrARIRERHpCo0TK0dHR9y4cQObNm3C9evXYWJign79+qFHjx4wNDTMjxiJiIiIdMIHDYoyMzPD4MGDtR0LERERkU7T+5CVNm7ciAYNGsDBwQFPnjwBACxevBh//fWXVoMjIiIi0iUaJ1bBwcEYO3YsWrdujbdv30ImkwEASpQogSVLlmg7PiIiIiKdoXFitWzZMqxatQpTpkxRml7Bx8cHN2/e1GpwRERERLpE48Tq0aNHirsBs5JKpUhMTNRKUERERES6SOPEys3NDdeuXctWfuDAAXh6emojJiIiIiKdpPFdgWPHjsW3336L5ORkCCFw8eJFbN68GUFBQVi9enV+xEhERESkEzROrAYOHAgTExNMnToVSUlJ+Prrr+Hg4IClS5eie/fu+REjEakgkws+RoKIqIjR+JE2WSUlJSEhIQG2trbajKlI4SNtqCg6cCuCDz4lIspFYf1+f9A8VgAQFRWFy5cvIzQ0FK9evdJmTLlavnw5XF1dYWxsjLp16+LixYu51t+2bRsqVaoEY2NjVK1aFfv27SugSInyx4FbERj22xWlpAoAImOTMey3KzhwK6KQIiMiIo0Tq/j4eHzzzTdwcHBA48aN0bhxYzg4OKBXr16IjY3NjxgV/vjjD4wdOxaBgYG4cuUKqlevDj8/P0RFRamsf/bsWfTo0QMDBgzA1atX0b59e7Rv3x63bt3K1ziJ8otMLjBzzx2oambOLJu55w5k8g9uiCYioo+gcVdgt27dcPXqVSxbtgy+vr4AgHPnzmHUqFGoUaMGtmzZki+BAkDdunVRu3Zt/PTTTwAAuVwOJycnjBgxAgEBASpjTUxMxN9//60o++yzz1CjRg2sWLFC5T5SUlKQkpKieB8XFwcnJyc8ffpU0ZRoaGgIExMTvHv3DmlpaYq6UqlUMe1E5sSpAGBsbAwjIyMkJCRALpcryk1NTWFgYIC4uDilGMzMzKCnp4f4+HilcgsLC8jl8mzTWlhaWiI9PR1JSUmKMj09PZibmyM1NRXJyf+1bOjr68PMzCzbcfKYivYx3Y2Ixf2oRDyOScXKM0+Rl4GfOcC9lAm8nEqhhqtNkTymTJ/SeeIx8Zh4TEXnmF68eAFHR8eCH8ojNGRqaipOnTqVrfzkyZPC1NRU082pLSUlRejr64tdu3Yplffu3Vt8+eWXKtdxcnISixcvViqbPn26qFatWo77CQwMFMj44z/H14ABA4QQQgwYMECpPDAwUAghRMuWLZXKV61aJYQQwsvLS6n8wIEDQgghLCwslMpv3bolYmNjs+03NjZW3Lp1S6nMwsJCCCHEgQMHlMq9vLyEEEKsWrVKqbxly5Yqj5PHpBvHVNLPX7hM+jvPl6lnIwFA+PQKKPLH9CmeJx4Tj4nHVPjHVLFiRUVsBUnjFitnZ2fs3bsXVatWVSq/ceMG2rRpg2fPnmmyObVlZp5nz55VtJQBwMSJE3HixAlcuHAh2zpGRkZYv349evTooSj7+eefMXPmTLx8+VLlfthixWMqisfEFivdOE88Jh4Tj6noHFNhtVhpnFitXLkS27Ztw8aNG2FnZwcAiIyMRJ8+fdCxY0cMGTIkXwItqMTqfbwrkIoSmVygwbyjiIxNhqp/uBIAdlbGOD3pc069QETFWmH9fms8j1VwcDDu378PZ2dnODs7AwDCw8MhlUrx6tUr/PLLL4q6V65c0VqgpUuXhr6+fraE6OXLl4oE7312dnYa1Scq6vT1JAhs54Vhv12BBFBKrjLTqMB2XkyqiIgKicaJVfv27fMhjLwZGRnB29sbR44cUcQgl8tx5MgR+Pv7q1zH19cXR44cwejRoxVlhw4dUmrxItI1rarYI7hXrWzzWNlxHisiokL3UROEFrQ//vgDffr0wS+//II6depgyZIl2Lp1K+7evYsyZcqgd+/ecHR0RFBQEICM6RYaN26MuXPnom3bttiyZQu+//57XLlyBVWqVFFrn+wKpKKKM68TEeVMZ7oCjx07hqZNm6pc9ssvv+TbGCsgY/qEV69eYfr06YiMjESNGjVw4MABlClTBkBGl6Se3n9Tc9WrVw+///47pk6div/973/w8PDAn3/+qXZSRVSU6etJ4FuuVGGHQUREWWjcYiWVSjFy5Eh8//33MDQ0BAC8fv0a/fr1w+nTp/H27dt8CbSwsMWKiIhI9+jMI22OHTuGXbt2oXbt2rhz5w727t2LKlWqIC4uDteuXcuHEImIiIh0g8aJVb169XDt2jVUqVIFtWrVQocOHTBmzBgcP34cLi4u+REjERERkU74oIcw37t3D//++y/Kli0LAwMDhIaGKk3+RURERFQcaZxYzZ07F76+vmjRogVu3bqFixcv4urVq6hWrRrOnTuXHzESERER6QSNE6ulS5fizz//xLJly2BsbIwqVarg4sWL6NixI5o0aZIPIRIRERHpBo2nW7h58yZKly6tVGZoaIj58+fjiy++0FpgRERERLpG4xar95OqrDw9PT8qGCIiIiJdpnZiZWpqilevXinet23bFhEREYr3L1++hL09H6VBRERExZfaiVVycjKyziV68uRJvHv3TqmODj0dh4iIiEjrPmi6hZxIJHxOGRERERVfWk2siIiIiIoztRMriUSi1CL1/nsiIiKi4k7t6RaEEKhQoYIimUpISEDNmjWhp6enWE5ERERUnKmdWK1duzY/4yAiIiLSeWonVn369MnPOIiIiIh0nlpjrNjNR0RERJQ3tRKrypUrY8uWLUhNTc21XlhYGIYNG4a5c+dqJTgiIiIiXaJWV+CyZcswadIkDB8+HC1atICPjw8cHBxgbGyMt2/f4s6dOzh9+jRu374Nf39/DBs2LL/jJiIiIipyJEKDfr7Tp0/jjz/+wKlTp/DkyRO8e/cOpUuXRs2aNeHn54eePXuiRIkS+RlvgYuLi4OVlRViY2NhaWlZ2OEQERGRGgrr91ujxKo4YmJFRESkewrr95szrxMRERFpCRMrIiIiIi1hYkVERESkJUysiIiIiLSEiRURERGRlnxQYvXgwQNMnToVPXr0QFRUFABg//79uH37tlaDIyIiItIlGidWJ06cQNWqVXHhwgXs3LkTCQkJAIDr168jMDBQ6wESERER6QqNE6uAgADMmTMHhw4dgpGRkaL8888/x/nz57UaHBEREZEu0TixunnzJjp06JCt3NbWFq9fv9ZKUERERES6SOPEytraGhEREdnKr169CkdHR60ERURERKSLNE6sunfvjkmTJiEyMhISiQRyuRxnzpzB+PHj0bt37/yIkYiIiEgnaJxYff/996hUqRKcnJyQkJAALy8vNGrUCPXq1cPUqVPzI0YiIiIinfDBD2EODw/HrVu3kJCQgJo1a8LDw0PbsRUJfAgzERGR7ims32+DD13R2dkZzs7O2oyFiIiISKdpnFj1798/1+Vr1qz54GCIiIiIdJnGidXbt2+V3qelpeHWrVuIiYnB559/rrXAiIiIiHSNxonVrl27spXJ5XIMGzYM5cqV00pQRERERLpIKw9h1tPTw9ixY7F48WJtbI6IiIhIJ2klsQIyHsycnp6urc0RERER6RyNuwLHjh2r9F4IgYiICOzduxd9+vTRWmBEREREukbjxOrq1atK7/X09GBjY4OFCxfmeccgERER0adM48Tq2LFj+REHERERkc7T2hgrIiIiouJOrRarmjVrQiKRqLXBK1eufFRARERERLpKrcSqffv2+RwGERERke774IcwFxd8CDMREZHuKazfb46xIiIiItISje8KlMlkWLx4MbZu3Yrw8HCkpqYqLY+OjtZacERERES6ROMWq5kzZ2LRokXo1q0bYmNjMXbsWHTs2BF6enqYMWNGPoSYITo6Gj179oSlpSWsra0xYMAAJCQk5Fp/xIgRqFixIkxMTODs7IyRI0ciNjY232IkIiKi4k3jxGrTpk1YtWoVxo0bBwMDA/To0QOrV6/G9OnTcf78+fyIEQDQs2dP3L59G4cOHcLff/+NkydPYvDgwTnWf/HiBV68eIEFCxbg1q1bWLduHQ4cOIABAwbkW4xERERUvGk8eN3MzAwhISFwdnaGvb099u7di1q1auHhw4eoWbNmvrQIhYSEwMvLC5cuXYKPjw8A4MCBA2jTpg2ePXsGBwcHtbazbds29OrVC4mJiTAwUK8XlIPXiYiIdI/ODF4vW7YsIiIiAADlypXDwYMHAQCXLl2CVCrVbnT/79y5c7C2tlYkVQDQvHlz6Onp4cKFC2pvJ/PDzS2pSklJQVxcnNKLiIiISB0aJ1YdOnTAkSNHAAAjRozAtGnT4OHhgd69e+fbswIjIyNha2urVGZgYICSJUsiMjJSrW28fv0as2fPzrX7EACCgoJgZWWleDk5OX1w3ERERFS8qH1X4E8//YRevXph7ty5irJu3brB2dkZ586dg4eHB9q1a6fRzgMCAjBv3rxc64SEhGi0TVXi4uLQtm1beHl55TnAfvLkyRg7dqzSukyuiIiISB1qj7GysrJCWloaOnTogAEDBuDzzz//6J2/evUKb968ybWOu7s7fvvtN4wbNw5v375VlKenp8PY2Bjbtm1Dhw4dclw/Pj4efn5+MDU1xd9//w1jY2ONYuQYKyIiIt1TWL/fardYRUZGYtu2bVi7di1atGgBZ2dn9O/fH3379v3gFh0bGxvY2NjkWc/X1xcxMTG4fPkyvL29AQBHjx6FXC5H3bp1c1wvLi4Ofn5+kEql2L17t8ZJFREREZEmPuiRNg8fPsS6deuwYcMGPHv2DM2bN8eAAQPQvn17GBoa5kecaN26NV6+fIkVK1YgLS0N/fr1g4+PD37//XcAwPPnz9GsWTNs2LABderUQVxcHFq2bImkpCTs2rULZmZmim3Z2NhAX19frf2yxYqIiEj36MxdgUBG99ysWbPw6NEj7N+/H6VKlULfvn3h6Oio7fgUNm3ahEqVKqFZs2Zo06YNGjRogJUrVyqWp6WlITQ0FElJSQCAK1eu4MKFC7h58ybKly8Pe3t7xevp06f5FicREREVXxo/0iYriUQCAwMDSCQSCCGQlpamrbiyKVmypKJ1ShVXV1dkbXxr0qQJ+HxpIiIiKkgf1GL19OlTzJo1C+7u7mjRogVevHiBVatWKea3IiIiIiqO1G6xSk1Nxc6dO7FmzRocPXoU9vb26NOnD/r37w93d/f8jJGIiIhIJ6idWNnZ2SEpKQlffPEF9uzZAz8/P+jpfVCDFxEREdEnSe3EaurUqfjmm2/Umh6BiIiIqDhSO7HKOhs5EREREWXHvjwiIiIiLWFiRURERKQlTKyIiIiItISJFREREZGWaDzzukwmw7p163DkyBFERUVBLpcrLT969KjWgiMiIiLSJRonVqNGjcK6devQtm1bVKlSBRKJJD/iIiIiItI5GidWW7ZswdatW9GmTZv8iIeIiIhIZ2k8xsrIyAjly5fPj1iIiIiIdJrGidW4ceOwdOlSCCHyIx4iIiIinaVxV+Dp06dx7Ngx7N+/H5UrV4ahoaHS8p07d2otOCIiIiJdonFiZW1tjQ4dOuRHLEREREQ6TePEau3atfkRBxEREZHO4wShRERERFqicYsVAGzfvh1bt25FeHg4UlNTlZZduXJFK4ERERER6RqNW6x+/PFH9OvXD2XKlMHVq1dRp04dlCpVCg8fPkTr1q3zI0YiIiIinaBxYvXzzz9j5cqVWLZsGYyMjDBx4kQcOnQII0eORGxsbH7ESERERKQTNE6swsPDUa9ePQCAiYkJ4uPjAQDffPMNNm/erN3oiIiIiHSIxomVnZ0doqOjAQDOzs44f/48AODRo0ecNJSIiIiKNY0Tq88//xy7d+8GAPTr1w9jxoxBixYt0K1bN85vRURERMWaRGjYzCSXyyGXy2FgkHFD4ZYtW3D27Fl4eHhgyJAhMDIyypdAC0tcXBysrKwQGxsLS0vLwg6HiIiI1FBYv98aJ1bFDRMrIiIi3VNYv98fNEHoqVOn0KtXL/j6+uL58+cAgI0bN+L06dNaDY6IiIhIl2icWO3YsQN+fn4wMTHB1atXkZKSAgCIjY3F999/r/UAiYiIiHSFxonVnDlzsGLFCqxatQqGhoaK8vr163PWdSIiIirWNE6sQkND0ahRo2zlVlZWiImJ0UZMRERERDrpg+axun//frby06dPw93dXStBEREREekijROrQYMGYdSoUbhw4QIkEglevHiBTZs2Yfz48Rg2bFh+xEhERESkEww0XSEgIAByuRzNmjVDUlISGjVqBKlUivHjx2PEiBH5ESMRERGRTvjgeaxSU1Nx//59JCQkwMvLC+bm5tqOrUjgPFZERES6p7B+vzVuscpkZGQELy8vbcZCREREpNPUTqz69++vVr01a9Z8cDBEREREukztxGrdunVwcXFBzZo1wafgEBEREWWndmI1bNgwbN68GY8ePUK/fv3Qq1cvlCxZMj9jIyIiItIpak+3sHz5ckRERGDixInYs2cPnJyc0LVrV/zzzz9swSIiIiLCR9wV+OTJE6xbtw4bNmxAeno6bt++/UneGci7AomIiHRPYf1+azxBqGJFPT1IJBIIISCTybQZExEREZFO0iixSklJwebNm9GiRQtUqFABN2/exE8//YTw8PBPsrWKiIiISBNqD14fPnw4tmzZAicnJ/Tv3x+bN29G6dKl8zM2IiIiIp2i9hgrPT09ODs7o2bNmpBIJDnW27lzp9aCKwo4xoqIiEj3FPmZ13v37p1rQkVERERU3Gk0QSgRERER5eyD7wokIiIiImVMrIiIiIi0RGcSq+joaPTs2ROWlpawtrbGgAEDkJCQoNa6Qgi0bt0aEokEf/75Z/4GSkRERMWWziRWPXv2xO3bt3Ho0CH8/fffOHnyJAYPHqzWukuWLOHAeyIiIsp3ag9eL0whISE4cOAALl26BB8fHwDAsmXL0KZNGyxYsAAODg45rnvt2jUsXLgQ//77L+zt7QsqZCIiIiqGdKLF6ty5c7C2tlYkVQDQvHlz6Onp4cKFCzmul5SUhK+//hrLly+HnZ2dWvtKSUlBXFyc0ouIiIhIHTqRWEVGRsLW1lapzMDAACVLlkRkZGSO640ZMwb16tXDV199pfa+goKCYGVlpXg5OTl9cNxERERUvBRqYhUQEACJRJLr6+7dux+07d27d+Po0aNYsmSJRutNnjwZsbGxitfTp08/aP9ERERU/BTqGKtx48ahb9++udZxd3eHnZ0doqKilMrT09MRHR2dYxff0aNH8eDBA1hbWyuVd+rUCQ0bNsTx48dVrieVSiGVStU9BCIiIiKFQk2sbGxsYGNjk2c9X19fxMTE4PLly/D29gaQkTjJ5XLUrVtX5ToBAQEYOHCgUlnVqlWxePFitGvX7uODJyIiInqPTtwV6OnpiVatWmHQoEFYsWIF0tLS4O/vj+7duyvuCHz+/DmaNWuGDRs2oE6dOrCzs1PZmuXs7Aw3N7eCPgQiIiIqBnRi8DoAbNq0CZUqVUKzZs3Qpk0bNGjQACtXrlQsT0tLQ2hoKJKSkgoxSiIiIirOJEIIUdhBFGVxcXGwsrJCbGwsLC0tCzscIiIiUkNh/X7rTIsVERERUVHHxIqIiIhIS5hYEREREWkJEysiIiIiLWFiRURERKQlOjGPlS6QyWRIS0sr7DComDEyMoKeHv8+IiIqKphYfSQhBCIjIxETE1PYoVAxpKenBzc3NxgZGRV2KEREBCZWHy0zqbK1tYWpqSkkEklhh0TFhFwux4sXLxAREQFnZ2dee0RERQATq48gk8kUSVWpUqUKOxwqhmxsbPDixQukp6fD0NCwsMMhIir2ODjjI2SOqTI1NS3kSKi4yuwClMlkhRwJEREBbLHSCk26YKLikhEVn6LxPmwtpLC1NNZ4Pfq0sfuPiKhoYWJVwDZdCMfSI2EarzeqmQfGtKiQDxERERGRtjCxKmA96zqjhVcZpbLkNBk6rzgHANg+1BfGhvrZ1rO1kBZIfERERPThmFgVMFtL42xdevHJ/81/lZCSjprOJaCvxy6eT1mTJk1Qo0YNLFmypLBDISIiLeLg9UJ24FYEmi86oXjfd+0lNJh3FAduRRRiVMqEEEhPT89Wnpqa+kHb+9D1iIiIijomVoXowK0IDPvtCl7GKQ9mj4xNxrDfruRbciWXyxEUFAQ3NzeYmJigevXq2L59u2L58ePHIZFIsH//fnh7e0MqleL06dNo0qQJ/P39MXr0aJQuXRp+fn4AgBMnTqBOnTqQSqWwt7dHQECAUiKmaj0hBGbMmAFnZ2dIpVI4ODhg5MiRucYdHByMcuXKwcjICBUrVsTGjRuVlkskEqxevRodOnSAqakpPDw8sHv37hy3d/fuXZiamuL3339XlG3duhUmJia4c+dOjuvt3r0bHh4eMDY2RtOmTbF+/XpIJBLFJLFv3rxBjx494OjoCFNTU1StWhWbN29WrN+3b1+cOHECS5cuhUQigUQiwePHjwEAt27dQuvWrWFubo4yZcrgm2++wevXr3P9XIiIqOhgYlVIZHKBmXvuQKhYllk2c88dyOSqanycoKAgbNiwAStWrMDt27cxZswY9OrVCydOnFCqFxAQgLlz5yIkJATVqlUDAKxfvx5GRkY4c+YMVqxYgefPn6NNmzaoXbs2rl+/juDgYPz666+YM2eO0rbeX2/Hjh1YvHgxfvnlF4SFheHPP/9E1apVc4x5165dGDVqFMaNG4dbt25hyJAh6NevH44dO6ZUb+bMmejatStu3LiBNm3aoGfPnoiOjla5zUqVKmHBggUYPnw4wsPD8ezZMwwdOhTz5s2Dl5eXynUePXqEzp07o3379rh+/TqGDBmCKVOmKNVJTk6Gt7c39u7di1u3bmHw4MH45ptvcPHiRQDA0qVL4evri0GDBiEiIgIRERFwcnJCTEwMPv/8c9SsWRP//vsvDhw4gJcvX6Jr1645fi5ERFTECMpVbGysACBiY2OzLXv37p24c+eOePfuncbbPXv/tXCZ9Heer7P3X2vjMBSSk5OFqampOHv2rFL5gAEDRI8ePYQQQhw7dkwAEH/++adSncaNG4uaNWsqlf3vf/8TFStWFHK5XFG2fPlyYW5uLmQyWY7rLVy4UFSoUEGkpqaqFXe9evXEoEGDlMq6dOki2rRpo3gPQEydOlXxPiEhQQAQ+/fvz3Xbbdu2FQ0bNhTNmjUTLVu2VDqW902aNElUqVJFqWzKlCkCgHj79m2u+xg3bpzifePGjcWoUaOU6syePVu0bNlSqezp06cCgAgNDVW53Y+5BomIPmW5/X7nJw5eLyRR8claraeu+/fvIykpCS1atFAqT01NRc2aNZXKfHx8sq3v7e2t9D4kJAS+vr5K8ynVr18fCQkJePbsGZydnVWu16VLFyxZsgTu7u5o1aoV2rRpg3bt2sHAQPUlGRISgsGDByuV1a9fH0uXLlUqy2xZAwAzMzNYWloiKipK5TYzrVmzBhUqVICenh5u376d69xQoaGhqF27tlJZnTp1lN7LZDJ8//332Lp1K54/f47U1FSkpKTkOZHs9evXcezYMZibm2db9uDBA1SowOk2iIiKOiZWhcTWQr3JPtWtp66EhAQAwN69e+Ho6Ki0TCpVntLBzMws2/qqytTx/npOTk4IDQ3F4cOHcejQIQwfPhzz58/HiRMnPurRLO+vK5FIIJfLc13n+vXrSExMhJ6eHiIiImBvb//B+weA+fPnY+nSpViyZAmqVq0KMzMzjB49Os9B+wkJCWjXrh3mzZuXbdnHxkRERAWDiVUhqeNWEvZWxoiMTVY5zkoCwM7KGHXcSmp1v15eXpBKpQgPD0fjxo0/enuenp7YsWMHhBCKlp4zZ87AwsICZcuWzXVdExMTtGvXDu3atcO3336LSpUq4ebNm6hVq5bK/Zw5cwZ9+vRRlJ05cybHsVDqio6ORt++fTFlyhRERESgZ8+euHLlCkxMTFTWr1ixIvbt26dUdunSJaX3Z86cwVdffYVevXoByLhZ4N69e0qxGhkZZXsMTa1atbBjxw64urrm2HJHRERFGwevFxJ9PQkC22X80L7f8ZT5PrCdl9bns7KwsMD48eMxZswYrF+/Hg8ePMCVK1ewbNkyrF+/XuPtDR8+HE+fPsWIESNw9+5d/PXXXwgMDMTYsWOhp5fz5bVu3Tr8+uuvuHXrFh4+fIjffvsNJiYmcHFxUVl/woQJWLduHYKDgxEWFoZFixZh586dGD9+vMYxZzV06FA4OTlh6tSpWLRoEWQyWa7bHDJkCO7evYtJkybh3r172Lp1K9atWwfgv8fLeHh44NChQzh79ixCQkIwZMgQvHz5Umk7rq6uuHDhAh4/fozXr19DLpfj22+/RXR0NHr06IFLly7hwYMH+Oeff9CvXz8+C5CISEcwsSpErarYI7hXLdhaKnfB2VkZI7hXLbSqkj/dP7Nnz8a0adMQFBQET09PtGrVCnv37oWbm5vG23J0dMS+fftw8eJFVK9eHUOHDsWAAQMwderUXNeztrbGqlWrUL9+fVSrVg2HDx/Gnj17UKpUKZX127dvj6VLl2LBggWoXLkyfvnlF6xduxZNmjTROOZMGzZswL59+7Bx40YYGBjAzMwMv/32G1atWoX9+/erXMfNzQ3bt2/Hzp07Ua1aNQQHByvuCszsSp06dSpq1aoFPz8/NGnSBHZ2dmjfvr3SdsaPHw99fX14eXnBxsYG4eHhcHBwwJkzZyCTydCyZUtUrVoVo0ePhrW1da5JKhERFR0SIYT27+f/hMTFxcHKygqxsbGwtLRUWpacnIxHjx7Bzc0NxsYfPhYqPjkNVWccBACs61cbDT1sOPO6Dvnuu++wYsUKPH36tMD3ra1rkIjoU5Pb73d+4kCOAhYVl4yoeOUJQZPT/uvmMZcaICQiLtt6thbSbI/CocLx888/o3bt2ihVqhTOnDmD+fPnw9/fv7DDIiKiIoCJVQHbdCEcS4+E5bg882HM7xvVzANjWvB2+6IgLCwMc+bMQXR0NJydnTFu3DhMnjy5sMMiIqIigF2BedB2V6CqFit1sMWKVGFXIBGRauwKLCZsLY2ZIBEREX2ieKsRERERkZYwsSIiIiLSEiZWRERERFrCMVYFLT4y46UpC7uMFxERERVZTKwK2r9rgRNzNV+vcQDQlLf0ExERFWVMrAqaTz+gYmvlsvR3wJpWGf/f/wBgoOIBwGytyheurq4YPXo0Ro8eXdihEBHRJ4CJVUFT1aWXnGWm9ZQEoGwdQE+/YOOiAnX8+HE0bdoUb9++hbW1dWGHQ0REWsLB64Xtzm5geZ3/3m/qDCypklFeRAghkJ6enq08NTX1g7b3oesREREVdUysCtOd3cDW3kB8hHJ5XERGeT4lV3K5HEFBQXBzc4OJiQmqV6+O7du3K5YfP34cEokE+/fvh7e3N6RSKU6fPo0mTZrA398fo0ePRunSpeHn5wcAOHHiBOrUqQOpVAp7e3sEBAQoJWKq1hNCYMaMGXB2doZUKoWDgwNGjhyZa9zBwcEoV64cjIyMULFiRWzcuFFpuUQiwerVq9GhQweYmprCw8MDu3fn/RkmJSWhf//+sLCwgLOzM1auXJlr/fj4ePTs2RNmZmawt7fH4sWL0aRJE6XuxI0bN8LHxwcWFhaws7PD119/jaioKADA48eP0bRpUwBAiRIlIJFI0LdvXwB5nxsiIiriBOUqNjZWABCxsbHZlr17907cuXNHvHv3TvMNy9KFWFhJiEDLHF5WQiz0zKinZXPmzBGVKlUSBw4cEA8ePBBr164VUqlUHD9+XAghxLFjxwQAUa1aNXHw4EFx//598ebNG9G4cWNhbm4uJkyYIO7evSvu3r0rnj17JkxNTcXw4cNFSEiI2LVrlyhdurQIDAxU7E/Vetu2bROWlpZi37594smTJ+LChQti5cqVOca8c+dOYWhoKJYvXy5CQ0PFwoULhb6+vjh69KiiDgBRtmxZ8fvvv4uwsDAxcuRIYW5uLt68eZPjdl1cXETJkiXF8uXLRVhYmAgKChJ6enri7t27Oa4zcOBA4eLiIg4fPixu3rwpOnToICwsLMSoUaMUdX799Vexb98+8eDBA3Hu3Dnh6+srWrduLYQQIj09XezYsUMAEKGhoSIiIkLExMSodW7e91HXIBHRJyy33+/8xMQqD/mWWD08mUtSleX18KQWjuI/ycnJwtTUVJw9e1apfMCAAaJHjx5CiP8Sqz///FOpTuPGjUXNmjWVyv73v/+JihUrCrlcrihbvny5MDc3FzKZLMf1Fi5cKCpUqCBSU1PVirtevXpi0KBBSmVdunQRbdq0UbwHIKZOnap4n5CQIACI/fv357hdFxcX0atXL8V7uVwubG1tRXBwsMr6cXFxwtDQUGzbtk1RFhMTI0xNTZUSq/ddunRJABDx8fFCiP8+47dv3yrqqHNu3sfEiohItcJKrNgVWFgSXmq3npru37+PpKQktGjRAubm5orXhg0b8ODBA6W6Pj4+2db39vZWeh8SEgJfX19IJBJFWf369ZGQkIBnz57luF6XLl3w7t07uLu7Y9CgQdi1a5fKcVxZ91O/fn2lsvr16yMkJESprFq1aor/NzMzg6WlpaILLidZ15FIJLCzs8txnYcPHyItLQ116vw3Ls7KygoVK1ZUqnf58mW0a9cOzs7OsLCwQOPGjQEA4eHhOcahybkhIqKiiXcFFhbzMtqtp6aEhAQAwN69e+Ho6Ki0TCqVKr03MzPLtr6qMnW8v56TkxNCQ0Nx+PBhHDp0CMOHD8f8+fNx4sQJGBoaftA+AGRbVyKRQC6Xa32d3CQmJsLPzw9+fn7YtGkTbGxsEB4eDj8/v1wH7mtyboiIqGhii1VhcakHWDoAkORQQQJYOmbU0yIvLy9IpVKEh4ejfPnySi8nJyeNt+fp6Ylz585BCKEoO3PmDCwsLFC2bNlc1zUxMUG7du3w448/4vjx4zh37hxu3ryZ437OnDmjVHbmzBl4eXlpHPPHcHd3h6GhIS5duqQoi42Nxb179xTv7969izdv3mDu3Llo2LAhKlWqlK0FzMjICAAgk8kUZdo+N0REVPDYYlVY9PSBVvMy7v6DBIDIsvD/k61Wc7U+n5WFhQXGjx+PMWPGQC6Xo0GDBoiNjcWZM2dgaWmJPn36aLS94cOHY8mSJRgxYgT8/f0RGhqKwMBAjB07Fnp6Oeft69atg0wmQ926dWFqaorffvsNJiYmcHFxUVl/woQJ6Nq1K2rWrInmzZtjz5492LlzJw4fPqxRvB/LwsICffr0wYQJE1CyZEnY2toiMDAQenp6iu5QZ2dnGBkZYdmyZRg6dChu3bqF2bNnK23HxcUFEokEf//9N9q0aQMTExOtnxsiIip4bLEqTF5fAl03ZJ8w1NIho9zry3zZ7ezZszFt2jQEBQXB09MTrVq1wt69e+Hm5qbxthwdHbFv3z5cvHgR1atXx9ChQzFgwABMnTo11/Wsra2xatUq1K9fH9WqVcPhw4exZ88elCpVSmX99u3bY+nSpViwYAEqV66MX375BWvXrkWTJk00jvljLVq0CL6+vvjiiy/QvHlz1K9fH56enjA2NgYA2NjYYN26ddi2bRu8vLwwd+5cLFiwQGkbjo6OmDlzJgICAlCmTBn4+/sD0O65ISKigicRWftwKJu4uDhYWVkhNjYWlpaWSsuSk5Px6NEjuLm5KX5UP0hyHDD3/7t6em4Hyn3Omdd1SGJiIhwdHbFw4UIMGDCgQPettWuQiOgTk9vvd35iV2BBi4/MeGWV/u6//5eaA5EqxhmpehQOFYqrV6/i7t27qFOnDmJjYzFr1iwAwFdffVXIkRERUWFjYlXQ/l0LnJib8/LMhzG/r3EA0HRy/sREGluwYAFCQ0NhZGQEb29vnDp1CqVLly7ssIiIqJAxsSpoPv2Aiq01X4+tVUVGzZo1cfny5cIOg4iIiiAmVgWNXXpERESfLJ25KzA6Oho9e/aEpaUlrK2tMWDAAMWEirk5d+4cPv/8c8Us3I0aNcK7d+/yXI+IiIhIUzqTWPXs2RO3b9/GoUOH8Pfff+PkyZMYPHhwruucO3cOrVq1QsuWLXHx4kVcunQJ/v7+uc6v9CE+ZpZuoo/Bm3qJiIoWnZhuISQkBF5eXrh06ZLi+XUHDhxAmzZt8OzZMzg4OKhc77PPPkOLFi2yTc6Ym5SUFKSkpCjex8XFwcnJSeXtmnK5HGFhYdDX14eNjQ2MjIyUnplHlJ+EEHj16hWSkpLg4eEBfX1O0UFElInTLeTi3LlzsLa2VnoocPPmzaGnp4cLFy6gQ4cO2daJiorChQsX0LNnT9SrVw8PHjxApUqV8N1336FBgwY57isoKAgzZ85UKy49PT24ubkhIiICL1680PzAiD6SRCJB2bJlmVQRERUROpFYRUZGwtbWVqnMwMAAJUuWRGRkpMp1Hj58CACYMWMGFixYgBo1amDDhg1o1qwZbt26BQ8PD5XrTZ48GWPHjlW8z2yxyomRkRGcnZ2Rnp6u9Nw3ooJgaGjIpIqIqAgp1MQqICAA8+bNy7VOSEjIB207c9zTkCFD0K9fPwAZt8kfOXIEa9asQVBQkMr1pFIppFKpRvuSSCQwNDSEoaHhB8VKREREn4ZCTazGjRuHvn375lrH3d0ddnZ2iIqKUipPT09HdHQ07OxUT11gb28PAPDy8lIq9/T0RHh4+IcHTURERJSDQk2sbGxsYGNjk2c9X19fxMTE4PLly/D29gYAHD16FHK5HHXr1lW5jqurKxwcHBAaGqpUfu/ePbRu/QETdBIRERHlQSemW/D09ESrVq0waNAgXLx4EWfOnIG/vz+6d++uuCPw+fPnqFSpEi5evAggo3tuwoQJ+PHHH7F9+3bcv38f06ZNw927dwv8QblERERUPOjE4HUA2LRpE/z9/dGsWTPo6emhU6dO+PHHHxXL09LSEBoaiqSkJEXZ6NGjkZycjDFjxiA6OhrVq1fHoUOHUK5cObX3mzkbRVxcnPYOhoiIiPJV5u92Qc8qpRPzWBWmZ8+e5XpXIBERERVdT58+RdmyZQtsf0ys8iCXy/HixQtYWFgU6OSfmdM8PH36tEAnNqNPB68h0gZeR/SxCusaEkIgPj4eDg4OWn/iSm50piuwsOjp6RVopvs+S0tLfpnRR+E1RNrA64g+VmFcQ1ZWVgW6P0BHBq8TERER6QImVkRERERawsSqiJJKpQgMDNR4FniiTLyGSBt4HdHHKm7XEAevExEREWkJW6yIiIiItISJFREREZGWMLEiIiIi0hImVkRERERawsRKy5o0aYLRo0fnuNzV1RVLlizJczsrV66Ek5MT9PT0sGTJEsyYMQM1atRQLO/bty/at2//0fFS0aDudZGfkpKS0KlTJ1haWkIikSAmJiZbXBKJBH/++WehxVhcvP89kp/XR17fWR+qqFwrjx8/hkQiwbVr1wo7FK3Lr3OXG22d14KIfd26dbC2ts7XfajCmdcL2KVLl2BmZqZ4L5FIsGvXLqUkKS4uDv7+/li0aBE6deoEKysr/PDDD0rbWbp0aYE/WJI+bevXr8epU6dw9uxZlC5dWuWMxREREShRokQhRFe8qfO9Qao5OTkhIiICpUuXLuxQKIudO3fC0NBQa9tzdXXF6NGjlZK1bt26oU2bNlrbh7qYWBUwGxubPOuEh4cjLS0Nbdu2hb29vco6hTFNP33aHjx4AE9PT1SpUiXHOnZ2dgUYEWVS53uDVNPX1+d1W4SkpqbCyMgIJUuWzPd9mZiYwMTEJN/38z52BeaD9PR0+Pv7w8rKCqVLl8a0adMUrUtZm/RdXV0BAB06dIBEIoGrqyvWrVuHqlWrAgDc3d0hkUjw+PHjbPt4vyuwSZMm8Pf3z3G/VLjUOT9JSUno378/LCws4OzsjJUrVyptY9KkSahQoQJMTU3h7u6OadOmIS0tTbH8+vXraNq0KSwsLGBpaQlvb2/8+++/iuWnT59Gw4YNYWJiAicnJ4wcORKJiYmK+BYuXIiTJ09CIpGgSZMmKo8jazdAZhfLli1bUK9ePRgbG6NKlSo4ceKElj61wnXgwAE0aNAA1tbWKFWqFL744gs8ePBAsTzz+Ldu3ar4XGvXro179+7h0qVL8PHxgbm5OVq3bo1Xr14p1sv8tztz5kzY2NjA0tISQ4cORWpqao6x5PW9kXW7WY0ePVrpXCYmJqJ3794wNzeHvb09Fi5cmG1fKSkpGD9+PBwdHWFmZoa6devi+PHjuX5WYWFhaNSoEYyNjeHl5YVDhw5lq5PX9Zs53GHNmjVwdnaGubk5hg8fDplMhh9++AF2dnawtbXFd999p7RdiUSC4OBgtG7dGiYmJnB3d8f27dsVy9/vCjx+/DgkEgmOHDkCHx8fmJqaol69eggNDVXa7pw5c2BrawsLCwsMHDgQAQEBSsMxClpe5y6v85bZLfbnn3/Cw8MDxsbG8PPzw9OnT5W2ExwcjHLlysHIyAgVK1bExo0bc41L3fO6evVquLm5wdjYGIByV2DmOXn/1bdvXwAZf/R99dVXKFOmDMzNzVG7dm0cPnxYsY8mTZrgyZMnGDNmjGLdrMesyfFJJBKsXr0aHTp0gKmpKTw8PLB79+5cP4P3MbHKB+vXr4eBgQEuXryIpUuXYtGiRVi9enW2epcuXQIArF27FhEREbh06RK6deumuGAuXryIiIgIODk5aXW/VDjyOj8LFy6Ej48Prl69iuHDh2PYsGFKX/YWFhZYt24d7ty5g6VLl2LVqlVYvHixYnnPnj1RtmxZXLp0CZcvX0ZAQICiqf3Bgwdo1aoVOnXqhBs3buCPP/7A6dOn4e/vDyCjWX7QoEHw9fVFREQEdu7cqfZxTZgwAePGjcPVq1fh6+uLdu3a4c2bNx/7cRW6xMREjB07Fv/++y+OHDkCPT09dOjQAXK5XKleYGAgpk6diitXrsDAwABff/01Jk6ciKVLl+LUqVO4f/8+pk+frrTOkSNHEBISguPHj2Pz5s3YuXMnZs6cqVZcqr431DVhwgScOHECf/31Fw4ePIjjx4/jypUrSnX8/f1x7tw5bNmyBTdu3ECXLl3QqlUrhIWFqdymXC5Hx44dYWRkhAsXLmDFihWYNGlStnp5Xb9AxnW6f/9+HDhwAJs3b8avv/6Ktm3b4tmzZzhx4gTmzZuHqVOn4sKFC0rrTZs2DZ06dcL169fRs2dPdO/eHSEhIbl+FlOmTMHChQvx77//wsDAAP3791cs27RpE7777jvMmzcPly9fhrOzM4KDg3PdXn7L69ypc96SkpLw3XffYcOGDThz5gxiYmLQvXt3xfJdu3Zh1KhRGDduHG7duoUhQ4agX79+OHbsWI5xqXNe79+/jx07dmDnzp0qx7rVq1cPERERitfRo0dhbGyMRo0aAQASEhLQpk0bHDlyBFevXkWrVq3Qrl07hIeHA8j4/ipbtixmzZql2IYq6h7fzJkz0bVrV9y4cQNt2rRBz549ER0dneNnkI0grWrcuLHw9PQUcrlcUTZp0iTh6ekphBDCxcVFLF68WLEMgNi1a5fSNq5evSoAiEePHinKAgMDRfXq1RXv+/TpI7766iu190uFS53rolevXoplcrlc2NraiuDg4By3OX/+fOHt7a14b2FhIdatW6ey7oABA8TgwYOVyk6dOiX09PTEu3fvhBBCjBo1SjRu3FipTm7X66NHjwQAMXfuXMXytLQ0UbZsWTFv3rwc49ZVr169EgDEzZs3hRD/Hf/q1asVdTZv3iwAiCNHjijKgoKCRMWKFRXv+/TpI0qWLCkSExMVZcHBwcLc3FzIZDIhRMb1MmrUKMVydb433v9OEEL5nMbHxwsjIyOxdetWxfI3b94IExMTxb6ePHki9PX1xfPnz5W206xZMzF58mSVn8s///wjDAwMlNbZv3+/yhizev/6DQwMFKampiIuLk5R5ufnJ1xdXRWfixBCVKxYUQQFBSl9FkOHDlXadt26dcWwYcOEEP+dp6tXrwohhDh27JgAIA4fPqyov3fvXgFA8W+hbt264ttvv1XaZv369ZW+gwtSXudOnfO2du1aAUCcP39esTwkJEQAEBcuXBBCCFGvXj0xaNAgpW106dJFtGnTRvH+Q86roaGhiIqKUqr3/jWe6fXr18Ld3V0MHz48x30IIUTlypXFsmXLFO/f/zciRMYxW1lZKd6re3xTp05VvE9ISBAAxP79+3ONJyu2WOWDzz77TNEUCQC+vr4ICwuDTCb7JPdL6snr/FSrVk2xTCKRwM7ODlFRUYqyP/74A/Xr14ednR3Mzc0xdepUxV9sADB27FgMHDgQzZs3x9y5c5W6ra5fv45169bB3Nxc8fLz84NcLsejR48+6rh8fX0V/29gYAAfH588Wwt0QVhYGHr06AF3d3dYWloqutyyfuaA8nkrU6YMACi68zPLsp5HAKhevTpMTU0V7319fZGQkJCtW0abHjx4gNTUVNStW1dRVrJkSVSsWFHx/ubNm5DJZKhQoYLStXLixAml6ymrkJAQODk5wcHBQel43pfX9QtkdHNaWFgo3pcpUwZeXl7Q09NTKnv/83x/f76+vnleg1nPW+ZY1szthoaGok6dOkr1339fkPI6d+qeNwMDA9SuXVvxvlKlSrC2tlZ8ViEhIahfv77SvuvXr5/rZ6nOeXVxcVFrnGBaWho6deoEFxcXLF26VFGekJCA8ePHw9PTE9bW1jA3N0dISEi2/eRF3ePLem2YmZnB0tIy2zWXGw5eJyoi3r9DRiKRKLqdzp07h549e2LmzJnw8/ODlZUVtmzZojTOYsaMGfj666+xd+9e7N+/H4GBgdiyZQs6dOiAhIQEDBkyBCNHjsy2X2dn5/w9MB3Vrl07uLi4YNWqVXBwcIBcLkeVKlWyjYXKet4yE+f3y97vPswPenp62cZUZh3roo6EhATo6+vj8uXL0NfXV1pmbm7+wbGpc/0Cqv8N5Pbv4mOoOm8FcZ7yQ36dt7yoe16z3tGam2HDhuHp06e4ePEiDAz+S0/Gjx+PQ4cOYcGCBShfvjxMTEzQuXPnXMclfoyPvebYYpUP3u//P3/+PDw8PLJd8EDGCdRWi5Im+6WC9zHn5+zZs3BxccGUKVPg4+MDDw8PPHnyJFu9ChUqYMyYMTh48CA6duyItWvXAgBq1aqFO3fuoHz58tleRkZGH3Vc58+fV/x/eno6Ll++DE9Pz4/aZmF78+YNQkNDMXXqVDRr1gyenp54+/at1rZ//fp1vHv3TvH+/PnzMDc3V3s8parvDRsbm2xjS7KOZylXrhwMDQ2VrsO3b9/i3r17ivc1a9aETCZDVFRUtuskpzvrPD098fTpU6V9Z70mAPWv3w/1/v7Onz//UddgxYoVs41d02Qsm7blde7UPW/p6elKN7SEhoYiJiZG8Vl5enrizJkzSvs+c+YMvLy8VMalzfO6aNEibN26FX/99RdKlSqVLYa+ffuiQ4cOqFq1Kuzs7LLd1GVkZJTnb6mmx/eh2GKVD8LDwzF27FgMGTIEV65cwbJly1TefQNkNH0fOXIE9evXh1Qq/ag5gjTZLxW8jzk/Hh4eCA8Px5YtW1C7dm3s3bsXu3btUix/9+4dJkyYgM6dO8PNzQ3Pnj3DpUuX0KlTJwAZd+589tln8Pf3x8CBA2FmZoY7d+7g0KFD+Omnnz7quJYvXw4PDw94enpi8eLFePv2rdJAYF1UokQJlCpVCitXroS9vT3Cw8MREBCgte2npqZiwIABmDp1Kh4/fozAwED4+/srdXnlRtX3xueff4758+djw4YN8PX1xW+//YZbt26hZs2aADJaLgYMGIAJEyagVKlSsLW1xZQpU5T2WaFCBfTs2RO9e/fGwoULUbNmTbx69QpHjhxBtWrV0LZt22yxNG/eHBUqVECfPn0wf/58xMXFYcqUKUp18rp+P9a2bdvg4+ODBg0aYNOmTbh48SJ+/fXXD97eiBEjMGjQIPj4+KBevXr4448/cOPGDbi7u2stZk3kde7UPW+GhoYYMWIEfvzxRxgYGMDf3x+fffaZoptzwoQJ6Nq1K2rWrInmzZtjz5492Llzp9IdeFlp67wePnwYEydOxPLly1G6dGlERkYCyJguwcrKCh4eHti5cyfatWsHiUSCadOmZWtBcnV1xcmTJ9G9e3dIpVKV85Zpenwfii1W+aB379549+4d6tSpg2+//RajRo3C4MGDVdZduHAhDh06BCcnJ8UXYEHslwrex5yfL7/8EmPGjIG/vz9q1KiBs2fPYtq0aYrl+vr6ePPmDXr37o0KFSqga9euaN26teJOs2rVquHEiRO4d+8eGjZsiJo1a2L69OlK42I+1Ny5czF37lxUr14dp0+fxu7du3V+MkY9PT1s2bIFly9fRpUqVTBmzBjMnz9fa9tv1qwZPDw80KhRI3Tr1g1ffvklZsyYofb6qr43/Pz8MG3aNEycOBG1a9dGfHw8evfurbTe/Pnz0bBhQ7Rr1w7NmzdHgwYN4O3trVRn7dq16N27N8aNG4eKFSuiffv2uHTpUo5dxnp6eti1a5fi2h44cGC2KRHyun4/1syZM7FlyxZUq1YNGzZswObNmz+qFaJnz56YPHkyxo8fj1q1auHRo0fo27evYqqAwpDXuVPnvJmammLSpEn4+uuvUb9+fZibm+OPP/5QLG/fvj2WLl2KBQsWoHLlyvjll1+wdu3aHKdf0dZ5PX36NGQyGYYOHQp7e3vFa9SoUQAyWrNKlCiBevXqoV27dvDz80OtWrWUtjFr1iw8fvwY5cqVy3E8l6bH96Ek4v1OedJJTZo0QY0aNQr9sSik2qd4fh4/fgw3NzdcvXq1UOf30TV9+/ZFTExMkXjcy6egoGahb9GiBezs7PKc16moWrduHUaPHo2YmJjCDuWTx65AIiKiLJKSkrBixQr4+flBX18fmzdvxuHDh1VOfEr0PiZWREREWUgkEuzbtw/fffcdkpOTUbFiRezYsQPNmzcv7NBIB7ArkIiIiEhLOHidiIiISEuYWBERERFpCRMrIiIiIi1hYkVERESkJUysiIiIiLSEiRURERGRljCxIiIiItISJlZEREREWvJ/QjrOCRPHCqAAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "datax = [get_noisy_data([0]*4,['x']*4,[0,1,2,3],[.1]*4),\n", " get_noisy_data([1]*4,['x']*4,[0,1,2,3],[.1]*4),\n", @@ -1139,21 +842,10 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "id": "48aa0162-7eb0-446a-ad67-9f949364e803", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAksAAAGzCAYAAAA/lFPrAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAaP9JREFUeJzt3XdYFFfbBvB7aUsHUbpUG2JvGGxgLGD91KjRWInRRGOvmKhoNEFjjzG2xG40akyiJhq7UcES0dgJKnawIQuI1D3fH7xsXHdZdnGRBe/fde0V58yZmWfYCfNwzpkzEiGEABERERGpZVTSARAREREZMiZLRERERBowWSIiIiLSgMkSERERkQZMloiIiIg0YLJEREREpAGTJSIiIiINmCwRERERacBkiYiIiEgDJktE9Fb6+uuv4efnB7lcXtKhlApr166FRCLBrVu3SjoU0pMjR45AIpHgyJEjxbL/Xr16oWfPnsWy7zeNyVIZl/8LTiKR4Pjx4yrrhRDw8PCARCJBx44dSyDC4vfTTz+hb9++qFKlCiQSCYKDg3Xa/vHjxxg1ahT8/PxgYWEBJycnBAQEYNKkSUhLS1PUGzhwoOJnLZFIYG1tDV9fX3Tv3h0///yz1jfl6dOnK+3n1U9iYqJO8b8pwcHBSnGamZnBx8cHQ4YMwd27d0s6PCUpKSmYM2cOJk2aBCMj5V+DGRkZWLhwIRo3bgw7OzuYm5ujatWqGD58OP79999ijeu7777D2rVri/UYhiIqKgrTp09HcnKy1tvcv38fPXv2hL29PWxtbfF///d/uHnzZvEFWYD8a71Tp04q627dugWJRIJ58+bp7Xje3t6QSCQYMWKEyrr8hGf79u16O56+TJo0CT///DP++eefkg7ltZmUdAD0Zpibm+PHH39Es2bNlMqPHj2Ke/fuQSqVllBkxW/ZsmU4e/YsGjVqhKdPn+q0bVJSEho2bIiUlBR8+OGH8PPzw9OnT3HhwgUsW7YMQ4cOhbW1taK+VCrF999/DwB48eIFbt++jV27dqF79+4IDg7Gb7/9BltbW63jfnnf+ezt7XU6hzepYsWKiIyMBABkZWXhypUrWL58Of78809cvXoVlpaWJRxhntWrVyMnJwe9e/dWKn/y5AlCQ0Nx9uxZdOzYER988AGsra0RGxuLLVu2YOXKlcjKyiq2uL777jtUqFABAwcOLLZjGIqoqCjMmDEDAwcO1OqaTktLQ8uWLSGTyfDZZ5/B1NQUCxcuRFBQEM6fP4/y5csXf9Cv2L17N86ePYsGDRq8keOtWrUKkydPhpubm17216JFC7x48QJmZmZ62d+r6tWrh4YNG2L+/PlYv359sRzjjRFUpq1Zs0YAEN26dRMVKlQQ2dnZSusHDx4sGjRoILy8vESHDh1KKMqiOXz4sAAg4uPjNda7c+eOyM3NFUIIUaNGDREUFKT1Mb7++msBQJw4cUJlnUwmEy9evFAsDxgwQFhZWandT2RkpAAgevbsWegxIyIiBADx+PFjrePM9+LFC8W5viotLU3n/b0sNzdX6XxfFRQUJGrUqKFS/u233woAYt++fa91/Hyvex5CCFG7dm3Rt29flfIOHToIIyMjsX37dpV1GRkZYty4ca99bE10vT61kZ2dLTIzM197P/m/Swr7/01bc+fO1Wl/c+bMEQDE6dOnFWVXr14VxsbGYvLkyXqJSVtBQUHC09NTlCtXTnTq1ElpXXx8vAAg5s6dq7fjeXl5iRo1aggTExMxYsQIpXX5vwe3bdumt+Pp07x584SVlZVITU0t6VBeC7vh3hK9e/fG06dPsX//fkVZVlYWtm/fjg8++EDtNnK5HIsWLUKNGjVgbm4OZ2dnfPzxx3j27JlSvd9++w0dOnSAm5sbpFIpKlWqhJkzZyI3N1epXnBwMGrWrIkrV66gZcuWsLS0hLu7O77++mv9n/BLPDw8VLpatHXjxg0YGxvjnXfeUVlna2sLc3NzrfYTHh6Otm3bYtu2bXrryslvft+yZQumTJkCd3d3WFpaIiUlBQMHDoS1tTVu3LiB9u3bw8bGBn369AEAPH/+HOPGjYOHhwekUimqVauGefPmQQihtH+JRILhw4dj06ZNqFGjBqRSKfbu3atznC4uLgAAE5P/GrJv376NYcOGoVq1arCwsED58uXRo0cPlfEw+d3IR48exbBhw+Dk5ISKFSsCAFJTUzF69Gh4e3tDKpXCyckJbdq0QUxMjMZ44uPjceHCBbRu3Vqp/NSpU/j9998xaNAgvPfeeyrbSaVSla6Va9euoXv37nBwcIC5uTkaNmyInTt3qj2HEydOYOzYsXB0dISVlRW6du2Kx48fK+p5e3vj8uXLOHr0qKIr8+Uu4+TkZIwePVrxvVWuXBlz5sxR6t59uQto0aJFqFSpEqRSKa5cuaJ1vABw+fJlvPvuu7CwsEDFihUxa9YsrbuRL1y4gIEDB8LX1xfm5uZwcXHBhx9+qNSqO336dEyYMAEA4OPjozhfTeOhtm/fjkaNGqFRo0aKMj8/P7Rq1Qpbt27VKjZ9srGxwZgxY7Br165CrzkAuHnzJnr06AEHBwdYWlrinXfewe+//6718by9vdG/f3+sWrUKDx48KLT+uXPn0K5dO9ja2sLa2hqtWrXCyZMnleqoG7MUFxeH9957Dy4uLjA3N0fFihXRq1cvyGQypW03btyIBg0awMLCAg4ODujVq5fa7vY2bdrg+fPnSvee0ojdcG8Jb29vBAYGYvPmzWjXrh0AYM+ePZDJZOjVqxe++eYblW0+/vhjrF27FmFhYRg5ciTi4+Px7bff4ty5czhx4gRMTU0B5N0MrK2tMXbsWFhbW+PQoUOYNm0aUlJSMHfuXKV9Pnv2DKGhoejWrRt69uyJ7du3Y9KkSahVq5YiroLIZDJkZ2crLefv8+XuKnNzc7XdV0Xh5eWF3NxcbNiwAQMGDHitffXr1w/79u3D/v37UbVq1ULrJyUlqZSZmJiodFnMnDkTZmZmGD9+PDIzMxVN6jk5OQgJCUGzZs0wb948WFpaQgiBzp074/Dhwxg0aBDq1q2LP//8ExMmTMD9+/excOFCpX0fOnQIW7duxfDhw1GhQgV4e3trjDk3NxdPnjwBAGRnZ+Pq1auIiIhA5cqV0bRpU0W9M2fOICoqCr169ULFihVx69YtLFu2DMHBwbhy5YpKd92wYcPg6OiIadOm4fnz5wCATz75BNu3b8fw4cPh7++Pp0+f4vjx47h69Srq169fYIxRUVEAoFInP2no16+fxnPMd/nyZTRt2hTu7u4IDw+HlZUVtm7dii5duuDnn39G165dleqPGDEC5cqVQ0REBG7duoVFixZh+PDh+OmnnwAAixYtwogRI2BtbY3PP/8cAODs7AwASE9PR1BQEO7fv4+PP/4Ynp6eiIqKwuTJk5GQkIBFixYpHWvNmjXIyMjAkCFDIJVK4eDgoHW8iYmJaNmyJXJychT1Vq5cCQsLC61+Lvv378fNmzcRFhYGFxcXXL58GStXrsTly5dx8uRJSCQSdOvWDf/++y82b96MhQsXokKFCgAAR0dHtfuUy+W4cOECPvzwQ5V1AQEB2LdvH1JTU2FjY1NgXK/+/iiILr8/Ro0ahYULF2L69Olqk858Dx8+RJMmTZCeno6RI0eifPnyWLduHTp37ozt27erXCsF+fzzz7F+/XrMnj1b7e/sfJcvX0bz5s1ha2uLiRMnwtTUFCtWrEBwcDCOHj2Kxo0bq90uKysLISEhyMzMxIgRI+Di4oL79+9j9+7dSE5Ohp2dHQDgyy+/xNSpU9GzZ0989NFHePz4MZYsWYIWLVrg3LlzSr+j/P39YWFhgRMnTmh9ngappJu2qHjlN52fOXNGfPvtt8LGxkakp6cLIYTo0aOHaNmypRBCqHTDHTt2TAAQmzZtUtrf3r17Vcrz9/eyjz/+WFhaWoqMjAxFWVBQkAAg1q9fryjLzMwULi4u4r333iv0XPK3L+wzYMCAAvehazdHYmKicHR0FACEn5+f+OSTT8SPP/4okpOTVepq6oYTQohz584JAGLMmDEaj5nfDafuU61aNUW9/OZ3X19fle9gwIABAoAIDw9XKv/1118FADFr1iyl8u7duwuJRCKuX7+uKAMgjIyMxOXLlzXGm6+g76d69eri5s2bSnXVXTPR0dEq10f+9dusWTORk5OjVN/Ozk58+umnWsX2silTpggAKt0CXbt2FQDEs2fPtNpPq1atRK1atZSucblcLpo0aSKqVKmicg6tW7cWcrlcUT5mzBhhbGysdC0VdH3OnDlTWFlZiX///VepPDw8XBgbG4s7d+4IIf7rArK1tRWPHj0qUryjR48WAMSpU6cUZY8ePRJ2dnZadZup+243b94sAIi//vpLUaZLN9zjx48FAPHFF1+orFu6dKkAIK5du6ZxH/r4/fHyvvK7nGfMmCEAiLNnzwoh1HfD5f9Mjx07pihLTU0VPj4+wtvbu8Cu83wv/34OCwsT5ubm4sGDB0II9d1wXbp0EWZmZuLGjRuKsgcPHggbGxvRokULRVn+tocPHxZC/Pc7SlOX3q1bt4SxsbH48ssvlcovXrwoTExMVMqFEKJq1aqiXbt2Gs/R0LEb7i3Ss2dPvHjxArt370Zqaip2795dYBfctm3bYGdnhzZt2uDJkyeKT4MGDWBtbY3Dhw8r6r78F2dqaiqePHmC5s2bIz09HdeuXVPar7W1Nfr27atYNjMzQ0BAgFZPtMyfPx/79+9XfPK7RDZu3KhUPnHiRJ1+Lpo4Ozvjn3/+wSeffIJnz55h+fLl+OCDD+Dk5ISZM2eqdF1pkv/Xampqqlb1f/75Z6Xz2r9/P9asWaNSb8CAAQX+1T906FCl5T/++APGxsYYOXKkUvm4ceMghMCePXuUyoOCguDv769VvEBeC2Z+rHv27MGiRYsgk8nQrl07pS6nl+PNzs7G06dPUblyZdjb26vt0hg8eDCMjY2Vyuzt7XHq1CmtuiRe9vTpU5iYmKi0HqSkpACAxtaJfElJSTh06BB69uypuOafPHmCp0+fIiQkBHFxcbh//77SNkOGDIFEIlEsN2/eHLm5ubh9+3ahx9u2bRuaN2+OcuXKKf3/2Lp1a+Tm5uKvv/5Sqv/ee+8ptdLoEu8ff/yBd955BwEBAYrtHR0dFd24hXn5u83IyMCTJ08U3djadFep8+LFCwBQ+yBKfld4fp2CvPr7o6CPrr8/Ro0ahXLlymHGjBkF1vnjjz8QEBCg9ICNtbU1hgwZglu3bim6SbUxZcoU5OTkYPbs2WrX5+bmYt++fejSpQt8fX0V5a6urvjggw9w/PhxxbX+qvyWoz///BPp6elq6+zYsQNyuRw9e/ZUuhZdXFxQpUoVpXtDvvzrtjRjN9xbxNHREa1bt8aPP/6I9PR05Obmonv37mrrxsXFQSaTwcnJSe36R48eKf59+fJlTJkyBYcOHVL5n/DVfu6KFSsq3TCAvP+RLly4UGj8rz5xkj8GpmnTpoV2D70OV1dXLFu2DN999x3i4uLw559/Ys6cOZg2bRpcXV3x0UcfabWf/GkGtLkZA3lPquR3T2ji4+OjttzExEQxviff7du34ebmphJD9erVFeu12XdBrKyslMYChYaGolmzZmjYsCFmz56N+fPnA8i7sUVGRmLNmjW4f/++UtL56jVTUBxff/01BgwYAA8PDzRo0ADt27dH//79lW4Qush/SjE1NbXQp7OuX78OIQSmTp2KqVOnqq3z6NEjuLu7K5Y9PT2V1pcrVw4AVMYAqhMXF4cLFy4U2E318v+PgOrPS5d4b9++rbabplq1aoXGCeQlZjNmzMCWLVtU4lL33WojPwHLzMxUWZeRkaFUpyDF9cSanZ0dRo8ejYiICJw7d07xvb6soJ/py//f1axZU6vj+fr6ol+/fli5ciXCw8NV1j9+/Bjp6elqv6/q1atDLpfj7t27qFGjhsp6Hx8fjB07FgsWLMCmTZvQvHlzdO7cGX379lUkUnFxcRBCoEqVKmrjyx+e8TIhhMrv/dKGydJb5oMPPsDgwYORmJiIdu3aFXhTkMvlcHJywqZNm9Suz/+lnZycjKCgINja2uKLL75ApUqVYG5ujpiYGEyaNEllUOirrQP5dGmhKSkSiQRVq1ZF1apV0aFDB1SpUgWbNm3SOlm6dOkSAKBy5cp6jaugm4RUKi3ywPbC9q2LBg0awM7OTqn1Y8SIEVizZg1Gjx6NwMBA2NnZQSKRoFevXmoHEquLo2fPnmjevDl++eUX7Nu3D3PnzsWcOXOwY8cOjePfypcvj5ycHJUxLn5+fgCAixcvonnz5hrPKT/G8ePHIyQkRG2dV7/n17n25XI52rRpU2Crx6tj4F79eRUl3qLq2bMnoqKiMGHCBNStWxfW1taQy+UIDQ0t8gSgDg4OkEqlSEhIUFmXX1bY4/RJSUlaTftgYWGhSAy0lT92acaMGSrjx4rD559/jg0bNmDOnDno0qWLXvc9f/58DBw4EL/99hv27duHkSNHIjIyEidPnkTFihUhl8shkUiwZ88etde0uvFez549KzC5Ki2YLL1lunbtio8//hgnT55UDCxVp1KlSjhw4ACaNm2q8YZ55MgRPH36FDt27ECLFi0U5fHx8XqN29D4+vqiXLlyan95F2TDhg2QSCRo06ZNMUammZeXFw4cOKCSKOR3l3p5eRXLcXNzc5Um8Ny+fTsGDBigaGkC8loIdJmgEMhr9Rs2bBiGDRuGR48eoX79+vjyyy81Jkv5SVF8fDxq166tKO/UqRMiIyOxcePGQpOl/NYrU1NTlafqXkdBf31XqlQJaWlpRT6WLvF6eXkhLi5OpTw2NrbQ4zx79gwHDx7EjBkzMG3aNEW5uv3p0tJgZGSEWrVq4e+//1ZZd+rUKfj6+hbaYtutWzccPXq00GMNGDBA54lB81uXpk+frvZBEC8vL7U/v6L+f1epUiX07dsXK1asUGmxcnR0hKWlZYHHMzIygoeHh8b916pVC7Vq1cKUKVMQFRWFpk2bYvny5Zg1axYqVaoEIQR8fHy0elAlJycHd+/eRefOnXU6R0PDMUtvGWtrayxbtgzTp09XO/tsvp49eyI3NxczZ85UWZeTk6O4qeX/ZfHyX8dZWVn47rvv9Bu4GsHBwRBCFGsX3KlTpxRPX73s9OnTePr0qdZdE7Nnz8a+ffvw/vvvl+hfWO3bt0dubi6+/fZbpfKFCxdCIpEU+kRiURw+fBhpaWmoU6eOoszY2FilRWXJkiUq000UJDc3V6VLx8nJCW5ubmq7al4WGBgIACo33sDAQISGhuL777/Hr7/+qrJdVlYWxo8frzhWcHAwVqxYoTZhfnl8li6srKzUJow9e/ZEdHQ0/vzzT5V1ycnJyMnJ0bhfXeJt3749Tp48idOnTyutL6iV+WXqfh8AUNvaYmVlpYhfG927d8eZM2eUvrfY2FgcOnQIPXr0KHT74hqzlG/06NGwt7fHF198obKuffv2OH36NKKjoxVlz58/x8qVK+Ht7a3TuMB8U6ZMQXZ2tsrUK8bGxmjbti1+++03pakYHj58qJiYuKCJcVNSUlSupVq1asHIyEjx/1W3bt1gbGyMGTNmqHzPQgiViX+vXLmCjIwMNGnSROdzNCRsWXoLafMIfFBQED7++GNERkbi/PnzaNu2LUxNTREXF4dt27Zh8eLF6N69O5o0aYJy5cphwIABGDlyJCQSCTZs2FAs3Wr79+/Hw4cPC61XqVIlxQ0RAP766y9FF9Djx4/x/PlzzJo1C0DeuKCXW8RetWHDBmzatAldu3ZFgwYNYGZmhqtXr2L16tUwNzfHZ599plQ/JycHGzduBJDXUnL79m3s3LkTFy5cQMuWLbFy5Uqtz3f79u1qm7TbtGmjeKRcV506dULLli3x+eef49atW6hTpw727duH3377DaNHj0alSpWKtN98MplMcf45OTmIjY3FsmXLYGFhoTS+omPHjtiwYQPs7Ozg7++P6OhoHDhwQOtZmFNTU1GxYkV0794dderUgbW1NQ4cOIAzZ84otVap4+vri5o1a+LAgQMqj6KvX78ebdu2Rbdu3dCpUye0atUKVlZWiIuLw5YtW5CQkKB4sGDp0qVo1qwZatWqhcGDB8PX1xcPHz5EdHQ07t27V6RXPDRo0ADLli3DrFmzULlyZTg5OeHdd9/FhAkTsHPnTnTs2BEDBw5EgwYN8Pz5c1y8eBHbt2/HrVu3Ch3fpm28EydOxIYNGxAaGopRo0Yppg7w8vIqdGyhra0tWrRoga+//hrZ2dlwd3fHvn371LY0548h+vzzz9GrVy+YmpqiU6dOiiTqVcOGDcOqVavQoUMHjB8/HqampliwYAGcnZ0xbtw4rX62xcnOzg6jRo1SO9A7PDxcMW3LyJEj4eDggHXr1iE+Ph4///xzkbrL81uX1q1bp7Ju1qxZ2L9/P5o1a4Zhw4bBxMQEK1asQGZmpsZ57Q4dOoThw4ejR48eqFq1KnJycrBhwwYYGxsr5h6rVKkSZs2ahcmTJ+PWrVvo0qULbGxsEB8fj19++QVDhgxR/FEB5P3etrS0LNEWdb1448/f0Rv18tQBmhQ0g/fKlStFgwYNhIWFhbCxsRG1atUSEydOVDy2KoQQJ06cEO+8846wsLAQbm5uYuLEieLPP/9UeiRViIJneB4wYIDw8vIq9FyK+uivpkfxIyIiNB7zwoULYsKECaJ+/frCwcFBmJiYCFdXV9GjRw8RExOjch4v79vS0lJ4e3uL9957T2zfvr3Qx4O1iffln6mmmXs1TWOQmpoqxowZI9zc3ISpqamoUqWKmDt3rtJj7ULkTR2gy6P5r34/EolEODg4iM6dOyseq8737NkzERYWJipUqCCsra1FSEiIuHbtmvDy8lL6/gq6fjMzM8WECRNEnTp1hI2NjbCyshJ16tQR3333nVaxLliwQFhbW6t9zD09PV3MmzdPNGrUSFhbWwszMzNRpUoVMWLECKWpFYQQ4saNG6J///7CxcVFmJqaCnd3d9GxY0elGcALOodXH9sWIm+qig4dOggbGxsBQGkagdTUVDF58mRRuXJlYWZmJipUqCCaNGki5s2bJ7KysoQQhc8erU28QuRd90FBQcLc3Fy4u7uLmTNnih9++EGrR/3v3bsnunbtKuzt7YWdnZ3o0aOHePDggdr/32bOnCnc3d2FkZGRVvu+e/eu6N69u7C1tRXW1taiY8eOIi4uTuM2xaGg32XPnj1TTLHw6ndw48YN0b17d2Fvby/Mzc1FQECA2L17t1bHK+j3c1xcnDA2Nlb7eyAmJkaEhIQIa2trYWlpKVq2bCmioqKU6rx6Dd68eVN8+OGHolKlSsLc3Fw4ODiIli1bigMHDqgc++effxbNmjUTVlZWwsrKSvj5+YlPP/1UxMbGKtVr3Lix2tnySxuJEKVgZC0RkR7JZDL4+vri66+/xqBBg0o6HKIy6fz586hfvz5iYmJQt27dkg7ntTBZIqK30pw5c7BmzRpcuXLltZ8aJCJV+U+3lsTraPSNyRIRERGRBvxzioiIiEgDJktEREREGjBZIiIiItKAyRIRERGRBpyUUg/kcjkePHgAGxubUv+yQCIioreFEAKpqalwc3PT+FQskyU9ePDgQaHv2iEiIiLDdPfuXVSsWLHA9UyW9CD/BY53794t8J07RAYr9SGQ9sprZHIygI3d8v7ddwdgYq66nbUzYFO0164QERmClJQUeHh4FPoiZiZLepDf9WZra8tkiUofW1sAr7zcNyMFkP6vS9lMAJUCASPjNx4aEdGbUNgQGg7wJiJlV3YCSwP+W97UHVhUM6+ciOgtxGSJiP5zZSewtT+QmqBcnpKQV86EiYjeQkyWiCiPPBfYOwmAujcg/a9sb3hePSKitwjHLBFRnttRQMoDDRUEkHI/r55P8zcWFpV+Qgjk5OQgN5eJNr1ZxsbGMDExee1pfZgsEVGeV5+Ie916RACysrKQkJCA9PT0kg6F3lKWlpZwdXWFmZlZkffBZImI8lhrOQ2AtvXorSeXyxEfHw9jY2O4ubnBzMyME/fSGyOEQFZWFh4/foz4+HhUqVJF48STmjBZIqI8Xk0AW7e8wdxqxy1J8tZ7NXnTkVEplZWVBblcDg8PD1haWpZ0OPQWsrCwgKmpKW7fvo2srCyYm6uZM04LTJaIKI+RMRA6J++pN0ignDD9rzUgdDbnWyKd6frX/KOUDDxKzdT5OE42UjjZFu1mSGVXUVuTXsZkiYj+498Z6Lke2DNRefoAW7e8RMm/c8nFRm+NTafuYPHBOJ23G9WqCsa0qVoMEdHbjskSESnz7wz4BgOz//e+wz7bgUrvskWJ3pg+jT3Rxl95bFxGdi66L48GAGz/JBDmpqrXo5ON9I3ER28fJktEpOrlxMirCRMleqOcbM1VutNSM7IV/07LzEE9z3IwNuJgcXozmCwRve1SE/M+L8t58d+/Ey8AJhaq29m45H2IitneSwmI2HlZsTxwzRm42pkjopM/Qmu6lmBkVJyCg4NRt25dLFq0qKRDYbJE9Nb7ew1wdHbB61eHqi8PCgdaTi6emIj+Z++lBAzdGKPyfGaiLANDN8ZgWd/6BpEwCSGQm5sLExPl22pWVlaR5vcp6nZUPPi6E6K3XcMwYMhR3T8Nw0o6cirjcuUCM3Zd0fQCHszYdQW5cnU1Xo9cLkdkZCR8fHxgYWGBOnXqYPv27Yr1R44cgUQiwZ49e9CgQQNIpVIcP34cwcHBGD58OEaPHo0KFSogJCQEAHD06FEEBARAKpXC1dUV4eHhyMnJUexP3XZCCEyfPh2enp6QSqVwc3PDyJEjNca9bNkyVKpUCWZmZqhWrRo2bNigtF4ikeD7779H165dYWlpiSpVqmDnzoLf+Xjt2jVYWlrixx9/VJRt3boVFhYWuHLlSoHb7dy5E1WqVIG5uTlatmyJdevWQSKRIDk5GQDw9OlT9O7dG+7u7rC0tEStWrWwefNmxfYDBw7E0aNHsXjxYkgkEkgkEty6dQsAcOnSJbRr1w7W1tZwdnZGv3798OTJE40/l9cmSplvv/1WeHl5CalUKgICAsSpU6cKrHvp0iXRrVs34eXlJQCIhQsXqtSJiIgQyPv/TvGpVq2aTjHJZDIBQMhkMl1Ph4iozHrx4oW4cuWKePHiRZG2j7r+RHhN2l3oJ+r6Ez1HLsSsWbOEn5+f2Lt3r7hx44ZYs2aNkEql4siRI0IIIQ4fPiwAiNq1a4t9+/aJ69evi6dPn4qgoCBhbW0tJkyYIK5duyauXbsm7t27JywtLcWwYcPE1atXxS+//CIqVKggIiIiFMdTt922bduEra2t+OOPP8Tt27fFqVOnxMqVKwuMeceOHcLU1FQsXbpUxMbGivnz5wtjY2Nx6NAhRR0AomLFiuLHH38UcXFxYuTIkcLa2lo8ffq0wP0uXbpU2NnZidu3b4u7d++KcuXKicWLFxdY/+bNm8LU1FSMHz9eXLt2TWzevFm4u7sLAOLZs2dCCCHu3bsn5s6dK86dOydu3LghvvnmG2FsbKy4pycnJ4vAwEAxePBgkZCQIBISEkROTo549uyZcHR0FJMnTxZXr14VMTExok2bNqJly5YFxqPpOtT2/l2qkqUtW7YIMzMzsXr1anH58mUxePBgYW9vLx4+fKi2/unTp8X48ePF5s2bhYuLS4HJUo0aNRRfRkJCgnj8+LFOcTFZIiJS9brJ0q/n7mmVLP167p5e487IyBCWlpYiKipKqXzQoEGid+/eQoj/kqVff/1VqU5QUJCoV6+eUtlnn30mqlWrJuRyuaJs6dKlwtraWuTm5ha43fz580XVqlVFVlaWVnE3adJEDB48WKmsR48eon379oplAGLKlCmK5bS0NAFA7NmzR+O+O3ToIJo3by5atWol2rZtq3Qur5o0aZKoWbOmUtnnn3+ulCwVdIxx48YploOCgsSoUaOU6sycOVO0bdtWqezu3bsCgIiNjVW7X30kS6WqG27BggUYPHgwwsLC4O/vj+XLl8PS0hKrV69WW79Ro0aYO3cuevXqBam04EdKTUxM4OLiovhUqFChuE6BiIi05GSj3QST2tbT1vXr15Geno42bdrA2tpa8Vm/fj1u3LihVLdhw4Yq2zdo0EBp+erVqwgMDFR61UvTpk2RlpaGe/fuFbhdjx498OLFC/j6+mLw4MH45ZdflLruXnX16lU0bdpUqaxp06a4evWqUlnt2rUV/7aysoKtrS0ePXpU4H4BYPXq1bhw4QJiYmKwdu1aja+tiY2NRaNGjZTKAgIClJZzc3Mxc+ZM1KpVCw4ODrC2tsaff/6JO3fuaIzjn3/+weHDh5W+Fz8/PwBQ+W70qdQM8M7KysLZs2cxefJ/A0qNjIzQunVrREdHv9a+4+Li4ObmBnNzcwQGBiIyMhKenp4F1s/MzERm5n+zy6akpLzW8YmISFWAjwNc7cyRKMso6AU8cLEzR4CPg16Pm5aWBgD4/fff4e7urrTu1T+8raysVLZXV6aNV7fz8PBAbGwsDhw4gP3792PYsGGYO3cujh49ClNT0yIdA4DKthKJBHK5XOM2//zzD54/fw4jIyMkJCTA1fX1BtXPnTsXixcvxqJFi1CrVi1YWVlh9OjRyMrK0rhdWloaOnXqhDlz5qise92YNCk1LUtPnjxBbm4unJ2VJypzdnZGYmJiAVsVrnHjxli7di327t2LZcuWIT4+Hs2bN0dqamqB20RGRsLOzk7x8fDwKPLxiYhIPWMjCSI6+QNQvHBHIX85opO/3udb8vf3h1QqxZ07d1C5cmWlT1F+31evXh3R0dEQ4r+U78SJE7CxsUHFihU1bmthYYFOnTrhm2++wZEjRxAdHY2LFy8WeJwTJ04olZ04cQL+/v46x/yypKQkDBw4EJ9//jkGDhyIPn364MWLFwXWr1atGv7++2+lsjNnzqjE9X//93/o27cv6tSpA19fX/z7779KdczMzJCbm6tUVr9+fVy+fBne3t4q301Rk1RtlJpkqbi0a9cOPXr0QO3atRESEoI//vgDycnJ2Lp1a4HbTJ48GTKZTPG5e/fuG4yYiOjtEVrTFcv61oeTrXKLjoudebFNG2BjY4Px48djzJgxWLduHW7cuIGYmBgsWbIE69at03l/w4YNw927dzFixAhcu3YNv/32GyIiIjB27FiN7y1bu3YtfvjhB1y6dAk3b97Exo0bYWFhAS8vL7X1J0yYgLVr12LZsmWIi4vDggULsGPHDowfP17nmF/2ySefwMPDA1OmTMGCBQuQm5urcZ8ff/wxrl27hkmTJuHff//F1q1bsXbtWgBQdN9VqVIF+/fvR1RUFK5evYqPP/4YDx8+VNqPt7c3Tp06hVu3buHJkyeQy+X49NNPkZSUhN69e+PMmTO4ceMG/vzzT4SFhakkVvpUapKlChUqwNjYWOWH+fDhQ7i46G9iPHt7e1StWhXXr18vsI5UKoWtra3Sh4iIikdoTVccGBukWF4b1gjHJ71brPMrzZw5E1OnTkVkZCSqV6+O0NBQ/P777/Dx8dF5X+7u7vjjjz9w+vRp1KlTB5988gkGDRqEKVOmaNzO3t4eq1atQtOmTVG7dm0cOHAAu3btQvny5dXW79KlCxYvXox58+ahRo0aWLFiBdasWYPg4GCdY863fv16/PHHH9iwYQNMTExgZWWFjRs3YtWqVdizZ4/abXx8fLB9+3bs2LEDtWvXxrJly/D5558D+K8bc8qUKahfvz5CQkIQHBwMFxcXdOnSRWk/48ePh7GxMfz9/eHo6Ig7d+7Azc0NJ06cQG5uLtq2bYtatWph9OjRsLe318sLcwsiES+3Cxq4xo0bIyAgAEuWLAGQNw+Gp6cnhg8fjvDwcI3bent7Y/To0Rg9erTGemlpafD09MT06dMLnc8iX0pKCuzs7CCTyZg4ERH9T0ZGBuLj4+Hj4wNzc+0HYT9KycCj1EylMm3fDffqa1LIMHz55ZdYvnx5ifTEaLoOtb1/l5oB3gAwduxYDBgwAA0bNkRAQAAWLVqE58+fIywsb3K8/v37w93dHZGRkQDyBoXnT5qVlZWF+/fv4/z587C2tkblypUB5GWunTp1gpeXFx48eICIiAgYGxujd+/eJXOSRERvuU2n7mDxwbgC1+cnTa8a1aoKxrSpWlxhkQ6+++47NGrUCOXLl8eJEycwd+5cDB8+vKTDKrJSlSy9//77ePz4MaZNm4bExETUrVsXe/fuVQz6vnPnjlIz3IMHD1CvXj3F8rx58zBv3jwEBQXhyJEjAIB79+6hd+/eePr0KRwdHdGsWTOcPHkSjo6Ob/TciIgoT5/Gnmjj71x4xVc42RQ8RQy9WXFxcZg1axaSkpLg6emJcePGKT3NXtqUqm44Q8VuOCIiVUXthiPSJ310w5WaAd5EREREJYHJEhEREZEGTJaIiIiINChVA7yJiOgtkJqY99GVjUveh0jPmCwREZFh+XsNcHS27tsFhQMtS+8TV2S4mCwREZFhaRgGVGunXJbzAlgdmvfvD/cCJhaq27FViYoJkyUiIjIs6rrTMlL++3dmGlAxADBSncWb9E/bN2CUZRzgTUREhu3KTmBpwH/Lm7oDi2rmlVOZdeTIEUgkEiQnJ5d0KEyWiIjIgF3ZCWztD6QmKJenJOSVG0jCJIRATk6OSnlWVlaR9lfU7ah4MFkiIiLDJM8F9k4CoO5FE/8r2xueV0/fh5bLERkZCR8fH1hYWKBOnTrYvn27Yn1+q8eePXvQoEEDSKVSHD9+HMHBwRg+fDhGjx6NChUqICQkBABw9OhRBAQEQCqVwtXVFeHh4UrJlbrthBCYPn06PD09IZVK4ebmVugL3pctW4ZKlSrBzMwM1apVw4YNG5TWSyQSfP/99+jatSssLS1RpUoV7NxZeMKZnp6ODz/8EDY2NvD09MTKlSs11k9NTUWfPn1gZWUFV1dXLFy4EMHBwUpdeRs2bEDDhg1hY2MDFxcXfPDBB3j06BEA4NatW2jZsiUAoFy5cpBIJBg4cCCAwr+b4sBkiYiIDNPtKCDlgYYKAki5n1dPzyIjI7F+/XosX74cly9fxpgxY9C3b18cPXpUqV54eDhmz56Nq1evonbt2gCAdevWwczMDCdOnMDy5ctx//59tG/fHo0aNcI///yDZcuW4YcffsCsWbOU9vXqdj///DMWLlyIFStWIC4uDr/++itq1apVYMy//PILRo0ahXHjxuHSpUv4+OOPERYWhsOHDyvVmzFjBnr27IkLFy6gffv26NOnD5KSkjT+PObPn4+GDRvi3LlzGDZsGIYOHYrY2NgC648dOxYnTpzAzp07sX//fhw7dgwxMTFKdbKzszFz5kz8888/+PXXX3Hr1i1FQuTh4YGff/4ZABAbG4uEhAQsXrwYgPbfjV4Jem0ymUwAEDKZrKRDISIyGC9evBBXrlwRL168KNoOLmwTIsK28M+FbXqNOyMjQ1haWoqoqCil8kGDBonevXsLIYQ4fPiwACB+/fVXpTpBQUGiXr16SmWfffaZqFatmpDL5YqypUuXCmtra5Gbm1vgdvPnzxdVq1YVWVlZWsXdpEkTMXjwYKWyHj16iPbt2yuWAYgpU6YoltPS0gQAsWfPngL36+XlJfr27atYlsvlwsnJSSxbtkxt/ZSUFGFqaiq2bfvve0lOThaWlpZi1KhRBR7nzJkzAoBITU0VQvz3M3727Jmijjbfzas0XYfa3r/ZskRERIbJ2lm/9bR0/fp1pKeno02bNrC2tlZ81q9fjxs3bijVbdiwocr2DRo0UFq+evUqAgMDIZFIFGVNmzZFWloa7t27V+B2PXr0wIsXL+Dr64vBgwfjl19+UTsu6uXjNG3aVKmsadOmuHr1qlJZfgsYAFhZWcHW1lbR/VWQl7eRSCRwcXEpcJubN28iOzsbAQH/Dcq3s7NDtWrVlOqdPXsWnTp1gqenJ2xsbBAUFAQAuHPnToFx6PLd6BOnDiAiIsPk1QSwdcsbzK123JIkb71XE70eNi0tDQDw+++/w93dXWmdVCpVWrayslLZXl2ZNl7dzsPDA7GxsThw4AD279+PYcOGYe7cuTh69ChMTU2LdAwAKttKJBLI5XK9b6PJ8+fPERISgpCQEGzatAmOjo64c+cOQkJCNA5u1+W70Se2LBERkWEyMgZC5/xvQfLKyv8th87W+3xL/v7+kEqluHPnDipXrqz08fDw0Hl/1atXR3R0NIT4L+E7ceIEbGxsULFiRY3bWlhYoFOnTvjmm29w5MgRREdH4+LFiwUe58SJE0plJ06cgL+/v84xvw5fX1+YmprizJkzijKZTIZ///1XsXzt2jU8ffoUs2fPRvPmzeHn56fSUmVmZgYAyM39bwC/vr8bbbFliYiIDJd/Z6DnemDPROXpA2zd8hIl/856P6SNjQ3Gjx+PMWPGQC6Xo1mzZpDJZDhx4gRsbW0xYMAAnfY3bNgwLFq0CCNGjMDw4cMRGxuLiIgIjB07FkZGBbdZrF27Frm5uWjcuDEsLS2xceNGWFhYwMvLS239CRMmoGfPnqhXrx5at26NXbt2YceOHThw4IBO8b4uGxsbDBgwABMmTICDgwOcnJwQEREBIyMjRVekp6cnzMzMsGTJEnzyySe4dOkSZs6cqbQfLy8vSCQS7N69G+3bt4eFhYXevxttsWWJiIgMm39n4NPT/y332Q6MvlgsiVK+mTNnYurUqYiMjET16tURGhqK33//HT4+Pjrvy93dHX/88QdOnz6NOnXq4JNPPsGgQYMwZcoUjdvZ29tj1apVaNq0KWrXro0DBw5g165dKF++vNr6Xbp0weLFizFv3jzUqFEDK1aswJo1axAcHKxzzK9rwYIFCAwMRMeOHdG6dWs0bdoU1atXh7m5OQDA0dERa9euxbZt2+Dv74/Zs2dj3rx5Svtwd3fHjBkzEB4eDmdnZwwfPhyAfr8bbUnEy+2CVCQpKSmws7ODTCaDra1tSYdDRGQQMjIyEB8fDx8fH8VNUiupiXmfl2n7bji+H84gPX/+HO7u7pg/fz4GDRr0Ro+t6TrU9v7NbjgiIjIsf68Bjs4ueH1+0vSqoHCg5eTiiYl0cu7cOVy7dg0BAQGQyWT44osvAAD/93//V8KRFQ2TJSIiMiwNw4Bq7XTfjq1KBmXevHmIjY2FmZkZGjRogGPHjqFChQolHVaRMFkq7dQ1V2uDzdVEZKj4+6nUq1evHs6ePVvSYegNk6XSrrDm6oKwuZqIiEgrTJZKO3XN1VlpwNoOef9uNwdwb6Q6Dwn/aiOiN4TPEVFJ0sf1x2SptHu1ufrKzrz5SPLtmfS/+UjmFOtjtkREr8qf9Tk9PR0WFmqeXiN6A9LT0wGozkKuCyZLZcmVncDW/lB5LUBKQl55z/VMmIjojTE2Noa9vb1iZmZLS0ul96MRFSchBNLT0/Ho0SPY29vD2LjoM70zWSor5LnA3klQ//4kAUAC7A0H/Dro/dUAREQFcXHJa/ku7EWtRMXF3t5ecR0WFZOlsuJ2FJDyQEMFAaTcz6vn0/yNhUVEbzeJRAJXV1c4OTkhOzu7pMOht4ypqelrtSjlY7JUVqQ91G89IiI9MjY21stNi6gk8N1wZYW1s37rEREREQAmS2WHV5O8p95Q0OBJCWDrnlePiIiItMZkqawwMs6bHgCAasL0v+XQ2RzcTUREpCMmS2WJf+e86QFenXDS1o3TBhARERURB3iXNf6dAd9gYLZH3nKf7UCld9miREREVERMlko7dS/SzXnx37+l1kDiRdXt+KJKIiIirTBZKu0Ke5Hu6lD15XyRLhERkVaYLJV26l6kqw22KhEREWmFyVJpx+40IiKiYsWn4YiIiIg0YLJEREREpAGTJSIiIiINmCwRERERacBkiYiIiEgDJktEREREGjBZIiIiItKg1CVLS5cuhbe3N8zNzdG4cWOcPn26wLqXL1/Ge++9B29vb0gkEixatOi190lERERvl1KVLP30008YO3YsIiIiEBMTgzp16iAkJASPHj1SWz89PR2+vr6YPXs2XFzUT9yo6z6JiIjo7SIRQoiSDkJbjRs3RqNGjfDtt98CAORyOTw8PDBixAiEh4dr3Nbb2xujR4/G6NGjX3ufmZmZyMzMVCynpKTAw8MDMpkMtra2r3GGRERE9KakpKTAzs6u0Pt3qWlZysrKwtmzZ9G6dWtFmZGREVq3bo3o6Og3us/IyEjY2dkpPh4eHkU6PhERERm+UpMsPXnyBLm5uXB2dlYqd3Z2RmJi4hvd5+TJkyGTyRSfu3fvFun4REREZPh0epGuXC7H0aNHcezYMdy+fRvp6elwdHREvXr10Lp167emhUUqlUIqlZZ0GERERPQGaNWy9OLFC8yaNQseHh5o37499uzZg+TkZBgbG+P69euIiIiAj48P2rdvj5MnTxZLoBUqVICxsTEePnyoVP7w4cMCB2+XxD6JiIiobNEqWapatSouXLiAVatWISUlBdHR0fj555+xceNG/PHHH7hz5w5u3LiB5s2bo1evXli1apXeAzUzM0ODBg1w8OBBRZlcLsfBgwcRGBhoMPskIiKiskWrbrh9+/ahevXqGut4eXlh8uTJGD9+PO7cuaOX4F41duxYDBgwAA0bNkRAQAAWLVqE58+fIywsDADQv39/uLu7IzIyEkDeAO4rV64o/n3//n2cP38e1tbWqFy5slb7JCIiorebVslSYYnSy0xNTVGpUqUiB6TJ+++/j8ePH2PatGlITExE3bp1sXfvXsUA7Tt37sDI6L/GsgcPHqBevXqK5Xnz5mHevHkICgrCkSNHtNonERERvd2KNM/SsWPHsGLFCty4cQPbt2+Hu7s7NmzYAB8fHzRr1qw44jRo2s7TQERERIaj2OZZ+vnnnxESEgILCwucO3dOMTmjTCbDV199VfSIiYiIiAyQzsnSrFmzsHz5cqxatQqmpqaK8qZNmyImJkavwRERERGVNJ2TpdjYWLRo0UKl3M7ODsnJyfqIiYiIiMhg6Jwsubi44Pr16yrlx48fh6+vr16CIiIiIjIUOidLgwcPxqhRo3Dq1ClIJBI8ePAAmzZtwvjx4zF06NDiiJGIiIioxOj0uhMACA8Ph1wuR6tWrZCeno4WLVpAKpVi/PjxGDFiRHHESERERFRiijR1AJA3yeP169eRlpYGf39/WFtb6zu2UoNTBxAREZU+2t6/dW5ZymdmZgZ/f/+ibk5ERERUKuicLLVs2RISiaTA9YcOHXqtgIiIiIgMic7JUt26dZWWs7Ozcf78eVy6dAkDBgzQV1xEREREBkHnZGnhwoVqy6dPn460tLTXDoiIiIjIkOg8dUBB+vbti9WrV+trd0REREQGQW/JUnR0NMzNzfW1OyIiIiKDoHM3XLdu3ZSWhRBISEjA33//jalTp+otMCIiIiJDoHOyZGdnp7RsZGSEatWq4YsvvkDbtm31FhgRERGRIdA5WVqzZk1xxEFERERkkPQ2ZomIiIioLNKqZalcuXIaJ6J8WVJS0msFRERERGRItEqWFi1aVMxhEBERERkmrZIlzsxNREREb6siv0gXADIyMpCVlaVUpumtvURERESljc4DvJ8/f47hw4fDyckJVlZWKFeunNKHiIiIqCzROVmaOHEiDh06hGXLlkEqleL777/HjBkz4ObmhvXr1xdHjEREREQlRuduuF27dmH9+vUIDg5GWFgYmjdvjsqVK8PLywubNm1Cnz59iiNOIiIiohKhc8tSUlISfH19AeSNT8qfKqBZs2b466+/9BsdERERUQnTOVny9fVFfHw8AMDPzw9bt24FkNfiZG9vr9fgiIiIiEqazslSWFgY/vnnHwBAeHg4li5dCnNzc4wZMwYTJkzQe4BEREREJUkihBDaVBw/fjw++ugj+Pn5KZXfvn0bZ8+eReXKlVG7du1iCdLQpaSkwM7ODjKZjFMnEBERlRLa3r+1TpaqVKmCmzdvonHjxvjoo4/w/vvvw8rKSm8Bl2ZMloiIiEofbe/fWnfDxcXF4fDhw6hatSpGjRoFFxcXfPjhh4iKitJLwERERESGSKcxSy1atMDatWuRmJiIxYsXIy4uDs2aNUP16tUxb948PHz4sLjiJCIiIioRWnfDFeT69etYs2YNli9fjrS0NGRmZuortlKD3XBERESlj9674dR5/vw5jh07hqNHj+LZs2eK+ZeIiIiIyooiJUvHjx/Hhx9+CFdXV4wcORJVq1bFsWPHcPXqVX3HR0RERFSitH7dSUJCAtatW4e1a9fi33//xTvvvIMFCxagV69esLa2Ls4YiYiIiEqM1smSh4cHypcvj379+mHQoEGoXr16ccZFREREZBC0Tpa2bt2Kzp07w8RE53fvEhEREZVaWmc+3bp1K844iIiIiAzSaz0NR0RERFTWsU+NqIx4lJKBR6m6z3PmZCOFk615MURERFQ2MFkiKiM2nbqDxQfjdN5uVKsqGNOmajFERERUNjBZMgDqWgRy5QKXH8jwLD0b5SxNUcPNDsZGEqU6bBGgl/Vp7Ik2/s5KZc8zc/D+ypMAgOmd/FHPs5za64iIiAqmc7L0/PlzzJ49GwcPHsSjR48gl8uV1t+8eVNvwb0t2CJA+uBka66UPO+9lICInZcVy9N3XYGrnTkiOvkjtKZrSYRIRFQq6ZwsffTRRzh69Cj69esHV1dXSCSSwjfSo6VLl2Lu3LlITExEnTp1sGTJEgQEBBRYf9u2bZg6dSpu3bqFKlWqYM6cOWjfvr1i/cCBA7Fu3TqlbUJCQrB3795iO4dXvdwiEHX9Cb7ac63Aup+180OTyhUAsEWACrb3UgKGbozBqy9+TJRlYOjGGCzrW58JExGRlnROlvbs2YPff/8dTZs2LY54NPrpp58wduxYLF++HI0bN8aiRYsQEhKC2NhYODk5qdSPiopC7969ERkZiY4dO+LHH39Ely5dEBMTg5o1ayrqhYaGYs2aNYplqfTNJiH5LQK5coHB6/8usJ4EwJqoWxjU3FelK4UoX65cYMauKyqJEgAI5F1HM3ZdQRt/F15HRERa0HnqgHLlysHBwaE4YinUggULMHjwYISFhcHf3x/Lly+HpaUlVq9erbb+4sWLERoaigkTJqB69eqYOXMm6tevj2+//VapnlQqhYuLi+JTrly5N3E6Kk7HJyFBllHgegEgQZaB0/FJby4oKnV4HRER6ZfOydLMmTMxbdo0pKenF0c8BcrKysLZs2fRunVrRZmRkRFat26N6OhotdtER0cr1QfyutherX/kyBE4OTmhWrVqGDp0KJ4+faoxlszMTKSkpCh99OFRasE3uKLUo7cTryMiIv3SuRtu/vz5uHHjBpydneHt7Q1TU1Ol9TExMXoL7mVPnjxBbm4unJ2Vn/ZxdnbGtWvqx/gkJiaqrZ+YmKhYDg0NRbdu3eDj44MbN27gs88+Q7t27RAdHQ1jY2O1+42MjMSMGTNe84xUOdlo92SbtvXo7cTriIhIv3ROlrp06VIMYZScXr16Kf5dq1Yt1K5dG5UqVcKRI0fQqlUrtdtMnjwZY8eOVSynpKTAw8PjtWMJ8HGAq505EmUZasebSAC42JkjwKdkukGpdOB1RESkXzonSxEREcURR6EqVKgAY2NjPHz4UKn84cOHcHFxUbuNi4uLTvUBwNfXFxUqVMD169cLTJakUmmxDAI3NpIgopM/hm6MgQRQutHlD8ON6OTPQbmkEa8jIiL9KvK74c6ePYuNGzdi48aNOHfunD5jUsvMzAwNGjTAwYMHFWVyuRwHDx5EYGCg2m0CAwOV6gPA/v37C6wPAPfu3cPTp0/h6vrmHqt+lJKBS/dluHRfhorlLDG5nR8crMyU6pS3NsPkdn6oWM5SUfdRCseckHqhNV2xrG99ONkqJ/UuduacNoCISEcSIYS6lvoCPXr0CL169cKRI0dgb28PAEhOTkbLli2xZcsWODo6FkecAPKmDhgwYABWrFiBgIAALFq0CFu3bsW1a9fg7OyM/v37w93dHZGRkQDypg4ICgrC7Nmz0aFDB2zZsgVfffWVYuqAtLQ0zJgxA++99x5cXFxw48YNTJw4Eampqbh48aLWrUcpKSmws7ODTCaDra2tzue1cP+/nJSSXpu6meC1ncGbM8ET0dtI2/u3zt1wI0aMQGpqKi5fvozq1asDAK5cuYIBAwZg5MiR2Lx5c9GjLsT777+Px48fY9q0aUhMTETdunWxd+9exSDuO3fuwMjov8ayJk2a4Mcff8SUKVPw2WefoUqVKvj1118VcywZGxvjwoULWLduHZKTk+Hm5oa2bdti5syZb3SuJXWvqdAGJ6WklxU2E/z0XVfUljPpJiLSTOeWJTs7Oxw4cACNGjVSKj99+jTatm2L5ORkfcZXKrxuyxKRPqhrWdIGW5aI6G1VbC1LcrlcZboAADA1NVV5TxwRvTmvvhuOiIj0Q+cB3u+++y5GjRqFBw8eKMru37+PMWPGFPj0GBEREVFppXOy9O233yIlJQXe3t6oVKkSKlWqBB8fH6SkpGDJkiXFESMRERFRidG5G87DwwMxMTE4cOCAYubs6tWrq7xWhIiIiKgs0HmAN6niAG8iIjXkucDtKCDtIWDtDHg1AYzUv0aKqCTodYD3N998gyFDhsDc3BzffPONxrojR47ULVIiIir9UhPzPvni/wKivgGeP/6vzMoRaDIS8GnxX5mNS96HyIBp1bLk4+ODv//+G+XLl4ePj0/BO5NIcPPmTb0GWBqwZYmI3nqHI4Gjs3XfLigcaDlZ//FQ2VDMrZN6bVmKj49X+28iIiIAQMMwoFq7vJvb5veVW5ReZeUE9N6Sd9NjqxLlM+DWSZ0HeH/xxRcYP348LC0tlcpfvHiBuXPnYtq0aXoLjoiISon8G1b8Mc2JEgA8fwRkpwM+zd9MbFQ6/L2m8NbJ54+B/VOVy95A66TOA7yNjY2RkJAAJycnpfKnT5/CyckJubm5eg2wNGA3HBHR/1zcDvw8qPB67/0A1Ope/PFQ6ZHfslSU1skitixpe//WeZ4lIQQkEolK+T///AMHBwddd0dERGWJtZbvudS2Hr09bFwAt7p5rY7atk661X0jXblad8OVK1cOEokEEokEVatWVUqYcnNzkZaWhk8++aRYgiQiolLCqwlg6wakJABQ13EhyVvv1eRNR0alRdpD/dbTA62TpUWLFkEIgQ8//BAzZsyAnZ2dYp2ZmRm8vb0RGBhYLEESEZGBe3lwbuOhquNKFATQ+BMg8WLeIqcOoHz511Bmqnb1M1OBB+ffyDWk85ilo0ePokmTJmpfpvu24pglInrrceoAel0lcA1pe/9+rRm8MzIykJWVpVT2NiYLTJaI6K336mPfQN5A3cQLQPpTwLI84FJbdY4ctixRvpevofi/NLROAmgz87/pA97AAG+dpw5IT0/HxIkTsXXrVjx9+lRl/dv4NBwR0VuvoBtWxQZvPhYqnV6+htzqAuW8gb2TgJQH/9WxdQdCZwP+nd9oaDonSxMmTMDhw4exbNky9OvXD0uXLsX9+/exYsUKzJ5dhOYzIiIiolf5dwb8OhjE+wV17obz9PTE+vXrERwcDFtbW8TExKBy5crYsGEDNm/ejD/++KO4YjVY7IYjIiIqfYptnqWkpCT4+voCyBuflJSUBABo1qwZ/vrrryKGS0RERGSYdE6WfH19Fe+H8/Pzw9atWwEAu3btgr29vV6DIyIiIippOidLYWFh+OeffwAA4eHhWLp0KczNzTFmzBhMmDBB7wESERERlaTXmjoAAG7fvo2zZ8+icuXKqF27tr7iKlU4ZomIiKj0KbYxS+vXr0dmZqZi2cvLC926dYOfnx/Wr19ftGiJiIiIDJTOLUvGxsZISEiAk5OTUvnTp0/h5OT0Vs6zxJYlIiKi0qfYWpaEEEov0c137949pffFEREREZUFWk9KWa9ePUgkEkgkErRq1QomJv9tmpubi/j4eISGhhZLkEREREQlRetkqUuXLgCA8+fPIyQkBNbW1op1ZmZm8Pb2xnvvvaf3AImIiIhKktbJUkREBADA29sbvXr1glQqLbagiIiIiAyFzmOW/P39cf78eZXyU6dO4e+//9ZHTEREREQGQ+dk6dNPP8Xdu3dVyu/fv49PP/1UL0ERERERGQqdk6UrV66gfv36KuX16tXDlStX9BIUERERkaHQOVmSSqV4+PChSnlCQoLSE3JEREREZYHOyVLbtm0xefJkyGQyRVlycjI+++wztGnTRq/BEREREZU0nZuC5s2bhxYtWsDLywv16tUDkDedgLOzMzZs2KD3AImIiIhKks7Jkru7Oy5cuIBNmzbhn3/+gYWFBcLCwtC7d2+YmpoWR4xEREREJaZIg4ysrKwwZMgQfcdCREREZHB0HrMEABs2bECzZs3g5uaG27dvAwAWLlyI3377Ta/BEREREZU0nZOlZcuWYezYsWjXrh2ePXuG3NxcAEC5cuWwaNEifcdHREREVKJ0TpaWLFmCVatW4fPPP1eaKqBhw4a4ePGiXoMjIiIiKmk6J0vx8fGKp+BeJpVK8fz5c70ERURERGQodE6WfHx81L4bbu/evahevbo+YiIiIiIyGDo/DTd27Fh8+umnyMjIgBACp0+fxubNmxEZGYnvv/++OGIkIiIiKjE6J0sfffQRLCwsMGXKFKSnp+ODDz6Am5sbFi9ejF69ehVHjEREVEJy5QKn45PwKDUDTjbmCPBxgLGRpKTDolKmtF9HEiGEKOrG6enpSEtLg5OTkz5jKnVSUlJgZ2cHmUwGW1vbkg6HiEgv9l5KwIxdV5Agy1CUudqZI6KTP0JrupZgZFSaGPJ1pO39u0jzLAHAo0ePcPbsWcTGxuLx48dF3Y3Oli5dCm9vb5ibm6Nx48Y4ffq0xvrbtm2Dn58fzM3NUatWLfzxxx9K64UQmDZtGlxdXWFhYYHWrVsjLi6uOE+BiMjg7b2UgKEbY5RucACQKMvA0I0x2HspoYQio9KkrFxHOidLqamp6NevH9zc3BAUFISgoCC4ubmhb9++Si/XLQ4//fQTxo4di4iICMTExKBOnToICQnBo0eP1NaPiopC7969MWjQIJw7dw5dunRBly5dcOnSJUWdr7/+Gt988w2WL1+OU6dOwcrKCiEhIcjIyFC7TyKisi5XLjBj1xWo63bIL5ux6wpy5UXumKC3QFm6jnTuhnv//fdx7tw5LFmyBIGBgQCA6OhojBo1CnXr1sWWLVuKJVAAaNy4MRo1aoRvv/0WACCXy+Hh4YERI0YgPDxcbazPnz/H7t27FWXvvPMO6tati+XLl0MIATc3N4wbNw7jx48HAMhkMjg7O2Pt2rUFjsHKzMxEZmamYjklJQUeHh64e/euohnP1NQUFhYWePHiBbKzsxV1pVKpYpqF/Ak9AcDc3BxmZmZIS0uDXC5XlFtaWsLExAQpKSlKMVhZWcHIyAipqalK5TY2NpDL5SrTONja2iInJwfp6emKMiMjI1hbWyMrK0spOTQ2NoaVlZXKefKceE48p7J9ThfuPMWl249x8+kLfH/yAQrzSTNPeNqZorKTFfxc7QzynMri92To53RbloPrj9Nx+c5jra6jT1tWgq+DBTxsjeDnavdGz+nBgwdwd3cvfBiN0JGlpaU4duyYSvlff/0lLC0tdd2d1jIzM4WxsbH45ZdflMr79+8vOnfurHYbDw8PsXDhQqWyadOmidq1awshhLhx44YAIM6dO6dUp0WLFmLkyJEFxhIRESGQlxgX+Bk0aJAQQohBgwYplUdERAghhGjbtq1S+apVq4QQQvj7+yuV7927VwghhI2NjVL5pUuXhEwmUzmuTCYTly5dUiqzsbERQgixd+9epXJ/f38hhBCrVq1SKm/btq3a8+Q58Zx4TmX7nFqMWCAACMvqLYTXpN2Ffir8X7jBn1NZ/J4M/ZxCpm0UHqO3an0deU3aLZx6zCiRc6pWrZri562Jzi1Lnp6e+P3331GrVi2l8gsXLqB9+/a4d++eLrvTWn72FxUVpWjRAoCJEyfi6NGjOHXqlMo2ZmZmWLduHXr37q0o++677zBjxgw8fPgQUVFRaNq0KR48eABX1/8GmfXs2RMSiQQ//fST2ljYssRz4jnxnMriObFlqXR8T4Z+TmWxZUnnqQOmTJmCsWPHYsOGDXBxcQEAJCYmYsKECZg6daquuyuV8n/4r7K1tVX5YVtYWMDCwkKlrpWVldp9W1tbqy0v6EtUV25sbKy23MTERG25mZkZzMzMVMoLOk+eE8+poHKeU+k+p9qe5VHbszxy5QK/X01CoiwD6v6algBwsTPHhPY11T7+bUjnVBa/J0M/p1q2QC0PB3Su667VdTS2TbUCpxEoqXN6VZFepHvy5El4enqicuXKqFy5Mjw9PREVFYUVK1agfv36io8+VahQAcbGxnj48KFS+cOHDxVJ26tcXFw01s//ry77JCIq64yNJIjo5A8g74b2svzliE7+pWqeHHrzytJ1pHPLUpcuXYohjMKZmZmhQYMGOHjwoCIGuVyOgwcPYvjw4Wq3CQwMxMGDBzF69GhF2f79+xXdeD4+PnBxccHBgwdRt25dAHldaqdOncLQoUOL83SIiAxaaE1XLOtbX2V+HBcDmR+HSoeych291qSUb9pPP/2EAQMGYMWKFQgICMCiRYuwdetWXLt2Dc7Ozujfvz/c3d0RGRkJIG/qgKCgIMyePRsdOnTAli1b8NVXXyEmJgY1a9YEAMyZMwezZ8/GunXr4OPjg6lTp+LChQu4cuUKzM3NtYqLk1ISUVlV2mdeJsNgqNeRtvdvnVuWDh8+jJYtW6pdt2LFCnz88ce67lJr77//Ph4/foxp06YhMTERdevWxd69e+Hs7AwAuHPnDoyM/utZbNKkCX788UdMmTIFn332GapUqYJff/1VkSgBeQPEnz9/jiFDhiA5ORnNmjXD3r17tU6UiIjKMmMjCQIrlS/pMKiUK+3Xkc4tS1KpFCNHjsRXX30FU1NTAMCTJ08QFhaG48eP49mzZ8USqCFjyxIREVHpU2yvOzl8+DB++eUXNGrUCFeuXMHvv/+OmjVrIiUlBefPn3+dmImIiIgMjs7JUpMmTXD+/HnUrFkT9evXR9euXTFmzBgcOXIEXl5exREjERERUYkp0ot0//33X/z999+oWLEiTExMEBsbqzRBFhEREVFZoXOyNHv2bAQGBqJNmza4dOkSTp8+jXPnzqF27dqIjo4ujhiJiIiISozOydLixYvx66+/YsmSJTA3N0fNmjVx+vRpdOvWDcHBwcUQIhEREVHJ0XnqgIsXL6JChQpKZaamppg7dy46duyot8CIiIiIDIHOLUuvJkovq169+msFQ0RERGRotE6WLC0t8fjxY8Vyhw4dkJCQoFh++PAhXF1Lx7TlRERERNrSOlnKyMjAy/NX/vXXX3jx4oVSnVL05hQiIiIirRRp6oCCSCQl/54XIiIiIn3Sa7JEREREVNZonSxJJBKllqNXl4mIiIjKIq2nDhBCoGrVqooEKS0tDfXq1YORkZFiPREREVFZo3WytGbNmuKMg4iIiMggaZ0sDRgwoDjjICIiIjJIWo1ZYhcbERERva20SpZq1KiBLVu2ICsrS2O9uLg4DB06FLNnz9ZLcEREREQlTatuuCVLlmDSpEkYNmwY2rRpg4YNG8LNzQ3m5uZ49uwZrly5guPHj+Py5csYPnw4hg4dWtxxExEREb0REqFDH9vx48fx008/4dixY7h9+zZevHiBChUqoF69eggJCUGfPn1Qrly54ozXIKWkpMDOzg4ymQy2trYlHQ4RERFpQdv7t07JEqnHZImIiKj00fb+zRm8iYiIiDRgskRERESkAZMlIiIiIg2YLBERERFpwGSJiIiISIMiJUs3btzAlClT0Lt3bzx69AgAsGfPHly+fFmvwRERERGVNJ2TpaNHj6JWrVo4deoUduzYgbS0NADAP//8g4iICL0HSERERFSSdE6WwsPDMWvWLOzfvx9mZmaK8nfffRcnT57Ua3BEREREJU3nZOnixYvo2rWrSrmTkxOePHmil6CIiIiIDIXOyZK9vT0SEhJUys+dOwd3d3e9BEVERERkKHROlnr16oVJkyYhMTEREokEcrkcJ06cwPjx49G/f//iiJGIiIioxOicLH311Vfw8/ODh4cH0tLS4O/vjxYtWqBJkyaYMmVKccRIREREVGKK/CLdO3fu4NKlS0hLS0O9evVQpUoVfcdWavBFukRERKWPtvdvk6IewNPTE56enkXdnIiIiKhU0DlZ+vDDDzWuX716dZGDISIiIjI0OidLz549U1rOzs7GpUuXkJycjHfffVdvgREREREZAp2TpV9++UWlTC6XY+jQoahUqZJegiIiIiIyFHp5ka6RkRHGjh2LhQsX6mN3RERERAZDL8kSkPdy3ZycHH3tjoiIiMgg6NwNN3bsWKVlIQQSEhLw+++/Y8CAAXoLjIiIiMgQ6JwsnTt3TmnZyMgIjo6OmD9/fqFPyhERERGVNjonS4cPHy6OOIiIiIgMkt7GLBERERGVRVq1LNWrVw8SiUSrHcbExLxWQAVJSkrCiBEjsGvXLhgZGeG9997D4sWLYW1tXeA2GRkZGDduHLZs2YLMzEyEhITgu+++g7Ozs6KOuvPavHkzevXqVSznQURERKWLVslSly5dijmMwvXp0wcJCQnYv38/srOzERYWhiFDhuDHH38scJsxY8bg999/x7Zt22BnZ4fhw4ejW7duOHHihFK9NWvWIDQ0VLFsb29fXKdBREREpUyRX6T7Jl29ehX+/v44c+YMGjZsCADYu3cv2rdvj3v37sHNzU1lG5lMBkdHR/z444/o3r07AODatWuoXr06oqOj8c477wDIa1n65ZdfXish5It0iYiISh9t79+lYsxSdHQ07O3tFYkSALRu3RpGRkY4deqU2m3Onj2L7OxstG7dWlHm5+cHT09PREdHK9X99NNPUaFCBQQEBGD16tUoLH/MzMxESkqK0oeIiIjKJp2fhsvNzcXChQuxdetW3LlzB1lZWUrrk5KS9BZcvsTERDg5OSmVmZiYwMHBAYmJiQVuY2ZmptKl5uzsrLTNF198gXfffReWlpbYt28fhg0bhrS0NIwcObLAeCIjIzFjxoyinxARERGVGjq3LM2YMQMLFizA+++/D5lMhrFjx6Jbt24wMjLC9OnTddpXeHg4JBKJxs+1a9d0DVEnU6dORdOmTVGvXj1MmjQJEydOxNy5czVuM3nyZMhkMsXn7t27xRojERERlRydW5Y2bdqEVatWoUOHDpg+fTp69+6NSpUqoXbt2jh58qTGFplXjRs3DgMHDtRYx9fXFy4uLnj06JFSeU5ODpKSkuDi4qJ2OxcXF2RlZSE5OVmpdenhw4cFbgMAjRs3xsyZM5GZmQmpVKq2jlQqLXAdERERlS06J0uJiYmoVasWAMDa2hoymQwA0LFjR0ydOlWnfTk6OsLR0bHQeoGBgUhOTsbZs2fRoEEDAMChQ4cgl8vRuHFjtds0aNAApqamOHjwIN577z0AQGxsLO7cuYPAwMACj3X+/HmUK1eOyRAREREBKEI3XMWKFZGQkAAAqFSpEvbt2wcAOHPmTLElGNWrV0doaCgGDx6M06dP48SJExg+fDh69eqleBLu/v378PPzw+nTpwEAdnZ2GDRoEMaOHYvDhw/j7NmzCAsLQ2BgoOJJuF27duH777/HpUuXcP36dSxbtgxfffUVRowYUSznQURERKWPzi1LXbt2xcGDB9G4cWOMGDECffv2xQ8//IA7d+5gzJgxxREjgLzuv+HDh6NVq1aKSSm/+eYbxfrs7GzExsYiPT1dUbZw4UJF3ZcnpcxnamqKpUuXYsyYMRBCoHLlyliwYAEGDx5cbOdBREREpYvW8yx9++236Nu3r8rTZdHR0YiOjkaVKlXQqVOn4ojR4HGeJSIiotJH2/u31smSnZ0dsrOz0bVrVwwaNAjvvvuu3oIt7ZgsERERlT56n5QyMTERy5cvx4MHD9CmTRv4+Phg5syZfGyeiIiIyjStkyULCwv0798fhw8fRlxcHPr164cffvgBPj4+CA0NxbZt25CdnV2csRIRERG9ca/1bjghBA4cOIC1a9fi119/hZWVlcp8SG8DdsMRERGVPm/k3XASiQQmJiaQSCQQQrBliYiIiMqcIiVLd+/exRdffAFfX1+0adMGDx48wKpVqxTzLxERERGVFVrPs5SVlYUdO3Zg9erVOHToEFxdXTFgwAB8+OGH8PX1Lc4YiYiIiEqM1smSi4sL0tPT0bFjR+zatQshISEwMnqtXjwiIiIig6d1sjRlyhT069dPq3e5EREREZUVWidLY8eOLc44iIiIiAwS+9GIiIiINGCyRERERKQBkyUiIiIiDZgsEREREWmg9QDvfLm5uVi7di0OHjyIR48eQS6XK60/dOiQ3oIjIiIiKmk6J0ujRo3C2rVr0aFDB9SsWRMSiaQ44iIiIiIyCDonS1u2bMHWrVvRvn374oiHiIiIyKDoPGbJzMwMlStXLo5YiIiIiAyOzsnSuHHjsHjxYgghiiMeIiIiIoOiczfc8ePHcfjwYezZswc1atSAqamp0vodO3boLTgiIiKikqZzsmRvb4+uXbsWRyxEREREBkfnZGnNmjXFEQcRERGRQeKklEREREQa6NyyBADbt2/H1q1bcefOHWRlZSmti4mJ0UtgRERERIZA55alb775BmFhYXB2dsa5c+cQEBCA8uXL4+bNm2jXrl1xxEhERERUYnROlr777jusXLkSS5YsgZmZGSZOnIj9+/dj5MiRkMlkxREjERERUYnROVm6c+cOmjRpAgCwsLBAamoqAKBfv37YvHmzfqMjIiIiKmE6J0suLi5ISkoCAHh6euLkyZMAgPj4eE5USURERGWOzsnSu+++i507dwIAwsLCMGbMGLRp0wbvv/8+518iIiKiMkcidGwOksvlkMvlMDHJe5Buy5YtiIqKQpUqVfDxxx/DzMysWAI1ZCkpKbCzs4NMJoOtrW1Jh0NERERa0Pb+rXOyRKqYLBEREZU+2t6/izQp5bFjx9C3b18EBgbi/v37AIANGzbg+PHjRYuWiIiIyEDpnCz9/PPPCAkJgYWFBc6dO4fMzEwAgEwmw1dffaX3AImIiIhKks7J0qxZs7B8+XKsWrUKpqamivKmTZty9m4iIiIqc3ROlmJjY9GiRQuVcjs7OyQnJ+sjJiIiIiKDUaR5lq5fv65Sfvz4cfj6+uolKCIiIiJDoXOyNHjwYIwaNQqnTp2CRCLBgwcPsGnTJowfPx5Dhw4tjhiJiIiISoyJrhuEh4dDLpejVatWSE9PR4sWLSCVSjF+/HiMGDGiOGIkIiIiKjFFnmcpKysL169fR1paGvz9/WFtba3v2EoNzrNERERU+mh7/9a5ZSmfmZkZ/P39i7o5ERERUamgdbL04YcfalVv9erVRQ6GiIiIyNBonSytXbsWXl5eqFevHviGFCIiInpbaJ0sDR06FJs3b0Z8fDzCwsLQt29fODg4FGdsRERERCVO66kDli5dioSEBEycOBG7du2Ch4cHevbsiT///JMtTURERFRm6TTPklQqRe/evbF//35cuXIFNWrUwLBhw+Dt7Y20tLTiihEAkJSUhD59+sDW1hb29vYYNGhQocdcuXIlgoODYWtrC4lEonaG8aLsl4iIiN4eOk9KqdjQyAgSiQRCCOTm5uozJrX69OmDy5cvY//+/di9ezf++usvDBkyROM26enpCA0NxWeffabX/RIREdHbQ6d5ljIzM7Fjxw6sXr0ax48fR8eOHREWFobQ0FAYGRU57yrU1atX4e/vjzNnzqBhw4YAgL1796J9+/a4d+8e3NzcNG5/5MgRtGzZEs+ePYO9vf1r7zczMxOZmZmK5ZSUFHh4eHCeJSIiolJE23mWtM5whg0bBldXV8yePRsdO3bE3bt3sW3bNrRv375YEyUAiI6Ohr29vSKhAYDWrVvDyMgIp06deuP7jYyMhJ2dneLj4eFR5BiIiIjIsGn9NNzy5cvh6ekJX19fHD16FEePHlVbb8eOHXoLLl9iYiKcnJyUykxMTODg4IDExMQ3vt/Jkydj7NixiuX8liUiIiIqe7ROlvr37w+JRKLXg4eHh2POnDka61y9elWvx9QHqVQKqVRa0mEQERHRG6DTpJT6Nm7cOAwcOFBjHV9fX7i4uODRo0dK5Tk5OUhKSoKLi0uRj19c+yUiIqKyo8jvhtMHR0dHODo6FlovMDAQycnJOHv2LBo0aAAAOHToEORyORo3blzk4xfXfomIiKjsKN6R2XpSvXp1hIaGYvDgwTh9+jROnDiB4cOHo1evXoon1u7fvw8/Pz+cPn1asV1iYiLOnz+P69evAwAuXryI8+fPIykpSev9EhER0dutVCRLALBp0yb4+fmhVatWaN++PZo1a4aVK1cq1mdnZyM2Nhbp6emKsuXLl6NevXoYPHgwAKBFixaoV68edu7cqfV+iYiI6O2m0zxLpJ628zQQERGR4dD7PEtEREREbyMmS0REREQaMFkiIiIi0oDJEhEREZEGTJaIiIiINGCyRERERKQBkyUiIiIiDZgsEREREWnAZImIiIhIAyZLRERERBowWSIiIiLSgMkSERERkQZMloiIiIg0YLJEREREpAGTJSIiIiINmCwRERERacBkiYiIiEgDJktEREREGjBZIiIiItKAyRIRERGRBkyWiIiIiDRgskRERESkAZMlIiIiIg2YLBERERFpwGSJiIiISAMmS0REREQaMFkiIiIi0oDJEhEREZEGTJaIiIiINGCyRERERKQBkyUiIiIiDZgsEREREWnAZImIiIhIAyZLRERERBowWSIiIiLSgMkSERERkQZMloiIiIg0YLJEREREpAGTJSIiIiINmCwRERERacBkiYiIiEgDJktEREREGjBZIiIiItKg1CRLSUlJ6NOnD2xtbWFvb49BgwYhLS1N4zYrV65EcHAwbG1tIZFIkJycrFLH29sbEolE6TN79uxiOgsiIiIqbUpNstSnTx9cvnwZ+/fvx+7du/HXX39hyJAhGrdJT09HaGgoPvvsM431vvjiCyQkJCg+I0aM0GfoREREVIqZlHQA2rh69Sr27t2LM2fOoGHDhgCAJUuWoH379pg3bx7c3NzUbjd69GgAwJEjRzTu38bGBi4uLvoMmYiIiMqIUtGyFB0dDXt7e0WiBACtW7eGkZERTp069dr7nz17NsqXL4969eph7ty5yMnJ0Vg/MzMTKSkpSh8iIiIqm0pFy1JiYiKcnJyUykxMTODg4IDExMTX2vfIkSNRv359ODg4ICoqCpMnT0ZCQgIWLFhQ4DaRkZGYMWPGax2XiIiISocSbVkKDw9XGVz96ufatWvFGsPYsWMRHByM2rVr45NPPsH8+fOxZMkSZGZmFrjN5MmTIZPJFJ+7d+8Wa4xERERUckq0ZWncuHEYOHCgxjq+vr5wcXHBo0ePlMpzcnKQlJSk97FGjRs3Rk5ODm7duoVq1aqprSOVSiGVSvV6XCIiIjJMJZosOTo6wtHRsdB6gYGBSE5OxtmzZ9GgQQMAwKFDhyCXy9G4cWO9xnT+/HkYGRmpdPsRERHR26lUjFmqXr06QkNDMXjwYCxfvhzZ2dkYPnw4evXqpXgS7v79+2jVqhXWr1+PgIAAAHljnRITE3H9+nUAwMWLF2FjYwNPT084ODggOjoap06dQsuWLWFjY4Po6GiMGTMGffv2Rbly5UrsfImIiMhwlIqn4QBg06ZN8PPzQ6tWrdC+fXs0a9YMK1euVKzPzs5GbGws0tPTFWXLly9HvXr1MHjwYABAixYtUK9ePezcuRNAXnfali1bEBQUhBo1auDLL7/EmDFjlPZLREREbzeJEEKUdBClXUpKCuzs7CCTyWBra1vS4RAREZEWtL1/l5qWJSIiIqKSwGSJiIiISAMmS0REREQaMFkiIiIi0oDJEhEREZEGTJaIiIiINGCyRERERKQBkyUiIiIiDZgsEREREWnAZImIiIhIAyZLRERERBowWSIiIiLSgMkSERERkQYmJR1AWSCEAJD39mIiIiIqHfLv2/n38YIwWdKD1NRUAICHh0cJR0JERES6Sk1NhZ2dXYHrJaKwdIoKJZfL8eDBA9jY2EAikbyRY6akpMDDwwN3796Fra3tGzkmlT28juh18RoifSip60gIgdTUVLi5ucHIqOCRSWxZ0gMjIyNUrFixRI5ta2vLX1D02ngd0eviNUT6UBLXkaYWpXwc4E1ERESkAZMlIiIiIg2YLJVSUqkUERERkEqlJR0KlWK8juh18RoifTD064gDvImIiIg0YMsSERERkQZMloiIiIg0YLJEREREpAGTJSIiIiINmCwRERERacBkqZQTQmDatGlwdXWFhYUFWrdujbi4uJIOi0qZHTt2oG3btihfvjwkEgnOnz9f0iFRKZKdnY1JkyahVq1asLKygpubG/r3748HDx6UdGhUykyfPh1+fn6wsrJCuXLl0Lp1a5w6daqkw2KyVNp9/fXX+Oabb7B8+XKcOnUKVlZWCAkJQUZGRkmHRqXI8+fP0axZM8yZM6ekQ6FSKD09HTExMZg6dSpiYmKwY8cOxMbGonPnziUdGpUyVatWxbfffouLFy/i+PHj8Pb2Rtu2bfH48eOSDUyQQUtLSxP9+vUTVlZWwsXFRcybN08EBQWJUaNGCblcLlxcXMTcuXMV9ZOTk4VUKhWbN28uwajJ0Gi6jl4WHx8vAIhz586VSJxkuLS9hvKdPn1aABC3b99+s4GSQdP1OpLJZAKAOHDgwJsN9BVsWTJwEyZMwNGjR/Hbb79h3759OHLkCGJiYgAA8fHxSExMROvWrRX17ezs0LhxY0RHR5dUyGSANF1HRNrQ9RqSyWSQSCSwt7d/c0GSwdPlOsrKysLKlSthZ2eHOnXqvOFIlZmU6NFJo7S0NPzwww/YuHEjWrVqBQBYt24dKlasCABITEwEADg7Oytt5+zsrFhHVNh1RFQYXa+hjIwMTJo0Cb17937jb5Anw6XtdbR792706tUL6enpcHV1xf79+1GhQoWSCFmBLUsG7MaNG8jKykLjxo0VZQ4ODqhWrVoJRkWlDa8jel26XEPZ2dno2bMnhBBYtmzZmwyTDJy211HLli1x/vx5REVFITQ0FD179sSjR4/edLhKmCyVYi4uLgCAhw8fKpU/fPhQsY6I6E3JT5Ru376N/fv3s1WJisTKygqVK1fGO++8gx9++AEmJib44YcfSjQmJksGrFKlSjA1NVV6bPLZs2f4999/AQA+Pj5wcXHBwYMHFetTUlJw6tQpBAYGvvF4yTAVdh0RFUabayg/UYqLi8OBAwdQvnz5kgiVDFhRfxfJ5XJkZmYWd3gaccySAbO2tsagQYMwYcIElC9fHk5OTvj8889hZJSX40okEowePRqzZs1ClSpV4OPjg6lTp8LNzQ1dunQp2eDJYBR2HQFAUlIS7ty5o5gXJzY2FkBe6yVbKamwayg7Oxvdu3dHTEwMdu/ejdzcXMW4SQcHB5iZmZVk+GQgCruOnj9/ji+//BKdO3eGq6srnjx5gqVLl+L+/fvo0aNHicbOZMnAzZ07F2lpaejUqRNsbGwwbtw4yGQyxfqJEyfi+fPnGDJkCJKTk9GsWTPs3bsX5ubmJRg1GZrCrqOdO3ciLCxMsdyrVy8AQEREBKZPn/6mwyUDpOkaun//Pnbu3AkAqFu3rtJ2hw8fRnBw8BuOlgyVpuvI2NgY165dw7p16/DkyROUL18ejRo1wrFjx1CjRo0SjVsihBAlGgHpLDg4GHXr1sWiRYtKOhQqxXgd0eviNUT6UBquI45ZIiIiItKAyRIRERGRBuyGIyIiItKALUtEREREGjBZIiIiItKAyRIRERGRBkyWiIiIiDRgskRERESkAZMlIiIiIg2YLBERERFpwGSJiIiISIP/B7sNFDkGn/3OAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "data = [get_noisy_data([2]*4,['x']*4,[0,1,2,3],[.1,0,0,0]),\n", " get_noisy_data([2]*4,['x']*4,[0,1,2,3],[0,.1,0,0]),\n", @@ -1181,9 +873,9 @@ "id": "7e2da1e1-78b2-4aca-9748-918c9a43dea2", "metadata": {}, "source": [ - "### 3.3b: Zero Noise Extrapolation ###\n", + "### 3.3b: Zero Noise Extrapolation\n", "\n", - "QPU results today are sometimes improved using **quantum error mitigation (QEM)** techniques. QEM techniques use classical postprocessing to improve results without the utilization of proper QEC protocols. One such QEM technique is **zero noise extrapolation (ZNE)**. The idea behind ZNE is that it is really hard to remove noise from an algorithm run on a physical QPU, but it is very easy to add noise. \n", + "QPU results today are sometimes improved using quantum error mitigation (QEM) techniques. QEM techniques use classical postprocessing to improve results without the utilization of proper QEC protocols. One such QEM technique is **zero noise extrapolation (ZNE)**. The idea behind ZNE is that it is really hard to remove noise from an algorithm run on a physical QPU, but it is very easy to add noise. \n", "\n", "The ZNE process works by applying increasing factors of error through clever application of the identity operator. For example, consider a circuit composed of a single layer of $R_X$ rotations of $\\pi$ radians. Applying the same gate three times is mathematically the same as applying it once and should have no impact on the result. \n", "\n", @@ -1191,75 +883,35 @@ "\n", "Experimentally, this is truly the identity operation as each gate is a noise channel and the total noise factor is increased from 1x to 3x. If this procedure is continued (5x, 7x, 9x, ...) the data can be fit to a curve and extrapolated back to estimate the experimentally inaccessible case of a 0x noise factor! So, paradoxically, adding noise can improve the result. \n", "\n", - "\"Drawing\"\n", + "\"Plot\n", "\n", "\n", "ZNE is a useful technique because it can be used experimentally. Noisy circuit simulation can demonstrate its effectiveness and help benchmark the effectiveness of ZNE when used on a physical QPU, help refine noise models, and test other QEM techniques before running experiments. \n", "\n", "\n", - "
\n", - "

Exercise 5 :

\n", - "

\n", - "You will now code an ZNE example by following the steps below:\n", + "

\n", + "\n", + "**Exercise 5:**\n", + "\n", + "You will now code a ZNE example by following the steps below:\n", "\n", "1. Create a Random Hamiltonian for a larger (20 qubit circuit)\n", "2. Define a quantum circuit with a layer of $R_x(\\pi/2)$ gates followed by a layer of $X$ gates.\n", - "3. Put a bitflip error on the $X$ gates and an Amplitudes Damping error on the $R_X$ gates.\n", - "4. Perform ZNE to obtain a correction for each. (Hint: use the $\\texttt{np.poly1d()}$ to fit a polynomial.)\n", + "3. Put a bitflip error on the $X$ gates and an **amplitude damping** error on the $R_X$ gates.\n", + "4. Perform ZNE to obtain a correction for each. (Hint: use the `np.poly1d()` to fit a polynomial.)\n", "5. Apply the correction to the original noisy circuit and calculate the percent error of the noisy circuit and the ZNE corrected result relative to the noiseless case.\n", - "

\n", - "
\n" + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "30a716ff-58bb-4133-be8d-fae8e4dcea4e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Percent Error of Uncorrected Noisy Circuit: -20.036193722076547 %\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAhHxJREFUeJzt3XdYk1cbBvA7rLAEB7IEAa0DnDiLe4CoLXXVPRCte0LVqnXgXnUv1NZZ96xWq+LAiQMV654oDnDLVMDk/f54P1IjQwIJgXD/riuXzXlPTp6cWHg87xkSQRAEEBEREekIPW0HQERERKROTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbIiIi0ilMboiIiEinMLkhIiIincLkhoiIiHQKkxsiIiLSKUxuiCjH1q5dC4lEgkePHmk7lALj0aNHkEgkWLt2rVrbdXZ2Rs+ePdXaJlFuY3JDlE0SieSrj8DAQG2HqSQ1rrlz56a5lpqghIWFaSEy1QUGBmba99HR0Sq1d/PmTQQGBhaIBO3s2bMIDAzE+/fvtR0KkUYYaDsAovxqw4YNGV4LDAzEgwcPULt27VyMKOvmzJmDAQMGwNTUVC3tde/eHZ06dYJUKlVLe6pYvnw5zM3N05QXLlxYpXZu3ryJSZMmoVGjRnB2dlZPcHnU2bNnMWnSJPTs2TNNP925cwd6evx3L+VvTG6Isqlbt27plv/+++948OABhgwZghYtWuT4fQRBwMePH2FiYpLjtgCgatWqCA8PR1BQEAICAtTSpr6+PvT19dXSlqp+/PFHWFlZ5ep7qvs7yUu0kaASqRvTcyI1unHjBoYOHQp3d3fMmTNH6ZpcLseCBQtQoUIFGBsbw8bGBv369cO7d++U6jk7O+P777/HoUOHUKNGDZiYmGDFihUAgIcPH6J9+/YoWrQoTE1N8e2332L//v0qxVi3bl00adIEs2fPxocPH75a/9ixY6hfvz7MzMxQuHBhtGrVCrdu3VKqk96cm7CwMHh7e8PKygomJiZwcXFBr169stUnOeHr6wtjY+M0MXt7e6NIkSJ4/vw51q5di/bt2wMAGjdurLi1FRISAiDz72TNmjVo0qQJrK2tIZVK4ebmhuXLl6eJI7WNw4cPo2rVqjA2Noabmxt27dqVpm52v+d///0XPXv2RKlSpWBsbAxbW1v06tULb968UdQJDAzEyJEjAQAuLi6Kz5r63aU35yYr8YSEhEAikWDbtm2YNm0aHBwcYGxsjKZNm+L+/ftfjZ1InThyQ6QmiYmJ6NChA/T19bFly5Y0/wLu168f1q5dCz8/PwwdOhQRERFYsmQJrly5gjNnzsDQ0FBR986dO+jcuTP69euHPn36oFy5cnjx4gXq1KmDxMREDB06FMWKFcO6devwww8/YMeOHWjTpk2WYw0MDESDBg2wfPnyTEdvjhw5ghYtWqBUqVIIDAzEhw8fsHjxYtStWxeXL1/O8PbNy5cv0axZMxQvXhyjR49G4cKF8ejRozS/yFXpk4y8ffs2TZmBgYHidsvChQtx7Ngx+Pr6IjQ0FPr6+lixYgUOHz6MDRs2wN7eHg0aNMDQoUOxaNEijB07Fq6urgCg+BNI/zsBxNtiFSpUwA8//AADAwPs27cPAwcOhFwux6BBg5TiunfvHjp27Ij+/fvD19cXa9asQfv27XHw4EF4eXkBQI6+5+DgYDx8+BB+fn6wtbXFjRs3sHLlSty4cQPnzp2DRCJB27ZtcffuXWzevBnz589XjHoVL1483TZVjWfmzJnQ09PDiBEjEBMTg9mzZ6Nr1644f/58Zl8jkXoJRKQWvXr1EgAI69atS3Pt1KlTAgBh48aNSuUHDx5MU+7k5CQAEA4ePKhUd/jw4QIA4dSpU4qyuLg4wcXFRXB2dhZkMtlXYwQgDBo0SBAEQWjcuLFga2srJCYmCoIgCGvWrBEACBcvXlTUr1q1qmBtbS28efNGUXb16lVBT09P6NGjh6Is9bURERGCIAjC7t2707SVkz5Jz8SJEwUA6T7KlSunVPfQoUMCAGHq1KnCw4cPBXNzc6F169ZKdbZv3y4AEI4fP57mvTL6TgRBUPTf57y9vYVSpUql28bOnTsVZTExMYKdnZ3g7u6uKMvq9xwRESEAENasWZNpLJs3bxYACCdPnlSUzZkzR+n7+jJOX19fleM5fvy4AEBwdXUVkpKSFHUXLlwoABCuXbuW5r2INIW3pYjUYNOmTVi9ejW6d++OHj16pLm+fft2WFpawsvLC69fv1Y8qlevDnNzcxw/flypvouLC7y9vZXKDhw4gFq1aqFevXqKMnNzc/Tt2xePHj3CzZs3VYo5MDAQ0dHRCAoKSvd6VFQUwsPD0bNnTxQtWlRRXrlyZXh5eeHAgQMZtp06avL3338jJSUl3Tqq9klGdu7cieDgYKXHmjVrlOo0a9YM/fr1w+TJk9G2bVsYGxsrbitlVXrfCQCleTcxMTF4/fo1GjZsiIcPHyImJkaprr29vdJIh4WFBXr06IErV64oVnfl5Hv+PJaPHz/i9evX+PbbbwEAly9fVunzplI1Hj8/PxgZGSme169fH4B4a4sotzC5Icqhe/fuoX///ihbtiyWLVuWYZ2YmBhYW1ujePHiSo/4+Hi8fPlSqb6Li0uaNh4/fqy4FfK51Fsnjx8/BiDepomOjlY8vvwFm6pBgwZo3LhxhnNvUtvL6D1fv36NhISEdNtu2LAh2rVrh0mTJsHKygqtWrXCmjVrkJSUlO0+yUiDBg3g6emp9PDw8EhT77fffkPRokURHh6ORYsWwdraOkvtp0rvOwGAM2fOwNPTUzEnqXjx4hg7diwApOn7b775BhKJRKmsbNmyAKCY85LV7zk9b9++xbBhw2BjYwMTExMUL15cEXdGfw++RtV4SpYsqfS8SJEiAKDWeVREX8M5N0Q5kJSUhI4dOyI5ORlbtmxJd0kyIE6ctba2xsaNG9O9/uV8h5yswmnbti1OnDiheO7r65vhRm8TJ05Eo0aNsGLFCpWXTmdGIpFgx44dOHfuHPbt24dDhw6hV69emDt3Ls6dOwdzc3OV+ySnrly5okiYrl27hs6dO6v0+vS+kwcPHqBp06YoX7485s2bB0dHRxgZGeHAgQOYP38+5HK5WmLPqg4dOuDs2bMYOXIkqlatqujn5s2b51osGa2aEwQhV96fCGByQ5QjI0aMwJUrV7Bw4UK4u7tnWK906dI4cuQI6tatm+3ExcnJCXfu3ElTfvv2bcV1AJg7d67Sv5Lt7e0zbLNhw4Zo1KgRZs2ahQkTJqR5PwAZvqeVlRXMzMwyjfnbb7/Ft99+i2nTpmHTpk3o2rUrtmzZgp9++kktfZJVCQkJ8PPzg5ubG+rUqYPZs2ejTZs2qFmzpqLOlyMqWbFv3z4kJSVh7969SiMWGd1Su3//PgRBUHqvu3fvAoBicnZWv+cvvXv3DkePHsWkSZOUvst79+6lqavKZ81uPETaxNtSRNm0e/duLFmyBD/88AOGDh2aad0OHTpAJpNhypQpaa59+vQpSzvFtmzZEhcuXEBoaKiiLCEhAStXroSzszPc3NwAANWrV1e6RZNanpHUuTcrV65UKrezs0PVqlWxbt06pfiuX7+Ow4cPo2XLlhm2+e7duzT/Uq9atSoAKG5NqaNPsuqXX35BZGQk1q1bh3nz5sHZ2Rm+vr5Kt8lSEzVV3jd1lOLzzxoTE5Nmzk+q58+fY/fu3YrnsbGxWL9+PapWrQpbW1sAWf+esxILACxYsCBNXVU+a3bjIdImjtwQZUNUVBR69+4NfX19NG3aFH/++We69UqXLg0PDw80bNgQ/fr1w4wZMxAeHo5mzZrB0NAQ9+7dw/bt27Fw4UL8+OOPmb7n6NGjsXnzZrRo0QJDhw5F0aJFsW7dOkRERGDnzp3Z3lW2YcOGaNiwodKtrFRz5sxBixYt4OHhgd69eyuWgltaWmZ6tMS6deuwbNkytGnTBqVLl0ZcXBxWrVoFCwsLRVKkjj4BgB07dqR7O9DLyws2NjY4duwYli1bhokTJ6JatWoAxL1pGjVqhPHjx2P27NkAxORLX18fs2bNQkxMDKRSqWL/mow0a9YMRkZG8PHxQb9+/RAfH49Vq1bB2toaUVFRaeqXLVsWvXv3xsWLF2FjY4PVq1fjxYsXSslQdr9nCwsLNGjQALNnz0ZKSgpKlCiBw4cPIyIiIk3d6tWrAwB+/fVXdOrUCYaGhvDx8Ul3JE5Tf++INEqra7WI8qnUZa9fe3y+pFYQBGHlypVC9erVBRMTE6FQoUJCpUqVhFGjRgnPnz9X1HFychK+++67dN/3wYMHwo8//igULlxYMDY2FmrVqiX8/fffWY4bny0Fz+jzfLl8+8iRI0LdunUFExMTwcLCQvDx8RFu3rypVOfLpeCXL18WOnfuLJQsWVKQSqWCtbW18P333wthYWFp3jsrfZKezJaC4/9LumNjYwUnJyehWrVqQkpKitLr/f39BT09PSE0NFRRtmrVKqFUqVKCvr6+0rLwzL6TvXv3CpUrVxaMjY0FZ2dnYdasWcLq1avTLLVObePQoUNC5cqVBalUKpQvX17Yvn17mjaz8j2ntxT86dOnQps2bYTChQsLlpaWQvv27YXnz58LAISJEycqvX7KlClCiRIlBD09PaVYv1wKntV4Uv8Offl50ouTSNMkgsBZXkREmubs7IyKFSvi77//1nYoRDqP44lERESkU5jcEBERkU5hckNEREQ6hXNuiIiISKdw5IaIiIh0CpMbIiIi0ikFbhM/uVyO58+fo1ChQtnabp2IiIhynyAIiIuLg729/Vc3jyxwyc3z58/h6Oio7TCIiIgoG548eQIHB4dM6xS45KZQoUIAxM6xsLBQa9spKSk4fPiwYht5yj3se+1h32sP+1572Pe5LzY2Fo6Ojorf45kpcMlN6q0oCwsLjSQ3pqamsLCw4F/2XMa+1x72vfaw77WHfa89WZlSwgnFREREpFOY3BAREZFOYXJDREREOqXAzbkhopyRyWRISUlRPE9JSYGBgQE+fvwImUymxcgKHva99rDvNcPIyOiry7yzgskNEWWJIAiIjo7G+/fv05Tb2triyZMn3Dsql7HvtYd9rxl6enpwcXGBkZFRjtphckNEWZKa2FhbW8PU1FTxA10ulyM+Ph7m5uZq+RcXZR37XnvY9+qXusluVFQUSpYsmaOkkckNEX2VTCZTJDbFihVTuiaXy5GcnAxjY2P+kM9l7HvtYd9rRvHixfH8+XN8+vQpR0vs+Y0Q0VelzrExNTXVciREpMtSb0fldB4TkxsiyjLOLSAiTVLXzxjeliLIZMCpU0BUFGBnB9SvD+jrazsqIiKi7OHITQG3axfg7Aw0bgx06SL+6ewslhNR/teoUSMMHz5c22EQ5SomNwXYrl3Ajz8CT58qlz97JpYzwSFd0LNnT0gkEvTv3z/NtUGDBkEikaBnz565H1gesXbtWkgkEkgkEujr66NIkSKoXbs2Jk+ejJiYGJXaevToESQSCcLDwzUTLFEWMbkpoGQyYNgwQBDSXkstGz5crEekTjIZEBICbN4s/pkbf8ccHR2xZcsWfPjwQVH28eNHbNq0CSVLltR8ADmUnJys0fYtLCwQFRWFp0+f4uzZs+jbty/Wr1+PqlWr4vnz5xp9byJNYHKjJjK5DCcen8DJdydx4vEJyOR5Oys4dSrtiM3nBAF48kSsR6Qu2roNWq1aNTg6OmLXZ2+0a9culCxZEu7u7kp15XI5ZsyYARcXF5iYmKBKlSrYsWOH4rpMJkPv3r0V18uVK4eFCxcqtRESEoJatWrBzMwMhQsXRt26dfH48WMA4khS69atleoPHz4cjRo1Ujxv1KgRBg8ejOHDh8PKygre3t4AgOvXr6NFixYwNzeHjY0NevTogTdv3ihel5CQgB49esDc3Bx2dnaYO3dulvpHIpHA1tYWdnZ2cHV1Re/evXH27FnEx8dj1KhRinoHDx5EvXr1ULhwYRQrVgzff/89Hjx4oLju4uICAHB3d4dEIlF8posXL8LLywtWVlawtLREw4YNcfny5SzFRpQdTG7UYNetXXBe6AyvjV6Y93gevDZ6wXmhM3bdyrv3daKi1FuP6Gu0fRu0V69eWLNmjeL56tWr4efnl6bejBkzsH79egQFBeHGjRvw9/dHt27dcOLECQBi8uPg4IDt27fj5s2bmDBhAsaOHYtt27YBAD59+oTWrVujYcOG+PfffxEaGoq+ffuqvApk3bp1MDIywpkzZxAUFIT379+jSZMmcHd3R1hYGA4ePIgXL14ofYaRI0fixIkT+Ouvv3D48GGEhIRkO4mwtrZG165dsXfvXsWy3ISEBAQEBCAsLAxHjx6Fnp4e2rRpA7lcDgC4cOECAODIkSOIiopSJJNxcXHw9fXF6dOnce7cOZQpUwYtW7ZEXFxctmIj+hqulsqhXbd24cdtP0KA8v2dZ7HP8OO2H7Gjww60dW2rpegyZmen3npEmfnabVCJRLwN2qqV5lbqdevWDWPGjFGMoJw5cwZbtmxBSEiIok5SUhKmT5+OI0eOwMPDAwBQqlQpnD59GitWrEDDhg1haGiISZMmKV7j4uKC0NBQbNu2DR06dEBsbCxiYmLw/fffo3Tp0gAAV1dXleMtU6YMZs+erXg+depUuLu7Y/r06YqyP/74A05OTrh79y4cHBzwxx9/4M8//0TTpk0BiAmSg4ODyu+dqnz58oiLi8ObN29gbW2Ndu3aKV1fvXo1ihcvjps3b6JixYooXrw4AKBYsWKwtbVV1GvSpInS61auXInChQvjxIkT+P7777MdH1FGmNzkgEwuw7CDw9IkNgAgQIAEEgw/OBytyrWCvl7eWltdvz7g4CD+qzm9XzgSiXi9fv3cj410jyq3QT+7O6NWxYsXx3fffYe1a9dCEAR89913sLKyUqpz//59JCYmwsvLS6k8OTlZ6fbV0qVLsXr1akRGRuLDhw9ITk5G1apVAQBFixZFz5494e3tDS8vL3h6eqJDhw6wU/FfCtWrV1d6fvXqVRw/fhzm5uZp6j548ABJSUlITk5G7dq1FeVFixZFuXLlVHrfzwn//+GQOup07949TJgwAefPn8fr168VIzaRkZGoWLFihu28ePEC48aNQ0hICF6+fAmZTIbExERERkZmOzaizDC5yYFTkafwNDbjn9gCBDyJfYJTkafQyLlR7gWWBfr6wMKF4u0AiUQ5wUkdPV+wgPvdkHrkldugvXr1wuDBgwGICcqX4uPjAQD79+9HiRIllK5JpVIAwJYtWzBixAjMnTsXHh4eKFSoEObMmYPz588r6q5ZswZDhw7FwYMHsXXrVowbNw7BwcH49ttvoaenp0gaUn1+ynoqMzOzNLH5+Phg1qxZirLU843KlCmDhw8fqtIVWXLr1i1YWFgojtzw8fGBk5MTVq1aBXt7e8jlclSsWPGrE559fX3x5s0bLFy4EE5OTpBKpfDw8ND4RGkquJjc5EBUXNZ+Eke9iwScNRtLdrRtC+zYId4u+Pxf1Q4OYmLTNu/dTaN8Kq/cBm3evDmSk5MhkUgUk3Q/5+bmBqlUisjISDRs2DDdNs6cOYM6depg4MCBirLPJ9Wmcnd3h7u7O8aMGQMPDw9s2rQJ3377LYoXL47r168r1Q0PD//qOTrVqlXDzp074ezsDAMD8Ue3XC5HbGwszMzMULp0aRgaGuL8+fOKFWDv3r3D3bt3M/wsmXn58iU2bdqE1q1bQ09PD2/evMGdO3ewatUq1P//kO7p06eVXpPR1vlnzpzBsmXL0LJlSwDAkydP8Pr1a5VjIsoqTijOAbtCWftJbOc3FBg7Fvj/vf68pG1b4NEj4PhxYNMm8c+ICCY2pF6pt0EzmlMrkQCOjpq/Daqvr49bt27h5s2b0E9nWLJQoUIYMWIE/P39sW7dOjx48ACXL1/G4sWLsW7dOgDiXJiwsDAcOnQId+/exfjx43Hx4kVFGxERERgzZgxCQ0Px+PFjHD58GPfu3VPMu2nSpAnCwsKwfv163Lt3DxMnTkyT7KRn0KBBePv2LTp37oyLFy/iwYMHOHToEAYNGgSZTAZzc3P07t0bI0eOxLFjx3D9+nX07NkzS4c6CoKA6OhoREVF4datW1i9ejXq1KkDS0tLzJw5EwBQpEgRFCtWDCtXrsT9+/dx7NgxBAQEKLVjbW0NExMTxWTn1H1yypQpgw0bNuDWrVs4f/48unbtChMTk6/GRZRdTG5yoH7J+nCwcIAE6f/ElgBwjNdH/X9jgBkzgFKlgB9+AA4eBP5/rzov0NcX5zl07iz+yVtRpG6pt0GBtAlObt8GtbCwgIWFRYbXp0yZgvHjx2PGjBlwdXVF8+bNsX//fsUy5379+qFt27bo2LEjateujTdv3iiN4piamuL27dto164dypYti759+2LQoEHo168fAMDb2xvjx4/HqFGjULNmTcTFxaFHjx5fjdve3h5nzpyBTCZDs2bNUKlSJQQEBMDS0lKRwMyZMwf169eHj48PPD09Ua9evTRzd9ITGxsLOzs7lChRAh4eHlixYgV8fX1x5coVxVwhPT09bNmyBZcuXULFihXh7++POXPmKLVjYGCARYsWYcWKFbC3t0erVq0AiBOf3717h2rVqqF79+4YOnQorK2tvxoXUXZJhC9v/uq42NhYWFpaIiYmJtMfcFmVuloKgNLE4tSEZ0e7rWh7zwBYtgw4cuS/F5YqBQwYAPj5Af+/n03Zl5KSggMHDqBly5ZfHd4n1X38+BERERFwcXGBsbGx0rXUWyMWFhZfHSXYtSvtbVBHR94GzS5V+p7Ui32vGZn9rFHl9ze/kRxq69oWOzrsQAkL5cmHDhYO4jLwiu2BNm2A4GDg9m3xJ7ulJfDwITByJFCiBNCzJ3DhQvrLloh0CG+DElFu4IRiNWjr2hatyrXC8YfH8c/pf9CiXgs0LtU47fLvcuXEf6JOmwZs2QIsXQpcuQKsWyc+qlcXR3M6dwZMTbXyWYg0LfU2KBGRpnDkRk309fTR0KkhGhRpgIZODTPf18bMDOjdG7h0CTh3DvD1BaRS8flPP4mjOf7+wN27ufcBiIiIdASTG22SSIDatYG1a8Xd9ObMEefivH8vjvCUKwd4eQG7dwOfPmk5WCIiovyByU1eUawYMGIEcO8ecOAA4OMjJj9HjogTEpydgSlTeNgTERHRVzC5yWv09IAWLYC9e8VJx2PGAMWLiyM7EyYAJUsCHToAISGcgExERJQOJjd5mbMzMH26eOjOxo1A3bri7ant24HGjYEKFYAlS4D/b5RFRERETG7yB6kU6NIFOH0aCA8H+vUTJyXfugUMGSJOQO7fH7h6VduREhERaR2Tm/ymShUgKEi8TbV4MeDmBiQkACtWAFWrAvXqiRuIJCVpO1IiIiKtYHKTX1laAoMHA9evi/NvOnQADAyAM2eArl3FbV/HjBF3TCOidDVq1AjDhw/P1muPHj0KV1fXNIdEfi4wMBBVq1bNXnAFRFBQEHx8fLQdBukYJjf5nUQCNGwIbN0KREYCkyeLt6levQJmzhSXlvv4AP/8k6fOsyLKLT179oREIknzuH//Pnbt2oUpU6Yo6jo7O2PBggVZanfUqFEYN25cugdwkujjx4/o2bMnKlWqBAMDA7Ru3TpNnV69euHy5cs4depU7gdIOovJjS6xswPGjxdHa3btAjw9xRVVf/8NtGwJlCkj7qXz+rW2IyXKVc2bN0dUVJTSw8XFBUWLFkWhQoVUbu/06dN48OAB2rVrp4FoVSMIAj7l0X2wZDIZTExMMHToUHh6eqZbx8jICF26dMGiRYtyOTrSZUxudJGBgfJ5VsOH/3ee1ahRgIODuCvyuXNcTk4FglQqha2trdJDX19f6bZUo0aN8PjxY/j7+ytGdzKyZcsWeHl5pTnYb+bMmbCxsUGhQoXQu3dvfPz4Mc1rf//9d7i6usLY2Bjly5fHsmXLlK6fPXsWVatWhbGxMWrUqIE9e/ZAIpEgPDwcABASEgKJRIJ//vkHNWvWhI2NDU6fPg25XI4ZM2bAxcUFJiYmqFKlCnbs2KHU9vXr19GiRQuYm5vDxsYG3bt3x2sN/mPHzMwMy5cvR58+fWBra5thPR8fH+zduxcfPnzQWCxUsDC50XXlygHz5wPPnwN//AFUqyZONl6/HvDwEM+z+v13cVIykSoEQfx7o42HBpLyXbt2wcHBAZMnT1aM7mTk1KlTqFGjhlLZtm3bEBgYiOnTpyMsLAx2dnZpEpeNGzdiwoQJmDZtGm7duoXp06dj/PjxWLduHQDx1GMfHx9UqlQJly9fxpQpU/DLL7+kG8Po0aMxffp0nD9/HpUrV8aMGTOwfv16BAUF4caNG/D390e3bt1w4sQJAMD79+/RpEkTuLu7IywsDAcPHsSLFy/QoUOHDD9nZGQkzM3NM31Mnz49S/2bmRo1auDTp084f/58jtsiAgAIWnTixAnh+++/F+zs7AQAwu7du7P82tOnTwv6+vpClSpVVHrPmJgYAYAQExOjWrBZkJycLOzZs0dITk5We9tqI5cLwvnzguDrKwhSqSCIvyYEwdJSEIYNE4Tbt7UcYPbki77Pxz58+CDcvHlT+PDhw3+F8fH//f3J7Ud8fJZj9/X1FfT19QUzMzPF48cffxQEQRAaNmwoDBs2TFHXyclJmD9//lfbtLS0FNavX69U5uHhIQwcOFCprHbt2ko/o0qXLi1s2rRJqc6UKVMEDw8PQRAEYfny5UKxYsWU+nnVqlUCAOHKlSuCIAjC8ePHBQDCnj17BJlMJrx7905ITEwUTE1NhbNnzyq13bt3b6Fz586K92nWrJnS9SdPnggAhDt37qT7OVNSUoR79+5l+njz5s1Xekvk6+srtGrVKsPrRYoUEdauXZultvKC1L6XyWTaDkWnpPuz5v9U+f2t1VPBExISUKVKFfTq1Qtt27bN8uvev3+PHj16oGnTpnjx4oUGI9RBEglQq5b4mDsXWLNGXFr+4AGwcKH4aNoUGDgQ+OEH8RYXUT7XuHFjLF++XPHczMwsR+19+PAhzS2pW7duoX///kplHh4eOH78OADx592DBw/Qu3dv9OnTR1Hn06dPsLS0BADcuXMHlStXVmq7Vq1a6cbw+cjR/fv3kZiYCC8vL6U6ycnJcHd3BwBcvXoVx48fh7m5eZq2Hjx4gLJly6YpNzAwwDfffJPu+6ubiYkJEhMTc+W9SPdp9TdXixYt0KJFC5Vf179/f3Tp0gX6+vrYs2eP+gMrKFLPswoIEOfnLFsmTj4+elR8lCgB9O0rnlRub6/taCmvMTUF4uMhl8sRGxsLCwsL6Onl0p1uU1OVqpuZman1l7SVlRXevXun0mvi4+MBAKtWrULt2rWVrmVnxdXnCVpq2/v370eJEiWU6kmlUkUdHx8fzJo1K01bdnZ26b5HZGQk3NzcMo1j7NixGDt2rEqxp+ft27coXrx4jtshArSc3GTHmjVr8PDhQ/z555+YOnWqtsPRDXp6gLe3+Hj8GFi5Eli1StwocOJE8cDONm3E0ZyGDcXRHyKJRNwpWy4HZDLxv3MrudEQIyOjTPetSeXu7o6bN28qlbm6uuL8+fPo0aOHouzcuXOK/7axsYG9vT0ePnyIrl27pttuuXLl8OeffyIpKUmRlFy8ePGr8bi5uUEqlSIyMhINGzZMt061atWwc+dOODs7wyCLI7L29vaKicwZKVq0aJbaysyDBw/w8eNHxSgTUU7lq+Tm3r17GD16NE6dOpXl/zmTkpKQ9NluvbGxsQCAlJQUpKSkqDW+1PbU3W6usrcHAgOBMWMg2b0beitWQO/MGfE8q+3bIZQvD3m/fpB36yauwMojdKLv87CUlBQIggC5XA75F/slCf+f3Jt6Pa8RBCHT2D6/5uTkhBMnTqBDhw6QSqWwsrJK9zXNmjXD+vXrldocMmQIevXqhWrVqqFu3brYtGkTbty4gVKlSinqTZw4EcOHD4eFhQW8vb2RlJSEsLAwvH//Hv7+/ujUqRN+/fVX9OnTB7/88gsiIyPx22+/KcWZ2pZcLlf0vbm5OX7++Wf4+/vj06dPqFevHmJiYnD27FkUKlQIvr6+GDBgAFatWoVOnTph5MiRKFq0KO7fv4+tW7di1apV6Y4e6enpoVSpUl/t48y+95s3byI5ORlv3rxBfHw8Ll++DABKmxueOHECpUqVgouLS578O5SevP73Pr9K/XudkpKS5u+kKj/f801yI5PJ0KVLF0yaNCnde8MZmTFjBiZNmpSm/PDhwzBVcWg7q4KDgzXSbq6zsABGjoRF+/Zw/ucfOJ44AYPbt6Hv7w9hzBg8bdAAES1aINbFRduRKuhM3+cxBgYGsLW1RXx8PJKTk9OtExcXl8tRZU1KSgo+ffqk+IfN5z59+oTk5GTFtVGjRsHf3x9lypRBUlJShreefHx88Msvv+DSpUsoU6YMAPE2+4gRI/DLL78gKSkJPj4+8PPzw7FjxxTtd+jQARKJBIsXL8aoUaNgamoKNzc3DBgwQFFn06ZN+Pnnn1GtWjW4ubnh559/Rp8+fRSfIXVeSlxcnOI2YFxcHEaMGIFChQphxowZePToESwtLVGlShX4+/sjNjYW5ubm+OeffxAYGAhvb28kJyfD0dERTZs2RXx8fKZL33OiZcuWePLkieJ59erVAUCpbzdu3Ihu3bql+x3ldXn1731+lZycjA8fPuDkyZNp9m9SZU6WREhNP7VMIpFg9+7d6e5gCYiTiIsUKaKUyaVmePr6+jh8+DCaNGmS5nXpjdw4Ojri9evXsLCwUOtnSElJQXBwMLy8vGBoaKjWtvOE2FjobdwIvaAgSG7dUhTLPTwg79cPQrt24iGfWqDzfa9lHz9+xJMnT+Ds7JxmIq0gCIiLi0OhQoU09gsyLxo1ahRiY2MRFBSk0ffZuHEjevfujXfv3sHExETpmi70/Y0bN+Dp6Ynbt28rJlbnB7rQ93nRx48f8ejRIzg6Oqb5WRMbGwsrKyvExMR89fd3vhm5sbCwwLVr15TKli1bhmPHjmHHjh1wyWD0QCqVKu5df87Q0FBjvwQ12bZWFSsGDB0qnkR+8qQ4AXnXLuiFhkIvNBQYORLo3Vs8tdzZWSsh6mzfa5lMJoNEIoGenl6aScOpQ/Kp1wuKcePGKfaxUefnXr9+PUqVKoUSJUrg6tWrGDNmDDp06JDuCi9d6PsXL15g/fr1KFKkiLZDUYku9H1epKenB4lEku7PclV+tms1uYmPj8f9+/cVzyMiIhAeHo6iRYuiZMmSGDNmDJ49e4b169dDT08PFStWVHq9tbU1jI2N05SThqWeZ9WwIRAdLW4CuGIF8PSpeJ7VrFnAd9+JE5C9vfP9JFOi9BQuXFgtq4S+FB0djQkTJiA6Ohp2dnZo3749pk2bpvb3ySsyOpaBKCe0+lsnLCwM7u7uihnyAQEBcHd3x4QJEwAAUVFRiIyM1GaI9DW2tsC4cUBEBLB7N+DlpXye1TffALNn8zwroiwaNWoUHj16hI8fPyIiIgLz58/X2PxAIl2l1eSmUaNGipUMnz/Wrl0LAFi7di1CQkIyfH1gYOBXlylSLjEwAFq3Bg4fBu7eFffOKVxYTHp++UU8z6pHD55nRUREGsf7BaR+ZcqIux8/ewasXi2eX5WUBGzYwPOsiIhI45jckOaYmgJ+fkBYGHDhAtCzJ2BsDFy5AvTpI+6APHw4cOeOtiMlIiIdwuSGckfNmuI5Vk+fAr/9BpQuDcTEiGdZlS8PeHoCu3YBX+xrQEREpComN5S7ihUDfv5ZnJfzzz/i4Zx6euJZVu3aiUvIJ08Gnj/XdqRERJRPMbkh7dDTA5o3B/76C3j4EBg7FrC2/u88KycnoH174PhxTkAmIiKVMLkh7XNyAqZNAyIjgU2bgHr1xNtTO3YATZoAFSoAixeLt7GISMn3338Pf39/bYeR7/Ts2TPDHfFV4efnp5Z2SL2Y3FDeIZUCnTsDp04B//4L9O8PmJsDt26JOyPb24u7H3P5P2VRSEgIJBJJho/GjRtrJa7AwEBIJBL0799fqTw8PBwSiQSPHj3KclsbNmzA5MmT1Rzhfxo1apRu330Ze2acnZ2xYMECjcWYGx49egSJRJJm+5EFCxYoti/Jy6ZNm4Y6derA1NQUhQsXztJrevbsmeZ7b968uVKdt2/fomvXrrCwsEDhwoXRu3dvxMfHK67fuXMHjRs3ho2NDYyNjVGqVCmMGzdO44ccM7mhvKlSJWD5cvE21dKl4uhNYiKwciXg7g7UqQP8+Sfw8aO2IyUVyeQyhDwKweZrmxHyKAQyuUxj71WnTh1ERUWleaxYsQISiQQDBw7MdtsZHSCaVcbGxvjjjz9w7969HLVTpEgRFCpUKEdtfE2fPn3S9OHs2bPV+h4ymSxfnq5taWmZ5WRBm5KTk9G+fXsMGDBApdc1b95c6XvfvHmz0vWuXbvixo0bCA4Oxt9//42TJ0+ib9++iuuGhobo0aMHDh8+jDt37mDBggVYtWoVJk6cqJbPlSGhgImJiREACDExMWpvOzk5WdizZ4+QnJys9rYLPLlcEE6cEISOHQXBwEAQxJk4gmBlJQi//CIk37nDvtegDx8+CDdv3hQ+fPiQ5ppMJhPevXsnyGSyr7az8+ZOwWGeg4BAKB4O8xyEnTd3aiLsdN28eVMoVKiQ8OuvvyqVX7t2TWjevLlgZmYmWFtbC926dRNevXqluN6wYUNh0KBBwrBhw4RixYoJjRo1EgRBEEJCQoSaNWsKRkZGgq2trfDLL78IKSkpmcYwceJEoUqVKoKXl5fQvn17RfmVK1cEAEJERISiLLP2ZTKZULduXWHo0KGK+kuXLhW++eYbQSqVCtbW1kK7du0U12QymTB9+nTB2dlZMDY2FipXrixs374901gbNmwoDBs2LMPr69atE8zMzIS7d+8qygYMGCCUK1dOSEhIEBo2bCgAUHoIgiCsWbNGsLS0FP766y/B1dVV0NfXFyIiIoQLFy4Inp6eQrFixQQLCwuhQYMGwqVLl5TeE4CwbNkyoXnz5oKxsbHg4uKS5nP8+++/QuPGjQVjY2OhaNGiQp8+fYS4uDjFdV9fX6FVq1aK5//8849Qt25dwdLSUihatKjw3XffCffv31d6z88fDRs2FN69eyf06NFDqZ2PHz8KQ4YMEYoXLy5IpVKhbt26woULFxTXjx8/LgAQjhw5IlSvXl0wMTERPDw8hNu3b2f6PahLar9nxZd99KWbN28KAISLFy8qyv755x9BIpEIz549y/B1/v7+Qr169dK9ltnPGlV+f3PkhvIHiQRo0ADYsgV48gSYMkXc9fj1a2DWLBiUL4/aU6dC8s8/gExzIwGUfbtu7cKP237E09inSuXPYp/hx20/YtetXRqP4f3792jVqhUaNWqEKVOmKJU3adIE7u7uCAsLw8GDB/HixQt06NBB6fXr1q2DkZERzpw5g6CgIDx79gwtW7ZEzZo1cfXqVSxfvhx//PEHpk6dmqV4Zs6ciZ07dyIsLCzd66q2HxYWhqFDh2Ly5Mm4c+cODh48iAYNGiiuz5gxA+vXr0dQUBBu3LgBf39/dOvWDSdOnMhSvOnp0aMHWrZsia5du+LTp0/Yv38/fv/9d2zcuBGmpqbYtWsXHBwcMHnyZMW//lMlJiZi1qxZ+P3333Hjxg1YW1sjLi4Ovr6+OH36NM6dO4cyZcqgZcuWiIuLU3rf8ePHo127drh69Sq6du2KTp064datWwCAhIQEeHt7o0iRIrh48SK2b9+OI0eOYPDgwRl+joSEBAQEBCAsLAxHjx6Fnp4e2rRpoxhNunDhAgDgyJEjiIqKwo4dO9JtZ9SoUdi5cyfWrVuHy5cv45tvvoG3tzfevn2rVO/XX3/F3LlzERYWBgMDA/Tq1SvTfq5QoQLMzc0zfLRo0SLT12dXSEgIrK2tUa5cOQwYMABv3rxRXAsNDUXhwoVRo0YNRZmnpyf09PRw/vz5dNu7f/8+Dh48iIYNG2okXoWvpj86hiM3OiQlRRB27RIEL6//RnIAQXBxEYRZswThs391U87kdOTmk+xTmhGbzx+SQIngOM9R+CT7pLHPIJPJhBYtWgiurq5CbGys0rUpU6YIzZo1Uyp78uSJAEC4c+eOIAjiCIa7u7tSnbFjxwrlypUT5HK5omzp0qWCubl5pv2ROnIjCILQqVMnoUmTJoIgpB25+Vr7X47c7Ny5U7CwsEjz+QRBHFEwNTUVzp49q1Teu3dvoXPnzhnG2rBhQ8HQ0FAwMzNTevz555+KOm/fvhUcHByEAQMGCDY2NsK0adOU2nBychLmz5+vVLZmzRoBgBAeHp7hewuC+L0VKlRI2Ldvn6IMgNC/f3+lerVr1xYGDBggCIIgrFy5UihSpIgQHx+vuL5//35BT09PiI6OFgTh66MSr169EgAI165dEwRBECIiIgQAwpUrVxRxfTlyEx8fLxgaGgobN25UtJOcnCzY29sLs2fPFgRBeeTm89gApPv/V6pHjx4J9+7dy/Dx9OnTDF/7OVVGbjZv3iz89ddfwr///ivs3r1bcHV1FWrWrCl8+iT+fzpt2jShbNmyaV5XvHhxYdmyZUplHh4eglQqFQAIffv2zfD/D3WN3Gj1VHCiHDEwANq0Adq0QcrNm3g8ZgxKnzwJSep5VhMmAB06iKeT164tjv6QVpyKPJVmxOZzAgQ8iX2CU5Gn0Mi5kUZiGDt2LEJDQ3HhwoU0c1SuXr2K48ePw9zcPM3rHjx4gLJlywIAqlevrnTt1q1b8PDwgOSzv1t169ZFfHw8nj4VP6+bm5tSDF+eJD516lS4urri8OHDsLa2Vql9BwcHpfpeXl5wcnJCqVKl0Lx5czRv3hxt2rSBqakp7t+/j8TERHh5eSm9Jjk5WXF4cUa6du2KX3/9VanMxsZG8d9FihTBH3/8AW9vb9SpUwejR4/OtL1URkZGqFy5slLZixcvMG7cOISEhODly5eQyWRITExMc4iyh4dHmuepk31v3bqFKlWqwMzMTHG9bt26kMvluHPnjlLsqe7du4cJEybg/PnzeP36tWLEJjIyEhUrVszS53nw4AFSUlJQt25dRZmhoSFq1aqlGFVK9fnntrOzAwC8fPkSJUuWTLdtJyenLMWgTp06dVL8d6VKlVC5cmWULl0aISEhaNq0qUptbd26FXFxcbh69SpGjhyJ3377DaNGjVJ3yApMbkg3lCmDG716wWn9ehju2iVOQr50STzPasMGoGpVMcnp0gX47Ace5Y6ouKivV1Khnqq2bNmC3377Dfv370eZMmXSXI+Pj4ePjw9mzZqV5lrqLx4ASr8ss8Le3l5pdU3RokXT1CldujT69OmD0aNH448//lCp/S8VKlQIly9fRkhICA4fPowJEyYgMDAQFy9eVKxg2b9/P0qUKKH0OqlUmmm7lpaW+OabbzKtc/LkSejr6yMqKgoJCQlZmuRsYmKilLgBgK+vL968eYOFCxfCyckJUqkUHh4eOZ7A/TU+Pj5wcnLCqlWrYG9vD7lcjooVK2rsfQ0NDRX/ndoHmU2orlChAh4/fpzh9fr16+Off/5RX4DpKFWqFKysrHD//n00bdoUtra2ePnypVKdT58+4e3bt7C1tVUqd3R0BCAm+zKZDH379sXPP/8MfX19jcTKOTekWzI6zyo8HOjbVzzPatgw4PZtbUdaoNgVsvt6JRXqqSI8PBy9e/fGzJkz4e3tnW6datWq4caNG3B2dsY333yj9MgsoXF1dUVoaCiEzzaaPHPmDAoVKgQHBwcYGBgotZVecgMAEyZMwN27d7FlyxaV2k+PgYEBPD09MXv2bPz777949OgRjh07Bjc3N0ilUkRGRqb5jKm/eLLr7NmzmDVrFvbt2wdzc/M0c1uMjIwgy+JcuDNnzmDo0KFo2bIlKlSoAKlUitevX6epd+7cuTTPXV1dAYj9dvXqVSR8djjvmTNnoKenh3LlyqVp682bN7hz5w7GjRuHpk2bwtXVFe/evUvzGQBk+jlKly6tmJOVKiUlBRcvXlQawcuOAwcOIDw8PMPH77//nqP2s+Lp06d48+aNIuH38PDA+/fvcenSJUWdY8eOQS6Xo3bt2hm2I5fLkZKSotHVcUxuSHelnmf17Jl4SnnqeVaLFgGurkDTpsDOnYCG91sgoH7J+nCwcIAE6d8alEACRwtH1C9ZX63v+/r1a7Ru3RqNGjVCt27dEB0drfR49eoVAGDQoEF4+/YtOnfujIsXL+LBgwc4dOgQ/Pz8Mv1lNnDgQDx58gRDhgzB7du38ddff2HixIkICAiAnl7Wf7za2NggICAAixYtylH7f//9NxYtWoTw8HA8fvwY69evh1wuR7ly5VCoUCGMGDEC/v7+WLduHR48eIDLly9j8eLFWLduXabxJSYmpum71F/+cXFx6N69O4YOHYoWLVpg48aN2Lp1q9KEW2dnZ5w8eRLPnj1LN1H5XJkyZbBhwwbcunUL58+fR9euXWFiYpKm3vbt27F69WrcvXsXEydOxIULFxRJVdeuXWFsbAxfX19cv34dx48fx5AhQ9C9e/d0b0kVKVIExYoVw8qVK3H//n0cO3YMAQEBSnWsra1hYmKimGwek86momZmZhgwYABGjhyJgwcP4ubNm+jTpw8SExPRu3fvTD/31zg5OaVJSj9/fDka96XIyEiEh4cjMjISMplMkRR9vidN+fLlsXv3bgDiaObIkSNx7tw5PHr0CEePHkWrVq0UE6QBMYls3rw5+vTpgwsXLuDMmTMYPHgwOnXqBHt7ewDAxo0bsW3bNty6dQsPHz7Etm3bMGbMGHTs2FFp9Ertvj6lSLdwQrFuylLfy2SCcOiQILRqJQh6ev9NQLa3F4TAQEHIZOliQaeOpeA7b+4UJIESQRIoSTOZWBIo0chy8LVr16ZZwvv5w8nJSVH37t27Qps2bYTChQsLJiYmQvny5YXhw4crJvNmtCQ6J0vBPxcTEyNYWVnlaCn4qVOnhIYNGwpFihQRTExMhMqVKwtbt25VtCWXy4UFCxYI5cqVEwwNDYXixYsL3t7ewokTJzKMNb2l3AAEb29vQRAEwc/PT6hUqZLw8eNHxWvmzp0rFC1aVDHJNTQ0VKhcubJiQqkgZDyx9fLly0KNGjUEY2NjoUyZMsL27dvTTEgGICxdulTw8vISpFKp4OzsrPQ5BUH1peDBwcGCq6urIJVKhcqVKwshISECAGH37t2KOqtWrRIcHR0FPT29DJeCf/jwQRgyZIhgZWWV6VLwd+/eKcrS2wJA3Xx9fdP9Ho8fP66oA0BYs2aNIAiCkJiYKDRr1kwoXry4YGhoKDg5OQl9+vRRTMhO9ebNG6Fz586Cubm5YGFhIfj5+Sn185YtW4Rq1aoJ5ubmgpmZmeDm5iZMnz49w8nT6ppQLPn/ByowYmNjYWlpiZiYGFhYWKi17ZSUFBw4cAAtW7bUbEZKaajc95GR4oaAq1YBqfeM9fXFCcoDBwKNGnEC8mc+fvyIiIgIuLi4wNjYWOmaXC5HbGwsLCwsvjpasevWLgw7OExpcrGjhSMWNF+Atq5tNRK7LlOl73WJRCLB7t27tXrsQUHte03L7GeNKr+/OaGYCqaSJYGpU8UVVbt2ibshnzwpnme1YwdQvjwwYADQoweQD3YfzS/aurZFq3KtcCryFKLiomBXyA71S9aHvp5mJhUSUcHEdJMKNiMjoFMn4MQJ4No1cdTG3FyccDxsmDgBuW9f4MoVbUeqM/T19NHIuRE6V+qMRs6NmNgQkdoxuSFKVbGiuIT8+XNg2bL/zrNatQqoVo3nWRHlEYIg8CRuyhSTG6IvFSok3pK6dk28VdWpE2BoCISGAt27i8c+/PILEBGh7UiJiCgdTG6IMiKRAPXrA5s3ixOQp04VE5s3b4DZs8Wl5d99B+zfX2DOsypg6w+IKJep62cMkxuirLC1BX79VRyt2bMHaNZMXEh+4ADw/ffAN98As2YB/983RdekrkBLTEzUciREpMtSd4TO6c7FXC1FpAoDA6BVK/Fx7x4QFCRuFPjoETB6tLj6qn17cWKyh4fOLCfX19dH4cKFFVutm5qaKm0Zn5ycjI8fP3JJbC5j32sP+1795HI5Xr16BVNTUxgY5Cw9YXJDlF1lyog7H0+ZAmzdKk5CDgsDNm4UH1Wq/HeeVToHMuY3qWfFfHmWjCAI+PDhQ7rnBJFmse+1h32vGXp6eihZsmSO+5TJDVFOpZ5n5ecHXLwo7pmzeTNw9SrQrx8wciTg6ytOUv7/2Tf5kUQigZ2dHaytrZHy2ZEVKSkpOHnyJBo0aMDNK3MZ+1572PeaYWRkpJaRMCY3ROpUs6b4+O03YO1aMdG5fx9YvFh8NG4sjua0aiWuwMqH9PX1le6H6+vr49OnTzA2NuYP+VzGvtce9n3exhuFRJpQtCgQEADcuQMcOiQmM3p6wPHj4pwcJycgMFA81JOIiNSKyQ2RJunpiSur9uwRV1qNGwfY2ABRUcCkSWKS8+OPwLFj4uorIiLKMSY3RLmlZElx8nFkJLBlC9Cggbg/zs6dQNOm4nychQuB9++1HSkRUb7G5IYotxkZAR07pj3P6s4dYPhw8TyrPn14nhURUTYxuSHSpi/Ps6pYUTzP6vffxfOsPDyADRt4nhURkQqY3BDlBannWf37r3ieVceO4mqqc+eAHj3+O8/q4UNtR0pElOcxuSHKS1LPs9qy5b/zrBwd/zvP6ptvgJYtgb//LjDnWRERqUqryc3Jkyfh4+MDe3t7SCQS7NmzJ9P6u3btgpeXF4oXLw4LCwt4eHjg0KFDuRMsUW5LPc/q4UNxtZW3t7ii6p9/AB8f8eDOmTN19jwrIqLs0mpyk5CQgCpVqmDp0qVZqn/y5El4eXnhwIEDuHTpEho3bgwfHx9c4cRL0mWp51kdPAjcvQv8/DNQpAjw+DEwZox4y6pbN+DsWS4nJyKClncobtGiBVq0aJHl+gsWLFB6Pn36dPz111/Yt28f3N3d1RwdUR5Upoy4+/Hn51ldvKiz51kREWVHvj5+QS6XIy4uDkWLFs2wTlJSEpKSkhTPY2NjAYjngnx+Po46pLan7nbp6/Jr38tkQGgoEB0t3oXy8AA+O9kgYwYGQNeuQNeukISFQS8oCJJt2yD5/3lWwsiRkHfvDnnfvho/zyq/9r0uYN9rD/s+96nS1xJBUH0c+9SpU1ixYgUePHiAHTt2oESJEtiwYQNcXFxQr149VZsTA5FIsHv3brRu3TrLr5k9ezZmzpyJ27dvw9raOt06gYGBmDRpUpryTZs2wdTUNFuxEuVFhnFxKHnsGJwPHoR5VJSi/FWlSoho0QLRtWpBMMjX/54hogIsMTERXbp0QUxMDCwsLDKtq3Jys3PnTnTv3h1du3bFhg0bcPPmTZQqVQpLlizBgQMHcODAgWwFrWpys2nTJvTp0wd//fUXPD09M6yX3siNo6MjXr9+/dXOUVVKSgqCg4Ph5eXFg9RyWX7r+337gO7d006RkUjEPzdsEOcMZ4tcDsmxY+Jozt9/QyKXAwAEOzvIe/eGvHdvcaNANclvfa9L2Pfaw77PfbGxsbCysspScqPyP+OmTp2KoKAg9OjRA1u2bFGU161bF1OnTlU92mzYsmULfvrpJ2zfvj3TxAYApFIppFJpmnJDQ0ON/YXUZNuUufzQ9zIZMGyYuFdfeiQScaPiVq2yeIsqPS1aiI8nT4CVK4FVqyCJioL+1KnQnzFDbHzgQKBJk/8yqhzKD32vq9j32sO+zz2q9LPKq6Xu3LmDBg0apCm3tLTE+1w4E2fz5s3w8/PD5s2b8d1332n8/YjU7dQp4OnTjK8LgpiTnDqlhjdzdPzvPKutW4GGDcXsatcuwNOT51kRkU5SObmxtbXF/fv305SfPn0apUqVUqmt+Ph4hIeHIzw8HAAQERGB8PBwREZGAgDGjBmDHj16KOpv2rQJPXr0wNy5c1G7dm1ER0cjOjoaMTExqn4MIq35bDqMWupliZER0KEDEBICXL8ODBok7orM86yISAepnNz06dMHw4YNw/nz5yGRSPD8+XNs3LgRI0aMwIABA1RqKywsDO7u7opl3AEBAXB3d8eECRMAAFFRUYpEBwBWrlyJT58+YdCgQbCzs1M8hg0bpurHINIaOzv11lNZhQrAkiXAs2fA8uVApUo8z4qIdIrKc25Gjx4NuVyOpk2bIjExEQ0aNIBUKsWIESMwZMgQldpq1KgRMpvPvHbtWqXnISEhqoZLlOfUry/uu/fsWfp77kkk4vX69TUcSKFCQP/+QL9+wJkz4p45O3aI51mdOwf4+wO9e4vXVRyVJSLSJpVHbiQSCX799Ve8ffsW169fx7lz5/Dq1StMmTJFE/ER6Rx9fXGaC5B2Lm/q8wULcjCZWFUSCVCvHrBpkzjZZ9o0oGRJnmdFRPlWto9fMDIygpubG2rVqgVz7oRKpJK2bcVBki9XZDs4iOVt22onLtjYAGPHiudZ7d0LNG+e9jyrGTOAly+1FCAR0depfFuqcePGkGSydPTYsWM5CoiooGjbVlyRfeqUOHnYzk68FZVrIzaZ0dcXkxkfH+D+fWDFCmD1avE8q7FjgYkTgfbtxeXkNWtqO1oiIiUqJzdVq1ZVep6SkoLw8HBcv34dvr6+6oqLqEDQ1wcaNdJ2FF/xzTfAnDnA5MnAtm3i3JwLF8TbWJs2waBSJTjVqwc0aCAe6ElEpGUqJzfz589PtzwwMBDx8fE5DoiI8igTE8DXV3yEhYkrrTZtguTaNVS9dg3Cn3+K1wYMANzctB0tERVg2Z5z86Vu3bph9erV6mqOiPKyGjWAP/4Anj+HbM4cxNvbQxIXJy4xr1ABaNwY2L4d4KGCRKQFaktuQkNDYWxsrK7miCg/KFIE8mHDcHTJEnw6cABo3RrQ0xM3C+zQAXByEufnZLYlMxGRmql8W6rtF8s4BEFAVFQUwsLCMH78eLUFRkT5iJ4eBE/P/86zWrVKPNMqKkqcqzNtmjh7esAAoGlTtZ1nRUSUHpVHbiwtLZUeRYsWRaNGjXDgwAFMnDhREzESUX7i6CgmNOmdZ+XlBZQvL27k8+6dtiMlIh2l8sjNmjVrNBEHEema1POsOnQAbtwAgoKAdeuAu3fF3Y/HjgW6dBGXk1erpu1oiUiHqG3ODRFRhipUABYvBp4//+88qw8fxEnJ1asD334LrF/P86yISC2ylNwUKVIERYsWzdKDiChD5ubieVZXrwKnT4sjN4aGwPnz4jJyBwdg1CjgwQNtR0pE+ViWbkstWLBAw2EQUYEikQB164qP+fPFEZygIHGezpw5wG+/Ad7e4i2rli3zyLbNRJRfZCm54c7DRKQx1tbAmDHiiM2BA+IOyAcP/vdwchJPJu/dW6xLRPQVOZpz8/HjR8TGxio9iIiyJfU8q3/+Ec+zGjECKFr0v/OsHByArl2BM2fEwzyJiDKgcnKTkJCAwYMHw9raGmZmZihSpIjSg4gox0qXFm9PPX0KrF0L1Kol7na8aRNQrx5Qtap4mCePfCGidKic3IwaNQrHjh3D8uXLIZVK8fvvv2PSpEmwt7fH+vXrNREjERVUqedZnT8vnmfVqxdgbAz8+684MdneHhgyBLh5U9uRElEeonJys2/fPixbtgzt2rWDgYEB6tevj3HjxmH69OnYuHGjJmIkIhKXjP//PCvMnw+UKQPwPCsiSofKyc3bt29RqlQpAICFhQXevn0LAKhXrx5Onjyp3uiIiL5UpAgwfDhw+zZw+DDQpo3yeVYlSwITJvA8K6ICTOXkplSpUoiIiAAAlC9fHtu2bQMgjugULlxYrcEREWVIT088zmHXLuDRI2D8eMDWFoiOBqZMAZydgbZtgSNHALlc29ESUS5SObnx8/PD1atXAQCjR4/G0qVLYWxsDH9/f4wcOVLtARIRfVXqeVaPHyufZ7V7t5gAubryPCuiAiTLyc2IESNw+/Zt+Pv7Y+jQoQAAT09P3L59G5s2bcKVK1cwbNgwjQVKRPRVqedZhYQA168DgwcDhQr9d55ViRLifjmXLmk7UiLSoCwnN3/99RcqVKiAOnXqYPXq1UhISAAAODk5oW3btqhcubLGgiQiUtnn51kFBQGVK4vnWa1eDdSoAdSuLR7k+eGDtiMlIjXLcnJz7949HD9+HGXLlsWwYcNga2uLXr164ezZs5qMj4goZ8zNxR2Ow8PFDQC7dhVHeC5cAHr2FDcHHDmS51kR6RCV5tw0aNAAa9euRXR0NBYuXIh79+6hXr16cHV1xW+//YYXL15oKk4iopyRSIA6dYA//wSePAGmTxePdnj7VjzL6ptvgBYtgL17xfk6RJRvZev4BTMzM/Tq1QunTp3C3bt30bZtW8yYMQMlS5ZUd3xEROqXep7VgwfAvn1iUiORiGdZtWoFlColJj/8BxtRvpSjs6USEhJw6tQpnDhxAu/evVPsf0NElC/o6wPffy8e2Hnvnnh7qmhR8XTyX38VV2F16QKcPs3zrIjykWwlN6dPn0avXr1gZ2eHoUOHomzZsjh16hRu3bql7viIiHJH6dLA7Nni5n/r1okTjlNSgM2bgfr1gSpVxInJcXHajpSIviLLyU1UVBRmzpyJ8uXLo0GDBrh9+zbmzZuHqKgorF69GnXr1tVknEREucPEBOjRAzh3TjzPqndvsezaNWDAAHE5+eDBwI0b2o6UiDKQ5eTG0dER8+fPx/fff48bN27g7Nmz+Omnn2Bubq7J+IiItKd6deD334Fnz8TzrMqWFUduli4FKlYUNwvcuhVITtZ2pET0mSwnN9u2bcOzZ8/w22+/wdXVVZMxERHlLZ+fZ3XkiHisg74+cPIk0KmTeJ7V+PHiKiwi0rosJzdt27aFgYGBJmMhIsrbJBKgaVNg507xPKsJE8TzrF68AKZOFc+zatMGCA7meVZEWpSj1VJERAWWgwMwaZK4smrbNqBRIzGh2bMHaNYMKF9evJXF86yIcp1Wk5uTJ0/Cx8cH9vb2kEgk2LNnz1dfExISgmrVqkEqleKbb77B2rVrNR4nEVGGDA2B9u2B48fFScap51nduwcEBPA8KyIt0Gpyk5CQgCpVqmDp0qVZqh8REYHvvvsOjRs3Rnh4OIYPH46ffvoJhw4d0nCkRERZ4ObG86yI8gCtTqJp0aIFWrRokeX6QUFBcHFxwdy5cwEArq6uOH36NObPnw9vb29NhUlEpJrU86z69gVCQ4Fly4Dt28XzrC5cEEd0/PyA/v3FYx+ISK1UTm4SEhIwc+ZMHD16FC9fvoT8i0lzDx8+VFtwXwoNDYWnp6dSmbe3N4YPH57ha5KSkpCUlKR4HhsbCwBISUlBSkqKWuNLbU/d7dLXse+1h33/FTVrAmvWALNmQW/tWuitWgXJ48fA3LnA3LmQN2sGeb9+EFq2FFdgqYB9rz3s+9ynSl+rnNz89NNPOHHiBLp37w47OztIJBJVm8i26Oho2NjYKJXZ2NggNjYWHz58gImJSZrXzJgxA5MmTUpTfvjwYZiammokzuDgYI20S1/Hvtce9n0WVKwIzJsHm0uX4HLwIKyvXIHe4cPQO3wYicWL41GzZoj08kJS4cIqNcu+1x72fe5JTEzMcl2Vk5t//vkH+/fvzzc7Eo8ZMwYBAQGK57GxsXB0dESzZs1gYWGh1vdKSUlBcHAwvLy8YGhoqNa2KXPse+1h32eDjw8QGIhPDx5Ab9Uq6K1dC9NXr+C2cSNct22D0KYN5P37Q6hbV1x+ngH2vfaw73Nf6p2XrFA5uSlSpAiKFi2q6svUwtbWFi++OKX3xYsXsLCwSHfUBgCkUimkUmmackNDQ439hdRk25Q59r32sO+zoXx58fbUtGninJxlyyA5dw6Sbdugt22bONIzcCDQrZu4AisD7HvtYd/nHlX6WeXVUlOmTMGECRNUGh5SFw8PDxw9elSpLDg4GB4eHrkeCxGR2hgbA927i5OPL10CfvpJPM/q+nUxubG3BwYNEp8T0VepnNzMnTsXhw4dgo2NDSpVqoRq1aopPVQRHx+P8PBwhIeHAxCXeoeHhyMyMhKAeEupR48eivr9+/fHw4cPMWrUKNy+fRvLli3Dtm3b4O/vr+rHICLKm6pVA1atEpeTL1wIlCsHxMeLK64qVeJ5VkRZoPJtqdatW6vtzcPCwtC4cWPF89S5Mb6+vli7di2ioqIUiQ4AuLi4YP/+/fD398fChQvh4OCA33//ncvAiUj3FC4MDB0KDBkCHDsGLF8u7n588qT4sLGBnp8fjEuX1nakRHmOysnNxIkT1fbmjRo1giAIGV5Pb/fhRo0a4cqVK2qLgYgoT0s9z6ppU/F08lWrgJUrgago6M+ciWZ6ehD27hV3Rvb0BPR4qg5Rtv8vuHTpEv7880/8+eefTDaIiHJDiRJAYCDw+DGwfTvkDRtCIpdDb98+wNtbvIU1bx7w9q22IyXSKpWTm5cvX6JJkyaoWbMmhg4diqFDh6J69epo2rQpXr16pYkYiYjoc4aGwI8/QhYcjGOLF0M2aBBgYQHcvw/8/LOYBPXqxfOsqMBSObkZMmQI4uLicOPGDbx9+xZv377F9evXERsbi6FDh2oiRiIiykCcoyPk8+eLt6xWrACqVAE+fhR3Ra5RA6hVC1i7ludZUYGicnJz8OBBLFu2DK6urooyNzc3LF26FP/8849agyMioiwyNxfPsrpyBTh7Vtwbx8gIuHhRPMeqRAlgxAhxdIdIx6mc3Mjl8nQ30jE0NExzzhQREeUyiQTw8AA2bACePgVmzACcnYF378QNA8uUAZo3B/buBWQybUdLpBEqJzdNmjTBsGHD8Pz5c0XZs2fP4O/vj6ZNm6o1OCIiTZHJgJAQYPNm8U+d/D1fvDgwerQ4WvP330DLlmLyc+gQ0KoV4OIi7o78xc7vRPmdysnNkiVLEBsbC2dnZ5QuXRqlS5eGi4sLYmNjsXjxYk3ESESkVrt2iYMZjRsDXbqIfzo7i+U6SV8f+O47YP9+MdEZNQooVgx48gQYNw5wdAQ6dwZOnQIy2Z6DKL9QOblxdHTE5cuXsX//fgwfPhzDhw/HgQMHcPnyZTg4OGgiRiIitdm1C/jxR/GOzeeePRPLdTbBSVWqFDBrltgB69eLt7BSUoAtW4AGDYDKlcUNA+PitB0pUbZla58biUQCLy8vDBkyBEOGDIGnp6e64yIiUjuZDBg2LP3BidSy4cN19BbVl1LPszp7Frh8WTzPytSU51mRTsjSDsWLFi1C3759YWxsjEWLFmVal8vBiSivOnUq7YjN5wRBvFNz6hTQqFGuhaV97u7izsdz5oijOcuWAXfuiH8uWwbUry8mPG3biiuwiPK4LCU38+fPR9euXWFsbIz58+dnWE8ikTC5IaI8KypKvfV0zufnWR0/LiY2e/aI2d6pU4CNjTjC07cvULKktqMlylCWkpuIiIh0/5uIKD+xs1NvPZ0lkQBNmoiPL86zwrRp4vJyHx9xNCcb51nJZGKuFBUl9nX9+uKcZyJ1UXnOzeTJk5GYmJim/MOHD5g8ebJagiIi0oT69QEHB/F3d3okEnHhUP36uRtXnvb5eVbbtolLy+Ry4K+/xPOsypYV98/J4nlWBW6lGmmFysnNpEmTEB8fn6Y8MTERkyZNUktQRESaoK8PLFwo/veXCU7q8wULOIqQLkNDoH174Ngx4OZN8faVhQXw4IG483GJEuJOyBcvZthEgV+pRrlG5eRGEARI0vlnz9WrV1G0aFG1BEVEpClt2wI7doi/iz/n4CCWt22rnbjyFVdXMUt8/ly8XVW1qnie1dq14llWNWuKZ1t9NsrPlWqUm7Kc3BQpUgRFixaFRCJB2bJlUbRoUcXD0tISXl5e6NChgyZjJSJSi7ZtgUePxDmzmzaJf0ZEMLFRmZkZ0KePuJT88/OswsLEU8kdHMRTyu/dU2mlGlFOZWlCMQAsWLAAgiCgV69emDRpEiwtLRXXjIyM4OzsDA8PD40ESUSkbvr6BWy5tyalnmfl4QHMmyeO2ixfLmaQ8+YB8+ahXKVm+AEDsR/fQZbJr54Cu1KN1CrLyY2vry8AwMXFBXXq1En38EwiIirgihcXj3f4+WfxDKtly4ADB2B37TD+wmFEwhEr0A9/oDdewDbNywv8SjVSC5Xn3DRs2FCR2Hz8+BGxsbFKDyIiIujriwd1/v038OAB5CN/wRs9K5TEE0zDOESiJDahM+rhFACBK9VIrVRObhITEzF48GBYW1vDzMwMRYoUUXoQEREpcXGB3uyZOLXxCbpjA0LxLYyQgs7YglNogH9RGQOEZVgyPZYr1UgtVE5uRo4ciWPHjmH58uWQSqX4/fffMWnSJNjb22P9+vWaiJGIiHRA607GaLOzGzo4hKIqrmAF+iIBpqiE61iKQfhhQAlxY0CeZ0U5pHJys2/fPixbtgzt2rWDgYEB6tevj3HjxmH69OnYuHGjJmIkIiIdkbpSbcHxqrDYtAKX9z2HfMEioHx5ID5enIhcqZJ4QvmWLUBysrZDpnxI5eTm7du3KFWqFADAwsICb/+/K2W9evVw8uRJ9UZHREQ6J3WlWufOQP3vLaE3bIi4MeCxY+Jufvr64prwzp3FiTjjxgGRkdoOm/IRlZObUqVKKc6XKl++PLZt2wZAHNEpXLiwWoMjIqICQiIRz2LYvl086iEwELC3B16+FM+zcnEBWrUSV2DJ5dqOlvI4lZMbPz8/XL16FQAwevRoLF26FMbGxvD398fIkSPVHiARERUwJUoAEyeK96927BAP8JTLgb17gebN/zvP6s0bbUdKeVSW97lJ5e/vr/hvT09P3L59G5cuXcI333yDypUrqzU4IiIqwAwNgXbtxMft20BQkHjEQ+p5VuPGAR07ipOQa9XSdrSUh6g8crN+/XokJSUpnjs5OaFt27YoX748V0sREZFmlC8vnmr67BmwatV/51mtWwfUrp3ueVZUcGXrtlRMTEya8ri4OPj5+aklKCIionSZmQE//SSeZxUamv55VgEBwN272o6UtEhtp4I/ffpU6bwpIiIijZFIgG+/BTZsEE/knDULcHYG3r0D5s8HypUDmjUD9uwBPn3SdrSUy7I858bd3R0SiQQSiQRNmzaFgcF/L5XJZIiIiEDz5s01EiQREVGGMjjPCsHB4sPBAejXTxzxsU17nhXpniwnN61btwYAhIeHw9vbG+bm5oprqaeCt2vXTu0BEhERZUnqeVYtWwIREcCKFcAff4gjO+PHA5MmiZOTBw4UD7FK5y4E6YYsJzcTJ04EADg7O6NTp06QSqUaC4qIiChHXFyAmTPFhGb7dnHn47Nnga1bxUeFCmKS060bYGGh7WhJzVSec+Pm5obw8PA05efPn0dYWJg6YiIiIlIPqVRMYM6cAa5cAfr2BUxNgRs3gEGDxD11BgwArl3TdqSkRionN4MGDcKTJ0/SlD979gyDBg1SOYClS5fC2dkZxsbGqF27Ni5cuJBp/QULFqBcuXIwMTGBo6Mj/P398fHjR5Xfl4iICpiqVcVbVc+fA4s+O88qKAioXFm8VbV5M/DZdieUP6mc3Ny8eRPVqlVLU+7u7o6bN2+q1NbWrVsREBCAiRMn4vLly6hSpQq8vb3x8uXLdOtv2rQJo0ePxsSJE3Hr1i388ccf2Lp1K8aOHavqxyAiooLK0hIY8v/zrI4fB9q3BwwMgNOngS5dgJIlgV9/FY+BoHxJ5eRGKpXixYsXacqjoqKUVlBlxbx589CnTx/4+fnBzc0NQUFBMDU1xerVq9Otf/bsWdStWxddunSBs7MzmjVrhs6dO391tIeIiCgNiUQ8wXPbNjGRmTTpv/Ospk8HSpXieVb5lMrHLzRr1gxjxozBX3/9pdjX5v379xg7diy8vLyy3E5ycjIuXbqEMWPGKMr09PTg6emJ0NDQdF9Tp04d/Pnnn7hw4QJq1aqFhw8f4sCBA+jevXuG75OUlKS0o3JsbCwAICUlBSkpKVmONytS21N3u/R17HvtYd9rD/tejYoXB8aMAUaMgOTvv6G3YgX0jh0Tz7PauxdC6dKQ9+kDua8vUKwY+14LVOlriSAIgiqNP3v2DA0aNMCbN2/g7u4OQFwebmNjg+DgYDg6OmapnefPn6NEiRI4e/YsPDw8FOWjRo3CiRMncP78+XRft2jRIowYMQKCIODTp0/o378/li9fnuH7BAYGYtKkSWnKN23aBFNT0yzFSkREBY/506dwPngQJY8dg+H/j3WQGRriWb16iGjRAu/LltVyhAVLYmIiunTpgpiYGFh8ZYWbyskNACQkJGDjxo24evUqTExMULlyZXTu3BmGhoZZbiM7yU1ISAg6deqEqVOnonbt2rh//z6GDRuGPn36YPz48em+T3ojN46Ojnj9+vVXO0dVKSkpCA4OhpeXl0p9QTnHvtce9r32sO9zSUICJFu3Qn/5ckiuXlUUvy9dGiY//wy9Ll3EFVj5gEwmnloRHS3uZ+jhIW4PlB/ExsbCysoqS8mNyrelAMDMzAx9+/bNVnCprKysoK+vn2b+zosXL2CbwQ6S48ePR/fu3fHTTz8BACpVqoSEhAT07dsXv/76K/T00k4hkkql6e7JY2hoqLEfBppsmzLHvtce9r32sO81rHBhcYfjvn2B8+eBZcsgbN2Kwg8eiHvljB0L+PkB/fsDeXg0Z9cuYNgwcU/DVA4OwMKFQNu22osrq1T5O67yhGIA2LBhA+rVqwd7e3s8/v9s8vnz5+Ovv/7KchtGRkaoXr06jh49qiiTy+U4evSo0kjO5xITE9MkMPr/TzmzMQBFRESUdannWa1fj0+PHuGGry8EFxfg/fs8f57Vrl3Ajz8qJzaAeMj6jz+K13WJysnN8uXLERAQgBYtWuDdu3eQyWQAgCJFimDBggUqtRUQEIBVq1Zh3bp1uHXrFgYMGICEhATF6eI9evRQmnDs4+OD5cuXY8uWLYiIiEBwcDDGjx8PHx8fRZJDRESkcVZWuN+mDT7duiWeY/X992LyExwMtGkj7pA8dap4/0fLZDJxxCa9MYDUsuHDxXq6QuXkZvHixVi1ahV+/fVXpaXfNWrUwDUVd3js2LEjfvvtN0yYMAFVq1ZFeHg4Dh48CBsbGwBAZGQkoqKiFPXHjRuHn3/+GePGjYObmxt69+4Nb29vrFixQtWPQURElHN6ekCLFsC+fcDDh8Do0YCV1X/nWTk6Ah07AidOpJ9d5IJTp9KO2HxOEIAnT8R6ukLlOTcRERGKVVKfk0qlSEhIUDmAwYMHY/DgweleCwkJUXpuYGCAiRMnKs65IiIiyjOcnYEZM4DAQGDHDvF08rNnxX10tm0D3NzEOTrdu+fqeVafjRGopV5+oPLIjYuLS7pnSx08eBCurq7qiImIiCj/kkqBrl2Vz7MyMxN3RB48WNwosH9/4N9/cyUcOzv11ssPVE5uAgICMGjQIGzduhWCIODChQuYNm0axowZg1GjRmkiRiIiovwp9TyrZ8+AxYsBV1cgIUEsq1IFqFcP2LRJo+dZ1a8vroqSSNK/LpGId8/q19dYCLlO5eTmp59+wqxZszBu3DjFhjrLly/HwoUL0alTJ03ESERElL9ZWoqjNjduKJ9ndeaMOMrj6CguKdfAeVb6+uJybyBtgpP6fMGC/LPfTVZkayl4165dce/ePcTHxyM6OhpPnz5F79691R0bERGRbsnoPKtXr8T5Oi4uwA8/AP/8o9bzrNq2FacBlSihXO7gIJbnh31uVJGt5AYAXr58iUuXLuHOnTt49eqVOmMiIiLSffb2wIQJwKNHwM6dQJMm4tKlffuAli2BMmWAOXOAN2/U8nZt24pvdfy4eCfs+HEgIkL3EhsgG8lNXFwcunfvDnt7ezRs2BANGzaEvb09unXrhpiYGE3ESEREpLsMDcUM4+hR4NYtcVMaS0txafmoUeJwS8+ewIULOV5Orq8vDhx17iz+qUu3oj6XrTk358+fx/79+/H+/Xu8f/8ef//9N8LCwtCvXz9NxEhERFQwlC8vToB59gxYtQpwdxcnG69bB9SuDdSoAfzxB/D/gzwpfSonN3///TdWr14Nb29vWFhYwMLCAt7e3li1ahX27duniRiJiIgKFjMz4KefgEuXgHPngB49xCXmly+L5SVKAP7+wJ072o40T1I5uSlWrBgsLS3TlFtaWqJIkSJqCYqIiIggTkCuXVscuXn6FJg9W5x0/P69OMJTvjzg5QXs3p3nzrPSJpWTm3HjxiEgIADRn52XER0djZEjR2L8+PFqDY6IiIj+z8oKGDkSuH9fXE3l4yMmP0eOiHN2nJ2BKVN0a6vhbFL5+IXly5fj/v37KFmyJEqWLAlAPANKKpXi1atXSuc8Xb58WX2REhERkXieVfPm4uPRI2DlSuD338V5OhMmAJMni4d3DhoENGiQ8e59Okzl5KZ169YaCIOIiIhU5uwMTJ8OTJwoLidftkzcGHD7dvHh5gYMGCCeZ5XOlBJdpXJyw0MriYiI8hipFOjSRXxcvQosXw78+ad4ntWQIeJp5d26iYlOlSrajlbjVJ5zc/z48QyvfX5LioiIiLSgShUgKCj986yqVs2V86y0TeXkpnnz5hg5ciRSUlIUZa9fv4aPjw9Gjx6t1uCIiIgom7R4npW2ZWvkZvfu3ahZsyZu3ryJ/fv3o2LFioiNjUV4eLgGQiQiIqJs+/w8q8hIccJxiRIaP89Km1ROburUqYPw8HBUrFgR1apVQ5s2beDv74+QkBA4OTlpIkYiIiJSBzs7YPx4cZXVrl3iHjnpnWf1+rW2I82RbB2ceffuXYSFhcHBwQEGBga4c+cOErkVNBERUf5gYCAuFz98WNzlePhwoHDh/86zcnAAfH2B8+dzfJ6VNqic3MycORMeHh7w8vLC9evXceHCBVy5cgWVK1dGaGioJmIkIiIiTSlbFpg/X5yA/McfQLVq4mTj9euBb7/Nl+dZqZzcLFy4EHv27MHixYthbGyMihUr4sKFC2jbti0aNWqkgRCJiIhI40xNgV69gLAwccTG11f5PCt7e3GEJx+cZ6VycnPt2jW0aNFCqczQ0BBz5szB4cOH1RYYERERaYFEAtSqBaxdK47mzJkDlCoFxMQACxeK51l5eopzdvLoeVYqJzdWVlYZXnN1dc1RMERERJSHFCsGjBgB3LunfJ7V0aNAu3biDsmTJ+e586yynNyYmpri1atXiuffffcdoj77MC9evICdnZ16oyMiIiLtSz3Pau9ecdLxmDFA8eLiyM7EiUDJkkCHDkBISJ6YgJzl5Objx48QPgv45MmT+PDhg1IdIQ98ICIiItKg1POsnjwRdzquV0+8PbV9O9C4MVChArBkiVZvWWVrKXhGJAXw5FEiIqICSSoFOncGTp0Sz7Pq3x8wMwNu3RJPKdfX11poKh+cSURERKSkcmXxsM5Zs4ANG8TNArU44JHl5EYikSiNzHz5nIiIiAo4Cwtg0CBtR5H15EYQBJQtW1aR0MTHx8Pd3R16enqK60RERETaluXkZs2aNZqMg4iIiEgtspzc+Pr6ajIOIiIiIrVQ62opIiIiIm1jckNEREQ6hckNERER6RStJzdLly6Fs7MzjI2NUbt2bVy4cCHT+u/fv8egQYNgZ2cHqVSKsmXL4sCBA7kULREREeV1Wt3Eb+vWrQgICEBQUBBq166NBQsWwNvbG3fu3IG1tXWa+snJyfDy8oK1tTV27NiBEiVK4PHjxyhcuHDuB09ERER5UpaSm4CAgCw3OG/ePJXq9unTB35+fgCAoKAg7N+/H6tXr8bo0aPT1F+9ejXevn2Ls2fPwtDQEADg7Oyc5fcjIiIi3Zel5ObKlStKzy9fvoxPnz6hXLlyAIC7d+9CX18f1atXz/IbJycn49KlSxgzZoyiTE9PD56enggNDU33NXv37oWHhwcGDRqEv/76C8WLF0eXLl3wyy+/QD+DMyySkpKQlJSkeB4bGwsASElJQUpKSpbjzYrU9tTdLn0d+1572Pfaw77XHvZ97lOlr7OU3Bw/flzx3/PmzUOhQoWwbt06FClSBADw7t07+Pn5oX79+ll+49evX0Mmk8HGxkap3MbGBrdv3073NQ8fPsSxY8fQtWtXHDhwAPfv38fAgQORkpKCiRMnpvuaGTNmYNKkSWnKDx8+DFNT0yzHq4rg4GCNtEtfx77XHva99rDvtYd9n3sSExOzXFciqHhuQokSJXD48GFUqFBBqfz69eto1qwZnj9/nqV2nj9/jhIlSuDs2bPw8PBQlI8aNQonTpzA+fPn07ymbNmy+PjxIyIiIhQjNfPmzcOcOXMQFRWV7vukN3Lj6OiI169fw8LCIkuxZlVKSgqCg4Ph5eWluG1GuYN9rz3se+1h32sP+z73xcbGwsrKCjExMV/9/a3yhOLY2Fi8evUqTfmrV68QFxeX5XasrKygr6+PFy9eKJW/ePECtra26b7Gzs4OhoaGSregXF1dER0djeTkZBgZGaV5jVQqhVQqTVNuaGiosb+QmmybMse+1x72vfaw77WHfZ97VOlnlZeCt2nTBn5+fti1axeePn2Kp0+fYufOnejduzfatm2b5XaMjIxQvXp1HD16VFEml8tx9OhRpZGcz9WtWxf379+HXC5XlN29exd2dnbpJjZERERU8Kic3AQFBaFFixbo0qULnJyc4OTkhC5duqB58+ZYtmyZSm0FBARg1apVWLduHW7duoUBAwYgISFBsXqqR48eShOOBwwYgLdv32LYsGG4e/cu9u/fj+nTp2NQHjhenYiIiPIGlW9LmZqaYtmyZZgzZw4ePHgAAChdujTMzMxUfvOOHTvi1atXmDBhAqKjo1G1alUcPHhQMck4MjISenr/5V+Ojo44dOgQ/P39UblyZZQoUQLDhg3DL7/8ovJ7ExERkW7K9iZ+UVFRiIqKQoMGDWBiYgJBECCRSFRuZ/DgwRg8eHC610JCQtKUeXh44Ny5cyq/DxERERUMKt+WevPmDZo2bYqyZcuiZcuWilVKvXv3xs8//6z2AImIiIhUoXJy4+/vD0NDQ0RGRirtE9OxY0ccPHhQrcERERERqUrl21KHDx/GoUOH4ODgoFRepkwZPH78WG2BEREREWWHyiM3CQkJ6e7s+/bt23T3kyEiIiLKTSonN/Xr18f69esVzyUSCeRyOWbPno3GjRurNTgiIiIiVal8W2r27Nlo2rQpwsLCkJycjFGjRuHGjRt4+/Ytzpw5o4kYiYiIiLJM5ZGbihUr4u7du6hXrx5atWqFhIQEtG3bFleuXEHp0qU1ESMRERFRlqk0cpOSkoLmzZsjKCgIv/76q6ZiIiIiIso2lUZuDA0N8e+//2oqFiIiIqIcU/m2VLdu3fDHH39oIhYiIiKiHFN5QvGnT5+wevVqHDlyBNWrV09zptS8efPUFhwRERGRqlRObq5fv45q1aoBAO7evat0LTtnSxERERGpk8rJzfHjxzURBxEREZFaqDznhoiIiCgvU3nkpnHjxpnefjp27FiOAiIiIiLKCZWTm6pVqyo9T0lJQXh4OK5fvw5fX191xUVERESULSonN/Pnz0+3PDAwEPHx8TkOiIiIiCgn1Dbnplu3bli9erW6miMiIiLKFrUlN6GhoTA2NlZXc0RERETZovJtqbZt2yo9FwQBUVFRCAsLw/jx49UWGBEREVF2qJzcWFpaKj3X09NDuXLlMHnyZDRr1kxtgRERERFlh8rJzZo1azQRBxEREZFaqDzn5smTJ3j69Kni+YULFzB8+HCsXLlSrYERERERZYfKyU2XLl0URzBER0fD09MTFy5cwK+//orJkyerPUAiIiIiVaic3Fy/fh21atUCAGzbtg2VKlXC2bNnsXHjRqxdu1bd8RERERGpROXkJiUlBVKpFABw5MgR/PDDDwCA8uXLIyoqSr3REREREalI5eSmQoUKCAoKwqlTpxAcHIzmzZsDAJ4/f45ixYqpPUAiIiIiVaic3MyaNQsrVqxAo0aN0LlzZ1SpUgUAsHfvXsXtKiIiIiJtUXkpeKNGjfD69WvExsaiSJEiivK+ffvC1NRUrcERERERqUrl5AYA9PX1lRIbAHB2dlZHPEREREQ5kq3kZseOHdi2bRsiIyORnJysdO3y5ctqCYyIiIgoO1Sec7No0SL4+fnBxsYGV65cQa1atVCsWDE8fPgQLVq00ESMRERERFmmcnKzbNkyrFy5EosXL4aRkRFGjRqF4OBgDB06FDExMZqIkYiIiCjLVE5uIiMjUadOHQCAiYkJ4uLiAADdu3fH5s2bsxXE0qVL4ezsDGNjY9SuXRsXLlzI0uu2bNkCiUSC1q1bZ+t9iYiISPeonNzY2tri7du3AICSJUvi3LlzAICIiAgIgqByAFu3bkVAQAAmTpyIy5cvo0qVKvD29sbLly8zfd2jR48wYsQI1K9fX+X3JCIiIt2lcnLTpEkT7N27FwDg5+cHf39/eHl5oWPHjmjTpo3KAcybNw99+vSBn58f3NzcEBQUBFNTU6xevTrD18hkMnTt2hWTJk1CqVKlVH5PIiIi0l0qr5ZauXIl5HI5AGDQoEEoVqwYzp49ix9++AH9+vVTqa3k5GRcunQJY8aMUZTp6enB09MToaGhGb5u8uTJsLa2Ru/evXHq1ClVPwIRERHpMJWTGz09Pejp/Tfg06lTJ3Tq1Clbb/769WvIZDLY2NgoldvY2OD27dvpvub06dP4448/EB4enqX3SEpKQlJSkuJ5bGwsAPGMrJSUlGzFnZHU9tTdLn0d+1572Pfaw77XHvZ97lOlr7O1z82pU6ewYsUKPHjwADt27ECJEiWwYcMGuLi4oF69etlpMkvi4uLQvXt3rFq1ClZWVll6zYwZMzBp0qQ05YcPH9bYjsrBwcEaaZe+jn2vPex77WHfaw/7PvckJiZmua7Kyc3OnTvRvXt3dO3aFVeuXFGMisTExGD69Ok4cOBAltuysrKCvr4+Xrx4oVT+4sUL2Nrapqn/4MEDPHr0CD4+Poqy1FtkBgYGuHPnDkqXLq30mjFjxiAgIEDxPDY2Fo6OjmjWrBksLCyyHGtWpKSkIDg4GF5eXjA0NFRr25Q59r32sO+1h32vPez73Jd65yUrVE5upk6diqCgIPTo0QNbtmxRlNetWxdTp05VqS0jIyNUr14dR48eVSznlsvlOHr0KAYPHpymfvny5XHt2jWlsnHjxiEuLg4LFy6Eo6NjmtdIpVJIpdI05YaGhhr7C6nJtilz7HvtYd9rD/tee9j3uUeVflY5ublz5w4aNGiQptzS0hLv379XtTkEBATA19cXNWrUQK1atbBgwQIkJCTAz88PANCjRw+UKFECM2bMgLGxMSpWrKj0+sKFCwNAmnIiIiIqmFRObmxtbXH//v00B2WePn06W8uyO3bsiFevXmHChAmIjo5G1apVcfDgQcUk48jISKUJzERERESZUTm56dOnD4YNG4bVq1dDIpHg+fPnCA0NxYgRIzB+/PhsBTF48OB0b0MBQEhISKavXbt2bbbek4iIiHSTysnN6NGjIZfL0bRpUyQmJqJBgwaQSqUYMWIEhgwZookYiYiIiLJM5eRGIpHg119/xciRI3H//n3Ex8fDzc0N5ubmmoiPiIiISCXZ2ucGEFc6ubm5qTMWIiIiohzLcnLTq1evLNXL7EwoIiIiIk3LcnKzdu1aODk5wd3dPVunfxMRERHlhiwnNwMGDMDmzZsREREBPz8/dOvWDUWLFtVkbEREREQqy/IGMkuXLkVUVBRGjRqFffv2wdHRER06dMChQ4c4kkNERER5hkq740mlUnTu3BnBwcG4efMmKlSogIEDB8LZ2Rnx8fGaipGIiIgoy7K99a+enh4kEgkEQYBMJlNnTERERETZplJyk5SUhM2bN8PLywtly5bFtWvXsGTJEkRGRnKfGyIiIsoTsjyheODAgdiyZQscHR3Rq1cvbN68GVZWVpqMjYiIiEhlWU5ugoKCULJkSZQqVQonTpzAiRMn0q23a9cutQVHREREpKosJzc9evSARCLRZCxEREREOabSJn5EREREeV22V0sRERER5UVMboiIiEinMLkhIiIincLkhoiIiHQKkxsiIiLSKUxuiIiISKcwuSEiIiKdwuSGiIiIdAqTGyIiItIpTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbIiIi0ilMboiIiEinMLkhIiIincLkhoiIiHQKkxsiIiLSKUxuiIiISKcwuSEiIiKdkieSm6VLl8LZ2RnGxsaoXbs2Lly4kGHdVatWoX79+ihSpAiKFCkCT0/PTOsTERFRwaL15Gbr1q0ICAjAxIkTcfnyZVSpUgXe3t54+fJluvVDQkLQuXNnHD9+HKGhoXB0dESzZs3w7NmzXI6ciIiI8iKtJzfz5s1Dnz594OfnBzc3NwQFBcHU1BSrV69Ot/7GjRsxcOBAVK1aFeXLl8fvv/8OuVyOo0eP5nLkRERElBdpNblJTk7GpUuX4OnpqSjT09ODp6cnQkNDs9RGYmIiUlJSULRoUU2FSURERPmIgTbf/PXr15DJZLCxsVEqt7Gxwe3bt7PUxi+//AJ7e3ulBOlzSUlJSEpKUjyPjY0FAKSkpCAlJSWbkacvtT11t0tfx77XHva99rDvtYd9n/tU6WutJjc5NXPmTGzZsgUhISEwNjZOt86MGTMwadKkNOWHDx+GqampRuIKDg7WSLv0dex77WHfaw/7XnvY97knMTExy3W1mtxYWVlBX18fL168UCp/8eIFbG1tM33tb7/9hpkzZ+LIkSOoXLlyhvXGjBmDgIAAxfPY2FjFJGQLC4ucfYAvpKSkIDg4GF5eXjA0NFRr25Q59r32sO+1h32vPez73Jd65yUrtJrcGBkZoXr16jh69Chat24NAIrJwYMHD87wdbNnz8a0adNw6NAh1KhRI9P3kEqlkEqlacoNDQ019hdSk21T5tj32sO+1x72vfaw73OPKv2s9dtSAQEB8PX1RY0aNVCrVi0sWLAACQkJ8PPzAwD06NEDJUqUwIwZMwAAs2bNwoQJE7Bp0yY4OzsjOjoaAGBubg5zc3OtfQ4iIiLKG7Se3HTs2BGvXr3ChAkTEB0djapVq+LgwYOKScaRkZHQ0/tvUdfy5cuRnJyMH3/8UamdiRMnIjAwMDdDJyIiojxI68kNAAwePDjD21AhISFKzx89eqT5gIiIiCjf0vomfkRERETqxOSGiIiIdAqTGyIiItIpTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbIiIi0ilMboiIiEinMLkhIiIincLkhoiIiHQKkxsiIiLSKUxuiIiISKcwuSEiIiKdwuSGiIiIdAqTGyIiItIpTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbIiIi0ilMboiIiEinMLkhIiIincLkhoiIiHQKkxsiIiLSKUxuiIiISKcwuSEiIiKdwuSGiIiIdAqTGyIiItIpTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbIiIi0ilMboiIiEinMLkhIiIincLkhoiIiHRKnkhuli5dCmdnZxgbG6N27dq4cOFCpvW3b9+O8uXLw9jYGJUqVcKBAwdyKVIiIiLK67Se3GzduhUBAQGYOHEiLl++jCpVqsDb2xsvX75Mt/7Zs2fRuXNn9O7dG1euXEHr1q3RunVrXL9+PZcjJyIiorxI68nNvHnz0KdPH/j5+cHNzQ1BQUEwNTXF6tWr062/cOFCNG/eHCNHjoSrqyumTJmCatWqYcmSJbkcOREREeVFBtp88+TkZFy6dAljxoxRlOnp6cHT0xOhoaHpviY0NBQBAQFKZd7e3tizZ0+69ZOSkpCUlKR4HhsbCwCIj4+Hnp6e4j0NDQ2RkpICuVyuqKuvrw8DAwMkJydDEARFuYGBAfT19dOUp4qPj4ehoaHiuaGhISQSCZKTk5XqGRkZQRAEpKSkKJVLpVLI5XKlcolEAiMjI8hkMnz69ClN+adPnyCTyRTl6vpMhoaG0NPTU+rDvPiZBEGATCZT6vv8/pnyy/f04cMHpb7Xhc+UX76nxMRERd8bGRnpxGfKL99TSkqKou/NzMx04jN9Li9+TwkJCcgqrSY3r1+/hkwmg42NjVK5jY0Nbt++ne5roqOj060fHR2dbv0ZM2Zg0qRJacoXL14MY2NjAEDRokVRsmRJREZG4u3bt0rt2tnZ4cGDB4iLi1OUOzo6olixYrh9+zY+fvyoKC9VqhQsLCywZMkSpS+sXLlyMDIywrVr15RiqFSpEpKTk3Hnzh1FmZ6eHipXrozY2Fg8fPhQUW5sbIzy5cvjzZs3ePLkiaK8UKFCKF26NKKiovDixQtFubo/07///pvnP9OjR4+waNEinfpM+el7Sn0PXfpMqfL6Z7p27ZrOfSYgf3xP165d07nPBOTN7yk8PBxZJRHSG3rIJc+fP0eJEiVw9uxZeHh4KMpHjRqFEydO4Pz582leY2RkhHXr1qFz586KsmXLlmHSpElKnZcqvZEbR0dHPHv2DBYWFgDUO3Jz9OhRNGjQgCM3Whi5OXjwIBo3bsyRm1z+TB8+fMCRI0cUfa8Lnym/fE+JiYk4fvw4GjduzJGbXP5MKSkpir7nyE3ufKbXr1/D3t4eMTExit/fGdHqyI2VlRX09fXTJCUvXryAra1tuq+xtbVVqb5UKoVUKk1Tbm5uDnNzc6WyzxOS7JSnftnm5ubpvsbIyCjDGLNantMY1V2eVz5TSkoK9PX10+37/PqZ1Fmuyc8EIN2+z8+fKb98T3p6emn6Pr9/pvzyPaX3Mye/f6b05KXPZGZmlm55erQ6odjIyAjVq1fH0aNHFWVyuRxHjx5VGsn5nIeHh1J9AAgODs6wPhERERUsWh25AYCAgAD4+vqiRo0aqFWrFhYsWICEhAT4+fkBAHr06IESJUpgxowZAIBhw4ahYcOGmDt3Lr777jts2bIFYWFhWLlypTY/BhEREeURWk9uOnbsiFevXmHChAmIjo5G1apVcfDgQcWk4cjISMWqJgCoU6cONm3ahHHjxmHs2LEoU6YM9uzZg4oVK2rrIxAREVEeovXkBgAGDx6MwYMHp3stJCQkTVn79u3Rvn17DUdFRERE+ZHWN/EjIiIiUicmN0RERKRTmNwQERGRTmFyQ0RERDqFyQ0RERHpFCY3REREpFOY3BAREZFOYXJDREREOoXJDREREemUPLFDcW5KPUY9NjZW7W2npKQgMTERsbGxmZ6gTOrHvtce9r32sO+1h32f+1J/b6f+Hs9MgUtu4uLiAACOjo5ajoSIiIhUFRcXB0tLy0zrSISspEA6RC6X4/nz5yhUqBAkEola246NjYWjoyOePHkCCwsLtbZNmWPfaw/7XnvY99rDvs99giAgLi4O9vb2Sgdqp6fAjdzo6enBwcFBo+9hYWHBv+xawr7XHva99rDvtYd9n7u+NmKTihOKiYiISKcwuSEiIiKdwuRGjaRSKSZOnAipVKrtUAoc9r32sO+1h32vPez7vK3ATSgmIiIi3caRGyIiItIpTG6IiIhIpzC5ISIiIp3C5IaIiIh0CpMbNVm6dCmcnZ1hbGyM2rVr48KFC9oOSefNmDEDNWvWRKFChWBtbY3WrVvjzp072g6rQJo5cyYkEgmGDx+u7VAKhGfPnqFbt24oVqwYTExMUKlSJYSFhWk7LJ0nk8kwfvx4uLi4wMTEBKVLl8aUKVOydNYR5S4mN2qwdetWBAQEYOLEibh8+TKqVKkCb29vvHz5Utuh6bQTJ05g0KBBOHfuHIKDg5GSkoJmzZohISFB26EVKBcvXsSKFStQuXJlbYdSILx79w5169aFoaEh/vnnH9y8eRNz585FkSJFtB2azps1axaWL1+OJUuW4NatW5g1axZmz56NxYsXazs0+gKXgqtB7dq1UbNmTSxZsgSAeH6Vo6MjhgwZgtGjR2s5uoLj1atXsLa2xokTJ9CgQQNth1MgxMfHo1q1ali2bBmmTp2KqlWrYsGCBdoOS6eNHj0aZ86cwalTp7QdSoHz/fffw8bGBn/88YeirF27djAxMcGff/6pxcjoSxy5yaHk5GRcunQJnp6eijI9PT14enoiNDRUi5EVPDExMQCAokWLajmSgmPQoEH47rvvlP7+k2bt3bsXNWrUQPv27WFtbQ13d3esWrVK22EVCHXq1MHRo0dx9+5dAMDVq1dx+vRptGjRQsuR0ZcK3MGZ6vb69WvIZDLY2NgoldvY2OD27dtaiqrgkcvlGD58OOrWrYuKFStqO5wCYcuWLbh8+TIuXryo7VAKlIcPH2L58uUICAjA2LFjcfHiRQwdOhRGRkbw9fXVdng6bfTo0YiNjUX58uWhr68PmUyGadOmoWvXrtoOjb7A5IZ0wqBBg3D9+nWcPn1a26EUCE+ePMGwYcMQHBwMY2NjbYdToMjlctSoUQPTp08HALi7u+P69esICgpicqNh27Ztw8aNG7Fp0yZUqFAB4eHhGD58OOzt7dn3eQyTmxyysrKCvr4+Xrx4oVT+4sUL2NraaimqgmXw4MH4+++/cfLkSTg4OGg7nALh0qVLePnyJapVq6Yok8lkOHnyJJYsWYKkpCTo6+trMULdZWdnBzc3N6UyV1dX7Ny5U0sRFRwjR47E6NGj0alTJwBApUqV8PjxY8yYMYPJTR7DOTc5ZGRkhOrVq+Po0aOKMrlcjqNHj8LDw0OLkek+QRAwePBg7N69G8eOHYOLi4u2QyowmjZtimvXriE8PFzxqFGjBrp27Yrw8HAmNhpUt27dNFse3L17F05OTlqKqOBITEyEnp7yr019fX3I5XItRUQZ4ciNGgQEBMDX1xc1atRArVq1sGDBAiQkJMDPz0/boem0QYMGYdOmTfjrr79QqFAhREdHAwAsLS1hYmKi5eh0W6FChdLMbTIzM0OxYsU450nD/P39UadOHUyfPh0dOnTAhQsXsHLlSqxcuVLboek8Hx8fTJs2DSVLlkSFChVw5coVzJs3D7169dJ2aPQFLgVXkyVLlmDOnDmIjo5G1apVsWjRItSuXVvbYek0iUSSbvmaNWvQs2fP3A2G0KhRIy4FzyV///03xowZg3v37sHFxQUBAQHo06ePtsPSeXFxcRg/fjx2796Nly9fwt7eHp07d8aECRNgZGSk7fDoM0xuiIiISKdwzg0RERHpFCY3REREpFOY3BAREZFOYXJDREREOoXJDREREekUJjdERESkU5jcEBERkU5hckNEWuHs7MwN/4hII5jcEFGW9ezZExKJBDNnzlQq37NnT4Y7Rmfk4sWL6Nu3rzrDU+Ls7AyJRKL0UNfBqhKJBHv27FFLW0SkfkxuiEglxsbGmDVrFt69e5ejdooXLw5TU1M1RZW+yZMnIyoqSvG4cuWKRt9PVSkpKdoOgUgnMbkhIpV4enrC1tYWM2bMyLTezp07UaFCBUilUjg7O2Pu3LlK1z+/LSUIAgIDA1GyZElIpVLY29tj6NChirpJSUkYMWIESpQoATMzM9SuXRshISFfjbVQoUKwtbVVPIoXLw6ZTIbevXvDxcUFJiYmKFeuHBYuXJjmtatXr1bEb2dnh8GDByviBoA2bdpAIpEongPA8uXLUbp0aRgZGaFcuXLYsGGDUpsSiQTLly/HDz/8ADMzM0ybNu2rn4GIVMfkhohUoq+vj+nTp2Px4sV4+vRpunUuXbqEDh06oFOnTrh27RoCAwMxfvx4rF27Nt36O3fuxPz587FixQrcu3cPe/bsQaVKlRTXBw8ejNDQUGzZsgX//vsv2rdvj+bNm+PevXsqxy+Xy+Hg4IDt27fj5s2bmDBhAsaOHYtt27Yp6ixfvhyDBg1C3759ce3aNezduxfffPMNAPF2GiAe0BoVFaV4vnv3bgwbNgw///wzrl+/jn79+sHPzw/Hjx9Xev/AwEC0adMG165d42nSRJoiEBFlka+vr9CqVStBEATh22+/FXr16iUIgiDs3r1b+PzHSZcuXQQvLy+l144cOVJwc3NTPHdychLmz58vCIIgzJ07VyhbtqyQnJyc5j0fP34s6OvrC8+ePVMqb9q0qTBmzJgMY3VychKMjIwEMzMzxWPhwoXp1h00aJDQrl07xXN7e3vh119/zbBtAMLu3buVyurUqSP06dNHqax9+/ZCy5YtlV43fPjwDNslIvXgyA0RZcusWbOwbt063Lp1K821W7duoW7dukpldevWxb179yCTydLUb9++PT58+IBSpUqhT58+2L17Nz59+gQAuHbtGmQyGcqWLQtzc3PF48SJE3jw4EGmMY4cORLh4eGKR48ePQAAS5cuRfXq1VG8eHGYm5tj5cqViIyMBAC8fPkSz58/R9OmTVXqj4w+85f9U6NGDZXaJSLVGWg7ACLKnxo0aABvb2+MGTMGPXv2zFFbjo6OuHPnDo4cOYLg4GAMHDgQc+bMwYkTJxAfHw99fX1cunQJ+vr6Sq8zNzfPtF0rKyvF7aRUW7ZswYgRIzB37lx4eHigUKFCmDNnDs6fPw8AMDExydFn+RozMzONtk9ETG6IKAdmzpyJqlWroly5ckrlrq6uOHPmjFLZmTNnULZs2TQJSioTExP4+PjAx8cHgwYNQvny5XHt2jW4u7tDJpPh5cuXqF+/fo5jPnPmDOrUqYOBAwcqyj4fASpUqBCcnZ1x9OhRNG7cON02DA0N04xApX5mX19fpfdyc3PLccxEpBomN0SUbZUqVULXrl2xaNEipfKff/4ZNWvWxJQpU9CxY0eEhoZiyZIlWLZsWbrtrF27FjKZDLVr14apqSn+/PNPmJiYwMnJCcWKFUPXrl3Ro0cPzJ07F+7u7nj16hWOHj2KypUr47vvvlMp5jJlymD9+vU4dOgQXFxcsGHDBly8eBEuLi6KOoGBgejfvz+sra3RokULxMXF4cyZMxgyZAgAKJKfunXrQiqVokiRIhg5ciQ6dOgAd3d3eHp6Yt++fdi1axeOHDmiYq8SUY5pe9IPEeUfn08oThURESEYGRkJX/442bFjh+Dm5iYYGhoKJUuWFObMmaN0/fMJxbt37xZq164tWFhYCGZmZsK3334rHDlyRFE3OTlZmDBhguDs7CwYGhoKdnZ2Qps2bYR///03w1g/b/9zHz9+FHr27ClYWloKhQsXFgYMGCCMHj1aqFKlilK9oKAgoVy5cor3GzJkiOLa3r17hW+++UYwMDAQnJycFOXLli0TSpUqJRgaGgply5YV1q9fr9Qm0pmITETqJxEEQdB2gkVERESkLlwtRURERDqFyQ0RERHpFCY3REREpFOY3BAREZFOYXJDREREOoXJDREREekUJjdERESkU5jcEBERkU5hckNEREQ6hckNERER6RQmN0RERKRTmNwQERGRTvkfbns/XUoCERsAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Percent Error of ZNE Estimate -20.96933151264632 %\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAHHCAYAAABXx+fLAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAiSlJREFUeJzt3XdYU9cbB/BvCHvjYigCLsQJakWcqCDaah2t24po1Vo3VeuoOOuss+6NrbuOqm3dorgHUvdGcYBbpgKG8/vj/kiNDIkmRMj38zz3gZycnLz3BOH13jNkQggBIiIiIj1ioOsAiIiIiPIaEyAiIiLSO0yAiIiISO8wASIiIiK9wwSIiIiI9A4TICIiItI7TICIiIhI7zABIiIiIr3DBIiIiIj0DhMgIsoTq1atgkwmw507d3Qdit64c+cOZDIZVq1apdF2XV1d0a1bN422SZTXmAARaZFMJnvvMXbsWF2HqSIjrhkzZmR6LiOJOXPmjA4iU9/YsWNz7PvY2Fi12rt8+TLGjh2rF0ncsWPHMHbsWLx8+VLXoRBphaGuAyAqyH777bdsnxs7dixu3boFb2/vPIwo96ZPn44+ffrA3NxcI+1988036NChA0xMTDTSnjoWLlwIS0vLTOW2trZqtXP58mWMGzcOvr6+cHV11Uxwn6hjx45h3Lhx6NatW6Z+unbtGgwM+P9nyt+YABFpUZcuXbIsX7ZsGW7duoX+/fujWbNmH/0+Qgi8fv0aZmZmH90WAHh6eiIyMhKLFi1CcHCwRtqUy+WQy+UaaUtdX3/9NYoUKZKn76npz+RToosklkjTmMIT5bFLly5hwIAB8PLywvTp01WeS09Px+zZs1GxYkWYmprC3t4evXv3xosXL1Tqubq6onnz5ti9ezdq1KgBMzMzLF68GABw+/ZttG3bFoUKFYK5uTlq1aqFv/76S60Y69Spg0aNGmHatGl49erVe+sfOHAA9erVg4WFBWxtbdGyZUtcuXJFpU5WY4DOnDmDgIAAFClSBGZmZnBzc0P37t0/qE8+RmBgIExNTTPFHBAQADs7Ozx8+BCrVq1C27ZtAQANGzZU3kYLCwsDkPNnsnLlSjRq1AjFihWDiYkJKlSogIULF2aKI6ONPXv2wNPTE6ampqhQoQK2bNmSqe6Hfs7nz59Ht27dUKpUKZiamsLBwQHdu3fHs2fPlHXGjh2LoUOHAgDc3NyU55rx2WU1Big38YSFhUEmk2Hjxo34+eefUaJECZiamqJx48a4efPme2Mn0iReASLKQ8nJyWjXrh3kcjnWr1+f6X/SvXv3xqpVqxAUFIQBAwYgKioK8+bNw7lz53D06FEYGRkp6167dg0dO3ZE79690bNnT7i7u+PRo0eoXbs2kpOTMWDAABQuXBihoaH48ssv8ccff6B169a5jnXs2LGoX78+Fi5cmONVoH379qFZs2YoVaoUxo4di1evXuHXX39FnTp1EBERke2tosePH6NJkyYoWrQohg8fDltbW9y5cyfTH3t1+iQ7z58/z1RmaGiovLUzZ84cHDhwAIGBgTh+/DjkcjkWL16MPXv24LfffoOTkxPq16+PAQMGYO7cuRg5ciQ8PDwAQPkVyPozAaRbcBUrVsSXX34JQ0ND7NixA99//z3S09PRt29flbhu3LiB9u3b47vvvkNgYCBWrlyJtm3bYteuXfD39weAj/qc9+7di9u3byMoKAgODg64dOkSlixZgkuXLuHEiROQyWRo06YNrl+/jnXr1mHWrFnKq2dFixbNsk1145kyZQoMDAwwZMgQxMXFYdq0aejcuTNOnjyZ08dIpFmCiPJM9+7dBQARGhqa6bnw8HABQKxZs0alfNeuXZnKXVxcBACxa9culbqDBg0SAER4eLiyLCEhQbi5uQlXV1ehUCjeGyMA0bdvXyGEEA0bNhQODg4iOTlZCCHEypUrBQBx+vRpZX1PT09RrFgx8ezZM2XZv//+KwwMDETXrl2VZRmvjYqKEkIIsXXr1kxtfUyfZGXMmDECQJaHu7u7St3du3cLAGLixIni9u3bwtLSUrRq1UqlzqZNmwQAcfDgwUzvld1nIoRQ9t/bAgICRKlSpbJsY/PmzcqyuLg44ejoKLy8vJRluf2co6KiBACxcuXKHGNZt26dACAOHz6sLJs+fbrK5/VunIGBgWrHc/DgQQFAeHh4iJSUFGXdOXPmCADiwoULmd6LSFt4C4woj6xduxYrVqzAN998g65du2Z6ftOmTbCxsYG/vz+ePn2qPKpXrw5LS0scPHhQpb6bmxsCAgJUyv7++2/UrFkTdevWVZZZWlqiV69euHPnDi5fvqxWzGPHjkVsbCwWLVqU5fMxMTGIjIxEt27dUKhQIWV5lSpV4O/vj7///jvbtjOuvuzcuRNpaWlZ1lG3T7KzefNm7N27V+VYuXKlSp0mTZqgd+/eGD9+PNq0aQNTU1PlLazcyuozAaAyDiguLg5Pnz5FgwYNcPv2bcTFxanUdXJyUrliYm1tja5du+LcuXPKWWsf8zm/Hcvr16/x9OlT1KpVCwAQERGh1vlmUDeeoKAgGBsbKx/Xq1cPgHQbjSivMAEiygM3btzAd999h3LlymHBggXZ1omLi0OxYsVQtGhRlSMxMRGPHz9Wqe/m5papjbt37ypvu7wt4zbN3bt3AUi3hGJjY5XHu3+EM9SvXx8NGzbMdixQRnvZvefTp0+RlJSUZdsNGjTAV199hXHjxqFIkSJo2bIlVq5ciZSUlA/uk+zUr18ffn5+KoePj0+mer/88gsKFSqEyMhIzJ07F8WKFctV+xmy+kwA4OjRo/Dz81OOkSpatChGjhwJAJn6vkyZMpDJZCpl5cqVAwDlGJzcfs5Zef78OQYOHAh7e3uYmZmhaNGiyriz+zl4H3XjKVmypMpjOzs7ANDouC6i9+EYICItS0lJQfv27ZGamor169dnOR0bkAb7FitWDGvWrMny+XfHX3zM7KI2bdrg0KFDyseBgYHZLpY3ZswY+Pr6YvHixWpPG8+JTCbDH3/8gRMnTmDHjh3YvXs3unfvjhkzZuDEiROwtLRUu08+1rlz55RJ1YULF9CxY0e1Xp/VZ3Lr1i00btwY5cuXx8yZM+Hs7AxjY2P8/fffmDVrFtLT0zUSe261a9cOx44dw9ChQ+Hp6ans56ZNm+ZZLNnNBhRC5Mn7EwFMgIi0bsiQITh37hzmzJkDLy+vbOuVLl0a+/btQ506dT44uXFxccG1a9cylV+9elX5PADMmDFD5X/bTk5O2bbZoEED+Pr6YurUqQgJCcn0fgCyfc8iRYrAwsIix5hr1aqFWrVq4eeff8batWvRuXNnrF+/Ht9++61G+iS3kpKSEBQUhAoVKqB27dqYNm0aWrdujc8++0xZ590rM7mxY8cOpKSkYPv27SpXPrK7fXfz5k0IIVTe6/r16wCgHFCe28/5XS9evMD+/fsxbtw4lc/yxo0bmeqqc64fGg+RLvEWGJEWbd26FfPmzcOXX36JAQMG5Fi3Xbt2UCgUmDBhQqbn3rx5k6sVeT///HOcOnUKx48fV5YlJSVhyZIlcHV1RYUKFQAA1atXV7kdlFGenYyxQEuWLFEpd3R0hKenJ0JDQ1Xiu3jxIvbs2YPPP/882zZfvHiR6X/8np6eAKC8DaaJPsmtH3/8EdHR0QgNDcXMmTPh6uqKwMBAlVtyGcmcOu+bcbXj7XONi4vLNAYpw8OHD7F161bl4/j4eKxevRqenp5wcHAAkPvPOTexAMDs2bMz1VXnXD80HiJd4hUgIi2JiYlBjx49IJfL0bhxY/z+++9Z1itdujR8fHzQoEED9O7dG5MnT0ZkZCSaNGkCIyMj3LhxA5s2bcKcOXPw9ddf5/iew4cPx7p169CsWTMMGDAAhQoVQmhoKKKiorB58+YPXr23QYMGaNCggcptswzTp09Hs2bN4OPjgx49eiinwdvY2OS4zUdoaCgWLFiA1q1bo3Tp0khISMDSpUthbW2tTJw00ScA8Mcff2R569Hf3x/29vY4cOAAFixYgDFjxqBatWoApLV7fH19MXr0aEybNg2AlKDJ5XJMnToVcXFxMDExUa7vk50mTZrA2NgYLVq0QO/evZGYmIilS5eiWLFiiImJyVS/XLly6NGjB06fPg17e3usWLECjx49UkmYPvRztra2Rv369TFt2jSkpaWhePHi2LNnD6KiojLVrV69OgBg1KhR6NChA4yMjNCiRYssr+hp6+eOSKt0OgeNqADLmPL7vuPt6cRCCLFkyRJRvXp1YWZmJqysrETlypXFsGHDxMOHD5V1XFxcxBdffJHl+966dUt8/fXXwtbWVpiamoqaNWuKnTt35jpuvDUNPrvzeXfq+r59+0SdOnWEmZmZsLa2Fi1atBCXL19WqfPuNPiIiAjRsWNHUbJkSWFiYiKKFSsmmjdvLs6cOZPpvXPTJ1nJaRo8/j+dPT4+Xri4uIhq1aqJtLQ0ldcPHjxYGBgYiOPHjyvLli5dKkqVKiXkcrnKlPicPpPt27eLKlWqCFNTU+Hq6iqmTp0qVqxYkWmaeUYbu3fvFlWqVBEmJiaifPnyYtOmTZnazM3nnNU0+Pv374vWrVsLW1tbYWNjI9q2bSsePnwoAIgxY8aovH7ChAmiePHiwsDAQCXWd6fB5zaejJ+hd88nqziJtE0mBEedERF9ClxdXVGpUiXs3LlT16EQFXi8LklERER6hwkQERER6R0mQERERKR3OAaIiIiI9A6vABEREZHeYQJEREREeocLIWYhPT0dDx8+hJWV1QctfU9ERER5TwiBhIQEODk5vXcBTiZAWXj48CGcnZ11HQYRERF9gHv37qFEiRI51mEClAUrKysAUgdaW1trtO20tDTs2bNHuaQ/5R32vW6w33WHfa877HvdiI+Ph7Ozs/LveE6YAGUh47aXtbW1VhIgc3NzWFtb8x9FHmPf6wb7XXfY97rDvtet3Axf4SBoIiIi0js6TYAOHz6MFi1awMnJCTKZDNu2bcuxfrdu3SCTyTIdFStWVNYZO3ZspufLly+v5TMhIiKi/ESnCVBSUhKqVq2K+fPn56r+nDlzEBMTozzu3buHQoUKoW3btir1KlasqFLvyJEj2gifiIiI8imdjgFq1qwZmjVrluv6NjY2sLGxUT7etm0bXrx4gaCgIJV6hoaGcHBw0FicRKQehUKBtLQ0ANJYCENDQ7x+/RoKhULHkekX9r3usO+1w8jICHK5XCNt5etB0MuXL4efnx9cXFxUym/cuAEnJyeYmprCx8cHkydPRsmSJXUUJZH+EEIgNjYWL1++VClzcHDAvXv3uK5WHmPf6w77XntsbW3h4ODw0f2abxOghw8f4p9//sHatWtVyr29vbFq1Sq4u7sjJiYG48aNQ7169XDx4sVsp8WlpKQgJSVF+Tg+Ph6AlMFn/C9WU97+XzHlLfa99j169Ajx8fEoWrQozM3NIZPJIIRAUlISLCws+Icgj7HvdYd9r3lCCCQnJ+PJkydQKBSwt7fPVEed3+/5NgEKDQ2Fra0tWrVqpVL+9i21KlWqwNvbGy4uLti4cSN69OiRZVuTJ0/GuHHjMpXv2bMH5ubmGo07w969e7XSLr0f+147ZDIZHB0d4eDgACMjI5VfRMbGxkw8dYR9rzvse80zMjKClZUVYmJiEBERgXf3c09OTs51W/kyARJCYMWKFfjmm29gbGycY11bW1uUK1cON2/ezLbOiBEjEBwcrHycsZBSkyZNtLIO0N69e+Hv78+1IfIY+167UlJSEB0djUKFCsHMzExZnrE0PbeWyXvse91h32uPkZEREhIS0KhRI5iYmKg8l3EHJzfyZQJ06NAh3Lx5M9srOm9LTEzErVu38M0332Rbx8TEJFMnAlIna+sPpTbbppyx77VDoVBAJpNBLper7MGTnp4OQLpC9L69eUiz2Pe6w77XHrlcDplMBkNDw0y/y9X53a7TTyUxMRGRkZGIjIwEAERFRSEyMhLR0dEApCszXbt2zfS65cuXw9vbG5UqVcr03JAhQ3Do0CHcuXMHx44dQ+vWrSGXy9GxY0etnktuKNIVOHT3EA6/OIxDdw9Bkc6ZAURERLqg0ytAZ86cQcOGDZWPM25DBQYGYtWqVYiJiVEmQxni4uKwefNmzJkzJ8s279+/j44dO+LZs2coWrQo6tatixMnTqBo0aLaO5Fc2HJlCwbuGoj78fcBADPvzkQJ6xKY03QO2ni00WlsRKS/fH194enpidmzZ+s6FKI8pdMrQL6+vhBCZDpWrVoFAFi1ahXCwsJUXmNjY4Pk5GT07NkzyzbXr1+Phw8fIiUlBffv38f69etRunRpLZ9JzrZc2YKvN36tTH4yPIh/gK83fo0tV7boKDIiylhh/rvvvsv0XN++fSGTydCtW7e8D+wTsWrVKuWq+nK5HHZ2dvD29sb48eMRFxenVlt37tyBTCZTXvUn0iXemNQyRboCA3cNhIDI9FxG2aBdg3g7jOj/FAogLAxYt076mhdryDk7O2P9+vV49eqVsuz169dYu3ZtvlhDLDU1VavtW1tbIyYmBvfv38exY8fQq1cvrF69Gp6ennj48KFW35tIW5gAaVl4dHimKz9vExC4F38P4dHheRgV0adpyxbA1RVo2BDo1En66uoqlWtTtWrV4OzsjC1vvdGWLVtQsmRJeHl5qdRNT0/H5MmT4ebmBjMzM1StWhV//PGH8nmFQoEePXoon3d3d890yz4sLAw1a9aEhYUFbG1tUadOHdy9exeAdEXq3eU9Bg0aBF9fX+VjX19f9OvXD4MGDUKRIkUQEBAAALh48SKaNWsGS0tL2Nvbo2vXrnj27JnydUlJSejatSssLS3h6OiIGTNm5Kp/ZDIZHBwc4OjoCA8PD/To0QPHjh1DYmIihg0bpqy3a9cu1K1bF7a2tihcuDCaN2+OW7duKZ93c3MDAHh5eUEmkynP6fTp0/D390eRIkVgY2ODBg0aICIiIlexEX0oJkBaFpMQo9F6RAXVli3A118D99/5/8KDB1K5tpOg7t27Y+XKlcrHK1asyLTNDiCtG7Z69WosWrQIly5dwuDBg9GlSxccOnQIgJQglShRAps2bcLly5cREhKCkSNHYuPGjQCAN2/eoFWrVmjQoAHOnz+P48ePo1evXmpPlQ4NDYWxsTGOHj2KRYsW4eXLl2jUqBG8vLxw5swZ7Nq1C48ePVI5h6FDh+LQoUP4888/sWfPHoSFhX1wolGsWDF07twZ27dvV271kJSUhODgYJw5cwb79++HgYEBWrdurZwRderUKQDAvn37EBMTo0w4ExISEBgYiCNHjuDEiRMoW7YsPv/8cyQkJHxQbES5kS+nwecnjlaOGq1HVBApFMDAgYDIfKcYQgAyGTBoENCyJaChbYAy6dKlC0aMGKG8EnP06FGsX79eZRxiSkoKJk2ahH379sHHxwcAUKpUKRw5cgSLFy9GgwYNYGRkpLKwqpubG44fP46NGzeiXbt2iI+PR1xcHJo3b64cn+jh4aF2vGXLlsW0adOUjydOnAgvLy9MmjRJWbZ8+XK4uLjg+vXrKFGiBJYvX47ff/8djRs3BiAlUSVKlFD7vTOUL18eCQkJePbsGYoVK4avvvpK5fkVK1agaNGiuHz5MipVqqScjFK4cGGV/RobNWqk8rolS5bA1tYWhw4dQvPmzT84PqKcMAHSsnol66GEdQk8iH+Q5TggmQBKmBZFvZL1dBAd0achPDzzlZ+3CQHcuyfVe+tOkEYVLVoUX3zxBVatWgUhBL744gsUKVJEpc7NmzeRnJwMf39/lfLU1FSVW2Xz58/HihUrEB0djVevXiE1NRWenp4AgEKFCqFbt24ICAiAv78//Pz80K5dOzg6qvefoOrVq6s8/vfff3Hw4EFYWlpmqnvr1i2kpKQgNTUV3t7eyvJChQrB3d1drfd9W8YqvBlXr27cuIGQkBCcPHkST58+VV75iY6OznLZkgyPHj3CTz/9hLCwMDx+/BgKhQLJycmZZgETaRITIC2TG8gxp+kcfL3xa8ggU0mCZP//dvbvzyC3Xw1kcbmdSB/E5PIOcG7rfaju3bujX79+AKQk5l2JiYkAgL/++gvFixdXeS5jMdX169djyJAhmDFjBnx8fGBlZYXp06fj5MmTyrorV67EgAEDsGvXLmzYsAE//fQT9u7di1q1asHAwCDT8v5ZbadgYWGRKbYWLVpg6tSpyrL09HQkJiaibNmyuH37tjpdkStXrlyBtbU1ChcuDABo0aIFXFxcsHTpUjg5OSE9PR2VKlV67yDtwMBAPHv2DHPmzIGLiwtMTEzg4+Oj9cHdpN+YAOWBNh5t8Ee7P1TWAQKAEtYlMPuKC9pcOgp07w7cvAlMmABw1VDSM7m9+KHmRRK1NW3aFKmpqZDJZMqBxW+rUKECTExMEB0djQYNGmTZxtGjR1G7dm18//33yrK3BwJn8PLygpeXF0aMGAEfHx+sXbsWtWrVQtGiRXHx4kWVupGRke9d4bZatWrYvHkzXF1dYWgo/WpPT09HfHw8LCwsULp0aRgZGeHkyZPKmW0vXrzA9evXsz2XnDx+/Bhr165Fq1atYGBggGfPnuHatWtYunQp6tWTrmgfOXJE5TUZWxcp3pnad/ToUSxYsACff/45AODevXt4+vSp2jERqYN/afNIG482uDPwDvZ23otgl2Ds7bwXUYPuoM3iw8CoUVKlSZOkqS9vTcUl0gf16gElSkhjfbIikwHOzlI9bZLL5bhy5QouX74MeRaDjaysrDBkyBAMHjwYoaGhuHXrFiIiIvDrr78iNDQUgDQ258yZM9i9ezeuX7+O0aNH4/Tp08o2oqKiMGLECBw/fhx3797Fnj17cOPGDeU4oEaNGuHMmTNYvXo1bty4gTFjxmRKiLLSt29fPH/+HB07dsTp06dx69Yt7N69G3379oVCoYClpSV69OiBoUOH4sCBA7h48SK6deuWq20ahBCIjY1FTEwMrly5ghUrVqB27dqwsbHBlClTAAB2dnYoXLgwlixZgps3b+LAgQMqeywC0sBpMzMz5QDtjHWEypYti99++w1XrlzByZMn0blzZ5X95Ii0gQlQHpIbyNHApQHq29VHA5cGkBvIpas9EycCK1cCRkbAhg1A48bAkye6Dpcoz8jlQMZM8XeToIzHs2drbwD026ytrXPcBHnChAkYPXo0Jk+eDA8PDzRt2hR//fWXcop379690aZNG7Rv3x7e3t549uyZytUgc3NzXL16FV999RXKlSuHXr16oW/fvujduzcAICAgAKNHj8awYcPw2WefISEhIcstgd7l5OSEo0ePQqFQoEmTJqhcuTKCg4NhY2OjTHKmT5+OevXqoUWLFvDz80PdunUzjSXKSnx8PBwdHVG8eHH4+Phg8eLFCAwMxLlz55RjlwwMDLB+/XqcPXsWlSpVwuDBgzF9+nSVdgwNDTF37lwsXrwYTk5OaNmyJQBpsPaLFy9QrVo1fPPNNxgwYACKFSv23riIPoZMvHuzmRAfHw8bGxvExcVpZTf4v//+G59//nnmS9phYUDr1sDLl9LiJzt2ADkMHCT15Nj39NFev36NqKgouLm5wdTUVFmecRvG2tr6vVcbtmyRZoO9PSDa2VlKftpwxxi1qdP3pFnse+3J7ncNoN7fb34qnxJfX+DECaB0aeDOHaB2beDvv3UdFVGeadNG+tE/eBBYu1b6GhXF5IeINI8J0KfG3R04eRJo0ABISABatJD++8sLdaQn5HLp/wIdO0pf8+K2FxHpHyZAn6LChYE9e4AePYD0dGDwYKB3b4BTQomIiDSCCdCnytgYWLoUmDFDGgW6dCkQEAA8f67ryIiIiPI9JkCfMpkMCA4Gtm8HLC2lQdLe3sC1a7qOjIiIKF9jApQfNG8OHDsGuLhIiyV6ewP79uk6KiIionyLCVB+UbkycOqUNDMsLg5o2hRYsEDXUREREeVLTIDyk2LFgP37gW++kbbP7tsX6NOHg6OJiIjUxAQovzE1BUJDgcmTpTFCixYB/v5cOZqIiEgNTIDyI5kMGD5cGhxtZQUcPgx89hnw77+6joyoQPH19cWgQYM+6LX79++Hh4dHpo0/3zZ27Fh4enp+WHB6Yvjw4ejfv7+uw6ACiAlQfta8ubRoYpkywN270vigzZt1HRVRvtKtWzfIZLJMx82bN7FlyxZMmDBBWdfV1RWzZ8/OVbvDhg3DTz/9lOWmqiQJCwtDy5Yt4ejoCAsLC3h6emLNmjUqdYYMGYLQ0FDcvn1bR1FSQcUEKL/z8JAGR/v7A8nJwNdfA2PHSgsoElGuNG3aFDExMSqHm5sbChUqBCsrK7XbO3LkCG7duoWvvvpKC9GqRwiBN2/e6DqMLB07dgxVqlTB5s2bcf78eQQFBaFr167YuXOnsk6RIkUQEBCAhQsX6jBSKoiYABUEdnbSnmGDB0uPx42TEqHERN3GRZRPmJiYwMHBQeWQy+Uqt8B8fX1x9+5dDB48WHmVKDvr16+Hv79/po0ap0yZAnt7e1hZWaFHjx54/fp1ptcuW7YMHh4eMDU1Rfny5bHgndmex44dg6enJ0xNTVGjRg1s27YNMpkMkZGRAKSrKjKZDP/88w8+++wz2Nvb48iRI0hPT8fkyZPh5uYGMzMzVK1aFX/88YdK2xcvXkSzZs1gaWkJe3t7fPPNN3j69OkH9GjujBw5EhMmTEDt2rVRunRpDBw4EE2bNsWWLVtU6rVo0QLr16/XWhykn5gAFRSGhsDMmcDKldIq0lu3SrfEoqJ0HRnpMyGApKS8P7Swd96WLVtQokQJjB8/XnmVKDvh4eGoUaOGStnGjRsxduxYTJo0CWfOnIGjo2Om5GbNmjUICQnBzz//jCtXrmDSpEkYPXo0QkNDAUg7Xbdo0QKVK1dGREQEJkyYgB9//DHLGIYPH45Jkybh5MmTqFKlCiZPnozVq1dj0aJFuHTpEgYPHowuXbrg0KFDAICXL1+iUaNG8PLywpkzZ7Br1y48evQI7dq1y/Y8o6OjYWlpmeMxadKkXPVvhri4OBQqVEilrGbNmrh//z7u3LmjVltEOTHUdQCkYd26SRuqtmkDXLggDY7esAFo3FjXkZE+Sk6GgbU1bPP6fRMTAQuLXFffuXMnLC0tlY+bNWuGTZs2qdQpVKgQ5HI5rKys4ODgkGN7d+/ehZOTk0rZ7Nmz0aNHD/To0QMAMHHiROzbt0/lKtCYMWMwY8YMtGnTBgDg5uaGy5cvY/HixQgMDMTatWshk8mwdOlSmJqaokKFCnjw4AF69uyZKYbx48fD398f8fHxMDExwaRJk7Bv3z74+PgAAEqVKoUjR45g8eLFaNCgAebNmwcvLy+VhGXFihVwdnbG9evXUa5cuUzv4eTkpLzylJ13k5mcbNy4EadPn8bixYszvQ8g9aurq2uu2yPKCROggsjHBzh9GmjdGjhzBmjSBJg+XbpFlsNleyJ91bBhQ5UxJhZqJE9ZefXqVabbX1euXMF3332nUubj44ODBw8CAJKSknDr1i306NFDJaF58+YNbGxsAADXrl1DlSpVVNquWbNmljG8fQXq5s2bSE5Ohr+/v0qd1NRUeHl5AQD+/fdfHDx4UCURzHDr1q0sEyBDQ0OUKVMmy/dX18GDBxEUFISlS5eiYsWKKs+ZmZkBAJKTkzXyXkQAE6CCq0QJaXp8nz7SukE//CAlQ8uWAebmuo6O9IW5OdLj4xEfHw9ra2sYGOTRXXc1f8YtLCw09occkAbuvnjxQq3XJP5/zN7SpUvh7e2t8tyHzCR7O4nLaPuvv/5C8eLFVeqZmJgo67Ro0QJTp07N1Jajo2OW7xEdHY0KFSrkGMfIkSMxcuTIHOscOnQILVq0wKxZs9C1a9dMzz///ybQRYsWzbEdInUwASrIzMykMUE1akhXf9atAy5flsYHubnpOjrSBzKZdCtKoZC+5lUCpCXGxsY5ruuTwcvLC5cvX1Yp8/DwwMmTJ1X+wJ84cUL5vb29PZycnHD79m107tw5y3bd3d3x+++/IyUlRZm4nD59+r3xVKhQASYmJoiOjkaDBg2yrFOtWjVs3rwZrq6uMDTM3Z8GTdwCCwsLQ/PmzTF16lT06tUryzoXL16EkZFRpitDRB+DCVBBJ5MB/foBVapIM8P+/VdKiDZsAPz8dB0dUb7i6uqKw4cPo0OHDjAxMUGRIkWyrBcQEKAcuJxh4MCB6NatG2rUqIE6depgzZo1uHTpEkqVKqWsM27cOAwYMAA2NjZo2rQpUlJScObMGbx48QLBwcHo1KkTRo0ahV69emH48OGIjo7GL7/8AgA5zkqzsrLCkCFDMHjwYKSnp6Nu3bqIi4vD0aNHYW1tjcDAQPTt2xdLly5Fx44dMWzYMBQqVAg3b97E+vXrsWzZsiyvQn3sLbCDBw+iefPmGDhwIL766ivExsYCkBLNtxOn8PBw1KtXT3krjEgT8vd/xyj36tcHzp6VBkU/fw4EBAC//KKV2TJEBdX48eNx584dlC5dOsfbMZ07d8alS5dw7do1ZVn79u0xevRoDBs2DNWrV8fdu3fRp08fldd9++23WLZsGVauXInKlSujQYMGWLVqFdz+f8XW2toaO3bsQGRkJDw9PTFq1CiEhIQAQKYxR++aMGECRo8ejcmTJ8PDwwNNmzbFX3/9pWzbyckJR48ehUKhQJMmTVC5cmUMGjQItra2Wrt1GRoaiuTkZEyePBmOjo7KI2MQeIb169dnOdCb6GPIhOBfwHfFx8fDxsYGcXFxsLa21mjbaWlp+Pvvv/H555/DyMhIo23nyuvX0rigVaukxx06SOOCPnLQZ36g874v4F6/fo2oqCi4ubmp/DFOT0/P+zFAn4ChQ4ciPj4+04wmTVuzZg2CgoIQFxeX6QpJQej7f/75Bz/88APOnz+f61tzn4KC0Pefqux+1wDq/f3mp6JvTE2BFSuA+fOltYPWr5fWC+Iy80QaNWrUKLi4uCBdw6uyr169GkeOHEFUVBS2bduGH3/8Ee3atSuwt4eSkpKwcuXKfJX8UP7Anyh9JJMB338PVK4sjQs6fx6oXh34/Xfgiy90HR1RgWBra/ve2U8fIjY2FiEhIYiNjYWjoyPatm2Ln3/+WePv86n4+uuvdR0CFVC8AqTP6tWTxgXVqgW8fCltrhoSIs3YIaJP0rBhw3Dnzh3lbYBZs2bBnEtbEKmNCZC+K1ECOHQI6NtXejxhgnQV6NkzlWoKBRAWJs2kDwtjjkRERPkbEyCS9g6bNw/47Tdp7aDdu6VbYmfOAAC2bAFcXYGGDYFOnaSvrq5SORERUX7EBIj+06ULcOIEULo0cPcuUKcOIr5fhq+/Bu7fV6364IE0fIhJEBER5UdMgEhVlSrSlZ8vvwRSU1FtYU8sFT1gilcq1TIWTxg0iLfDiIgo/9FpAnT48GG0aNECTk5OkMlk2LZtW471w8LCIJPJMh0Zq4dmmD9/PlxdXWFqagpvb2+cOnVKi2dRANnaAlu34va3k6CAAXpgBY6gLlwRpVJNCODePSA8XDdhEhERfSidJkBJSUmoWrUq5s+fr9brrl27hpiYGOVRrFgx5XMbNmxAcHAwxowZg4iICFStWhUBAQF4/PixpsMv2AwMcLLRCARgN56gCKojAhGohubYkalqTIwO4iMiIvoIOk2AmjVrhokTJ6J169Zqva5YsWJwcHBQHm+vsjlz5kz07NkTQUFBqFChAhYtWgRzc3OsWLFC0+EXeI6OwH74oTrO4gS8YYeX2IEvMQU/whBpKvWI6NPUvHlzDB48WNdh5DvdunVDq1atPrqdoKAgjbRDmpcvxwB5enrC0dER/v7+OHr0qLI8NTUVZ8+ehd9bm3waGBjAz88Px48f10Wo+Vq9etIs+fuykqiPw5iNgQCAHzENB9AIxfEAzs5SPaL8KLvb6hlHw4YNdRLX2LFjIZPJ8N1336mUR0ZGQiaT4c6dO7lu67fffsP48eM1HOF/fH19s+y7d2PPiaurK2bPnq21GPPCnTt3IJPJEBkZqVI+e/ZsrMrYeugT9vPPP6N27dowNzeHra1trl6T3b+b6dOnK+s8f/4cnTt3hrW1NWxtbdGjRw8kJiYqn8/4WX/3sMiD7Zny1UrQjo6OWLRoEWrUqIGUlBQsW7YMvr6+OHnyJKpVq4anT59CoVDA3t5e5XX29va4evVqtu2mpKQgJSVF+Tg+Ph6AtHdUWlpadi/7IBntabpdbZkzB/jmGwCQYaSYjlOKWliU2gv1cATn4IWrQauRnt4YGl7tXyvyW9/nN2lpaRBCID09XWX7h4ztBjOeex9FugLh0eGISYyBo6Uj6pWsB7lB5p3INaFWrVp48OBBpvLt27fj+++/x3fffffBW1mkpqbC2Nj4g14rhICpqSmWL1+OwYMHo2zZsgCgjOXdPs6pHTs7O1haWmp8S463ffvttxg3bpxKmbm5uVrv+b6fD4VCAZlMlmf7agkhcv0zC2T+bDJ+7q2trSGTybTa/5qQkpKCr7/+GrVq1cKKFStyFe+7/3b++ecf9OzZE61bt1a+vlOnToiNjcXu3buRlpaGHj16oGfPnlizZg0AIDg4GL169VJpx9/fHzVq1Mg2hoz+TUtLg1yu+rtBnd/v+SoBcnd3h7u7u/Jx7dq1cevWLcyaNQu//fbbB7c7efLkTP94AWDPnj1aW2F17969WmlX0+RyYO3at0vMcCpmGj6bOhVF79xBkQmf49r19rjWtq1UOR/IL32f3xgaGsLBwQGJiYlITU3N9HxCQsJ729hxcweGHxqOh4kPlWVOlk6Y0mAKWpRpodF4M7z7b/zatWsYNmwYgoODERAQoPwP0eXLlxESEoITJ07A3NwcDRs2xKRJk1C4cGEA0q0mDw8PGBoaYuPGjahQoQJ27NiBo0ePIiQkBBcvXoSdnR06dOiAn376Kce9rVJSUlCmTBkULVoUw4cPx8qVKwFI4yYBIDExURnX+9pv3rw5KleujMmTJwMAli1bhoULF+LBgwewtraGj48PQkNDAUh/WGbPno3Q0FA8fvwYpUuXxtChQ9GyZctsY33z5g0MDQ2z/F0ZHx+P9evXY8iQITh06BBKly4NAPjhhx8QHh6OsLAwtGvXDnfv3kVwcDCCg4MBAC9evMDatWsxYsQILFy4EOPHj8fNmzcRERGBp0+fYsKECTh//jzS0tJQuXJlTJo0CVWrVlW+r52dHX755Rf8888/OHr0KOzt7TFu3DiV87h06RJGjBiB06dPw8zMDF9++SUmTpwIS0tLANIf0jdv3ij7ed++ffjll19w5coVyOVyfPbZZ5gyZQrc3NwAQHlu1atXBwDUqVMHO3fuxDfffIO4uDjlH/yUlBSEhIRgy5YtSEhIgKenJyZNmoRq1aoBAI4cOYIWLVpg27ZtGDt2LK5du4ZKlSph/vz5ykRYGzL6fu3atRBCKM87J+9+5lu2bEG9evVQpEgRxMfH49q1a9i9ezcOHDgADw8PANLf23bt2iEkJASO/x8/8XY7Fy5cwOXLlzF9+vRsY0hNTcWrV69w+PBhvHnzRuW55OTk3J+0+EQAEFu3blX7dUOGDBG1atUSQgiRkpIi5HJ5pna6du0qvvzyy2zbeP36tYiLi1Me9+7dEwDE06dPRWpqqkaPpKQksW3bNpGUlKTxtrV5vHqVKg4cSBVr10pfXz2PE4oePYSQJoMJhZ+fSH3wQOdxFsS+zy9HfHy8uHTpkkhKShIKhUJ5vHnzRrx48UK8efNGpfzdY9PFTUI2ViYwFiqHbKxMyMbKxKaLm3J8vSaOZ8+eibJly4rmzZurxPvs2TNRtGhRMXz4cHHp0iVx5swZ4efnJxo2bKis06BBA2FpaSmGDBkiLl++LC5fviyio6OFubm56NOnj7h06ZLYvHmzKFKkiAgJCckxjpCQEFG1alVx+vRpYWBgIE6ePCkUCoU4e/asACBu3bolFArFe9t/8+aNqFOnjhgwYIBQKBTi5MmTQi6Xi99//13cvn1bnDlzRsyePVv5vhMmTBDly5cXf//9t7hx44ZYvny5MDExEQcOHMg21gYNGijbz+74+uuvxWeffSZSUlLE9u3bhZGRkTh16pRQKBTiyZMnokSJEmLcuHHiwYMH4sGDB0KhUIjly5cLIyMjUbt2bREeHi4uX74sEhISxN69e0VoaKi4dOmSuHjxoujevbuwt7cXL1++VL4fAFG4cGGxePFiceXKFTFq1Cghl8vFxYsXhUKhEPHx8cLR0VG0bt1a/Pvvv2Lv3r3Czc1NdO3aVdlGxt+NjMcbN24UmzZtEteuXRNnz54VzZs3F5UrVxZpaWlCoVCIEydOCABiz5494sGDB+Lx48fixYsXmdrp37+/cHJyEjt37hQXLlwQXbt2FXZ2duLJkydCoVCI/fv3CwDC29tbHDhwQFy4cEHUq1dP1K5dO8c+rlChgrCwsMj2CAgIyNW/geXLlwsbGxu1/+08fPhQGBoait9++01ZtnTpUmFra6tSL+Pv9B9//JFlO3379hXlypXL8b2SkpLEpUuXRHx8fKbfQ0+fPhUARFxc3PvzDrUzDi350ATIz89PtG7dWvm4Zs2aol+/fsrHCoVCFC9eXEyePDnXbcbFxeW6A9WVmpoqtm3bJlJTUzXetk6sXi2EubmUCBUvLkR4uK4jylaB6/tPzKtXr8Tly5fFq1evVMoVCoV48eKFUCgU2b72jeKNKDGzRKbk5+0kyHmms3ijeKO1+BUKhWjWrJnw8PAQ8fHxKs9NmDBBNGnSRKUs4z9K165dE0II0aBBA+Hl5aVSZ+TIkcLd3V2kp6cry+bPny8sLS1z7I8xY8aIqlWrCiGE6NChg2jUqJEQQohz584JACIqKipX7SsUCmUCJIQQmzdvFtbW1pnOTwjpP4Lm5ubi2LFjKuU9evQQHTt2zDbWBg0aCCMjo0x/cH///XdlnefPn4sSJUqIPn36CHt7e/Hzzz+rtOHi4iJmzZqlUrZy5UoBQERGRmb73kJIn5uVlZXYsWOHsgyA+O6771TqeXt7iz59+gghhFiyZImws7MTiYmJyuf/+usvYWBgIGJjY4UQQgQGBoqWLVtm+75PnjwRAMSFCxeEEEJERUUJAOLcuXPKuDISoIx2EhMThZGRkVizZo2yndTUVOHk5CSmTZsmhBDi4MGDAoDYt2+fSmwAMv3betudO3fEjRs3sj3u37+f7WvftnLlSmFjY5Orum+bOnWqsLOzU4nx559/FuXKlctUt2jRomLBggWZyl+9eiXs7OzE1KlTc3yv7H7XCKHe32+d3gJLTEzEzZs3lY+joqIQGRmJQoUKoWTJkhgxYgQePHiA1atXA5AGk7m5uaFixYp4/fo1li1bhgMHDmDPnj3KNoKDgxEYGIgaNWqgZs2amD17NpKSkhAUFJTn56cXvvkGqFYNaNsWuHIF8PWV9hP78Ucgj+7VU/4XHh2O+/H3s31eQOBe/D2ER4fD19VXKzGMHDkSx48fx6lTp2BlZaXy3L///ouDBw8qb4+87datWyhXrhyA/25/ZLhy5Qp8fHwgk8mUZXXq1EFiYiLu/3959QoVKqjE8O4O8hMnToSHhwf27NmjsuRHbtovUaKESn1/f3+4uLigVKlSaNq0KZo2bYrWrVvD3NwcN2/eRHJyMvz9/VVek5qaCi8vr6w77f86d+6MUaNGqZS9PRbTzs4Oy5cvR0BAAGrXro3hw4fn2F4GY2NjVKlSRaXs0aNH+OmnnxAWFobHjx9DoVAgOTkZ0dHRKvV8fHwyPc4YoHzlyhVUrVpVZaBtnTp1kJ6ejmvXrmUaRwoAN27cQEhICE6ePImnT58qx6dER0ejUqVKuTqfW7duIS0tDXXq1FGWGRkZoWbNmrhy5YpK3bfPO+NW0ePHj1GyZMks23ZxcclVDNqyYsUKdO7cGaamph/cxtatW5GQkIDAwEANRpY9nSZAZ86cUZllkXEPMjAwEKtWrUJMTIzKD3Vqaip++OEHPHjwAObm5qhSpQr27dun0kb79u3x5MkThISEIDY2Fp6enti1a1eWP9CkIRUrAqdOAX36AL//DowcCRw8KO0txn6nXIhJyN1iUrmtp67169fjl19+wV9//ZXlOIvExES0aNECU6dOzfSc41vrQKg7c8XJyUll1lChQoUy1SldujR69uyJ4cOHY/ny5Wq1/y4rKytEREQgLCwMe/bsQUhICMaOHYvTp08rZ+b89ddfKF68uMrrTExMcmzXxsYGZcqUybHO4cOHIZfLERMTg6SkpExJZlbMzMxUkjtA+vvw7NkzzJkzBy4uLjAxMYGPj0+W4840qUWLFnBxccHSpUvh5OSE9PR0VKpUSWvva2RkpPw+ow9yGphcsWJF3L17N9vn69Wrh3/++UdzAb4lPDwc165dw4YNG1TKHRwcMq3B9+bNGzx//hwODg6Z2lm2bBmaN2+eZ3+vdZoA+fr6KkfKZ+XdqYPDhg3DsGHD3ttuv3790K9fv48Nj9RhaQmsXg00biztLL93L1C1qpQQvbUsAVFWHK1yt5hUbuupIzIyEj169MCUKVMQEBCQZZ1q1aph8+bNcHV1zXHw8rs8PDywefNmCCGUf8SOHj0KKysrlChRAgYGBu9NHAAgJCQEpUuXxvr169VqPyuGhobw8/ODn58fxowZA1tbWxw4cAD+/v4wMTFBdHQ0GjRokOtzzI1jx45h6tSp2LFjB3788Uf069dPOfAakK70KHK5p87Ro0exYMECfP755wCAe/fu4enTp5nqnThxAl27dlV5nHEly8PDA6tWrUJSUpIyaT169CgMDAxUJtpkePbsGa5du4alS5ei3v/X/Thy5IhKnYwZfzmdR+nSpWFsbIyjR48qr9ikpaXh9OnTGDRoUK7OPzt///13jjOgzMzMPqr9nCxfvhzVq1dXGYgOSFfdXr58ibNnzyqvjh44cADp6enw9vZWqRsVFYWDBw9i+/btWovzXbxHQZojkwHdukl7iVWqBDx6BDRpAvz0E/DOSH2it9UrWQ8lrEtABlmWz8sgg7O1M+qV1OyiU0+fPkWrVq3g6+uLLl26IDY2VuV48uQJAKBv3754/vw5OnbsiNOnT+PWrVvYvXs3goKCcvyD9/333+PevXvo378/rl69ij///BNjxoxBcHCwWtO57e3tERwcjLlz535U+zt37sTcuXMRGRmJu3fvYvXq1UhPT4e7uzusrKwwZMgQDB48GKGhobh16xYiIiLw66+/qiQrWUlOTs7Udy9evAAgzf775ptvMGDAADRr1gxr1qzBhg0b8Mcffyhf7+rqisOHD+PBgwdZJjNvK1u2LH777TdcuXIFJ0+eROfOnbP8475p0yasWLEC169fx5gxY3Dq1Cnlf4wzbtUEBgbi4sWLOHjwIPr3749vvvkmy6sPdnZ2KFy4MJYsWYKbN2/iwIEDyjsWGYoVKwYzMzPs2rULjx49QlxcXKZ2LCws0KdPHwwdOhS7du3C5cuX0bNnTyQnJ6NHjx45nvf7uLi4oEyZMtke717Ve1d0dDQiIyMRHR0NhUKByMhIREZGqqzZU758eWzdulXldfHx8di0aRO+/fbbTG16eHigadOm6NmzJ06dOoWjR4+iX79+6NChA5ycnFTqrlixAo6OjmjWrNlH9IKa3jtKSA9xELQGJCcL0auXcpaYqFtXiHv3dBqS3vS9jnzMIGghhNh8ebNyxldWs8A2X96s8ZhXrVolAGR7uLi4KOtev35dtG7dWtja2gozMzNRvnx5MWjQIOUA5AYNGoiBAwdmeo+wsDDx2WefCWNjY+Hg4CB+/PFHkZaWlmNcbw+CzhAXFyeKFCmiMgj6fe2/Owg6PDxcNGjQQNjZ2QkzMzNRpUoVsWHDBmVb6enpYvbs2cLd3V0YGRmJokWLioCAAHHo0KFsY23QoEGWfRcQECCEECIoKEhUrlxZvH79WvmaGTNmiEKFCikH5h4/flxUqVJFmJiYiIw/S9kNxo2IiBA1atQQpqamomzZsmLTpk2ZBlEDEPPnzxf+/v7CxMREuLq6qpynEEKcP39eNGzYUJiamopChQqJnj17ioSEBOXz7w6C3rt3r/Dw8BAmJiaiSpUqIiwsLNPknaVLlwpnZ2dhYGAgGjRokGkQtBDSv5P+/fuLIkWKCBMTE1GnTh1x6tQp5fMZg6BfvHihLHt38Ls2BAYGZvk5Hjx4UFkHgFi5cqXK6xYvXizMzMzEy5cvs2z32bNnomPHjsLS0lJYW1uLoKAglX4WQvo5LVGihBg5cmSuYtXUIGjZ/0+K3hIfHw8bGxvExcXB2tpao22npaXh77//xueff65yj7fA2rAB6NkTSEgAChcGVq0CmjfXSSh61/d57PXr14iKioKbm5vKQMj09HTEx8fD2tr6vVc9tlzZgoG7BqoMiHa2dsbsprPRxqON1mIvqNTp+4JEJpNh69atOt2CQl/7Pi9k97sGUO/vd75aCJHyofbtgRo1pK9nzwItWgCDBwOTJwPvGVhJ+qeNRxu0dG8prQSdEANHK+2uBE1E+otpKWlf6dLA0aNAxiC/WbMAHx/g2jWdhkWfJrmBHL6uvuhYuSN8XX2Z/BCRVjABorxhYiIlPtu3S7fCzp2T1g9avlwaJUREpCFCCO7ATu/FBIjyVosWwPnz0nT55GTg22+l22P/nzFCRESUF5gAUd5zcgL27AGmTAEMDYFNmwBPT+CddTUof+K8CiLSJk39jmECRLphYCBtl3HsmDRGKDoaaNAAGDuWawblUxkz69TajZmISE0Zv2M+djYvZ4GRbn32mTQeqF8/aSXpceOAffuANWsAHe9tQ+qRy+WwtbVVLn1vbm4OmUyG9PR0pKam4vXr15wOnMfY97rDvtc8IQSSk5Px+PFj2NraQi7/uAkSTIBI96ysgNBQICAA+O47acZY1arAggVAp066jo7UkLG/z9v7/wgh8OrVqyz3dSLtYt/rDvtee2xtbbPcS0xdTIDo09GpE1CrFtC5M3DihPR1504pEbK11XV0lAsymQyOjo4oVqyYcl+itLQ0HD58GPXr1+cClHmMfa877HvtMDIy+ugrPxmYANGnpVQpIDwc+PlnYMIEYN06aXD06tWAr6+uo6Ncksvlyl9Scrkcb968gampKf8Q5DH2ve6w7z99vDFJnx5DQ2DMGOlWWJkywL17QKNGwLBhQEqKrqMjIqICgAkQfbq8vaUB0j17SoslTp8ulV26pOvIiIgon2MCRJ82S0tgyRJg2zagSBHg33+B6tWBuXOB9HRdR0dERPkUEyDKH1q2BC5cAD7/XLoNNnAg0KwZ8OCBriMjIqJ8iAkQ5R8ODv/NCjMzk1aTrlRJWjOIqw8TEZEamABR/iKTAX36ABER0iKKL18CXboAbdsCT57oOjoiIsonmABR/lS+vLSNxoQJ0qyxzZulq0F//qnryIiIKB9gAkT5l6Eh8NNPwKlTUvLz+DHQqhXQrRsQF6fr6IiI6BPGBIjyPy8v4MwZaZ0gmUzaVqNyZWD/fl1HRkREnygmQFQwmJgAU6dKq0iXLi0tnujnJ22ympSk6+iIiOgTwwSICpY6dYDISOD776XH8+dLG6sePqzTsIiI6NPyQQlQeHg4unTpAh8fHzz4/zosv/32G44cOaLR4Ig+iKWllPjs3g2UKAHcugU0aACDQYMgf/VK19EREdEnQO0EaPPmzQgICICZmRnOnTuHlP/vzRQXF4dJkyZpPECiD9akCXDxorSVBgD5ggVoOGgQZGFhuo2LiIh0Tu0EaOLEiVi0aBGWLl2qssNtnTp1EBERodHgiD6ajY20lcbu3RAlS8Li0SMYNmkC9O0LJCbqOjoiItIRtROga9euoX79+pnKbWxs8PLlS03ERKR5TZrgTUQEogICpMcLFnCmGBGRHlM7AXJwcMDNmzczlR85cgSlSpXSSFBEWmFtjfN9+uDNP/8ALi7AnTvSTLHvvgPi43UdHRER5SG1E6CePXti4MCBOHnyJGQyGR4+fIg1a9ZgyJAh6NOnjzZiJNIo0bixtLFqxkyxxYulhRT//lu3gRERUZ4xVPcFw4cPR3p6Oho3bozk5GTUr18fJiYmGDJkCPr376+NGIk0z8pKmin29ddAjx5AVBTwxRdAp07A7NlA0aK6jpCIiLRI7StAMpkMo0aNwvPnz3Hx4kWcOHECT548wYQJE7QRH5F2NWwoXQ0KDgYMDIC1a4EKFbjDPBFRAffBCyEaGxujQoUKqFmzJiwtLTUZE1HesrAAZswAjh+XBkY/fSrtMP/FF0B0tK6jIyIiLVD7FljDhg0hk8myff7AgQMfFRCRztSsKe0pNm2atMv8P/8AFSsCkydL44UMuHA6EVFBofZvdE9PT1StWlV5VKhQAampqYiIiEDlypW1ESNR3jE2lnaYj4yUttVITAT69wfq1QOuXNF1dEREpCFqXwGaNWtWluVjx45FIheWo4LCw0PaP2zhQmD4cODYMWlPseHDgZEjAVNTXUdIREQfQWPX9Lt06YIVK1Zoqjki3TMwkFaMvnRJGg+UlibdGqtSBeCtXiKifE1jCdDx48dhyv8VU0FUsiSwYwewaRPg6AjcuAE0bgwEBgJPnug6OiIi+gBq3wJr06aNymMhBGJiYnDmzBmMHj1aY4ERfVJkMmnNIH9/YNQoaSuN1auBnTuBX34BunWT6hARUb6g9hUgGxsblaNQoULw9fXF33//jTFjxqjV1uHDh9GiRQs4OTlBJpNh27ZtOdbfsmUL/P39UbRoUVhbW8PHxwe7d+9WqTN27FjIZDKVo3z58uqeJlHWbGyAefOkKfNVqgDPnwPduwO+vsDVq7qOjoiIckntK0ArV67U2JsnJSWhatWq6N69e6YrS1k5fPgw/P39MWnSJNja2mLlypVo0aIFTp48CS8vL2W9ihUrYt++fcrHhoZqnyZRzry9pSnzc+YAY8ZIA6arVJEGSY8YAZiZ6TpCIiLKgU4zg2bNmqFZs2a5rj979myVx5MmTcKff/6JHTt2qCRAhoaGcHBw0FSYRFkzMgKGDJFujfXtK+0lNmGCtIr0r78Cn3+u6wiJiCgbuUqA7Ozsclz88G3Pnz//qIDUkZ6ejoSEBBQqVEil/MaNG3BycoKpqSl8fHwwefJklCxZMtt2UlJSkJKSonwc//+dwdPS0pCWlqbRmDPa03S79H5a6/vixYGtWyHbsgXyIUMgu30b+OILpH/5JRQzZkg7z+sx/szrDvted9j3uqFOf8uEeP+GR6GhobluMDAwMNd1VQKRybB161a0atUq16+ZNm0apkyZgqtXr6JYsWIAgH/++QeJiYlwd3dHTEwMxo0bhwcPHuDixYuwsrLKsp2xY8di3LhxmcrXrl0Lc3PzDzof0k+Gr16h3IYNKL1jBwwUCrwxNsb1du1ws2VLCCMjXYdHRFSgJScno1OnToiLi4O1tXWOdXOVAOUFdROgtWvXomfPnvjzzz/h5+eXbb2XL1/CxcUFM2fORI8ePbKsk9UVIGdnZzx9+vS9HaiutLQ07N27F/7+/jDiH8Q8lad9f+kS5AMGwCA8HAAgypWDYu5ciEaNtPu+nyD+zOsO+1532Pe6ER8fjyJFiuQqAfqoMUCvX79GamqqSpmmE4asrF+/Ht9++y02bdqUY/IDALa2tihXrhxu3ryZbR0TExOYmJhkKjcyMtLaD64226ac5Unfe3oChw4Bv/8ODBkC2fXrMGzaFGjfHpg5E3By0u77f4L4M6877HvdYd/nLXX6Wu1p8ElJSejXrx+KFSsGCwsL2NnZqRzatm7dOgQFBWHdunX44osv3ls/MTERt27dgqOjo9ZjI1IhkwHffANcuybtJ2ZgAGzYALi7S2sHvfOfByIiyjtqJ0DDhg3DgQMHsHDhQpiYmGDZsmUYN24cnJycsHr1arXaSkxMRGRkJCIjIwEAUVFRiIyMRHR0NABgxIgR6Nq1q7L+2rVr0bVrV8yYMQPe3t6IjY1FbGws4uLilHWGDBmCQ4cO4c6dOzh27Bhat24NuVyOjh07qnuqRJphawvMnStNm/f2ljZYHToUqFwZ2LVL19EREekltROgHTt2YMGCBfjqq69gaGiIevXq4aeffsKkSZOwZs0atdo6c+YMvLy8lFPYg4OD4eXlhZCQEABATEyMMhkCgCVLluDNmzfo27cvHB0dlcfAgQOVde7fv4+OHTvC3d0d7dq1Q+HChXHixAkULVpU3VMl0iwvL2lT1ZUrgWLFgOvXgWbNgC+/BHK4RUtERJqn9hig58+fo1SpUgCk8T4Z097r1q2LPn36qNWWr68vchqDvWrVKpXHYWFh721z/fr1asVAlKcMDKRtM1q3BsaPl64M7dgB7N4trSk0YgRgaanrKImICjy1rwCVKlUKUVFRAIDy5ctj48aNAKQrQ7a2thoNjqjAsrEBZswAzp8HmjSRxgNNmiSND1q7Fvg0JmcSERVYaidAQUFB+PfffwEAw4cPx/z582FqaorBgwdj6NChGg+QqEDz8JDGAW3bBri5AQ8fAp07A/XrAxERAACFAggLA9atk74qFLoMmIioYMj1LbAhQ4bg22+/xeDBg5Vlfn5+uHr1Ks6ePYsyZcqgSpUqWgmSqECTyYCWLYGAAOmq0KRJwJEjQI0auOsbiK+u/oyzMf9Nmy9RQtqCLBfb5xERUTZyfQXozz//RMWKFVG7dm2sWLECSUlJAAAXFxe0adOGyQ/RxzI1BUaNknaV79QJEAIuB1chLKYcRmEiTPEKAPDggbT92JYtOo6XiCgfy3UCdOPGDRw8eBDlypXDwIED4eDggO7du+PYsWPajI9I/zg7Q7F6Db4sehzHUQuWSMJEjMY1uKMj1ionDgwaxNthREQfSq0xQPXr18eqVasQGxuLOXPm4MaNG6hbty48PDzwyy+/4NGjR9qKk0ivhIcDO57UQm0cQ0esRTScURL3sBadcRw+qClO4N49qR4REalP7UHQAGBhYYHu3bsjPDwc169fR5s2bd674zoR5V5MTMZ3MqxHR7jjGkZhIhJhgVo4iRPwwRp0QtyF6JyaISKibHxQApQhKSkJ4eHhOHToEF68eKFcH4iIPs67O7e8hhkmYRTK4gZWIAjpkKET1qHFD+WAH38EXr7USZxERPnVByVAR44cQffu3eHo6IgBAwagXLlyCA8Px5UrVzQdH5FeqldPmu0lk6mWx8IRPbACn+EMjpn4wiAtBZg2DShdGpg9G0hJ0Um8RET5Ta4ToJiYGEyZMgXly5dH/fr1cfXqVcycORMxMTFYsWIF6tSpo804ifSKXC5NdQcyJ0EyGXBOVg2xaw4AO3cCFSoAz58DgwdL6wqtXw+kp+d90ERE+UiuEyBnZ2fMmjULzZs3x6VLl3Ds2DF8++23sOSy/URa0aYN8McfQPHiquUlSkjlbb6SAV98Afz7L7BsmXTfLCoK6NhR2nQ1F1vHEBHpq1wvhLhx40Z8+eWXMDRUe/swIvpAbdpIaySGh0sDox0dpdtjcvlblQwNgR49gA4dgFmzgKlTpZ3nGzYEmjcHpkwBKlbU2TkQEX2Kcn0FqE2bNkx+iHRALgd8faULO76+7yQ/b7OwAH76Cbh1C+jbV0qMdu4EqlSRNmC9cyfPYiYi+tR91CwwIvoEFSsGzJsHXLoEfPWVNB4oNBQoVw4YOBB4/FjXERIR6RwTIKKCqlw5abDQyZNA48ZAWhowdy5QqhQwejQQF6frCImIdIYJEFFBV7MmsG+fdHz2GZCUBEycKCVCv/wCvHql6wiJiPIcEyAifdG4sXQ1aMsWabr88+fA0KFA2bLAkiXSFSIiIj2hdgKUlJSE0aNHo3bt2ihTpgxKlSqlchDRJ0wmA1q3Bi5cAFauBEqWlLaX790bcHcHVq0C3rzRdZRERFqn9rSub7/9FocOHcI333wDR0dHyN5dpY2IPn1yuTQzrGNHYPFiYNIkaQ2hoCDp+zFjpGn12U45IyLK39ROgP755x/89ddfXPmZqCAwMQEGDJDWEVq4UFpD6MYNoEsX4OefgbFjga+/Bgx4t5yICha1f6vZ2dmhUKFC2oiFiHTFwgIYMgS4fVu6AmRnB1y5ArRvD1StKo0bEkLXURIRaYzaCdCECRMQEhKC5ORkbcRDRLpkZQWMGCHdDhs3DrC2Bi5elNYTql4d2LaN+4wRUYGg9i2wGTNm4NatW7C3t4erqyuMjIxUno+IiNBYcESkIzY2QEgI0L8/MHOmtNP8uXPSAOrKlaUVp7/6imOEiCjfUjsBatWqlRbCIKJPkp0dMGGCtIL0rFnAr79KM8jat5em0v/0k/Q9EyEiymfUToDGjBmjjTiI6FNWpIg0KPqHH6TVpOfMkcYIde4sDZQeOVL6/p0rwkREn6oPntpx9uxZ/P777/j9999x7tw5TcZERJ+qQoWkhOfOHWk16UKFpFljQUHSOkJLlwKpqbqOkojovdROgB4/foxGjRrhs88+w4ABAzBgwABUr14djRs3xpMnT7QRIxF9amxsgFGjgLt3gWnTgKJFpYHTvXpJW2zMmgUkJuo6SiKibKmdAPXv3x8JCQm4dOkSnj9/jufPn+PixYuIj4/HgAEDtBEjEX2qLC2l7TTu3JGSHkdHaWXp4GDAxUW6WvTsma6jJCLKRO0EaNeuXViwYAE8PDyUZRUqVMD8+fPxzz//aDQ4IsonzM2BQYOkq0BLlwJlykh7jY0bB8PSpVFp+XLg3j1dR0lEpKR2ApSenp5p6jsAGBkZIZ3rgxDpNxMT4NtvgatXgY0bAS8vyJKTUXrHDhiWLw907y49R0SkY2onQI0aNcLAgQPx8OFDZdmDBw8wePBgNG7cWKPBEVE+JZcDbdsCZ8/izV9/4UmlSpClpUkbsFaoIK0ndPQoV5cmIp1ROwGaN28e4uPj4erqitKlS6N06dJwc3NDfHw8fv31V23ESET5lUwG4e+PYxMn4k14ONCypZT0bNsG1K0L1K4N/PEHoFDoOlIi0jNqrwPk7OyMiIgI7Nu3D1f/fynbw8MDfn5+Gg+OiAoO4e0tJT5XrkirS69eDZw4IV0pKlVKGkMUFCQNrCYi0rIPWgdIJpPB398f/fv3R//+/Zn8EFHueXhIA6Wjo4HRo6W1hG7flnalL1lSWlQxJkbXURJRAZerK0Bz585Fr169YGpqirlz5+ZYl1PhiShX7O2B8eOB4cOB0FDpqtDNm8DkycAvvwCdOklbcHh56TpSIiqAcpUAzZo1C507d4apqSlmzZqVbT2ZTMYEiIjUY24O9OkjLaK4Y4eU/Bw9KiVFoaFAvXpSItSyJWCo9l17IqIs5eq3SVRUVJbfExFpjFwOtGolHSdPSvuNbdoEhIdLR8mSQN++0jT7QoV0HS0R5XNqjwEaP348kpOTM5W/evUK48eP10hQRKTnvL2BtWulFaZHjZI2Y42OBn78EShRAujdG7h0SddRElE+pnYCNG7cOCRmscdPcnIyxo0bp1Zbhw8fRosWLeDk5ASZTIZt27a99zVhYWGoVq0aTExMUKZMGaxatSpTnfnz58PV1RWmpqbw9vbGqVOn1IqLiD4RxYtLm67euwesWAFUrQq8egUsWQJUqgT4+QFbtgBv3ug6UiLKZ9ROgIQQkMlkmcr//fdfFFLzsnRSUhKqVq2K+fPn56p+VFQUvvjiCzRs2BCRkZEYNGgQvv32W+zevVtZZ8OGDQgODsaYMWMQERGBqlWrIiAgAI8fP1YrNiL6hJiaSlPkz50DDh0C2rQBDAyA/fuBr74CXF2lAdVvLdBKRJSTXI8otLOzg0wmg0wmQ7ly5VSSIIVCgcTERHz33XdqvXmzZs3QrFmzXNdftGgR3NzcMGPGDADS+kNHjhzBrFmzEBAQAACYOXMmevbsiaCgIOVr/vrrL6xYsQLDhw9XKz4i+sTIZED9+tJx9y6waBGwfLm0AeuYMcCECdIYou+/B3x9pfpERFnIdQI0e/ZsCCHQvXt3jBs3DjY2NsrnjI2N4erqCh8fH60EmeH48eOZ1hwKCAjAoEGDAACpqak4e/YsRowYoXzewMAAfn5+OH78eLbtpqSkICUlRfk4Pj4eAJCWloa0tDQNngGU7Wm6XXo/9r1uaK3fnZykqz6jRkG2ZQsMFi+GwbFj0srSf/wB4e6O9N69kd6lC2Brq9n3zif4M6877HvdUKe/c50ABQYGAgDc3NxQu3btLDdE1bbY2FjY29urlNnb2yM+Ph6vXr3CixcvoFAosqxzNYcNGCdPnpzl+KU9e/bA3NxcM8G/Y+/evVppl96Pfa8bWu13Gxtg2DBY37kD11274BwWBsNr1yAPDoYYMQIP6tbFXX9/vHB318urQvyZ1x32fd7KapJWdtReVKNBgwbK71+/fo3U1FSV562trdVtUudGjBiB4OBg5eP4+Hg4OzujSZMmGj+ftLQ07N27F/7+/jpJIvUZ+1438rzfv/8eIj4eirVrYbBoEQwvX4bL/v1w2b8fokIFpHfvjvTOnYHChbUfi47xZ1532Pe6kXEHJzfUToCSk5MxbNgwbNy4Ec+ePcv0vEKLmxo6ODjg0aNHKmWPHj2CtbU1zMzMIJfLIZfLs6zj4OCQbbsmJiYwMTHJVG5kZKS1H1xttk05Y9/rRp72e+HCQP/+QL9+0qKKy5YBGzdCdvky5EOGQD5ypDR4+ttvpbFCBh+0K1C+wZ953WHf5y11+lrtf/VDhw7FgQMHsHDhQpiYmGDZsmUYN24cnJycsHr1anWbU4uPjw/279+vUrZ3717l2CNjY2NUr15dpU56ejr279+v9fFJRPQJksmkXedXrZJmiM2fD3h6AqmpwLp1QOPGQLlywJQpnEFGpGfUToB27NiBBQsW4KuvvoKhoSHq1auHn376CZMmTcKaNWvUaisxMRGRkZGIjIwEIE1zj4yMRHR0NADp1lTXrl2V9b/77jvcvn0bw4YNw9WrV7FgwQJs3LgRgwcPVtYJDg7G0qVLERoaiitXrqBPnz5ISkpSzgojIj1layvNDjt3DjhzRlpM0coKuHULGDECcHYGPv8c2LgReP1a19ESkZapnQA9f/4cpUqVAiCN93n+/DkAoG7dujh8+LBabZ05cwZeXl7w+v9mh8HBwfDy8kJISAgAICYmRpkMAdIA7L/++gt79+5F1apVMWPGDCxbtkw5BR4A2rdvj19++QUhISHw9PREZGQkdu3alWlgNBHpserVpSn0MTHSAou1awPp6cA//wDt2wOOjtL+ZCdPAkLoOloi0gK1xwCVKlUKUVFRKFmyJMqXL4+NGzeiZs2a2LFjB2zVnGrq6+sLkcMvl6xWefb19cW5c+dybLdfv37o16+fWrEQkR6ysJAWWAwKAq5fB1avlo5796QEadEioHx5oFs3oEsXaWVqIioQ1L4CFBQUhH///RcAMHz4cMyfPx+mpqYYPHgwhg4dqvEAiYjyRLly0rYbd+4A+/ZJCY+ZGXD1KjB8uLQZa5Mm0g71CQm6jpaIPpLaV4DeHm/j5+eHq1ev4uzZsyhTpgyqVKmi0eCIiPKcgYE0OLpxY2nQ9KZN0iDqI0eAvXul47vvgC+/BDp3Bpo2BYyNdR01EalJ7StAq1evVlk12cXFBW3atEH58uW1PguMiChPWVsDPXoA4eHAzZvSytPu7tIg6Y0bgZYtpfFC330n1UlP13XERJRLH3QLLC4uLlN5QkICZ1oRUcFVujQwejRw5Yo0i2zwYCn5ef4cWLxY2p/MzU26XRYRwcHTRJ84je0Gf//+fZX9wYiI8juFAggLk5YMCguTHkMmk2aRzZwpDZbeu1caJG1lBURHA1OnSs+XLQuMHAlERjIZIvoE5XoMkJeXl3I3+MaNG8PQ8L+XKhQKREVFoWnTploJkogor23ZAgwcCNy//19ZiRLAnDlAmzb/L5DLAT8/6ViwAPjrL2DDBunrrVvA5MnSUbYs0K6ddFSurJf7kRF9anKdALVq1QoAEBkZiYCAAFhaWiqfy9gN/quvvtJ4gEREeW3LFuDrrzNfuHnwQCr/44+3kqAMZmbSk19/DSQmSknQxo3A338DN24AP/8sHe7uQNu2QOvWgJcXkyEiHcl1AjRmzBgAgKurKzp06JDl3llERPmdQiFd+cnqrpUQUr4yaJA0/lkuz6YRS0tpQcX27aUp828nQ9euSdPtJ04EXFyAVq2kZKhu3RwaJCJNU3sMUIUKFZRbV7zt5MmTOHPmjCZiIiLSmfBw1dte7xJCGvoTHp7LBq2sgA4dpMtKjx8Dv/8uXT4yNwfu3pXuqfn6Ag4O0oyznTu5FQdRHlA7Aerbty/u3buXqfzBgwfo27evRoIiItKVmBjN1lNhbS2tHbR5M/DkCbBtGxAYCBQqBDx9Km3L0aIFULSoNF7ot9+kcj2U5QB0Ig1SeyHEy5cvo1q1apnKvby8cPnyZY0ERUSkK46Omq2XLXNz6T5ay5bAmzfA4cPA1q1SUnT/vrQA46ZN0sKMPj5A8+bSUbFigR83lKsB6EQfSe0rQCYmJnj06FGm8piYGJWZYURE+VG9etIf2+xyDJlM2ji+Xj0NvqmhIdCoEfDrr9JU+lOngFGjgKpVpcUVjx6VdqyvXFlaa6hfP2DXrgJ5qyxjAPq7tyEzBqBv2aKbuKjgUTsBatKkCUaMGKGyGOLLly8xcuRI+Pv7azQ4IqK8JpdLVxqAzElQxuPZs7U4XlkmAz77TBokHRkpJUQLFwJffAGYmkrjhubPB5o1AwoXlm6ZzZsnzTTL5+sNvW8AOiANQOftMNIEtROgX375Bffu3YOLiwsaNmyIhg0bws3NDbGxsZgxY4Y2YiQiylNt2khT3d/d/L1EiWymwGuTs7O01cbOncCzZ8COHUDv3lJwyclSef/+0maupUsDffpIt9Hi4/MwSM3Q+AB0ohyofc+qePHiOH/+PNasWYN///0XZmZmCAoKQseOHWFkZKSNGImI8lybNtLwnPBwacCzo6N020unM9XNzf8bCyQE8O+/wO7d0nHkCBAVBSxaJB2GhpD7+KBcyZKQFSsG1Kz5yU+z1+oAdKJ3fNCgHQsLC/Tq1UvTsRARfVLkcmmG+idJJgM8PaXjxx+lxRfDwv5LiG7cgEF4ODwAYM0awMYGaNBAGmvUuPEnOZg6zwagE+EDboEBwG+//Ya6devCyckJd+/eBQDMmjULf/75p0aDIyKiXLK0lK4M/forcP06cOsWFL/+ipiaNSFsbIC4OGD7dmkQTeXKgL29tD7RkiXSTvefwPghnQxAJ72ldgK0cOFCBAcHo1mzZnjx4gUU/x+NZmdnh9mzZ2s6PiIi+hClSiG9d2+cGjkSb2JjgdOnpY1aAwKkW2lPnkj7lvXuLe1VVrIk0KmTNOD64kVp9lke0/kAdNIraidAv/76K5YuXYpRo0apTHuvUaMGLly4oNHgiIhIA+RyoEYNYNgwafr8ixfSukNjxwL16wNGRtLo43XrgO+/l64QFS0qDYL65Rfg5EkgLS1PQv2kBqBTgab2GKCoqCh4eXllKjcxMUFSUpJGgiIiIi0yNpbuI9WrB4wZAyQlSUlOeLiUGJ04ATx/Lt0y275deo25OVCrlrQoY61agLe3lCRpwSc5AJ0KHLUTIDc3N0RGRsLFxUWlfNeuXfDw8NBYYERElEcsLKTB0Y0aSY/T0oCICCkDCQ+XZpg9fw4cOCAdGcqUkZKhjMSocmXpapIGfNID0KlAUDsBCg4ORt++ffH69WsIIXDq1CmsW7cOkydPxrJly7QRIxER5SUjI+kKj7c3MGSINB7oyhUpETp5Ejh+HLh6VRo8ffOmtMErAJiZSbfaatQAqleXvpYtK23nQfSJUTsB+vbbb2FmZoaffvoJycnJ6NSpE5ycnDBnzhx06NBBGzESEZEuGRhI0+YrVpQGTQPSOKJTp6TbZcePS4nRy5f/XTXKYGUFeHn9lxRVr86kiD4JH7QOUOfOndG5c2ckJycjMTERxYoV03RcRET0KbOzk2aUBQRIj9PTpen3J04AZ89KR2QkkJAgjSs6fPi/11pZSfucVakiHVWrApUqSVP5ifLIB+9e+vjxY1y7dg0AIJPJUFRLg+GIiCgfMDAAypeXjm7dpLI3b6RbZxkJ0Zkz/yVFR45IRwaZTNrK492kyM2No59JK9ROgBISEvD9999j3bp1SP//OhFyuRzt27fH/PnzYWNjo/EgiYgoHzI0lAZGV66cOSk6f17ayiPja2zsf2OK3t7y3cQEcHcHPDyAChX++1q2rDSbjegDfdAYoHPnzuGvv/6Cj48PAOD48eMYOHAgevfujfXr12s8SCIiKiDeToo6d/6v/PFj4MIF1aTo6lXg9Wvp8fnzqu3I5dIstPLlpa9ly/73tUQJjjGi91I7Adq5cyd2796NunXrKssCAgKwdOlSNG3aVKPBERGRnihWTNqjrHHj/8oUCuDuXeDyZemq0dtfExKAa9ek410mJtLttIyEqHRpwNVVOlxcpDWNSO+pnQAVLlw4y9tcNjY2sLOz00hQREREkMuBUqWko3nz/8qFAB4+lBKh69el22Y3bkhfb98GUlKk5y5fzrrdokX/S4Yyvrq4SFeOnJyk53kFqcBTOwH66aefEBwcjN9++w0ODg4AgNjYWAwdOhSjR4/WeIBEREQqZDJpr4zixQF/f9Xn3rwB7t2TEqKMpOjWLelK0p07QHy8tA/akyfS/mhZMTKSlp8uXlxKiDLey8lJ2kS2WDHpKFJEYws/Ut5TOwFauHAhbt68iZIlS6JkyZIAgOjoaJiYmODJkydYvHixsm5ERITmIiUiInofQ0Np5pibG9CkSebnX76UEqGMhCjj+7t3gQcPpLFIaWlAdLR0vI+dnZQMFS36X2JUqBAMrK1R8t49yFJSpOdsbaW6dnbSMgAF8QqTENKYrfh4aZ2o589Vj3fLOnUCvvlGZ+GqnQC1atVKC2EQERHlAVtbwNNTOrKSlibNSHvwQLrN9uCB6vePH0vH06fS2kcvXkjHO2OR5AC8AGD+/MzvYWAAWFtL6x5ZWEhfM463H5ubS+OZsjqMjf/73sDg/YdC8f4jNVVKYDKOlBTVx69fS/vGJSRIR2Lif99nHApF7j+LKlVyX1cL1E6AxowZo404iIiIdM/ICHB2lo6cpKdLVzEeP5Zup2UkRo8fA8+fI/35czy+fh3FjIxg8PKldOXpxQspiUhPlx6/fKn989EVW1ugUKH/Djs71ceFCmWfhOYRtROggwcPomHDhlk+t3jxYvTOWCadiIiooDIwkMYAFSmS5dOKtDSc/PtvfP755zB4e5zQ69dS4hMXJ11NSUxUPd4uS0qSrsKkpkpf3z0yytPTpUOI/75/+1AopHjl8pwPY2PExpki8oopXrw2xWtIh5GlKer5m8Ld00za783KSjosLf/7/u0yS8t8sXil2glQ06ZNMWDAAEyaNAlG//9Qnz59iqCgIBw5coQJEBERUXZMTQEHB+n4xGzZAnz9tZRHvU2WBGAb8EcXoE0bXUSmHWqPwjp48CC2bt2Kzz77DJcvX8Zff/2FSpUqIT4+HpGRkVoIkYiIiLRJoQAGDsyc/AD/lQ0apN4Qn0+d2glQ7dq1ERkZiUqVKqFatWpo3bo1Bg8ejLCwMLi4uGgjRiIiItKi8HDg/v3snxdCWl0gPDzvYtK2D5qHd/36dZw5cwYlSpSAoaEhrl27huTkZE3HRkRERHkgJkaz9fIDtROgKVOmwMfHB/7+/rh48SJOnTqFc+fOoUqVKjh+/PgHBTF//ny4urrC1NQU3t7eOHXqVLZ1fX19IZPJMh1ffPGFsk63bt0yPc9tOoiIiLLm6KjZevmB2oOg58yZg23btqFZs2YAgEqVKuHUqVMYOXIkfH19kZKSolZ7GzZsQHBwMBYtWgRvb2/Mnj0bAQEBuHbtGooVK5ap/pYtW5Camqp8/OzZM1StWhVt27ZVqde0aVOsXLlS+djExEStuIiIiPRFvXrSTiAPHmQ9Dkgmk56vVy/vY9MWta8AXbhwQZn8ZDAyMsL06dOxZ88etQOYOXMmevbsiaCgIFSoUAGLFi2Cubk5VqxYkWX9QoUKwcHBQXns3bsX5ubmmRIgExMTlXrcp4yIiChrcjkwZ470vUym+lzG49mz88Xs9lxT+wpQkWzWPAAADw8PtdpKTU3F2bNnMWLECGWZgYEB/Pz8cn07bfny5ejQoQMsLCxUysPCwlCsWDHY2dmhUaNGmDhxIgoXLpxlGykpKSpXruLj4wEAaWlpSEtLU+uc3iejPU23S+/HvtcN9rvusO91Jz/2fYsWwB9/AD/+KF0JylCiBDBlivT8p3466vS3TIisLnZlZm5ujrt376Jo0aIAgC+++ALLli2D4/9vCD569AhOTk5QqDFH7uHDhyhevDiOHTsGHx8fZfmwYcNw6NAhnDx5MsfXnzp1Ct7e3jh58iRq1qypLF+/fj3Mzc3h5uaGW7duYeTIkbC0tMTx48chzyJ9HTt2LMaNG5epfO3atTA3N8/1+RAREZHuJCcno1OnToiLi4O1tXWOdXN9Bej169d4O1c6fPgwXr16pVInl7mUxixfvhyVK1dWSX4AoEOHDsrvK1eujCpVqqB06dIICwtD48aNM7UzYsQIBAcHKx/Hx8fD2dkZTZo0eW8HqistLQ179+6Fv7+/ciFJyhvse91gv+sO+1532Pe6kXEHJzfUvgWWE9m7Nw7fo0iRIpDL5Xj06JFK+aNHj+DwnlUyk5KSsH79eowfP/6971OqVCkUKVIEN2/ezDIBMjExyXKQtJGRkdZ+cLXZNuWMfa8b7HfdYd/rDvs+b6nT1x+0DpCmGBsbo3r16ti/f7+yLD09Hfv371e5JZaVTZs2ISUlBV26dHnv+9y/fx/Pnj1T3q4jIiIi/ZbrBChjPZ3sHn+o4OBgLF26FKGhobhy5Qr69OmDpKQkBAUFAQC6du2qMkg6w/Lly9GqVatMA5sTExMxdOhQnDhxAnfu3MH+/fvRsmVLlClTBgEBAR8dLxEREeV/ub4FJoRAuXLllElPYmIivLy8YGBgoHz+Q7Rv3x5PnjxBSEgIYmNj4enpiV27dsHe3h4AEB0drXyPDNeuXcORI0eynHYvl8tx/vx5hIaG4uXLl3ByckKTJk0wYcIErgVEREREANRIgN5eVFDT+vXrh379+mX5XFhYWKYyd3f3bBMuMzMz7N69W5PhERERUQGT6wQoMDBQm3EQERER5RmdDoImIiIi0gUmQERERKR3mAARERGR3mECRERERHqHCRARERHpnVzNAnt7n6z3mTlz5gcHQ0RERJQXcpUAnTt3TuVxREQE3rx5A3d3dwDA9evXIZfLUb16dc1HSERERKRhuUqADh48qPx+5syZsLKyQmhoKOzs7AAAL168QFBQEOrVq6edKImIiIg0SO0xQDNmzMDkyZOVyQ8A2NnZYeLEiZgxY4ZGgyMiIiLSBrUToPj4eDx58iRT+ZMnT5CQkKCRoIiIiIi0Se0EqHXr1ggKCsKWLVtw//593L9/H5s3b0aPHj3Qpk0bbcRIREREpFG53gssw6JFizBkyBB06tQJaWlpUiOGhujRowemT5+u8QCJiIiINE3tBMjc3BwLFizA9OnTcevWLQBA6dKlYWFhofHgiIiIiLThgxdCjImJQUxMDMqWLQsLCwsIITQZFxEREZHWqJ0APXv2DI0bN0a5cuXw+eefIyYmBgDQo0cP/PDDDxoPkIiIiEjT1E6ABg8eDCMjI0RHR8Pc3FxZ3r59e+zatUujwRERERFpg9pjgPbs2YPdu3ejRIkSKuVly5bF3bt3NRYYERERkbaofQUoKSlJ5cpPhufPn8PExEQjQRERERFpk9oJUL169bB69WrlY5lMhvT0dEybNg0NGzbUaHBERERE2qD2LbBp06ahcePGOHPmDFJTUzFs2DBcunQJz58/x9GjR7URIxEREZFGqX0FqFKlSrh+/Trq1q2Lli1bIikpCW3atMG5c+dQunRpbcRIREREpFFqXQFKS0tD06ZNsWjRIowaNUpbMRERERFplVpXgIyMjHD+/HltxUJERESUJ9S+BdalSxcsX75cG7EQERER5Qm1B0G/efMGK1aswL59+1C9evVMe4DNnDlTY8ERERERaYPaCdDFixdRrVo1AMD169dVnpPJZJqJioiIiEiL1E6ADh48qI04iIiIiPLMB+8GT0RERJRfqX0FqGHDhjne6jpw4MBHBURERESkbWonQJ6eniqP09LSEBkZiYsXLyIwMFBTcRERERFpjdoJ0KxZs7IsHzt2LBITEz86ICIiIiJt09gYoC5dumDFihWaao6IiIhIazSWAB0/fhympqaaao6IiIhIa9S+BdamTRuVx0IIxMTE4MyZMxg9erTGAiMiIiLSFrUTIBsbG5XHBgYGcHd3x/jx49GkSRONBUZERESkLWonQCtXrtRGHERERER5Ru0xQPfu3cP9+/eVj0+dOoVBgwZhyZIlGg2MiIiISFvUToA6deqk3A4jNjYWfn5+OHXqFEaNGoXx48d/UBDz58+Hq6srTE1N4e3tjVOnTmVbd9WqVZDJZCrHu4OvhRAICQmBo6MjzMzM4Ofnhxs3bnxQbERERFTwqJ0AXbx4ETVr1gQAbNy4EZUrV8axY8ewZs0arFq1Su0ANmzYgODgYIwZMwYRERGoWrUqAgIC8Pjx42xfY21tjZiYGOVx9+5dleenTZuGuXPnYtGiRTh58iQsLCwQEBCA169fqx0fERERFTxqJ0BpaWkwMTEBAOzbtw9ffvklAKB8+fKIiYlRO4CZM2eiZ8+eCAoKQoUKFbBo0SKYm5vnuKaQTCaDg4OD8rC3t1c+J4TA7Nmz8dNPP6Fly5aoUqUKVq9ejYcPH2Lbtm1qx0dEREQFj9oJUMWKFbFo0SKEh4dj7969aNq0KQDg4cOHKFy4sFptpaam4uzZs/Dz8/svIAMD+Pn54fjx49m+LjExES4uLnB2dkbLli1x6dIl5XNRUVHKW3MZbGxs4O3tnWObREREpD/UngU2depUtG7dGtOnT0dgYCCqVq0KANi+fbvy1lhuPX36FAqFQuUKDgDY29vj6tWrWb7G3d0dK1asQJUqVRAXF4dffvkFtWvXxqVLl1CiRAnExsYq23i3zYzn3pWSkoKUlBTl4/j4eADS1a60tDS1zul9MtrTdLv0fux73WC/6w77XnfY97qhTn+rnQD5+vri6dOniI+Ph52dnbK8V69eMDc3V7c5tfn4+MDHx0f5uHbt2vDw8MDixYsxYcKED2pz8uTJGDduXKbyPXv2aO2c9u7dq5V26f3Y97rBftcd9r3usO/zVnJycq7rqp0AAYBcLldJfgDA1dVV7XaKFCkCuVyOR48eqZQ/evQIDg4OuWrDyMgIXl5euHnzJgAoX/fo0SM4OjqqtPnuTvYZRowYgeDgYOXj+Ph4ODs7o0mTJrC2tlbnlN4rLS0Ne/fuhb+/P4yMjDTaNuWMfa8b7HfdYd/rDvteNzLu4OTGByVAf/zxBzZu3Ijo6GikpqaqPBcREZHrdoyNjVG9enXs378frVq1AgCkp6dj//796NevX67aUCgUuHDhAj7//HMAgJubGxwcHLB//35lwhMfH4+TJ0+iT58+WbZhYmKiHNj9NiMjI6394GqzbcoZ+1432O+6w77XHfZ93lKnr9UeBD137lwEBQXB3t4e586dQ82aNVG4cGHcvn0bzZo1U7c5BAcHY+nSpQgNDcWVK1fQp08fJCUlISgoCADQtWtXjBgxQll//Pjx2LNnD27fvo2IiAh06dIFd+/exbfffgtAmiE2aNAgTJw4Edu3b8eFCxfQtWtXODk5KZMsIiIi0m9qXwFasGABlixZgo4dO2LVqlUYNmwYSpUqhZCQEDx//lztANq3b48nT54gJCQEsbGx8PT0xK5du5SDmKOjo2Fg8F+e9uLFC/Ts2ROxsbGws7ND9erVcezYMVSoUEFZZ9iwYUhKSkKvXr3w8uVL1K1bF7t27eJu9URERATgAxKg6Oho1K5dGwBgZmaGhIQEAMA333yDWrVqYd68eWoH0a9fv2xveYWFhak8njVrFmbNmpVjezKZDOPHj//glamJiIioYFP7FpiDg4PySk/JkiVx4sQJANL6O0IIzUZHREREpAVqJ0CNGjXC9u3bAQBBQUEYPHgw/P390b59e7Ru3VrjARIRERFpmtq3wJYsWYL09HQAQN++fVG4cGEcO3YMX375JXr37q3xAImIiIg0Te0EyMDAQGVQcocOHdChQweNBkVERESkTWrfAgOA8PBwdOnSBT4+Pnjw4AEA4LfffsORI0c0GhwRERGRNqidAG3evBkBAQEwMzPDuXPnlHtoxcXFYdKkSRoPkIiIiEjT1E6AJk6ciEWLFmHp0qUqKy7WqVNHrVWgiYiIiHRF7QTo2rVrqF+/fqZyGxsbvHz5UhMxEREREWnVB60DlLHx6NuOHDmCUqVKaSQoIiIiIm1SOwHq2bMnBg4ciJMnT0Imk+Hhw4dYs2YNhgwZku1mo0RERESfErWnwQ8fPhzp6elo3LgxkpOTUb9+fZiYmGDIkCHo37+/NmIkIiIi0ii1EyCZTIZRo0Zh6NChuHnzJhITE1GhQgVYWlpqIz4iIiIijVM7AcpgbGyssgM7ERERUX6R6wSoe/fuuaq3YsWKDw6GiIiIKC/kOgFatWoVXFxc4OXlxV3fiYiIKF/LdQLUp08frFu3DlFRUQgKCkKXLl1QqFAhbcZGREREpBW5ngY/f/58xMTEYNiwYdixYwecnZ3Rrl077N69m1eEiIiIKF9Rax0gExMTdOzYEXv37sXly5dRsWJFfP/993B1dUViYqK2YiQiIiLSqA/aDR4ADAwMIJPJIISAQqHQZExEREREWqVWApSSkoJ169bB398f5cqVw4ULFzBv3jxER0dzHSAiIiLKN3I9CPr777/H+vXr4ezsjO7du2PdunUoUqSINmMjIiIi0opcJ0CLFi1CyZIlUapUKRw6dAiHDh3Kst6WLVs0FhwRERGRNuQ6AeratStkMpk2YyEiIiLKE2othEhERERUEHzwLDAiIiKi/IoJEBEREekdJkBERESkd5gAERERkd5hAkRERER6hwkQERER6R0mQERERKR3mAARERGR3mECRERERHqHCRARERHpHSZAREREpHeYABEREZHeYQJEREREeocJEBEREemdTyIBmj9/PlxdXWFqagpvb2+cOnUq27pLly5FvXr1YGdnBzs7O/j5+WWq361bN8hkMpWjadOm2j4NIiIiyid0ngBt2LABwcHBGDNmDCIiIlC1alUEBATg8ePHWdYPCwtDx44dcfDgQRw/fhzOzs5o0qQJHjx4oFKvadOmiImJUR7r1q3Li9MhIiKifEDnCdDMmTPRs2dPBAUFoUKFCli0aBHMzc2xYsWKLOuvWbMG33//PTw9PVG+fHksW7YM6enp2L9/v0o9ExMTODg4KA87O7u8OB0iIiLKB3SaAKWmpuLs2bPw8/NTlhkYGMDPzw/Hjx/PVRvJyclIS0tDoUKFVMrDwsJQrFgxuLu7o0+fPnj27JlGYyciIqL8y1CXb/706VMoFArY29urlNvb2+Pq1au5auPHH3+Ek5OTShLVtGlTtGnTBm5ubrh16xZGjhyJZs2a4fjx45DL5ZnaSElJQUpKivJxfHw8ACAtLQ1paWkfcmrZymhP0+3S+7HvdYP9rjvse91h3+uGOv2t0wToY02ZMgXr169HWFgYTE1NleUdOnRQfl+5cmVUqVIFpUuXRlhYGBo3bpypncmTJ2PcuHGZyvfs2QNzc3OtxL53716ttEvvx77XDfa77rDvdYd9n7eSk5NzXVenCVCRIkUgl8vx6NEjlfJHjx7BwcEhx9f+8ssvmDJlCvbt24cqVarkWLdUqVIoUqQIbt68mWUCNGLECAQHBysfx8fHKwdXW1tbq3FG75eWloa9e/fC398fRkZGGm2bcsa+1w32u+6w73WHfa8bGXdwckOnCZCxsTGqV6+O/fv3o1WrVgCgHNDcr1+/bF83bdo0/Pzzz9i9ezdq1Kjx3ve5f/8+nj17BkdHxyyfNzExgYmJSaZyIyMjrf3garNtyhn7XjfY77rDvtcd9n3eUqevdT4LLDg4GEuXLkVoaCiuXLmCPn36ICkpCUFBQQCArl27YsSIEcr6U6dOxejRo7FixQq4uroiNjYWsbGxSExMBAAkJiZi6NChOHHiBO7cuYP9+/ejZcuWKFOmDAICAnRyjkRERPRp0fkYoPbt2+PJkycICQlBbGwsPD09sWvXLuXA6OjoaBgY/JenLVy4EKmpqfj6669V2hkzZgzGjh0LuVyO8+fPIzQ0FC9fvoSTkxOaNGmCCRMmZHmVh4iIiPSPzhMgAOjXr1+2t7zCwsJUHt+5cyfHtszMzLB7924NRUZEREQFkc5vgRERERHlNSZAREREpHeYABEREZHeYQJEREREeocJEBEREekdJkBERESkd5gAERERkd5hAkRERER6hwkQERER6R0mQERERKR3mAARERGR3mECRERERHqHCRARERHpHSZAREREpHeYABEREZHeYQJEREREeocJEBEREekdJkBERESkd5gAERERkd5hAkRERER6hwkQERER6R0mQERERKR3mAARERGR3mECRERERHqHCRARERHpHSZAREREpHeYABEREZHeYQJEREREeocJEBEREekdJkBERESkd5gAERERkd5hAkRERER6hwkQERER6R0mQERERKR3mAARERGR3mECRERERHqHCRARERHpHSZAREREpHeYABEREZHe+SQSoPnz58PV1RWmpqbw9vbGqVOncqy/adMmlC9fHqampqhcuTL+/vtvleeFEAgJCYGjoyPMzMzg5+eHGzduaPMUiIiIKB/ReQK0YcMGBAcHY8yYMYiIiEDVqlUREBCAx48fZ1n/2LFj6NixI3r06IFz586hVatWaNWqFS5evKisM23aNMydOxeLFi3CyZMnYWFhgYCAALx+/TqvTouIiIg+YTpPgGbOnImePXsiKCgIFSpUwKJFi2Bubo4VK1ZkWX/OnDlo2rQphg4dCg8PD0yYMAHVqlXDvHnzAEhXf2bPno2ffvoJLVu2RJUqVbB69Wo8fPgQ27Zty8MzIyIiok+VoS7fPDU1FWfPnsWIESOUZQYGBvDz88Px48ezfM3x48cRHBysUhYQEKBMbqKiohAbGws/Pz/l8zY2NvD29sbx48fRoUOHTG2mpKQgJSVF+Tg+Ph4AkJiYCAMDA2VcRkZGSEtLQ3p6urKuXC6HoaEhUlNTIYRQlhsaGkIul2cqz5CYmAgjIyPlYyMjI8hkMqSmpqrUMzY2hhACaWlpKuUmJiZIT09XKZfJZDA2NoZCocCbN28ylb958wYKhUJZrqlzMjIygoGBgUoffornJISAQqFQ6fv8fk754XOSyWRIT09X6ff8fk755XNKSUlR+ZkvCOeUXz6npKQklb4vCOeUHz6npKQk5JZOE6CnT59CoVDA3t5epdze3h5Xr17N8jWxsbFZ1o+NjVU+n1GWXZ13TZ48GePGjctU/uuvv8LU1BQAUKhQIZQsWRLR0dF4/vy5SruOjo64desWEhISlOXOzs4oXLgwrl69qnLrrVSpUrC2tsa8efNUPlR3d3cYGxvjwoULKjFUrlwZqampuHbtmrLMwMAAVapUQXx8PG7fvq0sNzU1Rfny5fHs2TPcu3dPWW5lZYXSpUsjJiYGjx49UpZr+pzOnz//yZ/TnTt3MHfu3AJ1Tvnhc3r06JFKvxeEc8pPn1PGawrSOWX41M8p42tBOqcMn+I5RUZGIrdkIqvLE3nk4cOHKF68OI4dOwYfHx9l+bBhw3Do0CGcPHky02uMjY0RGhqKjh07KssWLFiAcePG4dGjRzh27Bjq1KmDhw8fwtHRUVmnXbt2kMlk2LBhQ6Y2s7oC5OzsjAcPHsDa2hqAZq8A7d+/H/Xr1+cVIB1cAdq1axcaNmzIK0B5eE4ymQy7d++Gr68vrwDp4ArQnj17lD/zBeGc8svnlJSUhIMHDyr7viCcU374nJ4+fQonJyfExcUp/35nR6dXgIoUKQK5XK6S9QHAo0eP4ODgkOVrHBwccqyf8fXRo0cqCdCjR4/g6emZZZsmJiYwMTHJVG5paQlLS0uVsreTlg8pz/iBsLS0zPI1xsbG2caY2/KPjVHT5Z/KOaWlpUEul2fZ9/n1nDRZrq1zSktLg4GBgUb6/VM5J12Wq3tOWf3M5/dzyg+fk0wmy7Lv8/M55YfPycLCIsvyrOh0ELSxsTGqV6+O/fv3K8vS09Oxf/9+lStCb/Px8VGpDwB79+5V1ndzc4ODg4NKnfj4eJw8eTLbNomIiEi/6PQKEAAEBwcjMDAQNWrUQM2aNTF79mwkJSUhKCgIANC1a1cUL14ckydPBgAMHDgQDRo0wIwZM/DFF19g/fr1OHPmDJYsWQJAyroHDRqEiRMnomzZsnBzc8Po0aPh5OSEVq1a6eo0iYiI6BOi8wSoffv2ePLkCUJCQhAbGwtPT0/s2rVLOYg5OjpaORMLAGrXro21a9fip59+wsiRI1G2bFls27YNlSpVUtYZNmwYkpKS0KtXL7x8+RJ169bFrl27lAOaiYiISL/pPAECgH79+qFfv35ZPhcWFpaprG3btmjbtm227clkMowfPx7jx4/XVIhERERUgOh8IUQiIiKivMYEiIiIiPQOEyAiIiLSO0yAiIiISO8wASIiIiK9wwSIiIiI9A4TICIiItI7TICIiIhI7zABIiIiIr3zSawE/akRQgCQNlHVtLS0NCQnJyM+Pj7b3WxJO9j3usF+1x32ve6w73Uj4+92xt/xnDABykJCQgIAwNnZWceREBERkboSEhJgY2OTYx2ZyE2apGfS09Px8OFDWFlZQSaTabTt+Ph4ODs74969e7C2ttZo25Qz9r1usN91h32vO+x73RBCICEhAU5OTiobqWeFV4CyYGBggBIlSmj1PaytrfmPQkfY97rBftcd9r3usO/z3vuu/GTgIGgiIiLSO0yAiIiISO8wAcpjJiYmGDNmDExMTHQdit5h3+sG+1132Pe6w77/9HEQNBEREekdXgEiIiIivcMEiIiIiPQOEyAiIiLSO0yAiIiISO8wAcpD8+fPh6urK0xNTeHt7Y1Tp07pOqQCb/Lkyfjss89gZWWFYsWKoVWrVrh27Zquw9JLU6ZMgUwmw6BBg3Qdil548OABunTpgsKFC8PMzAyVK1fGmTNndB1WgadQKDB69Gi4ubnBzMwMpUuXxoQJE3K1NxXlLSZAeWTDhg0IDg7GmDFjEBERgapVqyIgIACPHz/WdWgF2qFDh9C3b1+cOHECe/fuRVpaGpo0aYKkpCRdh6ZXTp8+jcWLF6NKlSq6DkUvvHjxAnXq1IGRkRH++ecfXL58GTNmzICdnZ2uQyvwpk6dioULF2LevHm4cuUKpk6dimnTpuHXX3/VdWj0Dk6DzyPe3t747LPPMG/ePADSfmPOzs7o378/hg8fruPo9MeTJ09QrFgxHDp0CPXr19d1OHohMTER1apVw4IFCzBx4kR4enpi9uzZug6rQBs+fDiOHj2K8PBwXYeid5o3bw57e3ssX75cWfbVV1/BzMwMv//+uw4jo3fxClAeSE1NxdmzZ+Hn56csMzAwgJ+fH44fP67DyPRPXFwcAKBQoUI6jkR/9O3bF1988YXKzz9p1/bt21GjRg20bdsWxYoVg5eXF5YuXarrsPRC7dq1sX//fly/fh0A8O+//+LIkSNo1qyZjiOjd3Ez1Dzw9OlTKBQK2Nvbq5Tb29vj6tWrOopK/6Snp2PQoEGoU6cOKlWqpOtw9ML69esRERGB06dP6zoUvXL79m0sXLgQwcHBGDlyJE6fPo0BAwbA2NgYgYGBug6vQBs+fDji4+NRvnx5yOVyKBQK/Pzzz+jcubOuQ6N3MAEivdG3b19cvHgRR44c0XUoeuHevXsYOHAg9u7dC1NTU12Ho1fS09NRo0YNTJo0CQDg5eWFixcvYtGiRUyAtGzjxo1Ys2YN1q5di4oVKyIyMhKDBg2Ck5MT+/4TwwQoDxQpUgRyuRyPHj1SKX/06BEcHBx0FJV+6devH3bu3InDhw+jRIkSug5HL5w9exaPHz9GtWrVlGUKhQKHDx/GvHnzkJKSArlcrsMICy5HR0dUqFBBpczDwwObN2/WUUT6Y+jQoRg+fDg6dOgAAKhcuTLu3r2LyZMnMwH6xHAMUB4wNjZG9erVsX//fmVZeno69u/fDx8fHx1GVvAJIdCvXz9s3boVBw4cgJubm65D0huNGzfGhQsXEBkZqTxq1KiBzp07IzIyksmPFtWpUyfTcg/Xr1+Hi4uLjiLSH8nJyTAwUP3TKpfLkZ6erqOIKDu8ApRHgoODERgYiBo1aqBmzZqYPXs2kpKSEBQUpOvQCrS+ffti7dq1+PPPP2FlZYXY2FgAgI2NDczMzHQcXcFmZWWVaayVhYUFChcuzDFYWjZ48GDUrl0bkyZNQrt27XDq1CksWbIES5Ys0XVoBV6LFi3w888/o2TJkqhYsSLOnTuHmTNnonv37roOjd7BafB5aN68eZg+fTpiY2Ph6emJuXPnwtvbW9dhFWgymSzL8pUrV6Jbt255GwzB19eX0+DzyM6dOzFixAjcuHEDbm5uCA4ORs+ePXUdVoGXkJCA0aNHY+vWrXj8+DGcnJzQsWNHhISEwNjYWNfh0VuYABEREZHe4RggIiIi0jtMgIiIiEjvMAEiIiIivcMEiIiIiPQOEyAiIiLSO0yAiIiISO8wASIiIiK9wwSIiD5Jrq6uXDCRiLSGCRARaUy3bt0gk8kwZcoUlfJt27Zluyp3dk6fPo1evXppMjwVrq6ukMlkKoemNsqVyWTYtm2bRtoiIu1gAkREGmVqaoqpU6fixYsXH9VO0aJFYW5urqGosjZ+/HjExMQoj3Pnzmn1/dSVlpam6xCICiwmQESkUX5+fnBwcMDkyZNzrLd582ZUrFgRJiYmcHV1xYwZM1Sef/sWmBACY8eORcmSJWFiYgInJycMGDBAWTclJQVDhgxB8eLFYWFhAW9vb4SFhb03VisrKzg4OCiPokWLQqFQoEePHnBzc4OZmRnc3d0xZ86cTK9dsWKFMn5HR0f069dPGTcAtG7dGjKZTPkYABYuXIjSpUvD2NgY7u7u+O2331TalMlkWLhwIb788ktYWFjg559/fu85ENGHYQJERBoll8sxadIk/Prrr7h//36Wdc6ePYt27dqhQ4cOuHDhAsaOHYvRo0dj1apVWdbfvHkzZs2ahcWLF+PGjRvYtm0bKleurHy+X79+OH78ONavX4/z58+jbdu2aNq0KW7cuKF2/Onp6ShRogQ2bdqEy5cvIyQkBCNHjsTGjRuVdRYuXIi+ffuiV69euHDhArZv344yZcoAkG7dAdKGuzExMcrHW7duxcCBA/HDDz/g4sWL6N27N4KCgnDw4EGV9x87dixat26NCxcucAdxIm0SREQaEhgYKFq2bCmEEKJWrVqie/fuQgghtm7dKt7+ddOpUyfh7++v8tqhQ4eKChUqKB+7uLiIWbNmCSGEmDFjhihXrpxITU3N9J53794VcrlcPHjwQKW8cePGYsSIEdnG6uLiIoyNjYWFhYXymDNnTpZ1+/btK7766ivlYycnJzFq1Khs2wYgtm7dqlJWu3Zt0bNnT5Wytm3bis8//1zldYMGDcq2XSLSHF4BIiKtmDp1KkJDQ3HlypVMz125cgV16tRRKatTpw5u3LgBhUKRqX7btm3x6tUrlCpVCj179sTWrVvx5s0bAMCFCxegUChQrlw5WFpaKo9Dhw7h1q1bOcY4dOhQREZGKo+uXbsCAObPn4/q1aujaNGisLS0xJIlSxAdHQ0AePz4MR4+fIjGjRur1R/ZnfO7/VOjRg212iWiD2Oo6wCIqGCqX78+AgICMGLECHTr1u2j2nJ2dsa1a9ewb98+7N27F99//z2mT5+OQ4cOITExEXK5HGfPnoVcLld5naWlZY7tFilSRHnrKsP69esxZMgQzJgxAz4+PrCyssL06dNx8uRJAICZmdlHncv7WFhYaLV9IpIwASIirZkyZQo8PT3h7u6uUu7h4YGjR4+qlB09ehTlypXLlMRkMDMzQ4sWLdCiRQv07dsX5cuXx4ULF+Dl5QWFQoHHjx+jXr16Hx3z0aNHUbt2bXz//ffKsrevJFlZWcHV1RX79+9Hw4YNs2zDyMgo05WsjHMODAxUea8KFSp8dMxEpD4mQESkNZUrV0bnzp0xd+5clfIffvgBn332GSZMmID27dvj+PHjmDdvHhYsWJBlO6tWrYJCoYC3tzfMzc3x+++/w8zMDC4uLihcuDA6d+6Mrl27YsaMGfDy8sKTJ0+wf/9+VKlSBV988YVaMZctWxarV6/G7t274ebmht9++w2nT5+Gm5ubss7YsWPx3XffoVixYmjWrBkSEhJw9OhR9O/fHwCUCVKdOnVgYmICOzs7DB06FO3atYOXlxf8/PywY8cObNmyBfv27VOzV4lII3Q9CImICo63B0FniIqKEsbGxuLdXzd//PGHqFChgjAyMhIlS5YU06dPV3n+7UHQW7duFd7e3sLa2lpYWFiIWrVqiX379inrpqamipCQEOHq6iqMjIyEo6OjaN26tTh//ny2sb7d/ttev34tunXrJmxsbIStra3o06ePGD58uKhatapKvUWLFgl3d3fl+/Xv31/53Pbt20WZMmWEoaGhcHFxUZYvWLBAlCpVShgZGYly5cqJ1atXq7SJLAZPE5F2yIQQQtdJGBEREVFe4iwwIiIi0jtMgIiIiEjvMAEiIiIivcMEiIiIiPQOEyAiIiLSO0yAiIiISO8wASIiIiK9wwSIiIiI9A4TICIiItI7TICIiIhI7zABIiIiIr3DBIiIiIj0zv8AISnVHXn/ToUAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Percent Error of ZNE Estimate -10.243728919710081 %\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 5\n", "hamiltonian = 0\n", "\n", "for i in range(20):\n", @@ -1330,7 +982,8 @@ "\n", "\n", "plot_zero_noise_extrapolation(factors, results, linear)\n", - "plot_zero_noise_extrapolation(factors, results, quadratic)\n" + "plot_zero_noise_extrapolation(factors, results, quadratic)\n", + "" ] }, { @@ -1338,8 +991,7 @@ "id": "62f0113a-c9e5-4da6-8fd5-9906243a04e6", "metadata": {}, "source": [ - "### 3.3c: QEC Experiments ###\n", - "\n" + "### 3.3c: QEC Experiments" ] }, { @@ -1347,18 +999,20 @@ "id": "51443267-658a-4a75-8f15-5f1eefae39b9", "metadata": {}, "source": [ - "Noisy circuit simulation is perhaps most useful as a tools for QEC researchers. One can test how a code will perform in a variety of different noise conditions. Assuming an accurate noise model, this can be a great way to assess characteristics of new codes. Below you will add noise to the Steane code you prepared in lab 2. \n", + "Noisy circuit simulation is perhaps most useful as a tool for QEC researchers. One can test how a code will perform in a variety of different noise conditions. Assuming an accurate noise model, this can be a great way to assess characteristics of new codes. Below you will add noise to the Steane code you prepared in lab 2. \n", "\n", "\n", - "
\n", - "

Exercise 6 :

\n", - "

\n", + "

\n", + "\n", + "**Exercise 6:**\n", + "\n", "Apply noise to the Steane code in the following three ways and determine which case produces the best and worst logical error rates, keeping the probability of error fixed at 0.05. In which cases is the logical error rate an improvement over the 0.05 error rate?\n", - "1. Use $\\texttt{cudaq.apply\\_noise(cudaq.XError, p, data\\_qubits[j])}$ to manually apply Kraus operators following encoding of the Steane code but before the stabilizer checks are run. These errors are not tied to gates but model errors induced while the system idles.\n", - "2. Now, use $\\texttt{cudaq.apply\\_noise(cudaq.Depolarization2, p, data\\_qubits[i], data\\_qubits[j])}$ to apply a depolarization error following all of the two qubit gates in the encoding circuit, whee q and r are the two qubits involved in the gate operation.\n", - "3. Apply a bitflip noise channel to all $\\texttt{mz}$ measurements. In this case, errors are also possible in measurements performed on the ancillas. This helps model situations where measurements are performed in a way that is not fault tolerant.\n", - "

\n", - "
\n" + "\n", + "1. Use `cudaq.apply_noise(cudaq.XError, p, data_qubits[j])` to manually apply Kraus operators following encoding of the Steane code but before the stabilizer checks are run. These errors are not tied to gates but model errors induced while the system idles.\n", + "2. Now, use `cudaq.apply_noise(cudaq.Depolarization2, p, data_qubits[i], data_qubits[j])` to apply a depolarization error following all of the two qubit gates in the encoding circuit, where q and r are the two qubits involved in the gate operation.\n", + "3. Apply a bitflip noise channel to all `mz` measurements. In this case, errors are also possible in measurements performed on the ancillas. This helps model situations where measurements are performed in a way that is not fault tolerant.\n", + "\n", + "
" ] }, { @@ -1368,8 +1022,7 @@ "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "\n", + "# EXERCISE 6\n", "cudaq.set_target('stim')\n", "\n", "p = 0.05\n", @@ -1517,7 +1170,8 @@ "\n", "logical_rate = ones/(zeros+ones)\n", " \n", - "print(f\"logical error rate:{logical_rate}\")\n" + "print(f\"logical error rate:{logical_rate}\")\n", + "" ] }, { @@ -1552,7 +1206,8 @@ "\n", " noise = cudaq.NoiseModel()\n", " noise.add_all_qubit_channel(\"mz\", cudaq.BitFlipChannel(0.1))\n", - "\"\"\"\n" + "\"\"\"\n", + "" ] }, { @@ -1560,30 +1215,30 @@ "id": "4cfd73b9-21d0-44ae-b314-35b6e4963b22", "metadata": {}, "source": [ - "## 3.4: Using Dynamical Simulations to Build a Noise Model ###\n", + "---\n", "\n", - "The noise models used thus far are meant to mimic the underlying physics of physical qubits. Often, noise models are heavily informed by experiment, but extracting meaningful insights can be extremely difficult for such complex systems. \n", + "## 3.4 Using Dynamical Simulations to Build a Noise Model\n", "\n", - "\"Drawing\"\n", + "The noise models used thus far are meant to mimic the underlying physics of physical qubits. Often, noise models are heavily informed by experiment, but extracting meaningful insights can be extremely difficult for such complex systems. \n", "\n", - "To help with this task, the physics of the qubits can also be simulated to better understand noise sources and improve interpretation of experimental data. This sort of simulation is known as dynamical simulation and models the evolution of a quantum system over time as the system interacts with its environment. \n", + "\"Flowchart\n", "\n", - "Exercise:\n", + "To help with this task, the physics of the qubits can also be simulated to better understand noise sources and improve interpretation of experimental data. This sort of simulation is known as **dynamical simulation** and models the evolution of a quantum system over time as the system interacts with its environment. \n", "\n", - "The code below will help you walk through an example of using dynamical simulation to produce a noise model for a single qubit amplitude damping channel. Recall, the corresponding noise channel looks like this. \n", + "The code below will help you walk through an example of using dynamical simulation to produce a noise model for a single qubit **amplitude damping** channel. Recall, the corresponding noise channel looks like this. \n", "\n", - "$$ \\epsilon(\\rho) = \\sqrt{1-p}*\\rho + \\sqrt{p}*\\rho*0.5*(X+iY) $$\n", + "$$ \\epsilon(\\rho) = \\sqrt{1-p} \\cdot \\rho + \\sqrt{p} \\cdot \\rho \\cdot 0.5 \\cdot (X+iY) $$\n", "\n", "Thus, the goal is to simulate a simple qubit system to determine what $p$, the probability of energy loss resulting in decay to the ground state, is.\n", "\n", "Dynamical simulation is its own topic that warrants a detailed introduction that will not be provided here. Instead, the steps of the dynamical simulation will be discussed at a high level while curious readers can explore the CUDA-Q dynamics page for more information and more detailed examples.\n", "\n", - "To get started, import the following functions and libraries. This example will use the CUDA-Q dynamics backend, set like any other backend. " + "To get started, this example will use the CUDA-Q dynamics backend, set like any other backend." ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "id": "779f4d94-c0f1-4417-a8ed-417c2b0366f7", "metadata": {}, "outputs": [], @@ -1607,12 +1262,13 @@ "The code below sets up the the problem Hamiltonian, defines the dimensions of the system and specifies the initial ground state. The terms have more meaning than described above, but their details are not relevant for the purposes of this exercise.\n", "

\n", "
\n", - "\n" + "\n", + "" ] }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "id": "33b1b507-26d2-4753-9179-344a9111c2ff", "metadata": {}, "outputs": [], @@ -1640,7 +1296,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "id": "8b205e7e-6a71-4b25-8537-f401cf049d08", "metadata": {}, "outputs": [], @@ -1662,31 +1318,10 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "id": "3316d5fb-7d4d-42ad-aec4-2cd4ac199cc2", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'No Decoherence')" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAUNdJREFUeJzt3XlYVPXiBvB3FmaGHRQBWWRxX1ExCEvNokjN8ppLWmpq2uI1i+stLdPMa5iZaenNq6VmZZpWdktTCzXNSNxwwwVRREH2ZVhnYOb7+8Ocn1xRGRw4M8P7eZ55ZM6cM/POUWZez/mec2RCCAEiIiIiOyGXOgARERGRJbHcEBERkV1huSEiIiK7wnJDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEJFd2rNnD2QyGTZv3ix1FCJqZCw3RDZu7dq1kMlk0Gg0yMjIuOnxBx54AF26dLHY67399tuQyWSmm5OTE1q1aoXBgwdjzZo10Ol0FnstIqL6YLkhshM6nQ4LFixotNf75JNP8MUXX+Djjz/Gc889h4KCAkyYMAERERG4fPlyo+UgIvpfLDdEdqJ79+5YtWoVMjMzG+X1hg0bhmeeeQYTJ07E7NmzsX//fnz55Zc4efIkhg8f3igZrEFZWZnUEYjof7DcENmJN954AwaDoU5bb6qrqzFv3jy0bt0aarUawcHBeOONN+56l9LTTz+N5557DgcOHMAvv/xS47EDBw7g0Ucfhbu7O5ycnNCvXz/s37//pufIyMjAxIkT4efnB7VajZCQELz44ovQ6/WmeS5cuIDhw4ejWbNmcHJywr333outW7fWmsloNGL+/PkICAiARqPBQw89hPPnz980X13yXd8ll5ycjNGjR8PT0xP333+/6fEvv/wS4eHhcHR0RLNmzfDUU0/dtBXr+m7C5ORk9O/fH05OTvD398fChQtvylRZWYm3334b7dq1g0ajQcuWLTF06FCkpqbWeH9LlixB586dodFo4OPjg+effx6FhYW1rg+ipoDlhshOhISEYOzYsXXaevPcc89h9uzZ6NmzJz788EP069cPcXFxeOqpp+46x5gxYwAAO3fuNE3btWsX+vbtC61Wizlz5uDdd99FUVERHnzwQSQmJprmy8zMREREBDZs2ICRI0fio48+wpgxY/Dbb7+hvLwcAJCdnY3evXtjx44deOmllzB//nxUVlbi8ccfx/fff39TngULFuD777/H9OnTMXPmTPz55594+umna8xT13zXDR8+HOXl5Xj33XcxadIkAMD8+fMxduxYtG3bFosXL8Yrr7yC+Ph49O3bF0VFRTWWLywsxKOPPoqwsDB88MEH6NChA15//XX8/PPPpnkMBgMee+wxzJ07F+Hh4fjggw8wbdo0FBcX4+TJk6b5nn/+efzzn//Efffdh6VLl2L8+PH46quvEBMTg6qqqrr+tRHZF0FENm3NmjUCgDh48KBITU0VSqVSvPzyy6bH+/XrJzp37my6n5SUJACI5557rsbzTJ8+XQAQu3btuu3rzZkzRwAQubm5tT5eWFgoAIi//e1vQgghjEajaNu2rYiJiRFGo9E0X3l5uQgJCREPP/ywadrYsWOFXC4XBw8evOl5ry/7yiuvCABi3759psdKSkpESEiICA4OFgaDQQghxO7duwUA0bFjR6HT6UzzLl26VAAQJ06cMDvf9fc+atSoGtnS0tKEQqEQ8+fPrzH9xIkTQqlU1pjer18/AUCsW7fONE2n0wlfX1/x5JNPmqatXr1aABCLFy++5brYt2+fACC++uqrGo9v37691ulETQW33BDZkdDQUIwZMwYrV67E1atXa51n27ZtAIDY2Nga0//xj38AwC1379SVi4sLAKCkpAQAkJSUhJSUFIwePRr5+fnIy8tDXl4eysrK8NBDD2Hv3r0wGo0wGo3YsmULBg8ejF69et30vDKZzJQ/IiKixu4gFxcXTJ48GWlpaUhOTq6x3Pjx46FSqUz3+/TpA+Dari1z8t3ohRdeqHH/u+++g9FoxIgRI0zL5+XlwdfXF23btsXu3btvWkfPPPOM6b5KpUJERIQpEwB8++238PLywtSpU2+5LjZt2gR3d3c8/PDDNV43PDwcLi4uN70uUVOhlDoAEVnWrFmz8MUXX2DBggVYunTpTY9funQJcrkcbdq0qTHd19cXHh4euHTp0l29fmlpKQDA1dUVAJCSkgIAGDdu3C2XKS4uhl6vh1arveNh65cuXUJkZORN0zt27Gh6/MbnaNWqVY35PD09AcA0JqWu+a4vB1zbBXijlJQUCCHQtm3bWpd3cHCocT8gIMBUUG7Mdfz4cdP91NRUtG/fHkrlrT+mU1JSUFxcDG9v71ofz8nJueWyRPaM5YbIzoSGhuKZZ57BypUrMWPGjFvO979frpZyfTzI9fJ0favH+++/j+7du9e6jIuLCwoKChokj0KhqHW6EMKsfDdydHSscd9oNEImk+Hnn3+u9fX+d/k7Zaoro9EIb29vfPXVV7U+3qJFC7Oej8hesNwQ2aFZs2bhyy+/xHvvvXfTY0FBQTAajUhJSTFt7QCuDdQtKipCUFDQXb32F198AQCIiYkBALRu3RoA4Obmhujo6Fsu16JFC7i5udUYLFuboKAgnD179qbpZ86cMT1ujrrmu9NzCCEQEhKCdu3a1es5anvOAwcOoKqq6qYtPzfO8+uvv+K+++67qXARNWUcc0Nkh1q3bo1nnnkG//nPf5CVlVXjsYEDBwIAlixZUmP64sWLAQCDBg2q9+uuX78en376KaKiovDQQw8BAMLDw9G6dWssWrTItMvqRrm5uQAAuVyOIUOG4Mcff8ShQ4dumu/6Vo2BAwciMTERCQkJpsfKysqwcuVKBAcHo1OnTmZlrmu+2xk6dCgUCgXmzp1709YXIQTy8/PNygQATz75JPLy8rBs2bKbHrv+GiNGjIDBYMC8efNumqe6uvqmo7SImgpuuSGyU2+++Sa++OILnD17Fp07dzZNDwsLw7hx47By5UoUFRWhX79+SExMxOeff44hQ4agf//+dXr+zZs3w8XFBXq9HhkZGdixYwf279+PsLAwbNq0yTSfXC7Hp59+igEDBqBz584YP348/P39kZGRgd27d8PNzQ0//vgjAODdd9/Fzp070a9fP0yePBkdO3bE1atXsWnTJvz+++/w8PDAjBkz8PXXX2PAgAF4+eWX0axZM3z++ee4ePEivv32W8jl5v2fzZx8t9K6dWv861//wsyZM5GWloYhQ4bA1dUVFy9exPfff4/Jkydj+vTpZuUaO3Ys1q1bh9jYWCQmJqJPnz4oKyvDr7/+ipdeeglPPPEE+vXrh+effx5xcXFISkrCI488AgcHB6SkpGDTpk1YunQphg0bZtbrEtkF6Q7UIiJLuPFQ8P81btw4AaDGoeBCCFFVVSXmzp0rQkJChIODgwgMDBQzZ84UlZWVd3y964dDX79pNBoREBAgHnvsMbF69epbPsfRo0fF0KFDRfPmzYVarRZBQUFixIgRIj4+vsZ8ly5dEmPHjhUtWrQQarVahIaGiilTptQ4nDs1NVUMGzZMeHh4CI1GIyIiIsRPP/1U43muHwq+adOmGtMvXrwoAIg1a9aYne9Oh8F/++234v777xfOzs7C2dlZdOjQQUyZMkWcPXvWNM//Hpp/3bhx40RQUFCNaeXl5eLNN980/T35+vqKYcOGidTU1BrzrVy5UoSHhwtHR0fh6uoqunbtKl577TWRmZlZa04ieycTwswRbERERERWjGNuiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2ZUmdxI/o9GIzMxMuLq6Nti1dYiIiMiyhBAoKSmBn5/fHU/W2eTKTWZmJgIDA6WOQURERPVw+fJlBAQE3HaeJlduXF1dAVxbOW5ubhKnISIiorrQarUIDAw0fY/fTpMrN9d3Rbm5ubHcEBER2Zi6DCnhgGIiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyK5KWm71792Lw4MHw8/ODTCbDli1b7rjMnj170LNnT6jVarRp0wZr165t8JxERERkOyQtN2VlZQgLC8Py5cvrNP/FixcxaNAg9O/fH0lJSXjllVfw3HPPYceOHQ2clIiIiGyFpNeWGjBgAAYMGFDn+VesWIGQkBB88MEHAICOHTvi999/x4cffoiYmJiGiklEREQ2xKbG3CQkJCA6OrrGtJiYGCQkJEiUiIiIiABAX21EXqkOGUUVyNZWSprFpq4KnpWVBR8fnxrTfHx8oNVqUVFRAUdHx5uW0el00Ol0pvtarbbBcxIREdkTIQTySvVILyhDWl45LuWX4VJBOdLyy5FVXIHiiipUVhlN84cHeeLbF3tLltemyk19xMXFYe7cuVLHICIianRCCBiMAkYBGIWA+OtPgxAwGK79aTQKVBsFSiqrUViuR2GZHrmlOlwuKMel/HKkF5TjckE5yvSGOr2mWimHg0LWwO/s9myq3Pj6+iI7O7vGtOzsbLi5udW61QYAZs6cidjYWNN9rVaLwMDABs1JRETUmHJLdDhwMR9nrpbgTJYW6QXlKCirQlG5HtVGYZHXkMkAP3dHBDV3+uvmjODmTvD3cIKHkwPcNA5w0SihkEtbbAAbKzdRUVHYtm1bjWm//PILoqKibrmMWq2GWq1u6GhERESNKq9Uhy1HM/BDUiZOZBTX+3lkMkAhk8FVo4Snkwqezio0c1Yh0PNaiWnVzAmBzZwQ4OkIjYPCgu+g4UhabkpLS3H+/HnT/YsXLyIpKQnNmjVDq1atMHPmTGRkZGDdunUAgBdeeAHLli3Da6+9hgkTJmDXrl345ptvsHXrVqneAhERUaPKLKrAit9SseHgZeir/3+cS6eWbggLdEcHXzeEtnBGc2c1PJ0d4OiggEwmg1wGyGWya2VGLoNCJoNCLoNMJv2WFkuTtNwcOnQI/fv3N92/vvto3LhxWLt2La5evYr09HTT4yEhIdi6dSteffVVLF26FAEBAfj00095GDgREdk9bWUVFu88h68OXEKV4dqupm4B7hgeHoBHu7REC1fupbhOJoSwzM44G6HVauHu7o7i4mK4ublJHYeIiOi2hBDYeuIq3vkxGTkl147+vTe0GV5+qC2iQpvb5ZaX2pjz/W1TY26IiIiakuLyKsz47jh+PpkFAAjxcsY7T3RGn7YtJE5m3VhuiIiIrNDhSwV4+eskZBRVwEEhw5T+bfBCv9Y2M6hXSiw3REREVsRoFPjkt1Qs/uUcDEaBoOZOWDaqJ7oGuEsdzWaw3BAREVmJnJJKxG48ht/P5wEAnujuh38N6QJXjYPEyWwLyw0REZEV+O1cLv7xTRLySvVwdFDgnSc6Y1h4QJMZMGxJLDdEREQS0lcbsWjnWazcewEA0MHXFctG90QbbxeJk9kulhsiIiKJXMgtxSsbk3D8yrUzDI+LCsLMgR05aPgusdwQERE1Mn21Ef/5LRUf7z4PfbURHk4OWPhkNzzS2VfqaHaB5YaIiKiRCCGw60wOFvx8Bik5pQCAfu1aIG5oV/h51H4BaDIfyw0REVED01cb8fPJq1i9Pw3HLhcBAJo7qzB7cCc8HubHQcMWxnJDRERkYZVVBqQXlCPpchH+OJ+H+DM5KKmsBgBoHOR4tncIXugXCg8nlcRJ7RPLDRER0W0YjAJJl4twMqMYKTklKCyrQnFFFUp01aiqNqLaaES1QUBv+P8/C8r0Nz2Pj5saoyOCMCoyEN6uGgneSdPBckNERFSLc9klWLM/DdtPXkVheZXZyzupFOji544eQR6I7uiDnq08oZBz91NjYLkhIiK6QWZRBf61NRnbTmSZprk7OuCeYE+083GFj5sGbo5KuKgdoFLK4SCXQamQQ6mQQfXXn96uGng6OXAsjURYboiIiP6y8WA65v6YjHK9ATIZENPJF2OjghAR0gxKhVzqeFRHLDdERNTk6aoNeGvLSXxz6AoAoFeQJ+YN6YKOLd0kTkb1wXJDRERNWrm+Gs9/cRj7UvIglwH/eKQ9XuzXGnKOj7FZLDdERNRklemqMW51Ig5dKoSTSoEVz4Sjb7sWUseiu8RyQ0RETZK+2ogXvjyMQ5cK4aZRYu2ECPRs5Sl1LLIAlhsiImpyhBD45+Zj2JeSByeVAl9MjERYoIfUschCOPSbiIianHUJl/BDUiaUchlWPBPOYmNnWG6IiKhJOX6lCPO3ngYAvDmoI8fY2CGWGyIiajKKK6owZf0R6A1GxHT2wbO9g6WORA2A5YaIiJoEIQRe33wclwsqEODpiIXDwngGYTvFckNERE3C53+kYfupLDgoZFg+uifcHR2kjkQNhOWGiIjs3vErRZi/7do4mzcGduQAYjvHckNERHbt+jibKoPAo519Oc6mCWC5ISIiu3XjOJvAZo54b1g3jrNpAlhuiIjIbnGcTdPEckNERHbpxnE2bw7siG4BHtIGokbDckNERHbnf8fZjOM4myaF5YaIiOyKEAKvbT7GcTZNGMsNERHZlX/vScWOU9kcZ9OEsdwQEZHd2H02B4t2ngUAzH28C8fZNFEsN0REZBfS8sow7eujEAIYHdkKoyNbSR2JJMJyQ0RENi+3RIdn1yRCW1mN8CBPvD24s9SRSEIsN0REZNO0lVV4dk0i0vLLEeDpiE+e7gmVkl9vTZlS6gBERET1lV+qw7NrDuJUphZeLip8MTES3m4aqWORxFhuiIjIJl3MK8PEzw/iQm4Zmjmr8PmECIR4OUsdi6wAyw0REdkUIQR+SMrEm9+fQJneAD93DdZNjEQbbxepo5GVYLkhIiKbIITA0ctFWLj9DP68UAAAiAxpho9G9YAPd0XRDVhuiIjIalQbjCivMqBCb0CZrhrlegMKyvQ4fKkQO5OzcfqqFgCgVsrx0gNtMKV/aygVHDxMNbHcEBFRoyrXV2PXmRwcvlSI5Ewtckt0KCzXo0xvgL7aeNtl1Uo5Huvmh1cfbosAT6dGSky2huWGiIgaRY62EkviU/DD0QyU6Q23nVchl8HJQQFHlQIuGiW6+rvj3tDmGNilJdydeDkFuj2WGyIialBCCHz2+0V8+Ms5U6kJau6E/u290S3AHX4ejmjurIKTWmkqNGqlnBe7pHpjuSEiogZTpqvGa5uPY+uJqwCA7oEeeP3RDrg3tBnLCzUYlhsiImoQJZVVGPNZIpIuF8FBIcPsxzrh6cggyOUsNdSwWG6IiMjiyvXVmLD2IJIuF8HDyQGfju2FXsHNpI5FTQTLDRERWZQQAq9uTMLBtEK4apT4cmIkuvi7Sx2LmhCeHICIiCzqk99SseNUNlQKOdaOv4fFhhodyw0REVlMQmo+Fu04CwCY+0RnhAdxVxQ1PpYbIiKyiJLKKkzfdAxGAQwPD8CoiFZSR6ImiuWGiIgs4t1tp5FRVIEAT0fMebyz1HGoCWO5ISKiu3YorQBfJ14GALw/LAwuah6vQtJhuSEiorsihMD8bacBAE/dE4io1s0lTkRNHcsNERHdlW0nsnA0vQhOKgViH24ndRwilhsiIqo/fbURC3ecAQBM6hMKbzeNxImIrKDcLF++HMHBwdBoNIiMjERiYuJt51+yZAnat28PR0dHBAYG4tVXX0VlZWUjpSUioht9+eclXMovRwtXNSb3DZU6DhEAicvNxo0bERsbizlz5uDIkSMICwtDTEwMcnJyap1//fr1mDFjBubMmYPTp0/js88+w8aNG/HGG280cnIiIiquqMJHu1IAAK9Gt4MzBxGTlZC03CxevBiTJk3C+PHj0alTJ6xYsQJOTk5YvXp1rfP/8ccfuO+++zB69GgEBwfjkUcewahRo+64tYeIiCzv33vOo6i8Cm28XTCiV4DUcYhMJCs3er0ehw8fRnR09P+HkcsRHR2NhISEWpfp3bs3Dh8+bCozFy5cwLZt2zBw4MBbvo5Op4NWq61xIyKiu3OlsBxr9qcBAGYO6AClQvJRDkQmkm1DzMvLg8FggI+PT43pPj4+OHPmTK3LjB49Gnl5ebj//vshhEB1dTVeeOGF2+6WiouLw9y5cy2anYioqftg5znoq424N7QZHuzgLXUcohpsqmrv2bMH7777Lv7973/jyJEj+O6777B161bMmzfvlsvMnDkTxcXFptvly5cbMTERkf05mVGM749mAADeHNgJMplM4kRENUm25cbLywsKhQLZ2dk1pmdnZ8PX17fWZd566y2MGTMGzz33HACga9euKCsrw+TJk/Hmm29CLr+5q6nVaqjVasu/ASKiJkgIgXf/OmHfE9390DWAV/wm6yPZlhuVSoXw8HDEx8ebphmNRsTHxyMqKqrWZcrLy28qMAqFAsC1XzgiImpYe87m4o/UfKgUckx/pL3UcYhqJelxe7GxsRg3bhx69eqFiIgILFmyBGVlZRg/fjwAYOzYsfD390dcXBwAYPDgwVi8eDF69OiByMhInD9/Hm+99RYGDx5sKjlERNQwqg1GxP18bavN+PuCEdjMSeJERLWTtNyMHDkSubm5mD17NrKystC9e3ds377dNMg4PT29xpaaWbNmQSaTYdasWcjIyECLFi0wePBgzJ8/X6q3QETUZGw+fAXnskvh4eSAl/q3kToO0S3JRBPbn6PVauHu7o7i4mK4ublJHYeIyCaU66vxwPt7kFOiw1uPdcLE+0OkjkRNjDnf3zZ1tBQREUlj1d6LyCnRoVUzJ4y5N0jqOES3xXJDRES3laOtxH/2pgIAXnu0PVRKfnWQdeO/UCIiuq2FO86iXG9A90APDOraUuo4RHfEckNERLd07HIRNh++AgCYM5gn7CPbwHJDRES1EkLg7R9PAQCe7BmAHq08JU5EVDcsN0REVKstSRk4ml4EZ5UCrz/KE/aR7WC5ISKimxSXV+HdbdcuYjzlwTbwdtNInIio7lhuiIjoJu/8lIzcEh1at3DmOW3I5rDcEBFRDbvP5ODbI1cgkwELh4VBreTlbci2sNwQEZFJVnEl/rn5GABg4n0hCA/iIGKyPSw3REQEAKgyGDH16yPIK9Wjg68rpsdwEDHZJpYbIiKCEAJvbTmJg2mFcFUr8ckz4dA4cHcU2SaWGyKiJk4IgQU/n8GGg5chlwEfjAhDiJez1LGI6k0pdQAiIpKOvtqIOf89ha8T0wEAC4Z2wyOdfSVORXR3WG6IiJqo8zmlmPndcRxMK4RMBrw9uDNG3BModSyiu8ZyQ0TUhAghcDJDi68PpuObg5dRbRRwUSux9KnueKijj9TxiCyC5YaIyA6U6qqRnKlFam4pCsv1KKmshq7KiMpqg+lPbUUVkjO1yC/Tm5aL7uiN2Y91RqvmThKmJ7IslhsiIhtlNArsOJWFjYcuY++5XBhF3ZZTKeWI6eyLURGB6N3aq2FDEkmA5YaIyAadyizGrC0ncTS9yDStpbsGHXxd0dxFDVeNEhoHBdRKuelPZ5US7Xxd0cHXlYd5k11juSEisjHfHr6CGd8dR5VBwFmlwLjewRjeK5CHbxP9heWGiMiGLN99Hu/vOAsAeLiTD+Y90QW+7rxiN9GNWG6IiGzEl39eMhWbFx9ojX8+0h5yuUziVETWh+WGiMgG7DqTjdk/nAQAvBrdDtOi20qciMh68fILRERW7mpxBV7deAxGAYzoFYCXH2ojdSQiq8ZyQ0RkxYxGgembjqG4ogrdAtwx/29dIZNxVxTR7bDcEBFZsc8T0rD/fD4cHRRYMrI7HBT82Ca6E/6WEBFZqZySSizeeQ4A8Magjght4SJxIiLbwHJDRGSlFmw7gxJdNcIC3PF0RCup4xDZDJYbIiIrdPhSIb47mgGZDHjniS485JvIDCw3RERWaNFf57MZ1jMAYYEe0oYhsjEsN0REVuaP83lIuJAPlUKOVx5uJ3UcIpvDckNEZEWEEFi089pWm9GRreDv4ShxIiLbw3JDRGRF9pzNxZH0IqiVcrz0QGup4xDZJJYbIiIrIYTA4l+uHfo9rncwvN14QUyi+mC5ISKyEntT8nAioxhOKgWe7xsqdRwim8VyQ0RkJVbuTQUAPHVPKzR3UUuchsh2sdwQEVmBkxnF2H8+Hwq5DBPuD5Y6DpFNY7khIrICK/deAAA81q0lAjydJE5DZNtYboiIJHalsBxbT1wFAEzmWBuiu8ZyQ0Qksc9+vwiDUeD+Nl7o7OcudRwim8dyQ0QkoeLyKmw8eBkAt9oQWQrLDRGRhL48cAnlegM6tnRDn7ZeUschsgssN0REEqmsMmDN/jQAwOS+IZDJeOVvIktguSEiksiWoxnIK9XBz12Dx7r5SR2HyG6w3BARScBoFFi579rh3xPuD4GDgh/HRJbC3yYiIgnEn8nBhdwyuGqUeCqildRxiOwKyw0RUSMTQuCTPecBAE9HBsFFrZQ4EZF9YbkhImpkiRcLcCS9CCqlnJdaIGoALDdERI3sk9+uXSBzeHgAvF01Eqchsj8sN0REjehUZjH2nM2FXMaT9hE1FJYbIqJGtOK36xfI9ENQc2eJ0xDZp3qNYtPpdDhw4AAuXbqE8vJytGjRAj169EBISIil8xER2Y2LeWXYejwTAPBCv9YSpyGyX2aVm/3792Pp0qX48ccfUVVVBXd3dzg6OqKgoAA6nQ6hoaGYPHkyXnjhBbi6ujZUZiIim7Tk13MwCuChDt7o5OcmdRwiu1Xn3VKPP/44Ro4cieDgYOzcuRMlJSXIz8/HlStXUF5ejpSUFMyaNQvx8fFo164dfvnll4bMTURkU85mleC/x65ttXn14XYSpyGyb3XecjNo0CB8++23cHBwqPXx0NBQhIaGYty4cUhOTsbVq1ctFpKIyNZ9+Ms5CAEM7OqLLv7uUschsmsyIYSQOkRj0mq1cHd3R3FxMdzcuFmYiBpe4sUCjPhPAmQyYOcrfdHWh7vticxlzve35EdLLV++HMHBwdBoNIiMjERiYuJt5y8qKsKUKVPQsmVLqNVqtGvXDtu2bWuktERE5jEYBeb+eAoA8NQ9rVhsiBqBRcvNsWPHoFAo6jz/xo0bERsbizlz5uDIkSMICwtDTEwMcnJyap1fr9fj4YcfRlpaGjZv3oyzZ89i1apV8Pf3t9RbICKyqE2HLuNUphauGiWmP8KxNkSNweIXNDFnL9fixYsxadIkjB8/HgCwYsUKbN26FatXr8aMGTNumn/16tUoKCjAH3/8YRr7ExwcbJHcRESWlq2tRNzPZwAA0x5qi+YuaokTETUNZpWboUOH3vbx4uJiyGSyOj2XXq/H4cOHMXPmTNM0uVyO6OhoJCQk1LrMf//7X0RFRWHKlCn44Ycf0KJFC4wePRqvv/76LbcY6XQ66HQ6032tVlunfEREd0MIgRnfHkdxRRW6+rtjXO9gqSMRNRlm7Zb68ccfUVlZCXd391pvLi4udX6uvLw8GAwG+Pj41Jju4+ODrKysWpe5cOECNm/eDIPBgG3btuGtt97CBx98gH/961+3fJ24uLgaGQMDA+uckYiovr46kI7dZ3OhUsrxwYgwOCgkH+JI1GSYteWmY8eOePLJJzFx4sRaH09KSsJPP/1kkWC1MRqN8Pb2xsqVK6FQKBAeHo6MjAy8//77mDNnTq3LzJw5E7Gxsab7Wq2WBYeIGlRCaj7e/u+1QcT/fKQ92nEQMVGjMuu/EuHh4Thy5MgtH1er1WjVqlWdnsvLywsKhQLZ2dk1pmdnZ8PX17fWZVq2bIl27drV2AXVsWNHZGVlQa/X3zKTm5tbjRsRUUNJyS7Bi18dRrVRYHCYH57rw8vSEDU2s8rNihUr8P7779/y8Y4dO+LixYt1ei6VSoXw8HDEx8ebphmNRsTHxyMqKqrWZe677z6cP38eRqPRNO3cuXNo2bIlVCpVHd8FEVHDOJpeiOH/SUBReRXCAtzx/rBudR6HSESWY1a5UavVcHJystiLx8bGYtWqVfj8889x+vRpvPjiiygrKzMdPTV27NgaA45ffPFFFBQUYNq0aTh37hy2bt2Kd999F1OmTLFYJiIicxmMAp/uu4BRq/68VmwCPbBmfAQ0DnU/NQYRWU6dx9yUlZXB2dm5zk9cl/lHjhyJ3NxczJ49G1lZWejevTu2b99uGmScnp4Oufz/+1dgYCB27NiBV199Fd26dYO/vz+mTZuG119/vc65iIgsJb9Uh+2nsvBFwiWcySoBAPRv3wLLRveEs9riZ9ogojqq8+UXWrZsiWnTpmHcuHFo2bJlrfMIIfDrr79i8eLF6Nu3b42tLtaCl18goju5WlyBg2mFSMkuQV6pDtqKauiqDaisMqKyyoDKagMKy6qQUVRhWsZVo8QbAzviqXsCuSuKqAGY8/1d5/9a7NmzB2+88QbefvtthIWFoVevXvDz84NGo0FhYSGSk5ORkJAApVKJmTNn4vnnn7/rN0JE1FiMRoGdyVlY/XsaEtMK6rxcB19XDO8ViCd7+sPDiWP/iKyB2RfOTE9Px6ZNm7Bv3z5cunQJFRUV8PLyQo8ePRATE4MBAwaYdQmGxsYtN0T0vzKLKvDPzcew/3w+AEAuAzr7uaNrgDt8XDVwc1RC46CAxkEOjVIBjYMCzmolOrR0hZvGQeL0RE2DOd/fvCo4ETVph9IKMGHtQWgrq6FxkGPCfSEYGxUMX3eN1NGI6AYNslvqRrt370b//v3rFY6IyFr8kZqH5z4/hHK9AWEB7vhwZHeEtqj7mdaJyDrV63zgjz76KFq3bo1//etfuHz5sqUzERE1uDNZWkxce63Y9GnrhQ2To1hsiOxEvcpNRkYG/v73v2Pz5s0IDQ1FTEwMvvnmm1ueJZiIyJoUlesxed1hVFQZcH8bL3w6rhccVdY7VpCIzFOvcuPl5YVXX30VSUlJOHDgANq1a4eXXnoJfn5+ePnll3Hs2DFL5yQisgghBKZvOob0gnIEeDri41E9oFay2BDZk7u+TG3Pnj0xc+ZM/P3vf0dpaSlWr16N8PBw9OnTB6dOnbJERiIii/khKRO/ns6BSiHHf8aEw9OZh28T2Zt6l5uqqips3rwZAwcORFBQEHbs2IFly5YhOzsb58+fR1BQEIYPH27JrEREdyW/VIe5P177T9fLD7VBZz93iRMRUUOo19FSU6dOxddffw0hBMaMGYOFCxeiS5cupsednZ2xaNEi+Pn5WSwoEdHdenfbGRSWV6GDryue79da6jhE1EDqVW6Sk5Px8ccfY+jQoVCr1bXO4+Xlhd27d99VOCIiSzmVWYzvjl4BAMQN7QoHxV3vlSciK1Wv3+45c+Zg+PDhNxWb6upq7N27FwCgVCrRr1+/u09IRHSXhBCI23YGQgCDw/zQo5Wn1JGIqAHVq9z0798fBQU3X3uluLiYJ/cjIquzNyUPv5/Pg0ohx2sx7aWOQ0QNrF7lRghR61Vv8/Pz4ezsfNehiIgsRQiBD385BwAYExWEwGZOEiciooZm1piboUOHAgBkMhmeffbZGrulDAYDjh8/jt69e1s2IRHRXfj9fB6SLhdBrZTjBQ4iJmoSzCo37u7XDpsUQsDV1RWOjo6mx1QqFe69915MmjTJsgmJiOpJCIGlv6YAAJ6ODEIL19oPgCAi+2JWuVmzZg0AIDg4GNOnT+cuKCKyaokXC3DoUiFUSjme7xcqdRwiaiT1OhR8zpw5ls5BRGRxq/dfBAA82TMAPm4aidMQUWOpc7np2bMn4uPj4enpiR49etQ6oPi6I0eOWCQcEVF9peeXY2dyNgBgwn3B0oYhokZV53LzxBNPmAYQDxkypKHyEBFZxOcJaRAC6NuuBdr6uEodh4gakUwIIaQO0Zi0Wi3c3d1RXFwMNzc3qeMQUQMoqaxCVNwulOqqsXb8PXigvbfUkYjoLpnz/c3zjxOR3dl8+ApKddUIbeGMvm1bSB2HiBpZnXdLeXp63naczY1qO3sxEVFjMBoF1v6RBgAYf18I5PK6fW4Rkf2oc7lZsmRJA8YgIrKMXWdycCm/HG4aJZ7s6S91HCKSQJ3Lzbhx4xoyBxGRRVw//HtUZCs4qep1tgsisnF1/s3XarWmATxarfa283KgLhFJ4fRVLf5IzYdCLsPYqGCp4xCRRMwac3P16lV4e3vDw8Oj1vE31y+oaTAYLBqSiKgu1u5PAwA82sUX/h6Ot5+ZiOxWncvNrl270KxZMwDA7t27GywQEVF95Jfq8H1SBgCetI+oqatzuenXr1+tPxMRWYP1B9KhrzYiLMAdPVt5Sh2HiCRU79F2hYWF+Oyzz3D69GkAQKdOnTB+/HjT1h0iosairzZi3Z+XAAAT7g+p82kriMg+1eskfnv37kVwcDA++ugjFBYWorCwEB999BFCQkKwd+9eS2ckIrqtrScykVuig4+bGgO6tJQ6DhFJrF5bbqZMmYKRI0fik08+gUKhAAAYDAa89NJLmDJlCk6cOGHRkEREtyKEwKf7rh3+PebeIKiUPPE6UVNXr0+B8+fP4x//+Iep2ACAQqFAbGwszp8/b7FwRER3kpCaj1OZWjg6KPB0ZJDUcYjICtSr3PTs2dM01uZGp0+fRlhY2F2HIiKqq5X7LgAARvQKgKezSuI0RGQN6rxb6vjx46afX375ZUybNg3nz5/HvffeCwD4888/sXz5cixYsMDyKYmIanE2qwR7zuZCLrs2kJiICABkQghRlxnlcjlkMhnuNLu1n8TPnEumE5F1++emY9h0+AoGdPHFJ8+ESx2HiBqQOd/fdd5yc/HixbsORkRkKdnaSmz566R9k/qGSpyGiKxJnctNUBAH6hGR9VjxWyqqDAK9gjx50j4iquGuLpmbnJyM9PR06PX6GtMff/zxuwpFRHQ7OdpKrD+QDgCYFt1W4jREZG3qVW4uXLiAv/3tbzhx4kSNcTjXzwpqzWNuiMj2ffJbKnTVRoQHeeL+Nl5SxyEiK1OvQ8GnTZuGkJAQ5OTkwMnJCadOncLevXvRq1cv7Nmzx8IRiYj+X0ZRhWmrzavR7XipBSK6Sb223CQkJGDXrl3w8vKCXC6HXC7H/fffj7i4OLz88ss4evSopXMSEQEAFm4/A121EZEhzXBfm+ZSxyEiK1SvLTcGgwGurq4AAC8vL2RmZgK4Nuj47NmzlktHRHSDI+mF+CEpEzIZ8NZjnbjVhohqVa8tN126dMGxY8cQEhKCyMhILFy4ECqVCitXrkRoKA/JJCLLMxgF5v73FABgWM8AdPF3lzgREVmrepWbWbNmoaysDADwzjvv4LHHHkOfPn3QvHlzbNy40aIBiYgAYNW+Czh2pRiuaiX+GdNe6jhEZMXqVW5iYmJMP7dp0wZnzpxBQUEBPD09uZmYiCwuJbsEi3eeAwC8NbgTvN00EiciImt2V+e5AYDLly8DAAIDA+86DBHR/yqprMKU9UegNxjRv30LDA8PkDoSEVm5eg0orq6uxltvvQV3d3cEBwcjODgY7u7umDVrFqqqqiydkYiaKINRYNqGJJzLLoW3qxoLnuzGrcNEdEf12nIzdepUfPfdd1i4cCGioqIAXDs8/O2330Z+fj4++eQTi4YkoqanymDEa5uPY9eZHKiVcqwa2ws+3B1FRHVQ56uC38jd3R0bNmzAgAEDakzftm0bRo0aheLiYosFtDReFZzI+hWU6fHqxiT8di4XSrkMH43qgYFdW0odi4gk1CBXBb+RWq1GcHDwTdNDQkKgUqnq85RERKisMmDL0Qy8t/0MCsur4OigwL+f6Yn+7b2ljkZENqRe5ebvf/875s2bhzVr1kCtVgMAdDod5s+fj7///e8WDUhEtk8IgfwyPXK0OpRUVkFXbfzrZoCuyoiCMj1OZhZj95kcaCurAQAdfF2xaHgYz2dDRGarc7kZOnRojfu//vorAgICEBYWBgA4duwY9Ho9HnroIcsmJCKbZDAK7DiVhf8mZSIxrQAFZfo6Lefv4Yhnewfj2fuC4aCo1zEPRNTE1bncuLvX/N/Tk08+WeM+DwUnout+Sc7GvJ+SkV5QbpomkwHNnVVwc3SAWqmAWim/dnNQwEWtQEdfN4QHeeLe0OaQy3lEFBHVX53LzZo1axoyBxHZgcoqA97+7ylsOHjt/FeeTg4YHdkKD3bwRld/D6iU3BJDRA3vrk7il5uba7pQZvv27dGiRQuLhCIi21OhN2DyF4ewLyUPMhkwuU8oXoluB0eVQupoRNTE1Ou/UWVlZZgwYQJatmyJvn37om/fvvDz88PEiRNRXl5+5yf4H8uXL0dwcDA0Gg0iIyORmJhYp+U2bNgAmUyGIUOGmP2aRGQ5umoDxq9NxL6UPDipFFg3IQIzB3ZksSEiSdSr3MTGxuK3337Djz/+iKKiIhQVFeGHH37Ab7/9hn/84x9mPdfGjRsRGxuLOXPm4MiRIwgLC0NMTAxycnJuu1xaWhqmT5+OPn361OctEJGFCCEwe8sp/HmhAC5qJdZNiECfttyKS0TSqddJ/Ly8vLB582Y88MADNabv3r0bI0aMQG5ubp2fKzIyEvfccw+WLVsGADAajQgMDMTUqVMxY8aMWpcxGAzo27cvJkyYgH379qGoqAhbtmyp0+vxJH5ElvXVgUt48/uTkMuAteMj0Lcdiw0RWZ4539/12nJTXl4OHx+fm6Z7e3ubtVtKr9fj8OHDiI6O/v9Acjmio6ORkJBwy+XeeecdeHt7Y+LEiXd8DZ1OB61WW+NGRJaRmluKuT8mAwD+GdOBxYaIrEK9yk1UVBTmzJmDyspK07SKigrMnTvXdK2pusjLy4PBYLipKPn4+CArK6vWZX7//Xd89tlnWLVqVZ1eIy4uDu7u7qYbD1knsgyjUeD1zcehrzaib7sWeKFfqNSRiIgA1PNoqSVLluDRRx+96SR+Go0GO3bssGjAG5WUlGDMmDFYtWoVvLy86rTMzJkzERsba7qv1WpZcIgs4Is/L+HQpUI4qxSIG9qVV+smIqtRr3LTtWtXpKSk4KuvvsKZM2cAAKNGjcLTTz8NR0fHOj+Pl5cXFAoFsrOza0zPzs6Gr6/vTfOnpqYiLS0NgwcPNk0zGo3X3ohSibNnz6J169Y1llGr1aZLRBCRZRSU6bFo57XTQMwY0AH+HnX/vSciamhml5uqqip06NABP/30EyZNmnRXL65SqRAeHo74+HjT4dxGoxHx8fG1XqOqQ4cOOHHiRI1ps2bNQklJCZYuXcotMkSNZOmv51BSWY1OLd0wOjJI6jhERDWYXW4cHBxqjLW5W7GxsRg3bhx69eqFiIgILFmyBGVlZRg/fjwAYOzYsfD390dcXBw0Gg26dOlSY3kPDw8AuGk6ETWM8zml+PJAOgBg1qCOUPBSCURkZeq1W2rKlCl477338Omnn0KpvKuTHGPkyJHIzc3F7NmzkZWVhe7du2P79u2mQcbp6emQy3nKdiJr8cHOszAYBaI7+qB3m7qNfSMiakz1Os/N3/72N8THx8PFxQVdu3aFs7Nzjce/++47iwW0NJ7nhqj+TmUWY9BHv0MmA3a+0hdtfVyljkRETYQ539/12uzi4eFx01XBicj+Lf01BQDwWDc/FhsislpmlRuj0Yj3338f586dg16vx4MPPoi3337brCOkiMg2ncwoxs7kbMhlwLSH2kodh4jolswazDJ//ny88cYbcHFxgb+/Pz766CNMmTKlobIRkRVZ8VsqAGBwmB/aeLtInIaI6NbMKjfr1q3Dv//9b+zYsQNbtmzBjz/+iK+++sp0rhkisk/p+eXYduIqAOCFfq3vMDcRkbTMKjfp6ekYOHCg6X50dDRkMhkyMzMtHoyIrMenv1+AUQD92rVAx5YciE9E1s2sclNdXQ2NRlNjmoODA6qqqiwaioisR36pDt8cugwAeL4vrx9FRNbPrAHFQgg8++yzNS5nUFlZiRdeeKHG4eDWfCg4EZlnXcIlVFYZ0dXfHVGtm0sdh4jojswqN+PGjbtp2jPPPGOxMERkXcr11ViXkAYAeL5fKC+OSUQ2waxys2bNmobKQURWaNOhKygsr0KrZk54tPPNF7MlIrJGvK4BEdWq2mDEqn0XAACT+oRAqeDHBRHZBn5aEVGtfj6ZhSuFFWjmrMKw8ECp4xAR1RnLDRHdRAiBlXuvbbUZc28QHFUKiRMREdUdyw0R3eTPCwU4kVEMtVKOsVFBUschIjILyw0R3eT6WJvhvQLQ3EV9h7mJiKwLyw0R1ZCSXYJdZ3IgkwET7+dJ+4jI9rDcEFEN18faxHTyRYiX8x3mJiKyPiw3RGSSo63ElqQMAMDkftxqQ0S2ieWGiEw+238RVQaBXkGe6NnKU+o4RET1wnJDRACuXSDzi4RLAIAXH2gtcRoiovpjuSEiAMDKfRdQrjegq787HuzgLXUcIqJ6Y7khIuSX6rDuj2tbbV6JbssLZBKRTWO5ISKs3HcBFVUGdAvgVhsisn0sN0RNXB632hCRnWG5IWriPth5DhVVBoQFuKN/e261ISLbx3JD1IQlZ2qx8WA6AODNQZ241YaI7ALLDVETJYTAOz+dglEAg7q2RERIM6kjERFZBMsNURO16dAV/HmhACqlHDMGdJA6DhGRxbDcEDVBmUUVmPdTMgDgHw+3Q2AzJ4kTERFZDssNURNjMAq8tvk4SnTV6NHKA8/14TWkiMi+sNwQNTELfj6N38/nQeMgx/vDwqCQcxAxEdkXlhuiJmRDYjpW7bsIAFg0PAxtvF0kTkREZHksN0RNxLqENMz47gQA4O/92+Cxbn4SJyIiahhKqQMQUcMq11djwc9nsO6vK36Pvy8YsQ+3kzgVEVHDYbkhslMVegO+P5qBFb+lIr2gHAAw7aG2vMQCEdk9lhsiG5OjrcSFvDJkaytRUlmNyioDKvQGVFRdu1VWGZCWV44j6YXQVRsBAC3dNXh/WBjub+slcXoioobHckNkA9Lzy/HlgUvYcSoLl/LL67xcYDNHPNs7BCPvCYSLmr/uRNQ08NOOyIoVV1Rhwc+nseHgZQhxbZpcBgQ3d4a3mxpuGgc4qhRwdFBA46CA018/e7mqcU+wJ1q3cOEuKCJqclhuiKzUobQCTFl/BNlaHQCgb7sWeDqyFaJaN4ebxkHidERE1ovlhsgKbT95FS9vSIK+2ohQL2fEDe2KyNDmUsciIrIJLDdEVmb7ySy8+NURCAFEd/TGR6N6wEnFX1UiorriJyaRFTl8qQDTNhyFEMCw8AAsGNoVSgXPtUlEZA5+ahJZiavFFXju80PQVRsR3dGbxYaIqJ74yUlkBQxGgWkbklBYXoXOfm74aFQPFhsionripyeRFVi26zwSLxbAWaXAstE9OcaGiOgusNwQSez0VS0+3pUCAJj/t64I8XKWOBERkW1juSGSkMEoMOO7E6g2CsR09sGQHv5SRyIisnksN0QS+vLPSzh2uQiuaiXeeaKL1HGIiOwCyw2RRIorqvDhr+cAAK8N6AAfN43EiYiI7APLDZFEVvyWiqLyKrT1dsGoewKljkNEZDdYbogkcLW4Aqt/vwgAeP3RDjzsm4jIgviJSiSBD385B121ERHBzfBQR2+p4xAR2RWWG6JGdi67BJsPXwEAzBjYATKZTOJERET2heWGqJG99/MZGAXwaGdf9GzlKXUcIiK7w3JD1IgOXMhH/JkcKOQy/PPR9lLHISKySyw3RI1ECIG4n88AAJ66JxCtW7hInIiIyD6x3BA1kp9PZiHpchGcVApMi24rdRwiIrtlFeVm+fLlCA4OhkajQWRkJBITE28576pVq9CnTx94enrC09MT0dHRt52fyBpUGYx4f8dZAMBzfULh7coT9hERNRTJy83GjRsRGxuLOXPm4MiRIwgLC0NMTAxycnJqnX/Pnj0YNWoUdu/ejYSEBAQGBuKRRx5BRkZGIycnqrsNiem4mFcGLxcVJvcNlToOEZFdkwkhhJQBIiMjcc8992DZsmUAAKPRiMDAQEydOhUzZsy44/IGgwGenp5YtmwZxo4de8f5tVot3N3dUVxcDDc3t7vOT3QnpbpqPPD+buSV6vHOE50xNipY6khERDbHnO9vSbfc6PV6HD58GNHR0aZpcrkc0dHRSEhIqNNzlJeXo6qqCs2aNWuomER3ZdXeC8gr1SO4uRNGRbSSOg4Rkd1TSvnieXl5MBgM8PHxqTHdx8cHZ86cqdNzvP766/Dz86tRkG6k0+mg0+lM97Vabf0DE5kpp6QSq/ZdAAD8M6YDHHiZBSKiBmfTn7QLFizAhg0b8P3330OjqX2AZlxcHNzd3U23wEBeoJAaz0fxKSjXGxAW6IGBXX2ljkNE1CRIWm68vLygUCiQnZ1dY3p2djZ8fW//RbBo0SIsWLAAO3fuRLdu3W4538yZM1FcXGy6Xb582SLZie7kTJYW6w+kAwBmDuBlFoiIGouk5UalUiE8PBzx8fGmaUajEfHx8YiKirrlcgsXLsS8efOwfft29OrV67avoVar4ebmVuNG1NCEEHjnx2QYBTCgiy/uDW0udSQioiZD0jE3ABAbG4tx48ahV69eiIiIwJIlS1BWVobx48cDAMaOHQt/f3/ExcUBAN577z3Mnj0b69evR3BwMLKysgAALi4ucHHhGV/JOuxMzsYfqflQKeV4Y2BHqeMQETUpkpebkSNHIjc3F7Nnz0ZWVha6d++O7du3mwYZp6enQy7//w1Mn3zyCfR6PYYNG1bjeebMmYO33367MaMT1UpXbcD8racBAJP6hCCwmZPEiYiImhbJz3PT2HieG2poH8WnYPEv5+Dtqsbu6Q/AWS35/yGIiGyezZznhsjenMsuwce7UgAAbw7qyGJDRCQBlhsiCzEYBV7bfBxVBoGHOnjj8TA/qSMRETVJLDdEFrI0PgVJl4vgqlZi/t+68tBvIiKJsNwQWcDec7mm3VHzhnSBrzuv+k1EJBWWG6K7dDGvDK9sTIIQwKiIVhjSw1/qSERETRrLDdFdyNZWYsxnB1BQpkcXfzfMGdxJ6khERE0eyw1RPaXnl2P0qj9xpbACQc2dsObZCGgcFFLHIiJq8nicKlE9/JGah6nrjyK/TI+W7hp8MSESLVzVUsciIiKw3BCZJaOoAkt/PYdvDl0BAHT2c8PqZ++BjxsHEBMRWQuWG6K/GIwCFVUGVOgNqKwymH4u1xtwNkuLfSl52H02B8a/zuk9OrIV3hzIE/UREVkbfipTk1NtMOKP1HzsT83D8cvFuFxYjrxSHSqrjHVavnfr5oh9uB16BTdr4KRERFQfLDfUZJTpqvHZ7xexLuES8kp1t53X0UEBR5UCjg4KaBzk8Pd0QmRIM8R09kEbb9dGSkxERPXBckNNwk/HM/H2f08hr1QPAPB0csAjnXzRM8gDbbxd0MJFA1eNEo4qBdRKOc8uTERkw1huyK5VVhkwa8tJbD58bQBwUHMnxD7cDgO7toSDgmdCICKyRyw3ZLeKyvV47vNDOHSpEHIZMKV/G7z8UFuWGiIiO8dyQ3Ypr1SHUSv/REpOKdw0SqwYE47erb2kjkVERI2A5YbsTnFFFcZ+loiUnFL4umnw+YQItPflIGAioqaC5Ybsir7aiEnrDiH5qhZeLiqsnxSJ0BYuUsciIqJGxMEHZFfe/vEUEi8WwFWtxLoJLDZERE0Ryw3Zja8OXML6A+mQyYCPRvVAJz83qSMREZEEWG7ILhxKK8CcH04BAKY/0h79O3hLnIiIiKTCckM2r7BMj6lfH0W1UWBQt5Z46YHWUkciIiIJsdyQTRNC4PVvj+NqcSVCvZyx8MluPLswEVETx3JDNu3LA+nYmZwNlUKOj0b14BW6iYiI5YZs15ksLeb9lAwAeH1AB3Txd5c4ERERWQOWG7JJFXoDpq4/Cn21Ef3bt8CE+4KljkRERFaC5YZs0js/JSMlpxQtXNV4f3gYx9kQEZEJyw3ZnK3Hr+LrxGvns1kysju8XNRSRyIiIivCckM25XJBOWZ8dxwA8GK/1rivDS+GSURENbHckM2oMhgxbcNRlFRWo0crD7z6cDupIxERkRViuSGb8eEv53AkvQiuGiU+eqoHHBT850tERDfjtwPZhK3Hr+Lfe1IBAHFDuyKwmZPEiYiIyFqx3JDVO5VZjOmbjgEAnrs/BI9185M4ERERWTOWG7JqeaU6TF53GBVVBvRp64UZAzpIHYmIiKwcyw1ZLW1lFZ5dk4iMogqEeDlj2aieUHKcDRER3QG/KcgqlemqMWHNQZzM0KK5swqfjusFdycHqWMREZEN4FUGyerklugwYe1BnMgohptGiXUTI9C6hYvUsYiIyEaw3JBVOX6lCFPWH8Hlggo0c1ZhzbP3oLMfL4hJRER1x3JDVqGyyoBVey9gaXwKqo0Cgc0csW5CJEK8nKWORkRENoblhiRVWKbHlqQMrNp7AZnFlQCAgV19Efe3bhxjQ0RE9cJyQxZTZTCiqLwKJZVVKNVVo8ogYBQCBuMNNyGQW6LDpfwyHEorxNH0IugNRgCAn7sGrz3aAU909+NVvomIqN5YbqjecrSV2HUmB3+k5uNkZjEu5ZfDYBRmP0+nlm54KiIQI3oFQuOgaICkRETUlLDckFmEENhzNhdr/0jDvpRc1NZlXNVKuGiUcFDIoZDLIJcBSrkccrkMCjng6aRCgKcTugW4IyKkGY+EIiIii2K5oTr7PSUPcT+fxqlMrWla90AP9G3XAr2CPNHWxwXerhoo5NylRERE0mG5oTvKK9Xhze9PYMepbACAk0qBpyNb4enIIATzaCYiIrIyLDd0Wwmp+Zi24ShySnRQyGUYGxWElx9sC09nldTRiIiIasVyQ7UyGAU+3pWCj+JTYBRAG28XfDyqBzq2dJM6GhER0W2x3NBNcrSVeGVjEv5IzQcADA8PwNwnOsNJxX8uRERk/fhtRTXsS8nFqxuTkFeqh5NKgX8N6YKhPQOkjkVERFRnLDcEAKg2GLHk1xQs33MeQgAdfF2xbHRPtPHmYdpERGRbWG4IV4srMO3rJCSmFQAARke2wuzHOvGEekREZJNYbpq4naey8Pq3x1FYXgUXtRJxQ7ticJif1LGIiIjqjeWmiSrXV2PeT8n4OvEyAKCLvxuWjerJ89YQEZHNY7lpYoQQ+CU5G/O2JuNyQQVkMmBy31D84+H2UCnlUscjIiK6ayw3Tcjpq1rE/XwGe8/lArh2Fe5FI8LQu7WXxMmIiIgsh+XGzlUZjNh7LhfrD6Qj/kwOAEClkOO5PiGY0r8NnNX8J0BERPaF32w2QAiBKoOAgIAQ//vYX3/i2g+lldVIzS3D+dxSHLxYgN/P56GgTA8AkMmAgV1aYnpMe4RwbA0REdkpqyg3y5cvx/vvv4+srCyEhYXh448/RkRExC3n37RpE9566y2kpaWhbdu2eO+99zBw4MBGTGw51QYjzueW4mSGFqm5pcgsqsDVokpkFlegVFeNyioDdNXGm0qNOZo7q/BEd388c28rhLbgeWuIiMi+SV5uNm7ciNjYWKxYsQKRkZFYsmQJYmJicPbsWXh7e980/x9//IFRo0YhLi4Ojz32GNavX48hQ4bgyJEj6NKliwTvoO4qqwxIyS7FqcxinMgoxslMLc5c1UJXbbTYa8hlQGAzJ7Ru4YLOfm7o07YFerTygIOCg4WJiKhpkAlxN9sE7l5kZCTuueceLFu2DABgNBoRGBiIqVOnYsaMGTfNP3LkSJSVleGnn34yTbv33nvRvXt3rFix4o6vp9Vq4e7ujuLiYri5We4ikPpqI/LLdCjXG1CuM6BEV4WrRZW4UliBi3mlSL6qRWpuGQzGm1e3i1qJTn5uaO/jigBPR/h5OMLPQwN3RweolQpoHBRQKeWQy67NL5Nd++Gvu5Bdnw4ZlAoZiwwREdkdc76/Jd1yo9frcfjwYcycOdM0TS6XIzo6GgkJCbUuk5CQgNjY2BrTYmJisGXLllrn1+l00Ol0pvtarfbug9fiaHohRq78847zeTo5oLOfOzr7u6GLnzu6+LsjqJkT5NebCxEREd0VSctNXl4eDAYDfHx8akz38fHBmTNnal0mKyur1vmzsrJqnT8uLg5z5861TODbcFIpoZTL4KRSwEmlhJNagZbuGvh7OKJVMyd08nNDp5bu8HFTm7a8EBERkeVJPuamoc2cObPGlh6tVovAwECLv04Xfzecf9c2BzUTERHZE0nLjZeXFxQKBbKzs2tMz87Ohq+vb63L+Pr6mjW/Wq2GWq22TODb4NYYIiIi6yDpyFOVSoXw8HDEx8ebphmNRsTHxyMqKqrWZaKiomrMDwC//PLLLecnIiKipkXy3VKxsbEYN24cevXqhYiICCxZsgRlZWUYP348AGDs2LHw9/dHXFwcAGDatGno168fPvjgAwwaNAgbNmzAoUOHsHLlSinfBhEREVkJycvNyJEjkZubi9mzZyMrKwvdu3fH9u3bTYOG09PTIZf//wam3r17Y/369Zg1axbeeOMNtG3bFlu2bLH6c9wQERFR45D8PDeNraHOc0NEREQNx5zvb57tjYiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEBERkV1huSEiIiK7wnJDREREdoXlhoiIiOyK5JdfaGzXT8is1WolTkJERER1df17uy4XVmhy5aakpAQAEBgYKHESIiIiMldJSQnc3d1vO0+Tu7aU0WhEZmYmXF1dIZPJLPrcWq0WgYGBuHz5Mq9b1YC4nhsH13Pj4HpuPFzXjaOh1rMQAiUlJfDz86txQe3aNLktN3K5HAEBAQ36Gm5ubvzFaQRcz42D67lxcD03Hq7rxtEQ6/lOW2yu44BiIiIisissN0RERGRXWG4sSK1WY86cOVCr1VJHsWtcz42D67lxcD03Hq7rxmEN67nJDSgmIiIi+8YtN0RERGRXWG6IiIjIrrDcEBERkV1huSEiIiK7wnJjpuXLlyM4OBgajQaRkZFITEy87fybNm1Chw4doNFo0LVrV2zbtq2Rkto2c9bzqVOn8OSTTyI4OBgymQxLlixpvKA2zpz1vGrVKvTp0weenp7w9PREdHT0Hf/90zXmrOfvvvsOvXr1goeHB5ydndG9e3d88cUXjZjWdpn7+Xzdhg0bIJPJMGTIkIYNaEfMWddr166FTCarcdNoNA0bUFCdbdiwQahUKrF69Wpx6tQpMWnSJOHh4SGys7NrnX///v1CoVCIhQsXiuTkZDFr1izh4OAgTpw40cjJbYu56zkxMVFMnz5dfP3118LX11d8+OGHjRvYRpm7nkePHi2WL18ujh49Kk6fPi2effZZ4e7uLq5cudLIyW2Luet59+7d4rvvvhPJycni/PnzYsmSJUKhUIjt27c3cnLbYu56vu7ixYvC399f9OnTRzzxxBONE9bGmbuu16xZI9zc3MTVq1dNt6ysrAbNyHJjhoiICDFlyhTTfYPBIPz8/ERcXFyt848YMUIMGjSoxrTIyEjx/PPPN2hOW2fuer5RUFAQy00d3c16FkKI6upq4erqKj7//POGimgX7nY9CyFEjx49xKxZsxoint2oz3qurq4WvXv3Fp9++qkYN24cy00dmbuu16xZI9zd3Rsp3TXcLVVHer0ehw8fRnR0tGmaXC5HdHQ0EhISal0mISGhxvwAEBMTc8v5qX7rmcxnifVcXl6OqqoqNGvWrKFi2ry7Xc9CCMTHx+Ps2bPo27dvQ0a1afVdz++88w68vb0xceLExohpF+q7rktLSxEUFITAwEA88cQTOHXqVIPmZLmpo7y8PBgMBvj4+NSY7uPjg6ysrFqXycrKMmt+qt96JvNZYj2//vrr8PPzu6nA0/+r73ouLi6Gi4sLVCoVBg0ahI8//hgPP/xwQ8e1WfVZz7///js+++wzrFq1qjEi2o36rOv27dtj9erV+OGHH/Dll1/CaDSid+/euHLlSoPlbHJXBSeiu7dgwQJs2LABe/bsafiBgU2Qq6srkpKSUFpaivj4eMTGxiI0NBQPPPCA1NHsQklJCcaMGYNVq1bBy8tL6jh2LyoqClFRUab7vXv3RseOHfGf//wH8+bNa5DXZLmpIy8vLygUCmRnZ9eYnp2dDV9f31qX8fX1NWt+qt96JvPdzXpetGgRFixYgF9//RXdunVryJg2r77rWS6Xo02bNgCA7t274/Tp04iLi2O5uQVz13NqairS0tIwePBg0zSj0QgAUCqVOHv2LFq3bt2woW2UJT6jHRwc0KNHD5w/f74hIgLgbqk6U6lUCA8PR3x8vGma0WhEfHx8jUZ6o6ioqBrzA8Avv/xyy/mpfuuZzFff9bxw4ULMmzcP27dvR69evRojqk2z1L9no9EInU7XEBHtgrnruUOHDjhx4gSSkpJMt8cffxz9+/dHUlISAgMDGzO+TbHEv2mDwYATJ06gZcuWDRWTh4KbY8OGDUKtVou1a9eK5ORkMXnyZOHh4WE6pG3MmDFixowZpvn3798vlEqlWLRokTh9+rSYM2cODwWvA3PXs06nE0ePHhVHjx4VLVu2FNOnTxdHjx4VKSkpUr0Fm2Duel6wYIFQqVRi8+bNNQ7pLCkpkeot2ARz1/O7774rdu7cKVJTU0VycrJYtGiRUCqVYtWqVVK9BZtg7nr+Xzxaqu7MXddz584VO3bsEKmpqeLw4cPiqaeeEhqNRpw6darBMrLcmOnjjz8WrVq1EiqVSkRERIg///zT9Fi/fv3EuHHjasz/zTffiHbt2gmVSiU6d+4stm7d2siJbZM56/nixYsCwE23fv36NX5wG2POeg4KCqp1Pc+ZM6fxg9sYc9bzm2++Kdq0aSM0Go3w9PQUUVFRYsOGDRKktj3mfj7fiOXGPOas61deecU0r4+Pjxg4cKA4cuRIg+aTCSFEw20XIiIiImpcHHNDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEBERkV1huSEim/Lss89iyJAhUscgIivGC2cSkdWQyWS3fXzOnDlYunQpeO5RIrodlhsishpXr141/bxx40bMnj0bZ8+eNU1zcXGBi4uLFNGIyIZwtxQRWQ1fX1/Tzd3dHTKZrMY0FxeXm3ZLPfDAA5g6dSpeeeUVeHp6wsfHB6tWrUJZWRnGjx8PV1dXtGnTBj///HON1zp58iQGDBgAFxcX+Pj4YMyYMcjLy2vkd0xEDYHlhohs3ueffw4vLy8kJiZi6tSpePHFFzF8+HD07t0bR44cwSOPPIIxY8agvLwcAFBUVIQHH3wQPXr0wKFDh7B9+3ZkZ2djxIgREr8TIrIElhsisnlhYWGYNWsW2rZti5kzZ0Kj0cDLywuTJk1C27ZtMXv2bOTn5+P48eMAgGXLlqFHjx5499130aFDB/To0QOrV6/G7t27ce7cOYnfDRHdLY65ISKb161bN9PPCoUCzZs3R9euXU3TfHx8AAA5OTkAgGPHjmH37t21jt9JTU1Fu3btGjgxETUklhsisnkODg417stkshrTrh+FZTQaAQClpaUYPHgw3nvvvZueq2XLlg2YlIgaA8sNETU5PXv2xLfffovg4GAolfwYJLI3HHNDRE3OlClTUFBQgFGjRuHgwYNITU3Fjh07MH78eBgMBqnjEdFdYrkhoibHz88P+/fvh8FgwCOPPIKuXbvilVdegYeHB+RyfiwS2TqZ4Kk+iYiIyI7wvyhERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu/J/GT4ZH/DHhxQAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "evolution_result = cudaq.evolve(hamiltonian,\n", " dimensions,\n", @@ -1722,31 +1357,10 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "id": "40abb26a-e966-4dc2-aa07-11ef4248364d", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Text(0.5, 1.0, 'Decoherence')" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAPYQAAD2EBqD+naQAATuxJREFUeJzt3XlcVOXiBvBnZmBmZBtAZBUBQcV9ASXNtUhS00wrs3LLLEttoe4vLZVs08xKTW+WN61s0TIrczdcKsMNdxYVFcEFEJBdZpiZ9/cHOTeuaAwynFme7+czn+TMOTPPnBQe3vOec2RCCAEiIiIiOyGXOgARERFRQ2K5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISKrlpmZCZlMhgULFkgdhYhsBMsNkQP7/PPPIZPJTA+1Wo3AwEDExcVh8eLFKC0tlToiEZHZnKQOQETSe+ONNxAWFoaqqirk5ORg165deOGFF/DBBx9g/fr16NSpk9QRiYjqjOWGiDBo0CBER0ebvp4xYwZ27NiB++67D8OGDUNaWhqaNGkiYULLKy8vh6urq9QxiKgB8LAUEdXqrrvuwqxZs3D+/Hl89dVXpuXp6el48MEH4e3tDbVajejoaKxfv/6G7YuKivDiiy8iNDQUKpUKzZs3x9ixY5Gfn29aJy8vDxMnToSfnx/UajU6d+6ML7744qaZPv30U4SHh0OlUqF79+44cODADevUJd/1w3G7d+/Gs88+C19fXzRv3tz0/ObNm9GnTx+4urrC3d0dQ4YMQUpKSo3XGD9+PNzc3HDx4kUMHz4cbm5uaNasGV5++WUYDIYa6xqNRixatAgdO3aEWq1Gs2bNcO+99+LgwYM11vvqq68QFRWFJk2awNvbG4888giys7Nvuj+IqHYsN0R0U2PGjAEAbNu2DQCQkpKCO+64A2lpaZg+fTref/99uLq6Yvjw4fjxxx9N25WVlaFPnz746KOPMHDgQCxatAiTJ09Geno6Lly4AAC4du0a+vfvj1WrVuGxxx7De++9B41Gg/Hjx2PRokU3ZPnmm2/w3nvv4emnn8Zbb72FzMxMjBgxAlVVVaZ16prvumeffRapqamYPXs2pk+fDgBYtWoVhgwZAjc3N7z77ruYNWsWUlNT0bt3b2RmZtbY3mAwIC4uDk2bNsWCBQvQr18/vP/++/j0009rrDdx4kS88MILCA4Oxrvvvovp06dDrVZj7969pnXefvttjB07Fq1atcIHH3yAF154AYmJiejbty+KiorM+L9GRBBE5LBWrlwpAIgDBw7cdB2NRiO6du0qhBDi7rvvFh07dhSVlZWm541Go+jVq5do1aqVadns2bMFALFu3bobXs9oNAohhFi4cKEAIL766ivTczqdTvTs2VO4ubmJkpISIYQQ586dEwBE06ZNRWFhoWndn3/+WQAQv/zyi2lZXfNd/9y9e/cWer3etLy0tFR4enqKSZMm1cick5MjNBpNjeXjxo0TAMQbb7xRY92uXbuKqKgo09c7duwQAMRzzz13032RmZkpFAqFePvtt2s8f/z4ceHk5HTDciK6NY7cENEtubm5obS0FIWFhdixYwcefvhhlJaWIj8/H/n5+SgoKEBcXBxOnz6NixcvAgB++OEHdO7cGQ888MANryeTyQAAmzZtgr+/P0aPHm16ztnZGc899xzKysqwe/fuGtuNGjUKXl5epq/79OkDADh79iwAmJXvukmTJkGhUJi+3r59O4qKijB69GjT9vn5+VAoFIiJicHOnTtv+DyTJ0+u8XWfPn1Mma7vC5lMhoSEhJvui3Xr1sFoNOLhhx+u8b7+/v5o1apVre9LRDfHCcVEdEtlZWXw9fVFRkYGhBCYNWsWZs2aVeu6eXl5CAoKwpkzZzBy5Mhbvu758+fRqlUryOU1f8dq27at6fm/a9GiRY2vrxedq1evAoBZ+a4LCwur8fzp06cBVM83qo2Hh0eNr6/Pn/nfXNczAcCZM2cQGBgIb2/vWl/z+vsKIdCqVatan3d2dr7ptkR0I5YbIrqpCxcuoLi4GBERETAajQCAl19+GXFxcbWuHxERYbEsfx9h+TshBADUK9//ngF2/TVWrVoFf3//G7Z3cqr5LfNmmcxlNBohk8mwefPmWl/Tzc2tQd6HyFGw3BDRTa1atQoAEBcXh5YtWwKoHkWIjY295Xbh4eE4ceLELdcJCQnBsWPHYDQaa4zepKenm543hzn5biY8PBwA4OvrW+/XqO01t27disLCwpuO3oSHh0MIgbCwMLRu3bpB3pfIkXHODRHVaseOHXjzzTcRFhaGxx57DL6+vujfvz8++eQTXL58+Yb1r1y5YvrzyJEjcfTo0VrPULo+0jJ48GDk5ORgzZo1puf0ej0++ugjuLm5oV+/fmblNSffzcTFxcHDwwPvvPNOjbOwzHmN/zVy5EgIITBnzpwbnru+L0aMGAGFQoE5c+aYlv19nYKCArPfl8iRceSGiLB582akp6dDr9cjNzcXO3bswPbt2xESEoL169dDrVYDAJYuXYrevXujY8eOmDRpElq2bInc3FwkJSXhwoULOHr0KADgX//6F9auXYuHHnoITzzxBKKiolBYWIj169dj2bJl6Ny5M5566il88sknGD9+PJKTkxEaGoq1a9diz549WLhwIdzd3c3+HHXNdzMeHh74+OOPMWbMGHTr1g2PPPIImjVrhqysLGzcuBF33nknlixZYlamAQMGYMyYMVi8eDFOnz6Ne++9F0ajEb///jsGDBiAqVOnIjw8HG+99RZmzJiBzMxMDB8+HO7u7jh37hx+/PFHPPXUU3j55ZfN3h9EDkuq07SISHrXT4m+/lAqlcLf31/cc889YtGiRabTsf/uzJkzYuzYscLf3184OzuLoKAgcd9994m1a9fWWK+goEBMnTpVBAUFCaVSKZo3by7GjRsn8vPzTevk5uaKCRMmCB8fH6FUKkXHjh3FypUra7zO9VPB33vvvRuyABAJCQlm5/unU+B37twp4uLihEajEWq1WoSHh4vx48eLgwcPmtYZN26ccHV1vWHbhIQE8b/fWvV6vXjvvfdEZGSkUCqVolmzZmLQoEEiOTm5xno//PCD6N27t3B1dRWurq4iMjJSTJkyRZw8ebLWnERUO5kQ/zMGSkRERGTDOOeGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXXG4i/gZjUZcunQJ7u7upjvyEhERkXUTQqC0tBSBgYE33HD3fzlcubl06RKCg4OljkFERET1kJ2djebNm99yHYcrN9cv6Z6dnQ0PDw+J0xAREVFdlJSUIDg4uE63ZnG4cnP9UJSHhwfLDRERkY2py5QSTigmIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWHu3EmERGRNdLpjbimM+BalQECAkqFHEonOVROCiidOBZhDpYbIiIiCyrT6pFTfA2XiiqRU1yJnJJK5JZUIrdEi7zS6j8XlOmgN4qbvoabygk+bko0dVOhmZsK/ho1AjRq+GvU8PdQI0DTBL4eKqidFY34yawXyw0REVEdCCFQoTOg6FoVrpbrUHytCsXXqlDy13+Lr1WhpLIKxdf0KKrQIbekEpeLK1FaqTfrfRRyGWRAjbJTptWjTKtHZkHFLbf1dlX+VXbU8HJVQukkN40AVRmMqKwyQltlQKXeYBoluv7fCl31n3V6I1TOCripFNC4KBHW1AUtm7mhfaAHuod5w0PtXJ/d16hYboiIiP6mQqfHiYslOJpdhJO5pcgqrEB2YQXyy7SoMtx8dOVW3NVOCNQ0MY20+GnU8PNQwc9dDT8PNXzclXBROqGJ838PQRmNAjqDEdoqIwrKtcgv06GgTIvckkrklGj/Kk/XTKNBlVVGFJbrUFiuQ+rlktvaB6VaPfLLABRU4Gh2kWm5XAZ0D/XGfZ0DcX+XQKstOjIhRP3+T9mokpISaDQaFBcXw8PDQ+o4RERkBbIKKrAtNQfbUnORfP4qDLc4RKRUyOHp4gxNE2d4ujjDQ139Z4+/Hpq/Hn4eKgT8VWjcVJYdSxBCoPhaFS7/VXRyiitRVFEFnd4IncGAKoOAk1wGtbMCamd59X+dFGiiVKCJswIuSgXUyur/KhVyVFYZUaHT40qpFucKynEmrxzJ5wtrjBy5qZwwqnswxvcKRbC3i0U/H2Dez2+WGyIickhVBiO2p+biq73n8eeZghrP+Xuo0TlYg/aBGoQ0dUGwtwv8PdTwdHFGE2cFZDKZRKmllV1Ygc0nLuO7gxeQkVcGAHCSy/D4HSF47u5W8HZVWuy9WW5ugeWGiMix6fRGfJ+cjaU7MnCpuBIAIJMBPVs2xT3t/BDb1q9RRiJsmRACu09dwfLfz2JPRnUxdFc74fm7W2F8r1A4KRr+7C6Wm1tguSEickxGo8BPRy7i/W2ncLHoGgDAx02FR7oH45EewWjuxUJTH3sy8vHWxjSk/TXPp12AB+aO6IjOwZ4N+j4sN7fAckNE5HiOXyhGwvoTOJRVBADwdVfhmf7hGN2jBU+fbgAGo8D3B7Mxd3M6iq9VwcdNiT9euatB9605P795thQREdmtgjItFmw7idUHsiEE4KJUYOpdEXjizjCWmgakkMvwSI8WiG3nh7c3pqF3hI+k+5flhoiI7I7BKPDNvvN4b+tJlPx1nZnhXQIxY3Bb+HmoJU5nv3zcVPhwVBepY7DcEBGRfTmUdRWzfz6BExf/Owdkzv3t0T3UW+Jk1FhYboiIyC7kl2nx3paTWHMwGwDgoXbCy3Ft8FhMCBRyxzx121Gx3BARkU0rrazC8t/P4bPfz6JcZwAAPBTVHK8MioSPm0ridCQFlhsiIrJJuSWV+GrveXy19zyuVlQBADo11yBhaDtEhfAQlCNjuSEiIptRrtVj18kr2HT8Mram5JhuLtmymSv+NbAN7u3g77BXD6b/YrkhIiKrVHytChevXkNmQTmOXSjGsQtFSD5/FVq90bRO91AvTLgzDAPb+Vnkqrhkm1huiIio0RVfq0JGXiky8sqQWVCBgjItCst1yC+rvqt1QZnWNH/mf4U0dcG97f0xtHMgOgRpGjk52QKWGyIiahQZeaVYd+gidp+6gpRLJXXaxttVieZeTdA+0AOdm3uiW4gXWvm68dAT3RLLDRERWYwQAnsyCrA48TT2ZxbWeC5Ao0aErxta+riimbsK3q4qNHVToqmrEt6uSvhr1HBR8scUmY9/a4iIyCIy88sx86cT+CMjH0D1JfoHtGmGIZ0CcGeED3zdeaVgsgyWGyIialBCCHy19zze2pgGrd4IpZMcj/Zogcn9wuGvYaEhy2O5ISKiBnNNZ8D0dcfw85FLAIA7I5ri7eEdEerjKnEyciQsN0RE1CAKy3WY+MUBHM4qgkIuw/R7I/FknzBO/qVGx3JDRES3La+kEqOX78WZK+XQNHHGp2OiENOyqdSxyEFJfsWjpUuXIjQ0FGq1GjExMdi/f/8t11+4cCHatGmDJk2aIDg4GC+++CIqKysbKS0REf2v/DItHv3PPpy5Uo4AjRprJ/dksSFJSVpu1qxZg/j4eCQkJODQoUPo3Lkz4uLikJeXV+v633zzDaZPn46EhASkpaXhs88+w5o1a/Dqq682cnIiIgKAogodHv/PPmTklSFAo8Z3T/dEKz93qWORg5O03HzwwQeYNGkSJkyYgHbt2mHZsmVwcXHBihUral3/zz//xJ133olHH30UoaGhGDhwIEaPHv2Poz1ERNTwrukMGLfyANJzStHMXYWvn4xBsLeL1LGIpCs3Op0OycnJiI2N/W8YuRyxsbFISkqqdZtevXohOTnZVGbOnj2LTZs2YfDgwTd9H61Wi5KSkhoPIiK6PUajwItrjuBodhE8XZzxzZMxaNnMTepYRAAknFCcn58Pg8EAPz+/Gsv9/PyQnp5e6zaPPvoo8vPz0bt3bwghoNfrMXny5Fselpo7dy7mzJnToNmJiBzdu1vSsSUlB0qFHMvHRvNQFFkVyScUm2PXrl1455138O9//xuHDh3CunXrsHHjRrz55ps33WbGjBkoLi42PbKzsxsxMRGR/fl633l88ttZAMB7D3VC91BviRMR1STZyI2Pjw8UCgVyc3NrLM/NzYW/v3+t28yaNQtjxozBk08+CQDo2LEjysvL8dRTT+G1116DXH5jV1OpVFCpVA3/AYiIHNDuU1cw++cUAED8Pa1xf5cgiRMR3UiykRulUomoqCgkJiaalhmNRiQmJqJnz561blNRUXFDgVEoFACqL/dNRESWk55TgilfH4LBKDCiWxCm3RUhdSSiWkl6Eb/4+HiMGzcO0dHR6NGjBxYuXIjy8nJMmDABADB27FgEBQVh7ty5AIChQ4figw8+QNeuXRETE4OMjAzMmjULQ4cONZUcIiJqeHkllXhi5QGUafWICfPGvBGdeOVhslqSlptRo0bhypUrmD17NnJyctClSxds2bLFNMk4KyurxkjNzJkzIZPJMHPmTFy8eBHNmjXD0KFD8fbbb0v1EYiI7F6FTo+JXxzEpeJKtGzmik/GREHpZFNTNsnByISDHc8pKSmBRqNBcXExPDw8pI5DRGTVDEaBp1cdxK9pefB2VeLHZ3shpClvgkmNz5yf36zeRERUKyEE3tyQil/T8qB0qj7lm8WGbAHLDRER1eqD7afw+Z+ZAIAPH+6CqBAvaQMR1RHLDRER3WDpzgx8tCMDAPD60HYY0ilA4kREdSfphGIiIrIuQggs230W7209CQCYPigS4+8MkzgVkXlYboiICED1/aLe2piGFXvOAQBeiG2Fyf3CJU5FZD6WGyIiwtVyHeK/O4KdJ68AAF4b3BaT+raUOBVR/bDcEBE5uJ3peXjtx+O4VFwJlZMc8x/sxNsqkE1juSEiclApl4qx6NfT2JZafY+/MB9XLH20G9oF8hpgZNtYboiIHMjl4mvYdfIK1h+5hKSzBQAAhVyGJ+4MxQuxreGq4o8Fsn38W0xEZGeulutwrqAc2YUVuHD1Gi5crUB24TWcyy/HxaJrpvXkMuC+ToGYMiACbfzdJUxM1LBYboiIbJgQAimXSrAzPQ97zxXgZE4Z8su0N11fLgM6BGkwqEMAhnYOQHMvl0ZMS9Q4WG6IiGyQwSjw0+GLWLHnHFIuldzwfIBGjWAvFzT3boLmXi4I9mqCYG8XdAjSwI2HnsjO8W84EZGNOZx1FbN+PoETF6tLjcpJjn6tm6Fv62boEKRBhK8bCww5NP7tJyKyEUIIfPrbWczfehIGo4C72gmT+4Xj0R4t4OWqlDoekdVguSEisgFVBiNe+u4o1h+9BAAY1jkQs4e2g4+bSuJkRNaH5YaIyMpp9QZM/eYwtqfmwlkhQ8LQ9ngspgVkMpnU0YisEssNEZEVMxoFXlxzBNtTc6F0kuOTMVEY0MZX6lhEVo3lhojIir27JR2bjudAqZDjs3HR6NOqmdSRiKyeXOoARERUu7XJF/DJb2cBAPMf7MRiQ1RHLDdERFboVG4pZv50HADw/N2tMLwrb2RJVFcsN0REVuaazoApXx9CZZURfVr54Pm7W0kdicimsNwQEVmZxTtO43ReGXzdVfhwVBfI5TwrisgcLDdERFbkVG4plv81z+btBzryOjZE9cByQ0RkJYxGgdd+PA69UWBgOz/c085P6khENonlhojISqxNvoADmVfholTg9WHtpY5DZLNYboiIrEBBmRbvbE4DAMTf0xqBnk0kTkRku1huiIiswNzN6SiqqELbAA+M7xUqdRwim8ZyQ0Qksb1nC7A2+QJkMuDtBzrAScFvzUS3g/+CiIgkpNMbMfOnEwCAR3u0QLcWXhInIrJ9LDdERBJa/vtZZOSVwcdNif+Li5Q6DpFdYLkhIpLI+YJyLE48DQCYdV87aFycJU5EZB9YboiIJCCEwOyfU6DVG9E7wgfDOgdKHYnIbrDcEBFJYNPxHOw+dQVKJzneHN4BMhlvsUDUUFhuiIgaWUllFeb8kgIAeLZ/OMJ8XCVORGRfWG6IiBrZB9tOIa9UizAfV0zuFy51HCK7w3JDRNSIjmYX4cukTADAW8M7QO2skDYQkR1iuSEiaiQ6vRGv/HAMRgE80DUId0b4SB2JyC6x3BARNZKPd51Bek4pvF2VmHVfO6njENktlhsiokZwKrcUS3ZWX9Pm9WHt4e2qlDgRkf1iuSEisjC9wYh/rT2GKoNAbFtfDO0UIHUkIrvGckNEZGGLEk/jaHYR3FVOvKYNUSNguSEisqCkMwVYsjMDAPDOiI4I0DSROBGR/WO5ISKykAtXKzDt20MQAhgVHYyhvMUCUaNguSEisoByrR5PfnEQ+WU6tA3wQMIwnh1F1FhYboiIGlhllQGTv0pGek4pfNxU+M+4aLgonaSOReQwWG6IiBpQZZUBk748iN9P58NFqcCnY6MQ5Ml5NkSNib9KEBE1kEtF1/D0qmQcv1iMJs4KrBzfHd1aeEkdi8jhsNwQEd0mIQTWH72EN35JRUG5Dl4uzvhkTDR6hHlLHY3IIbHcEBHVk9EosPvUFXy8+wz2nysEALQN8MCnY6IQ7O0icToix8VyQ0RUR5VVBmTklSE9pxSHs64iMS0POSWVAAClkxzTBkTgqX4toXLinb6JpMRyQ0RUi2s6Aw5nX8WJi8U4dqEYaZdLkFlQAYNR1FjPXeWEUd2DMaF3GCcOE1kJlhsior8IIbAnowBf7zuPXSev4FqV4YZ1PF2c0cbPHW0DPNCvdTP0DG8KtTNHaoisCcsNERGAo9lFSFifgiPZRaZlARo1ugR7omNzDdoFeKBtgAd83VW8NxSRlWO5ISKHZjAKLPz1FP696wwMRgGVkxyPdA/GQ9HBaB/owSJDZINYbojIYVVWGfDct4exLTUXADC0cyBm3dcWvu5qiZMR0e1guSEih1Sh02Pciv04kHkVSic53nuwE+7vEiR1LCJqACw3RORw9AYjpn1zGAcyr8JD7YT/jOvOC+4R2RGWGyJyOHN+SUVieh5UTnKsnNAdUSEsNkT2hDfOJCKHsv7oJazaex4yGbDoka4sNkR2iOWGiBxGZn45Xl13HAAwdUAE7u3gL3EiIrIElhsicghGo8DL3x9FmVaPHqHeeP7uVlJHIiILYbkhIofw9f4sHDx/Fa5KBT58pAucFPz2R2Sv+K+biOxeTnEl5m9OBwD8K64N7wFFZOdYbojI7s3dnIZSrR5dgj0xpmeo1HGIyMJYbojIriWfv4qfj1yCTAa8NbwDFHLeToHI3rHcEJHdMhoF3tyQCgB4OCoYHYI0EiciosYgeblZunQpQkNDoVarERMTg/37999y/aKiIkyZMgUBAQFQqVRo3bo1Nm3a1EhpiciWbD6RgyPZRXBVKvBSXGup4xBRI5H0CsVr1qxBfHw8li1bhpiYGCxcuBBxcXE4efIkfH19b1hfp9Phnnvuga+vL9auXYugoCCcP38enp6ejR+eiKza9bt9A8CTfVryZphEDkTScvPBBx9g0qRJmDBhAgBg2bJl2LhxI1asWIHp06ffsP6KFStQWFiIP//8E87OzgCA0NDQxoxMRDZi4/HLOJ1XBg+1Eyb2CZM6DhE1IskOS+l0OiQnJyM2Nva/YeRyxMbGIikpqdZt1q9fj549e2LKlCnw8/NDhw4d8M4778BgMNz0fbRaLUpKSmo8iMi+GYwCi/4atZnUpyU81M4SJyKixiRZucnPz4fBYICfn1+N5X5+fsjJyal1m7Nnz2Lt2rUwGAzYtGkTZs2ahffffx9vvfXWTd9n7ty50Gg0pkdwcHCDfg4isj6/HL2EM1fK4enijPF3hkodh4gameQTis1hNBrh6+uLTz/9FFFRURg1ahRee+01LFu27KbbzJgxA8XFxaZHdnZ2IyYmosamNxixKPE0gOpRG3eO2hA5HMnm3Pj4+EChUCA3N7fG8tzcXPj7134zu4CAADg7O0OhUJiWtW3bFjk5OdDpdFAqlTdso1KpoFKpGjY8EVmtn49cwrn8cni7KjGuV6jUcYhIApKN3CiVSkRFRSExMdG0zGg0IjExET179qx1mzvvvBMZGRkwGo2mZadOnUJAQECtxYaIHIveYMTiHdWjNk/1bQk3laTnTBCRRCQ9LBUfH4/ly5fjiy++QFpaGp555hmUl5ebzp4aO3YsZsyYYVr/mWeeQWFhIZ5//nmcOnUKGzduxDvvvIMpU6ZI9RGIyIqsO3wR5wsq0NRVibE9Q6SOQ0QSkfTXmlGjRuHKlSuYPXs2cnJy0KVLF2zZssU0yTgrKwty+X/7V3BwMLZu3YoXX3wRnTp1QlBQEJ5//nm88sorUn0EIrISVQYjPvpr1GZyv3C4KDlqQ+SoZEIIIXWIxlRSUgKNRoPi4mJ4eHhIHYeIGsjq/VmYvu44fNxU+P3/BqCJUvHPGxGRzTDn57dNnS1FRFQbnd6Ij3ZkAACe6R/OYkPk4FhuiMjmfZ+cjYtF1+DrrsJjMS2kjkNEEmO5ISKbptUbsOSvUZtn+4dD7cxRGyJHx3JDRDZt9f5sXC6uhL+HGo/04KgNEbHcEJENK9fqTXNtptwVwVEbIgLAckNENmzlnnPIL9MipKkLHunO+8YRUTWWGyKySVfLdfhk91kAQPw9reGs4LczIqrG7wZEZJOW7T6DUq0ekf7uGNopUOo4RGRFWG6IyOZcLLqGz//MBAD8371tIJfLpA1ERFaF5YaIbM47m9Kg1RvRI8wbA9r4Sh2HiKwMyw0R2ZQ/z+Rj47HLkMuA14e2h0zGURsiqonlhohshk5vxJz1qQCAx2JC0C6Q94cjohux3BCRzVi6MwMnc0vh7arESwNbSx2HiKwUyw0R2YSUS8VYurP6gn1v3N8eni5KiRMRkbVyqs9GWq0W+/btw/nz51FRUYFmzZqha9euCAsLa+h8REQo1+rx4poj0BsFBnXwx5COAVJHIiIrZla52bNnDxYtWoRffvkFVVVV0Gg0aNKkCQoLC6HVatGyZUs89dRTmDx5Mtzd3S2VmYgciBAC09cdx6ncMvi6q/DG/R04iZiIbqnOh6WGDRuGUaNGITQ0FNu2bUNpaSkKCgpw4cIFVFRU4PTp05g5cyYSExPRunVrbN++3ZK5ichBfPLbWfxy9BKc5DIsfawbmrmrpI5ERFauziM3Q4YMwQ8//ABnZ+dan2/ZsiVatmyJcePGITU1FZcvX26wkETkmL7edx7zNqcDAF4b0hbdQ70lTkREtkAmhBBSh2hMJSUl0Gg0KC4uhocHTyMlskZCCHz2xzm8vSkNQgDP9A/HK/dGSh2LiCRkzs/vek0oJiKylDKtHq+vT8Ha5AsAgPG9QvF/cW0kTkVEtqRBy83Ro0fRrVs3GAyGhnxZInIAeoMRG45dxtzNacgt0UIuA14b0g5P3BnKCcREZJYGH7lxsKNcRHQbSiqrcDCzEPvOFuKXo5dwqbgSABDS1AVzH+iIXhE+EickIltkVrkZMWLELZ8vLi7mb1hEdFNFFTrsO1ddZvadK0Da5RIY//b7kI+bEuN7heLJPi2hdlZIF5SIbJpZ5eaXX37BPffcAz8/v1qf5+EoIvpfV8t1WJt8AZtPXMaR7KIaZQYAQpu6ICasKXpFNEVce3+WGiK6bWaVm7Zt22LkyJGYOHFirc8fOXIEGzZsaJBgRGTbiq9VYeGvp/DNvixo9UbT8ghfN8SEeSOmZVPEhHnDz0MtYUoiskdmlZuoqCgcOnTopuVGpVKhRYsWDRKMiGzXtpQcTF93HIXlOgBA+0APPNKjBe6O9EWgZxOJ0xGRvTPrOjdarRYGgwEuLi6WzGRRvM4NkeUYjQILfz2FxTuqb3AZ4euG2fe1Q59WPpyPR0S3xWLXuVGpeNlzIqqdEAKz15/AV3uzAFRfn+a1IW3hrKjzXV6IiBpEnctNeXk5XF1d6/zC5q5PRLZLCIG3Nqbhq71ZkMmAd0d0wsPdg6WORUQOqs6/UkVERGDevHm3vGeUEALbt2/HoEGDsHjx4gYJSETW75v9Wfjsj3MAWGyISHp1HrnZtWsXXn31Vbz++uvo3LkzoqOjERgYCLVajatXryI1NRVJSUlwcnLCjBkz8PTTT1syNxFZicNZV/H6+hQAwP/d24bFhogkZ/aNM7OysvD999/j999/x/nz53Ht2jX4+Piga9euiIuLw6BBg6BQWO91KjihmKjhlGn1iPvwN1wsuoa49n5Y9ngUJw4TkUWY8/ObdwUnonp77cfj+HpfFoK9m2DTc33grnaWOhIR2Slzfn7X6zSGnTt31isYEdmPPzPy8fW+6jOj3h3ZicWGiKxGvcrNvffei/DwcLz11lvIzs5u6ExEZOV0eiNm/nQCAPBYTAv0CucNLonIetSr3Fy8eBFTp07F2rVr0bJlS8TFxeG7776DTqdr6HxEZIW+TMrE2fxy+Lgp8cqgSKnjEBHVUK9y4+PjgxdffBFHjhzBvn370Lp1azz77LMIDAzEc889h6NHjzZ0TiKyEvllWixKPA0A+FdcG3jwcBQRWZnbvnRot27dMGPGDEydOhVlZWVYsWIFoqKi0KdPH6SkpDRERiKyIkt2ZKC0Uo8OQR54MIqnfROR9al3uamqqsLatWsxePBghISEYOvWrViyZAlyc3ORkZGBkJAQPPTQQw2ZlYgkll+mxeoD1ZOIX7k3Ego5T/smIutj1r2lrps2bRq+/fZbCCEwZswYzJ8/Hx06dDA97+rqigULFiAwMLDBghKR9D774xwqq4zoHOyJ3hGcRExE1qle5SY1NRUfffQRRowYcdObafr4+PCUcSI7UlxRhVVJ5wEAUwdE8GJ9RGS16nVYKiEhAQ899NANxUav1+O3334DADg5OaFfv363n5CIrMIXSZko0+oR6e+OuyN9pY5DRHRT9So3AwYMQGFh4Q3Li4uLMWDAgNsORUTWpVyrx4o91TfGfHZABOSca0NEVqxe5UYIUeuQdEFBAVxdXW87FBFZl2/2ZaGoogqhTV0wpGOA1HGIiG7JrDk3I0aMAADIZDKMHz++xmEpg8GAY8eOoVevXg2bkIgkVVllwKe/nwUAPNM/nGdIEZHVM6vcaDQaANUjN+7u7mjSpInpOaVSiTvuuAOTJk1q2IREJKnvky/gSqkWgRo1HujaXOo4RET/yKxys3LlSgBAaGgoXn75ZR6CIrJzVQYjlu06AwB4qm9LKJ1u+7qfREQWV69TwRMSEho6BxFZoZ+PXMLFomvwcVPikR4tpI5DRFQndS433bp1Q2JiIry8vNC1a9dbXuPi0KFDDRKOiKRjMAr8e1cGAGBi75ZQOyskTkREVDd1Ljf333+/aQLx8OHDLZWHiKzElhM5OHulHB5qJzx+B0dtiMh2yIQQQuoQjamkpAQajQbFxcXw8PCQOg6RVRJCYPDiP5B2uQTP3d0K8fe0ljoSETk4c35+c3YgEd1g58k8pF0ugYtSgQm9QqWOQ0RkljoflvLy8qrzvWRqu3oxEdkGIQSW7Kiea/P4HSHwclVKnIiIyDx1LjcLFy60YAwishZJZwtwKKsISic5nuwdJnUcIiKz1bncjBs3zpI5iMhKLN1ZPWozKjoYvh5qidMQEZmvzuWmpKTENIGnpKTklutyoi6RbTqcdRV7MgrgJJfh6X4tpY5DRFQvZs25uXz5Mnx9feHp6Vnr/JvrN9Q0GAwNGpKIGsdHf821Gd41CM29XCROQ0RUP3UuNzt27IC3tzcAYOfOnRYLRETSOJJdhB3peVDIZZgyIELqOERE9VbnctOvX79a/0xE9mHhr6cAAMO7BCHMh/eNIyLbVa97SwHA1atX8dlnnyEtLQ0A0K5dO0yYMME0ukNEtuNQ1lXsOnkFCrkMz93NURsism31uojfb7/9htDQUCxevBhXr17F1atXsXjxYoSFheG3335r6IxEZGELfz0NABjRNQghTTlqQ0S2rV4jN1OmTMGoUaPw8ccfQ6GovpmewWDAs88+iylTpuD48eMNGpKILOdAZiF+O1U9ajP1Lo7aEJHtq9fITUZGBl566SVTsQEAhUKB+Ph4ZGRkNFg4IrIso1HgrY3Vh5Yfjm7OURsisgv1KjfdunUzzbX5u7S0NHTu3Pm2QxFR4/jl2CUczS6Cq1KBF3lzTCKyE3U+LHXs2DHTn5977jk8//zzyMjIwB133AEA2Lt3L5YuXYp58+Y1fEoianAVOj3mbzkJAHimfzh83Xk1YiKyDzIhhKjLinK5HDKZDP+0urVfxM+cW6YT2bO3N6Zi+e/nEOTZBL/G90MTpeKfNyIikog5P7/rfFjq3LlzOHv2LM6dO3fLx9mzZ80OvHTpUoSGhkKtViMmJgb79++v03arV6+GTCbD8OHDzX5PIkd2/EIxPvvjHADgreEdWGyIyK7U+bBUSEiIRQKsWbMG8fHxWLZsGWJiYrBw4ULExcXh5MmT8PX1vel2mZmZePnll9GnTx+L5CKyV5VVBvxr7VEYBTC0cyAGRN783xkRkS2q82Gp2qSmpiIrKws6na7G8mHDhtX5NWJiYtC9e3csWbIEAGA0GhEcHIxp06Zh+vTptW5jMBjQt29fPPHEE/j9999RVFSEn376qU7vx8NS5Ohe+/E4vt6XhaauSmx5oS+auaukjkRE9I/M+fldr+vcnD17Fg888ACOHz9eYx7O9Ztp1nXOjU6nQ3JyMmbMmGFaJpfLERsbi6SkpJtu98Ybb8DX1xcTJ07E77//Xp+PQOSQfj5yEV/vy4JMBnw4qguLDRHZpXqdCv78888jLCwMeXl5cHFxQUpKCn777TdER0dj165ddX6d/Px8GAwG+Pn51Vju5+eHnJycWrf5448/8Nlnn2H58uV1eg+tVouSkpIaDyJHtCcjH//6vvqsxyn9I9C3dTOJExERWUa9yk1SUhLeeOMN+Pj4QC6XQy6Xo3fv3pg7dy6ee+65hs5oUlpaijFjxmD58uXw8fGp0zZz586FRqMxPYKDgy2Wj8haJZ8vxFNfHoTOYMTgjv68pg0R2bV6HZYyGAxwd3cHAPj4+ODSpUto06YNQkJCcPLkyTq/jo+PDxQKBXJzc2ssz83Nhb+//w3rnzlzBpmZmRg6dKhpmdForP4gTk44efIkwsPDa2wzY8YMxMfHm74uKSlhwSGHsvHYZbz43RHo9Eb0Cm+KD0d1gUIukzoWEZHF1KvcdOjQAUePHkVYWBhiYmIwf/58KJVKfPrpp2jZsmWdX0epVCIqKgqJiYmm07mNRiMSExMxderUG9aPjIy84b5VM2fORGlpKRYtWlRraVGpVFCpOK+AHE9pZRXmbk7HN/uyAACxbX2x6JGuUDnxtG8ism/1KjczZ85EeXk5gOrJvffddx/69OmDpk2bYs2aNWa9Vnx8PMaNG4fo6Gj06NEDCxcuRHl5OSZMmAAAGDt2LIKCgjB37lyo1Wp06NChxvaenp4AcMNyIkdVptXj231Z+PT3s7hSqgUATOwdhlcHt+WIDRE5hHqVm7i4ONOfIyIikJ6ejsLCQnh5eZnOmKqrUaNG4cqVK5g9ezZycnLQpUsXbNmyxTTJOCsrC3J5vaYGETmMC1crkJiWh8T0POw9UwCdofpwbUhTF8wb0Qk9w5tKnJCIqPHc1nVuACA7OxsAbGYeC69zQ/ZACIHD2UX4NTUXiWl5OJlbWuP58GaueKpvSzzQtTmUTvzlgIhsn8Wvc6PX6zFnzhwsXrwYZWVlAAA3NzdMmzYNCQkJcHZ2rs/LEtE/KK6owpdJmfg++QKyCitMy+UyIDrUG3dH+uLutn4Ib+Zq9igqEZG9qFe5mTZtGtatW4f58+ejZ8+eAKpPD3/99ddRUFCAjz/+uEFDEjk6nd6IT3afwSe/nUWZVg8AcFUqcFdbP8S29UW/1s3g6aKUOCURkXWo12EpjUaD1atXY9CgQTWWb9q0CaNHj0ZxcXGDBWxoPCxFtiY9pwQvrD6C9JzqQ0+R/u54ul9L3Ns+gDe8JCKHYfHDUiqVCqGhoTcsDwsLg1LJ3x6JGsruU1cw5etDKNPq4e2qxOz72mFY50DIedYTEdFN1Wum4dSpU/Hmm29Cq9Walmm1Wrz99tu1Xp+GiMy34dglPPH5AZRp9bijpTe2vdgXw7sGsdgQEf2DOo/cjBgxosbXv/76K5o3b47OnTsDAI4ePQqdToe77767YRMSOaDdp67gxTVHYDAKjOgahHkjO/GsJyKiOqpzudFoNDW+HjlyZI2vbeVUcCJrl3a5BJNXJaPKIDC0cyAWPNSZozVERGaoc7lZuXKlJXMQEapvmfDs14dwrcqA3hE+eJ/FhojIbPWaUHzdlStXTDfKbNOmDZo1a9YgoYgckRACM9Ydx7n8cgRq1PhodFceiiIiqod6fecsLy/HE088gYCAAPTt2xd9+/ZFYGAgJk6ciIqKin9+ASK6wYZjl7Hh2GU4yWVY8lg3eLnyzEMiovqoV7mJj4/H7t278csvv6CoqAhFRUX4+eefsXv3brz00ksNnZHI7hWW6/D6+hQAwNS7ItCthZfEiYiIbFe9Dkv98MMPWLt2Lfr3729aNnjwYDRp0gQPP/wwr1BMZKa3NqSioFyHNn7ueLZ/hNRxiIhsWr1GbioqKkx37f47X19fHpYiMtOhrKtYd/giZDLg3Qd5yjcR0e2q13fRnj17IiEhAZWVlaZl165dw5w5c0z3miKifyaEwFsbUgEAD3Zrji7BntIGIiKyA/U6LLVw4ULce++9N1zET61WY+vWrQ0akMiebTx+GYeyitDEWYGX49pIHYeIyC7Uq9x07NgRp0+fxtdff4309HQAwOjRo/HYY4+hSZMmDRqQyF5VVhkwb3P1v5/J/cLh56GWOBERkX0wu9xUVVUhMjISGzZswKRJkyyRicghfJmUiQtXr8HfQ41JfcOkjkNEZDfMnnPj7OxcY64NEZmvQqfHst1nAQDxA1vDRXlb19MkIqK/qdeE4ilTpuDdd9+FXq9v6DxEDmFV0nkUlusQ0tQFI7oGSR2HiMiu1OvXxQMHDiAxMRHbtm1Dx44d4erqWuP5devWNUg4IntUodPjk9+qR22m3dUKTgqe+k1E1JDqVW48PT1vuCs4EdXN30dthncJlDoOEZHdMavcGI1GvPfeezh16hR0Oh3uuusuvP766zxDiqiOOGpDRGR5Zn1nffvtt/Hqq6/Czc0NQUFBWLx4MaZMmWKpbER2h6M2RESWZ1a5+fLLL/Hvf/8bW7duxU8//YRffvkFX3/9NYxGo6XyEdkNjtoQETUOs767ZmVlYfDgwaavY2NjIZPJcOnSpQYPRmRvvt6bxVEbIqJGYFa50ev1UKtrXkXV2dkZVVVVDRqKyN5UVhlMozZT+kdw1IaIyILMmlAshMD48eOhUqlMyyorKzF58uQap4PzVHCimlbvz0J+mRZBnk0wnNe1ISKyKLPKzbhx425Y9vjjjzdYGCJ7pNUbTFcjntw/HEonjtoQEVmSWeVm5cqVlspBZLfWJl9ATkkl/DxUeCiqudRxiIjsHn+FJLKgKoMRH+86AwB4um841M4KiRMREdk/lhsiC/rp8EVcuHoNPm5KjO7RQuo4REQOgeWGyEIMRoF//zVq82Sflmii5KgNEVFjYLkhspANxy7hXH45PF2c8fgdIVLHISJyGCw3RBZQZTBi4a+nAQAT7wyDm6pe96glIqJ6YLkhsoDvDmbjXH45vF2VGH9nqNRxiIgcCssNUQO7pjNg0V+jNlMHRMBd7SxxIiIix8JyQ9TAVv55DnmlWjT3aoLH7uAZUkREjY3lhqgBFVXoTNe1ib+nNVROPEOKiKixsdwQNaAPt59CaaUekf7uuL8L7yFFRCQFlhuiBpJyqRir9p4HAMy+rx0UcpnEiYiIHBPLDVEDMBoFZv+cAqMA7usUgF4RPlJHIiJyWCw3RA3gy6RMJJ+/ChelAq8NaSt1HCIih8ZyQ3Sbzl4pw7wt6QCA6YMiEaBpInEiIiLHxnJDdBu0egNe/O4oKquM6B3hg8djeJsFIiKpsdwQ3YbX16fiaHYRPNROmP9gJ8g5iZiISHIsN0T1tCopE9/uz4JMBiwe3RWBnjwcRURkDVhuiOph3aELmL0+BQDw8sA26N/GV+JERER0HcsNkZm+O5iNl78/CiGAx+9ogWf7h0sdiYiI/sZJ6gBEtqLKYMS8zen47I9zAIBR0cF4Y1gHyGScZ0NEZE1Ybojq4HDWVcxYdxzpOaUAgOfvboXn727FCcRERFaI5YboJoxGgYPnr+LT387i17RcAICXizPeeaAjBnUMkDgdERHdDMsN0d8IIZByqQS/HL2EDccu42LRNQCATAaM6Nocrw1pC29XpcQpiYjoVlhuiABkF1bg++QL2HD0Es7ml5uWu6mcMLijP57uF47wZm4SJiQiorpiuSGHZTQKbE/LxVd7z+P30/mm5SonOe5u64thnQPRv40v1M4KCVMSEZG5WG7IISWdKcDbm1Jx4mKJaVmfVj4Y0S0I97Tzh5uK/zSIiGwVv4OTQ6nQ6TF3UzpW7T0PAHBVKjC2Vyge7dECwd4uEqcjIqKGwHJDDuNi0TVM/PyA6XTuR2Na4KV7WqOpm0riZERE1JBYbsghpFwqxviVB3ClVAsfNxU+HNUZfVo1kzoWERFZAMsN2b1TuaUY89l+FJbrEOnvjv+Mi0ZzLx6CIiKyVyw3ZNeyCyvw2H/2obBch07NNfjqyRh4qJ2ljkVERBbEG2eS3SrX6jHpy4O4UqpFpL87vnyiB4sNEZEDYLkhuySEQPx3R5CeUwofNxVWTugOTxdeWZiIyBGw3JBd+vzPTGxNyYVSIccnY6IQoGkidSQiImokLDdkd9Iul2DupnQAwKuDIxEV4iVxIiIiakwsN2RXdHojXlh9BDqDEXdF+mJcr1CpIxERUSNjuSG7smz3GZzMLUVTVyXee7ATZDKZ1JGIiKiRWUW5Wbp0KUJDQ6FWqxETE4P9+/ffdN3ly5ejT58+8PLygpeXF2JjY2+5PjmOjLwyLNmRAQCYPbQdrzxMROSgJC83a9asQXx8PBISEnDo0CF07twZcXFxyMvLq3X9Xbt2YfTo0di5cyeSkpIQHByMgQMH4uLFi42cnKyJ0Sjw6rrj0BmM6N+mGYZ1DpQ6EhERSUQmhBBSBoiJiUH37t2xZMkSAIDRaERwcDCmTZuG6dOn/+P2BoMBXl5eWLJkCcaOHfuP65eUlECj0aC4uBgeHh63nZ+sw7f7szBj3XG4KBXY9mJfXoGYiMjOmPPzW9KRG51Oh+TkZMTGxpqWyeVyxMbGIikpqU6vUVFRgaqqKnh7e1sqJlm54ooqzN9SfXZU/D2tWWyIiBycpLdfyM/Ph8FggJ+fX43lfn5+SE9Pr9NrvPLKKwgMDKxRkP5Oq9VCq9Wavi4pKal/YLJKCxNP4WpFFVr7uWE8z44iInJ4ks+5uR3z5s3D6tWr8eOPP0KtVte6zty5c6HRaEyP4ODgRk5JlnQ6txRfJp0HAMy+rz2cFDb9V5qIiBqApD8JfHx8oFAokJubW2N5bm4u/P39b7ntggULMG/ePGzbtg2dOnW66XozZsxAcXGx6ZGdnd0g2Ul6Qgi8sSEVBqPAPe380LuVj9SRiIjICkhabpRKJaKiopCYmGhaZjQakZiYiJ49e950u/nz5+PNN9/Eli1bEB0dfcv3UKlU8PDwqPEg+7AjPQ+/n86HUiHHa4PbSh2HiIishKRzbgAgPj4e48aNQ3R0NHr06IGFCxeivLwcEyZMAACMHTsWQUFBmDt3LgDg3XffxezZs/HNN98gNDQUOTk5AAA3Nze4ublJ9jmocen0Rry5IRUAMKF3KEJ9XCVORERE1kLycjNq1ChcuXIFs2fPRk5ODrp06YItW7aYJhlnZWVBLv/vANPHH38MnU6HBx98sMbrJCQk4PXXX2/M6CShz/88h8yCCvi4qTB1QITUcYiIyIpIfp2bxsbr3Ni+K6VaDFiwC2VaPeY/2AkPR3OSOBGRvbOZ69wQ1cf8Leko0+rRqbkGD3ZrLnUcIiKyMiw3ZFOOZBfh++QLAIDXh7WHXM4bYxIRUU0sN2QzjEaBhJ9PAABGdmuObi28JE5ERETWiOWGbMbaQxdw9EIx3FROeOXeNlLHISIiK8VyQzahpPK/94967u4I+HrUfkVqIiIilhuyCQu3n0Z+mQ4tfVwxvleY1HGIiMiKsdyQ1TuSXYTP/zwHAJg9tB2UTvxrS0REN8efEmTVtHoD/m/tURgFMLxLIPq38ZU6EhERWTmWG7JqS3eewancMjR1VWL20PZSxyEiIhvAckNWK+1yCf69MwMAMOf+9vB2VUqciIiIbAHLDVmlCp0e0749DL1RYGA7PwzpGCB1JCIishEsN2SVXl+fgoy8Mvh5qDB3REfIZLwSMRER1Q3LDVmdb/dn4buDFyCTAQtHdUVTN5XUkYiIyIaw3JBV2ZORj1k/Vd9iIT62NXqGN5U4ERER2RqWG7Iaxy8UY/JXydAbBe7vEoipd0VIHYmIiGwQyw1ZhZRLxXj8s30ordSje6gX3h3ZifNsiIioXpykDkC0+9QVTP36EEq1enRr4YmVE3pA7ayQOhYREdkolhuSjMEosPz3s5i/JR1GAfQI9cZ/xkfDTcW/lkREVH/8KUKSyMgrxavrTmB/ZiEAYGS35nhnRAeonDhiQ0REt4flhhrV6dxSrPwzE2sOZMNgFHBVKjDzvnZ4pHsw59gQEVGDYLkhi6usMmDT8cv4dn8WDmReNS0f2M4Ps+5rh2BvFwnTERGRvWG5IYs5mVOKb/dnYd2hCyip1AMAFHIZ7o70xcTeYYhpyWvYEBFRw2O5oQZVZTBi0/HL+OLPTBzKKjItD/JsgtE9gvFQdDD8PNTSBSQiIrvHckMNokKnx5dJ5/H5nkzklFQCAJzkMsS29cPomBboE+EDuZxzaoiIyPJYbui2CCHwffIFLNh6EnmlWgCAj5sKY+4IweiYYPi6c5SGiIgaF8sN1Vt2YQVe+eEY/jxTAABo7tUEz9/dCsO6BPKUbiIikgzLDdXLtpQcvPT9UZRW6qF2luPF2NYYf2coSw0REUmO5YbMIoTAosTTWPjraQBAtxae+ODhLgj1cZU4GRERUTWWG6ozg1Hg9fUpWLX3PADgiTvDMGNwJJwVvP8qERFZD5YbqpMqgxEvrjmCDccuQyYD5gxrj7E9Q6WORUREdAOWG/pHBqNA/HdHseHYZTgrZPjg4S4Y2jlQ6lhERES1YrmhWxJCYNbPJ/DL0UtwVsjw6ZhoDIj0lToWERHRTXGyBN3Se1tP4pt9WZDJgA9HdWGxISIiq8dyQze15kAW/r3rDADgnQc64r5OPBRFRETWj+WGarX/XCFm/nQCAPD83a0wukcLiRMRERHVDcsN3SC7sAKTv0pGlUFgSMcAPH93K6kjERER1RnLDdVQptXjyS8OorBchw5BHljwUGfe8JKIiGwKyw2ZGIwCL6w+jJO5pfB1V2H52Gg0UfJ2CkREZFtYbsjkva0n8WtaHpROcnw6NhoBmiZSRyIiIjIbyw0BANYduoBlu6vPjHrvwU7oEuwpbSAiIqJ6YrkhHMwsxPQfjgMApgwIx/1dgiROREREVH8sNw4uu7ACT69Khs5gRFx7P7x0TxupIxEREd0WlhsHdv3MqIJyHdoFeODDUV14ZhQREdk8lhsHpTcY8dy31WdGNXNX4bPx0XBR8lZjRERk+1huHJAQAtPXHceO9DyonORYzjOjiIjIjrDcOKC5m9OxNvkC5DLgo9FdeWYUERHZFZYbB7Nkx2l8+ttZAMC8kZ0wsL2/xImIiIgaFidZOAghBOZvPYmP/7rL96uDI/FwdLDEqYiIiBoey40DMBoFEtanYNXe8wCqi81TfcMlTkVERGQZLDd2rrSyCi+uOYpf03IhkwFvD++IR2NaSB2LiIjIYlhu7Fja5RJM+/YwMvLKoHSSY8FDnTGsc6DUsYiIiCyK5cYOVRmMWPHHOby/7RR0BiP8PdRYNiaKZ0UREZFDYLmxM7+duoI3NqQiI68MABDb1hfzRnaCj5tK4mRERESNg+XGDuj0RmxLzcGqpPPYd64QAODtqsT0eyPxUHRzyGS8pQIRETkOlhsbdrn4Gr7dl4VvD2TjSqkWAOAkl2Fsz1A8H9sKmibOEickIiJqfCw3NsZoFPjzTAFW7c3Er2l5MBgFAKCZuwqjuwfjkR4tEOjJWykQEZHjYrmxEcUVVfg+ORtf78vCufxy0/I7Wnrj8TtCMLCdP5ROvOA0ERERy42VO3ulDP/54xzWHbqAyiojAMBN5YSR3YLw2B0haO3nLnFCIiIi68JyY6WSzxdi2e6z+DUtF6L6yBMi/d0xpmcIhncJgquK/+uIiIhqw5+QVuZkTine3ZKOHel5pmWxbf0wqU8YeoR588wnIiKif8ByYyVySyrxwbZT+D45G0ZRfdbTg1HN8WSflojwdZM6HhERkc1guZGY0Sjw1b7zmL/lJMq0egDAoA7++L97IxHm4ypxOiIiItvDciOh07mlmL7uOJLPXwUAdAn2xKz72iEqxEviZERERLaL5UYCWr0BS3eewce7MlBlEHBVKvB/90bi8TtCoJBzTg0REdHtYLlpZAcyCzH9h2M4c6X6WjWxbX3xxv0deOE9IiKiBsJy00hKKqswf0s6vtqbBQDwcVNhzrD2GNzRn2dAERERNSCWm0awLSUHs39OQU5JJQDgke7BmDGoLTQuvPcTERFRQ7OK6/UvXboUoaGhUKvViImJwf79+2+5/vfff4/IyEio1Wp07NgRmzZtaqSk5snML8ekLw/iqVXJyCmpRGhTF3wzKQbzRnZisSEiIrIQycvNmjVrEB8fj4SEBBw6dAidO3dGXFwc8vLyal3/zz//xOjRozFx4kQcPnwYw4cPx/Dhw3HixIlGTn5zheU6vLkhFfd8uBvbU3OhkMvwTP9wbHmhL3qF+0gdj4iIyK7JhLh+cX9pxMTEoHv37liyZAkAwGg0Ijg4GNOmTcP06dNvWH/UqFEoLy/Hhg0bTMvuuOMOdOnSBcuWLfvH9yspKYFGo0FxcTE8PDwa7oMAuFx8Dct/O4dv92fhWpUBANCvdTO8NqQt7wFFRER0G8z5+S3pnBudTofk5GTMmDHDtEwulyM2NhZJSUm1bpOUlIT4+Pgay+Li4vDTTz/Vur5Wq4VWqzV9XVJScvvBa7H5+GU8t/owqgzVXbFDkAdeGtgGA9r4WuT9iIiIqHaSlpv8/HwYDAb4+fnVWO7n54f09PRat8nJyal1/ZycnFrXnzt3LubMmdMwgW8hKtQLMpkMPcK8MGVABPq28uFZUERERBKw+7OlZsyYUWOkp6SkBMHBwQ3+Pr7uaiTG90Owt0uDvzYRERHVnaTlxsfHBwqFArm5uTWW5+bmwt/fv9Zt/P39zVpfpVJBpVI1TOB/wGJDREQkPUnPllIqlYiKikJiYqJpmdFoRGJiInr27FnrNj179qyxPgBs3779pusTERGRY5H8sFR8fDzGjRuH6Oho9OjRAwsXLkR5eTkmTJgAABg7diyCgoIwd+5cAMDzzz+Pfv364f3338eQIUOwevVqHDx4EJ9++qmUH4OIiIishOTlZtSoUbhy5Qpmz56NnJwcdOnSBVu2bDFNGs7KyoJc/t8Bpl69euGbb77BzJkz8eqrr6JVq1b46aef0KFDB6k+AhEREVkRya9z09gseZ0bIiIisgxzfn5LfoViIiIioobEckNERER2heWGiIiI7ArLDREREdkVlhsiIiKyKyw3REREZFdYboiIiMiusNwQERGRXWG5ISIiIrsi+e0XGtv1CzKXlJRInISIiIjq6vrP7brcWMHhyk1paSkAIDg4WOIkREREZK7S0lJoNJpbruNw95YyGo24dOkS3N3dIZPJGvS1S0pKEBwcjOzsbN63yoK4nxsH93Pj4H5uPNzXjcNS+1kIgdLSUgQGBta4oXZtHG7kRi6Xo3nz5hZ9Dw8PD/7DaQTcz42D+7lxcD83Hu7rxmGJ/fxPIzbXcUIxERER2RWWGyIiIrIrLDcNSKVSISEhASqVSuoodo37uXFwPzcO7ufGw33dOKxhPzvchGIiIiKybxy5ISIiIrvCckNERER2heWGiIiI7ArLDREREdkVlhszLV26FKGhoVCr1YiJicH+/ftvuf7333+PyMhIqNVqdOzYEZs2bWqkpLbNnP2ckpKCkSNHIjQ0FDKZDAsXLmy8oDbOnP28fPly9OnTB15eXvDy8kJsbOw//v2naubs53Xr1iE6Ohqenp5wdXVFly5dsGrVqkZMa7vM/f583erVqyGTyTB8+HDLBrQj5uzrzz//HDKZrMZDrVZbNqCgOlu9erVQKpVixYoVIiUlRUyaNEl4enqK3NzcWtffs2ePUCgUYv78+SI1NVXMnDlTODs7i+PHjzdyctti7n7ev3+/ePnll8W3334r/P39xYcffti4gW2Uufv50UcfFUuXLhWHDx8WaWlpYvz48UKj0YgLFy40cnLbYu5+3rlzp1i3bp1ITU0VGRkZYuHChUKhUIgtW7Y0cnLbYu5+vu7cuXMiKChI9OnTR9x///2NE9bGmbuvV65cKTw8PMTly5dNj5ycHItmZLkxQ48ePcSUKVNMXxsMBhEYGCjmzp1b6/oPP/ywGDJkSI1lMTEx4umnn7ZoTltn7n7+u5CQEJabOrqd/SyEEHq9Xri7u4svvvjCUhHtwu3uZyGE6Nq1q5g5c6Yl4tmN+uxnvV4vevXqJf7zn/+IcePGsdzUkbn7euXKlUKj0TRSumo8LFVHOp0OycnJiI2NNS2Ty+WIjY1FUlJSrdskJSXVWB8A4uLibro+1W8/k/kaYj9XVFSgqqoK3t7elopp8253PwshkJiYiJMnT6Jv376WjGrT6ruf33jjDfj6+mLixImNEdMu1Hdfl5WVISQkBMHBwbj//vuRkpJi0ZwsN3WUn58Pg8EAPz+/Gsv9/PyQk5NT6zY5OTlmrU/1289kvobYz6+88goCAwNvKPD0X/Xdz8XFxXBzc4NSqcSQIUPw0Ucf4Z577rF0XJtVn/38xx9/4LPPPsPy5csbI6LdqM++btOmDVasWIGff/4ZX331FYxGI3r16oULFy5YLKfD3RWciG7fvHnzsHr1auzatcvyEwMdkLu7O44cOYKysjIkJiYiPj4eLVu2RP/+/aWOZhdKS0sxZswYLF++HD4+PlLHsXs9e/ZEz549TV/36tULbdu2xSeffII333zTIu/JclNHPj4+UCgUyM3NrbE8NzcX/v7+tW7j7+9v1vpUv/1M5rud/bxgwQLMmzcPv/76Kzp16mTJmDavvvtZLpcjIiICANClSxekpaVh7ty5LDc3Ye5+PnPmDDIzMzF06FDTMqPRCABwcnLCyZMnER4ebtnQNqohvkc7Ozuja9euyMjIsEREADwsVWdKpRJRUVFITEw0LTMajUhMTKzRSP+uZ8+eNdYHgO3bt990farffibz1Xc/z58/H2+++Sa2bNmC6Ojoxohq0xrq77PRaIRWq7VERLtg7n6OjIzE8ePHceTIEdNj2LBhGDBgAI4cOYLg4ODGjG9TGuLvtMFgwPHjxxEQEGCpmDwV3ByrV68WKpVKfP755yI1NVU89dRTwtPT03RK25gxY8T06dNN6+/Zs0c4OTmJBQsWiLS0NJGQkMBTwevA3P2s1WrF4cOHxeHDh0VAQIB4+eWXxeHDh8Xp06el+gg2wdz9PG/ePKFUKsXatWtrnNJZWloq1UewCebu53feeUds27ZNnDlzRqSmpooFCxYIJycnsXz5cqk+gk0wdz//L54tVXfm7us5c+aIrVu3ijNnzojk5GTxyCOPCLVaLVJSUiyWkeXGTB999JFo0aKFUCqVokePHmLv3r2m5/r16yfGjRtXY/3vvvtOtG7dWiiVStG+fXuxcePGRk5sm8zZz+fOnRMAbnj069ev8YPbGHP2c0hISK37OSEhofGD2xhz9vNrr70mIiIihFqtFl5eXqJnz55i9erVEqS2PeZ+f/47lhvzmLOvX3jhBdO6fn5+YvDgweLQoUMWzScTQgjLjQsRERERNS7OuSEiIiK7wnJDREREdoXlhoiIiOwKyw0RERHZFZYbIiIisissN0RERGRXWG6IiIjIrrDcEJFNGT9+PIYPHy51DCKyYrxxJhFZDZlMdsvnExISsGjRIvDao0R0Kyw3RGQ1Ll++bPrzmjVrMHv2bJw8edK0zM3NDW5ublJEIyIbwsNSRGQ1/P39TQ+NRgOZTFZjmZub2w2Hpfr3749p06bhhRdegJeXF/z8/LB8+XKUl5djwoQJcHd3R0REBDZv3lzjvU6cOIFBgwbBzc0Nfn5+GDNmDPLz8xv5ExORJbDcEJHN++KLL+Dj44P9+/dj2rRpeOaZZ/DQQw+hV69eOHToEAYOHIgxY8agoqICAFBUVIS77roLXbt2xcGDB7Flyxbk5ubi4YcflviTEFFDYLkhIpvXuXNnzJw5E61atcKMGTOgVqvh4+ODSZMmoVWrVpg9ezYKCgpw7NgxAMCSJUvQtWtXvPPOO4iMjETXrl2xYsUK7Ny5E6dOnZL40xDR7eKcGyKyeZ06dTL9WaFQoGnTpujYsaNpmZ+fHwAgLy8PAHD06FHs3Lmz1vk7Z86cQevWrS2cmIgsieWGiGyes7Nzja9lMlmNZdfPwjIajQCAsrIyDB06FO++++4NrxUQEGDBpETUGFhuiMjhdOvWDT/88ANCQ0Ph5MRvg0T2hnNuiMjhTJkyBYWFhRg9ejQOHDiAM2fOYOvWrZgwYQIMBoPU8YjoNrHcEJHDCQwMxJ49e2AwGDBw4EB07NgRL7zwAjw9PSGX89sika2TCV7qk4iIiOwIf0UhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2RWWGyIiIrIrLDdERERkV1huiIiIyK6w3BAREZFdYbkhIiIiu8JyQ0RERHaF5YaIiIjsCssNERER2ZX/B0DDGioZVcVyAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "gamma_sm = 1.0\n", "evolution_result_decay = cudaq.evolve(\n", @@ -1782,18 +1396,10 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "id": "09db2f53-c94f-4323-8f21-5c39fbc61dc7", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Probability of |1> from the gate-level simulation with noise: 0.852\n" - ] - } - ], + "outputs": [], "source": [ "cudaq.set_target('density-matrix-cpu')\n", "cudaq.set_random_seed(13)\n", @@ -1826,21 +1432,10 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "id": "f5d0999d-36fb-4d18-92c8-9a439d003dfa", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'\\nUsing an off-resonance frequency means there is not hte same energy transfer and the peak only goes up to around 0.8, even without decoherence.\\nChanging the time range means that the pulse no longer stops at its peak and has started to slightly decay. \\n'" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "\"\"\"\n", "Using an off-resonance frequency means there is not hte same energy transfer and the peak only goes up to around 0.8, even without decoherence.\n", @@ -1853,10 +1448,21 @@ "id": "6431aa03-378f-4e39-86ba-fd096691d167", "metadata": {}, "source": [ - "## Summary ##\n", + "## Conclusion\n", "\n", "Noise is the greatest challenge facing quantum computers. Accurate simulations can help us understand both the sources and impacts of noise to guide development of better hardware, algorithms, and QEC codes. You now know how to utilize CUDA-Q for noise modeling as well as a number of situations where noise modeling is useful. Scaling up any of these examples makes simulation much more challenging and requires the power of CUDA-Q and AI supercomputing to usher in new advancements to the field." ] + }, + { + "cell_type": "markdown", + "id": "af69b90635ee4ef0", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — covers the Steane code used in this notebook’s QEC exercises\n", + "* [QEC Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — next notebook in the QEC101 series\n", + "* [VQE and GQE](https://github.com/NVIDIA/cuda-q-academic/blob/main/chemistry-simulations/vqe_and_gqe.ipynb) — explores the variational chemistry algorithm used in the noise impact analysis" + ] } ], "metadata": { @@ -1876,8 +1482,24 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.3" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SIM", + "QCS.SW" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_chemistry", + "hpc_integration" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/Solutions/04_QEC_Decoders_Solution.ipynb b/qec101/Solutions/04_QEC_Decoders_Solution.ipynb index d0cedc2..7a6c384 100644 --- a/qec101/Solutions/04_QEC_Decoders_Solution.ipynb +++ b/qec101/Solutions/04_QEC_Decoders_Solution.ipynb @@ -27,34 +27,59 @@ "id": "3e33ec99-e8e6-4a88-b4ea-bcf1e8bbb0df", "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 4: Decoders\n", - "\n", - "\n", - "QEC is only effective if the codes utilized to flag errors can be interpreted to identify the errors to be fixed. This is the job of a decoder. Decoding is one of the most challenging yet critical aspects of QEC and finding good decoders is a major researcher objective in the field.\n", - "\n", - "This lab introduces the basic concepts of decoding and frames why the problem is so difficult. In the later sections, you will then work through a number of exercises to implement a naive \"brute force\" decoder, train an AI decoder, and explore how GPUs can power advanced decoding algorithms like belief propagation with ordered statistics decoding. \n", - "\n", - "\n", - "**Prerequisites:** This is the 4th lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete [\"The Basics of Classical and Quantum Error Correction\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) first. It is also helpful to have completed [\"Stabilizers, the Shor Code, and the Steane Code\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) in which stabilizers and syndromes are introduced in detail. This notebook also walks you through building a Steane code implementation in CUDA-Q which is used again in this lab. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **4.1** Understand what decoding is, why it is important, and what makes it so difficult\n", - "* **4.2** Explore Pauli frames and error tracking\n", - "* **4.3** Code a naive brute force decoder for the Steane Code\n", - "* **4.4** Train an AI decoder for the Steane Code\n", - "* **4.5** Experiment with NVIDIA's accelerated belief propagation decoder.\n", - "\n", - "Terminology and notation you'll use:\n", - "* decoders, decoding window, Pauli frames\n", - "* most likely error decoding\n", - "* AI decoding\n", - "* Belief propagation and ordered statistics decoding\n", + "# QEC 101 — Lab 4: Decoders — Solutions\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Explore the role of decoding in quantum error correction and understand the decoding window constraint\n", + "* Implement Pauli frame tracking for multi-round error correction\n", + "* Build a brute-force maximum likelihood error decoder for the Steane code\n", + "* Train a neural network AI decoder using CUDA-Q simulated syndrome data\n", + "* Implement belief propagation decoding and apply NVIDIA's GPU-accelerated BP+OSD decoder to large qLDPC codes\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of [QEC 101 Lab 1](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) (classical and quantum error correction basics)\n", + "* Completion of [QEC 101 Lab 2](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) (stabilizers, Steane code implementation in CUDA-Q)\n", + "\n", + "**Key Terminology:**\n", + "* Decoder\n", + "* Decoding Window\n", + "* Pauli Frame\n", + "* Maximum Likelihood Decoding (MLE)\n", + "* AI Decoder\n", + "* Belief Propagation (BP)\n", + "* Ordered Statistics Decoding (OSD)\n", + "* Code Capacity\n", + "* qLDPC (Quantum Low-Density Parity Check)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.NoiseModel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.NoiseModel) — defines a quantum noise model\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library (BP decoder, code definitions)\n", + "\n", + "QEC is only effective if the codes utilized to flag errors can be interpreted to identify the errors to be fixed. This is the job of a decoder. Decoding is one of the most challenging yet critical aspects of QEC and finding good decoders is a major researcher objective in the field.\n", + "\n", + "This lab introduces the basic concepts of decoding and frames why the problem is so difficult. In the later sections, you will then work through a number of exercises to implement a naive \"brute force\" decoder, train an AI decoder, and explore how GPUs can power advanced decoding algorithms like belief propagation with ordered statistics decoding.\n", + "\n", + "This is the 4th lab in the QEC series. If you are not familiar with the basics of classical or quantum error correction (EC), please complete [\"The Basics of Classical and Quantum Error Correction\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb) first. It is also helpful to have completed [\"Stabilizers, the Shor Code, and the Steane Code\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) in which stabilizers and syndromes are introduced in detail. This notebook also walks you through building a Steane code implementation in CUDA-Q which is used again in this lab." + ] + }, + { + "cell_type": "markdown", + "id": "19021cfd832544a0", + "metadata": {}, + "source": [ + "
\n", "\n", - "💻 Just a heads-up: This notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning! ⭐\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "To get started, run the cells below to install the prerequisite libraries and then restart the kernel." + "
" ] }, { @@ -64,32 +89,55 @@ "metadata": {}, "outputs": [], "source": [ - "## Instructions for Google Colab. You can ignore this cell if you have cuda-q set up \n", - "# Run this notebook in a GPU runtime\n", - "# Uncomment the lines below and execute the cell to install cuda-q\n", - "\n", - "#!pip install cudaq\n", - "\n", + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", + "#!pip install cudaq -q\n", + "#!pip install torch scikit-learn galois ipywidgets -q\n", + "#\n", "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", "#!unzip -q main.zip\n", "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, + { + "cell_type": "markdown", + "id": "45d32c6755024b5c", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, { "cell_type": "code", "execution_count": null, +<<<<<<< qec_updates + "id": "da83c723f5694d80", +======= "id": "4a3fc629-1a82-4eec-b946-1f3014d749ea", +>>>>>>> main "metadata": {}, "outputs": [], "source": [ - "# install `torch`, 'scikit-learn', 'galois', cudaq-qec' and `ipywidgets` in the current Python kernel. Skip this if they are already installed.\n", - "# Make sure to restart your kernel if you execute this!\n", - "# In a Jupyter notebook, go to the menu bar > Kernel > Restart Kernel.\n", - "# In VSCode, click on the Restart button in the Jupyter toolbar.\n", - "\n", "import sys\n", + "import os\n", + "sys.path.append(os.path.join(os.getcwd(), '..'))\n", + "from itertools import product\n", "\n", +<<<<<<< qec_updates + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import requests\n", + "import bz2\n", + "import galois\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "from sklearn.model_selection import train_test_split\n", +======= "try:\n", " import torch\n", " import scikit_learn\n", @@ -98,8 +146,17 @@ " import ipywidgets as widgets\n", " import numpy as np\n", " import cudaq_qec as qec\n", +>>>>>>> main "\n", + "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", "\n", +<<<<<<< qec_updates + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec\n", +======= "except ImportError:\n", " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", " !{sys.executable} -m pip install --upgrade pip -q\n", @@ -110,11 +167,16 @@ " !{sys.executable} -m pip install ipywidgets -q\n", " !{sys.executable} -m pip install cudaq-qec -q\n", " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", +>>>>>>> main "\n", + "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", "\n", +<<<<<<< qec_updates +======= "#This lab runs a GPU accelerated decoder and requires access to a GPU\n", "import cudaq\n", "import cudaq_qec as qec\n", +>>>>>>> main "cudaq.set_target('nvidia')" ] }, @@ -123,6 +185,8 @@ "id": "f5ee9745-1d8a-4591-b1c3-0159c4dad662", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.1 Decoding Decoded\n", "\n", "Remember that a QEC round involves four main steps:\n", @@ -131,10 +195,11 @@ "* Decoding the syndrome to identify where an error occurred and what instructions to send to the QPU to fix the error\n", "* Correcting the error\n", "\n", - "\"Drawing\"\n", - "\n", - "The decoding step is very challenging and is considered one of the primary limitations for QEC. This is because decoding requires measurements on a QPU, data transfer to the supercomputer, decoding on the supercomputer, and then data transfer back to the QPU. The time available for this is called the **decoding window** and varies based on a number of factors such as the qubit modality, data transfer rates, and the volume of information that needs to be decoded.\n", + "\"Diagram\n", "\n", +<<<<<<< qec_updates + "The decoding step is very challenging and is considered one of the primary limitations for QEC. This is because decoding requires measurements on a QPU, data transfer to the supercomputer, decoding on the supercomputer, and then data transfer back to the QPU. The time available for this is called the **decoding window** and varies based on a number of factors such as the qubit modality, data transfer rates, and the volume of information that needs to be decoded." +======= "The simulation below makes this more clear. First, set the time for the decoding window. All 50 syndromes must be decoded in this time, otherwise the QEC procedure fails. In many cases, syndromes vary in decoding difficulty, so this simulation samples random times from a normal distribution. Try changing the parameters of the distribution and see how this impacts the decoder's success rate.\n", "\n", "Notice how even if a decoder is quite fast and can decode most of the syndromes in time, the worst-case scenario (i.e. hardest syndrome to decode) is usually the limiting factor." @@ -157,6 +222,7 @@ "\n", "from Images.decoder.decoder_widget import display_widget\n", "display_widget()" +>>>>>>> main ] }, { @@ -164,7 +230,9 @@ "id": "5c13cd03-89fd-4069-a410-36d7047b1d86", "metadata": {}, "source": [ - "Directly competing with speed is accuracy. If a decoder is inaccurate, errors will be missed or introduced each QEC round and will propagate to ruin the computation. High-distance codes are necessary for accuracy, but unfortunately introduce high-qubit overheads and make decoding much more challenging. Advances in QEC code design and low-latency integration between AI supercomputers and QPUs alleviate pressure on the decoding step, but it nevertheless remains the primary bottleneck of QEC." + "Directly competing with speed is accuracy. If a decoder is inaccurate, errors will be missed or introduced each QEC round and will propagate to ruin the computation. High-distance codes are necessary for accuracy, but unfortunately introduce high-qubit overheads and make decoding much more challenging. Advances in QEC code design and low-latency integration between AI supercomputers and QPUs alleviate pressure on the decoding step, but it nevertheless remains the primary bottleneck of QEC.\n", + "\n", + "To learn more about decoder metrics, explore the notebook called \"[Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb)\"" ] }, { @@ -172,24 +240,27 @@ "id": "6b77d598-b3c0-489c-9809-34bdfb7a9a65", "metadata": {}, "source": [ - "## 4.2 Pauli Frames and Error Tracking ##\n", + "---\n", + "\n", + "## 4.2 Pauli Frames and Error Tracking\n", "\n", - "In practice, when errors are identified by the decoder, they are not immediately corrected but are tracked using a Pauli frame. The Pauli frame keeps track of the corrections classically and applies them later. This approach reduces the number of gate operations required to fix errors on the QPU, thereby protecting the encoded state from additional noise introduced by each correction gate. For instance, if a bit flip error occurs on qubit 1 in the first round and another bit flip error happens on the same qubit later, the two errors cancel each other out, eliminating the need for a correction\n", + "In practice, when errors are identified by the decoder, they are not immediately corrected but are tracked using a **Pauli frame**. The Pauli frame keeps track of the corrections classically and applies them later. This approach reduces the number of gate operations required to fix errors on the QPU, thereby protecting the encoded state from additional noise introduced by each correction gate. For instance, if a bit flip error occurs on qubit 1 in the first round and another bit flip error happens on the same qubit later, the two errors cancel each other out, eliminating the need for a correction\n", "\n", "Often, codes are depicted using 3D images like the one below. In this case, each plane is a Steane code QEC round with flagged errors in purple. Each error is saved, and the list grows with future rounds. The final Paul frame, $[X_1, X_5, X_1]$, is the list of corrections for the three bit flip errors that have occurred over all the rounds: two on qubit 1 and one on qubit 5. In the last step, the errors can be simplified, for example, $X_1X_1 = I$, so only one of the three corrections, $X_5$, needs to be applied. This is a rather trivial example, and often diagrams like this are used to depict more complex codes and their respective error pathways.\n", "\n", - "\"Drawing\"\n", + "\"3D\n", "\n", "The dimension of time can also lend itself to more sophisticated decoding schemes. This is particularly important when measurement errors occur during the stabilizer checks. In this case, it might appear that a stabilizer flags when in fact the data qubits are fine. Multiple decoding rounds can demonstrate that the false stabilizer flag is a consequence of measurement error and not a true error, where other true errors would persist without correction. Such an approach is more powerful but requires decoding of much more complex syndromes. The diagram below demonstrates this concept with an example.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "Notice how, in the first case, the decoder has kept track of a measurement error and is therefore making an incorrect syndrome in the final case. When decoding happens over time, the decoding task must not decode a 19-bit syndrome but is able to flag measurement errors\n", "\n", "\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", + "

\n", + "\n", + "**Exercise 1:**\n", + "\n", "The benefit of decoding in time is that the measurement errors can be factored into the decoding process. However, the tradeoff is that the decoding problem is much harder. When decoding in time, an effective parity check matrix must be constructed as an input to the decoder. In this exercise you will build $H^{(2)}$ for a two round Steane code that includes consistency checks to flag errors between the two time steps. \n", "\n", "First, a few hints. Consider the dimensions. The number of columns still corresponds to the number of qubits, but, now we need to take into account the data qubits at time 0, the data qubits at time 1, and the three ancilla qubits used to measure syndromes between the two rounds. \n", @@ -201,7 +272,7 @@ "What do the middle three columns need to be for $H^{(2)}$ to be able to catch measurement errors?\n", "\n", "Build $H^{(2)}$, and then build an error vector $e$ of size 17 such that each entry is a 0 or a 1 if an error occurred on that qubit. Compute $H^{(2)}e^T$ for a case with an error on data qubit 1 in the first time step, an error on data qubit 1 in the second time step only, and a measurement error. Note, it is best practice to assume that the decoder will not hanndle raw syndrome outputs, but the differences between he current set of measurements and the next round. For example, after preparation the syndrome 101 might be measured. If the next round produces the same stabilizer measurerments, the decoder would see 000 not 101. This syntax makes it much easier for decoders to handle data in more complex settings.\n", - "

\n", + "\n", "
\n" ] }, @@ -212,8 +283,7 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "\n", + "# EXERCISE 1\n", "H = np.array([\n", " [1, 1, 0, 1, 1, 0, 0],\n", " [1, 0, 1, 1, 0, 1, 0],\n", @@ -261,6 +331,8 @@ "id": "7cc15193-fe88-49e3-a943-c5188cd0d5d7", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.3 Most Likely Error Decoding\n", "\n", "So far, decoders have been presented as black boxes. In many cases that is sufficient. If you are developing or testing new codes, you might just use a state of the art decoder and not care how it works. In other cases, the opposite is true, and you might focus on developing and tuning a decoder to work well for a specific sort of QEC situation. \n", @@ -269,7 +341,7 @@ "\n", "The starting point is to consider a naive brute forced decoder that is conceptually simple yet sets the upper bound on decoder accuracy. \n", "\n", - "The steps of maximum likelihood decoding are as follows (considering only bitflip errors for simplicity):\n", + "The steps of **maximum likelihood decoding** are as follows (considering only bitflip errors for simplicity):\n", "\n", "1. Select a QEC code and encode a message in the codespace with $n$ data qubits.\n", "2. Generate the $2^n$ bitstrings $\\{x_0, \\cdots, x_{2^n} \\}$ of length $n$ corresponding to all possible error situations.\n", @@ -283,11 +355,12 @@ "Notice, in Lab 2 when you coded the Steane code, we assumed a situation where one error occurs at a time, allowing your syndrome checks to fix errors. This is the same assumption made here. The problem with this approach is that it does not scale. There are $2^n$ errors that need to be computed *a priori* which is not possible for large codes. The sections below will consider more scaleable heuristics to overcome this issue.\n", "\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - "Code the most likely error decoder for the Steane code below given the parity check matrix below. For each syndrome, print the associated length 7 bitstrings that produce that error, the Hamming distance from the baseline message (0000000), and the probability of that error. \n", - "

\n", + "
\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Code the most likely error decoder for the Steane code below given the parity check matrix below. For each syndrome, print the associated length 7 bitstrings that produce that error, the Hamming distance from the baseline message (0000000), and the probability of that error.\n", + "\n", "
\n", " " ] @@ -299,8 +372,7 @@ "metadata": {}, "outputs": [], "source": [ - "from itertools import product\n", - "\n", + "# EXERCISE 2\n", "# Define the probability of a bitflip\n", "p = 0.1\n", "\n", @@ -359,6 +431,8 @@ "id": "e16ead63-c070-4ef7-a097-f01783635be1", "metadata": {}, "source": [ + "---\n", + "\n", "## 4.4 AI Decoders\n", "\n", "One way to circumvent the scaling challenges posed by the a brute force most likely error decoder is to use tools like AI. AI is fantastic at pattern recognition, runs very quickly, and can easily scale. \n", @@ -367,19 +441,20 @@ "\n", "Recently, [NVIDIA and QuEra announced a new transformed based decoder](https://developer.nvidia.com/blog/nvidia-and-quera-decode-quantum-errors-with-ai/) tested on magic state distillation circuits used by QuEra (A 35 qubit circuit with 5 Steane code logically encoded logical qubits). The decoder showed promise by outperforming the decoder used by QuEra in terms of speed and accuracy. Additionally, the AI decoder might have the potential to scale to code distances large enough for sufficiently low logical error rates.\n", "\n", - "\"Drawing\"\n", + "\"Performance\n", + "\n", "\n", "\n", + "
\n", + "\n", + "**Exercise 3:**\n", "\n", - "
\n", - "

Exercise 3:

\n", - "

\n", "You will now build a working AI decoder for the Steane code. The goal is to build something similar to the workflow in the image below.\n", "\n", - "\"Drawing\"\n", + "\"Workflow\n", "\n", "This lab does not expect you to have experience coding AI models with tools like PyTorch, so you will focus on the data generation and learn how to prepare the data to train an AI decoder without worrying about details of the model. Follow the steps outlined below to complete the code.\n", - "

\n", + "\n", "
\n", "\n" ] @@ -391,7 +466,11 @@ "source": [ "The first step is to generate the training data. Take the Steane code circuit you coded in Lab 2, now with bitflip noise to each qubit after encoding. In this case, we can explore circuit-level noise based on simulated results rather than a contrived data set. \n", "\n", +<<<<<<< qec_updates + "Create a data set of 5000 samples. To generate this, run `cudaq.run()` 5000 times taking one shot each time. Output the measurements from the syndrome checks (without correcting any errors) and then measure all of the data qubits. Compute the parity of bits corresponding to the correct logical operator to determine the true logical state. \n", +======= "Create a data set of 5000 samples. To generate this, run `cudaq.run()` 5000 times to return the syndrome and data qubit measurements for each shot. Compute the parity of bits corresponding to the correct logical operator to determine the true logical state. \n", +>>>>>>> main "\n", "Save the syndromes and the logical states as two numpy arrays. This will be your data set." ] @@ -403,6 +482,9 @@ "metadata": {}, "outputs": [], "source": [ +<<<<<<< qec_updates + "# EXERCISE 3\n", +======= "import cudaq\n", "import numpy as np\n", "\n", @@ -413,6 +495,7 @@ " pass\n", "\n", "# Defines noise model and probability of bit-flip error.\n", +>>>>>>> main "p = 0.05\n", "cudaq.unset_noise()\n", "noise = cudaq.NoiseModel()\n", @@ -513,16 +596,29 @@ }, { "cell_type": "markdown", +<<<<<<< qec_updates + "id": "956a81bf", + "metadata": {}, + "source": [ + "The previous cell is quite a bit of work, and requires you to manually construct the entire QEC code. This is not ideal when you are primarily interested in testing an AI decoder and want data generation streamlined.\n", + "\n", + "Another more efficient way to sample training data for memory experiments is directly using the parity check matrix. Random bitflips can be applied and syndromes determined via matrix multiplication. CUDA-Q QEC can do this with just a few lines of codes (shown below) and generate the same sort of data we did above with a preloaded Steane code. Additionally, if you want to generate data for multiple syndrome extraction rounds, you can use the sample_memory_circuit. If you want to test a new, non-standard code, you would need to define the kernels explicitly similar to the example above as shown in the docs [here](https://nvidia.github.io/cudaqx/components/qec/introduction.html#qec-code-framework-cudaq-qec-code)" +======= "id": "b68f8852", "metadata": {}, "source": [ "Another more efficient way to sample training data for memory experiments is directly using the parity check matrix. Random bitflips can be applied and syndromes determined via matrix multiplication. CUDA-Q QEC can do this with just a few lines of codes (shown below) and generate the same sort of data we did above with a preloaded Steane code. Additionally, if you want to generate data for multiple syndrome extraction rounds, you can use the `sample_memory_circuit`. If you want to test a new, non-standard code, you would need to define the kernels explicitly similar to the example above as shown in the docs [here](https://nvidia.github.io/cudaqx/components/qec/introduction.html#core-components)" +>>>>>>> main ] }, { "cell_type": "code", "execution_count": null, +<<<<<<< qec_updates + "id": "e98a9fac", +======= "id": "303d2976", +>>>>>>> main "metadata": {}, "outputs": [], "source": [ @@ -574,18 +670,18 @@ "# Normalize input data\n", "syndromes = (syndromes - syndromes.mean()) / syndromes.std()\n", "\n", - "X_tr, X_te, y_tr, y_te = train_test_split(\n", + "X_train, X_test, y_train, y_test = train_test_split(\n", " syndromes, logical_flips, test_size=0.20, random_state=42)\n", "\n", - "X_tr = torch.tensor(X_tr)\n", - "X_te = torch.tensor(X_te)\n", - "y_tr = torch.tensor(y_tr)\n", - "y_te = torch.tensor(y_te)\n", + "X_train = torch.tensor(X_train)\n", + "X_test = torch.tensor(X_test)\n", + "y_train = torch.tensor(y_train)\n", + "y_test = torch.tensor(y_test)\n", "\n", "# Create data loaders\n", "batch_size = 32\n", "train_loader = torch.utils.data.DataLoader(\n", - " torch.utils.data.TensorDataset(X_tr, y_tr),\n", + " torch.utils.data.TensorDataset(X_train, y_train),\n", " batch_size=batch_size,\n", " shuffle=True\n", ")\n", @@ -613,14 +709,14 @@ "criterion = nn.BCELoss()\n", "optimizer = optim.Adam(model.parameters(), lr=1e-4)\n", "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n", - " optimizer, mode='min', factor=0.1, patience=10)\n", + " optimizer, mode='min', factor=0.1, patience=10,)\n", "\n", "# Define the test_accuracy function\n", "def test_accuracy():\n", " model.eval()\n", " with torch.no_grad():\n", - " pred = (model(X_te).squeeze() > 0.5).float()\n", - " return (pred == y_te).float().mean().item()\n", + " pred = (model(X_test).squeeze() > 0.5).float()\n", + " return (pred == y_test).float().mean().item()\n", "\n", "# Training loop\n", "num_epochs = 30\n", @@ -656,7 +752,7 @@ " # Update learning rate\n", " scheduler.step(avg_epoch_loss)\n", " \n", - " if epoch % 5 == 0:\n", + " if epoch % 1 == 0:\n", " print(f\"Epoch {epoch:3d}/{num_epochs} | \"\n", " f\"train loss={avg_epoch_loss:.4f} | \"\n", " f\"test acc={current_acc:.4f}\")\n", @@ -686,11 +782,12 @@ "print(f\"\\nFinal Results:\")\n", "print(f\"Final Test Accuracy: {test_acc[-1]:.4f}\")\n", "print(f\"Final Training Loss: {train_losses[-1]:.4f}\")\n", + "print(f\"Raw (Undecoded) Accuracy: {1-raw_logical_error_rate:.4f}\")\n", "\n", "# Print first 20 predictions\n", "model.eval()\n", "with torch.no_grad():\n", - " test_predictions = model(X_te).squeeze()\n", + " test_predictions = model(X_test).squeeze()\n", " predicted_labels = (test_predictions > 0.5).float()\n", "\n", "print(\"\\nFirst 20 test examples:\")\n", @@ -699,14 +796,14 @@ "print(\"=\" * 40)\n", "\n", "for i in range(20):\n", - " true_label = int(y_te[i].item())\n", + " true_label = int(y_test[i].item())\n", " pred_label = int(predicted_labels[i].item())\n", " print(f\"{i:^6} | {true_label:^6} | {pred_label:^6}\")\n", "\n", "print(\"=\" * 40)\n", "\n", "# Calculate accuracy for these 20 examples\n", - "correct = (predicted_labels[:20] == y_te[:20]).sum().item()\n", + "correct = (predicted_labels[:20] == y_test[:20]).sum().item()\n", "print(f\"\\nAccuracy for these 20 examples: {correct}/20 = {correct/20:.2%}\")\n", "\n" ] @@ -716,6 +813,18 @@ "id": "26973394-04c5-4919-a43e-9ab2d7309fb1", "metadata": {}, "source": [ +<<<<<<< qec_updates + "You should see the model successfully train! The test set accuracy should increase while the loss functions decreases.\n", + "\n", + "There are a few key observations to discuss.\n", + "\n", + "1. If the model parameters are random and we run this training multiple times, we should see the model on average start with an accuracy of about 0.5. This means we would have as much luck flipping a coin as our decoder. It may start higher or lower depending on the initial parameters bias towards outputting 1's or 0's. So, we do demonstrate the model did train.\n", + "\n", + "2. The trained model does outperform the raw logical error rate without decoding. So, our AI decoder is an improvement. Given the simplicity of the Steane code, this is unsurprising, as there is not really any hidden insight to be gleaned as we can essentially work out the brute force MLE decoding by hand.\n", + "\n", + "3. The final output of the AI model (or any decoder) is limited by the underlying QEC code and its distance. This is what determines what errors are detectable or correctable before we even try decoding. For example, the distance three Steane code cannot correct two errors. So, we cannot expect the AI model to learn how to correct these errors either. Thus, AI decoding shines in the regime where there are many correctable errors with non-trivial syndrome patterns.\n", + "\n", +======= "TYou should see the model successfully train! The test set accuracy should increase while the loss functions decreases. \n", "\n", "There are a few key observations to discuss. \n", @@ -726,6 +835,7 @@ "\n", "3. The final output of the AI model (or any decoder) is limited by the underlying QEC code and its distance. This is what determines what errors are detectable or correctable before we even try decoding. For example, the distance three Steane code cannot correct two errors. So, we cannot expect the AI model to learn how to correct these errors either. Thus, AI decoding shines in the regime where there are many correctable errors with non-trivial syndrome patterns.\n", "\n", +>>>>>>> main "4. Because the trained decoder depends on the error model used, it is really important to have large training data sets with sufficiently realistic noise models and model often require fine tuning with experimental data to realize peak performance. If the error rate was tiny, the model may just learn to output logical 0 all the time, and learn nothing about the error patterns if it has insufficient cases to train on. When training on physical QPU data, we are not trying to learn a noise model, but the actual, unknown noise profile of the device." ] }, @@ -734,9 +844,11 @@ "id": "3b8cb708-3810-43aa-af2e-38144aba2b17", "metadata": {}, "source": [ - "## Belief Propagation Decoding\n", + "---\n", + "\n", + "## 4.5 Belief Propagation Decoding\n", "\n", - "Another state-of-the-art decoding method is belief propagation (BP). BP is a powerful technique borrowed from classical error correction that is highly flexible and can serve as a black box decoder for arbitrary QEC Codes. It is particularly useful for codes like quantum low-density parity check (qLDPC). All the user needs to do is provide a parity check matrix and then feed the decoder syndromes to decode. \n", + "Another state-of-the-art decoding method is **belief propagation (BP)**. BP is a powerful technique borrowed from classical error correction that is highly flexible and can serve as a black box decoder for arbitrary QEC Codes. It is particularly useful for codes like quantum low-density parity check (**qLDPC**). All the user needs to do is provide a parity check matrix and then feed the decoder syndromes to decode. \n", "\n", "NVIDIA created a GPU accelerated BP decoder which allows researchers to push QEC even further than before. This section will walk you through implementing BP and how to use NVIDIA's accelerated BP decoder. \n", "\n", @@ -752,16 +864,12 @@ " \n", "4. Finally, the final beliefs $L_{\\text{final}, i}$ are computed as $L_i + \\sum_{j \\in N(i)} L_{c_j \\to v_i}$, summing the prior beliefs with the final messages sent to each variable node. From this a decision can be made where positive numbers indicate no error and negative an error, with the magnitudes related to confidence.\n", "\n", - "Ideally, BP will converge to a solution that agrees with the original syndrome and correct the error. If BP cannot converge, it means there is still significant uncertainty whether some of the bits have errors or not and postprocessing is necessary to refine the result. This will be discussed in the following section.\n", + "Ideally, BP will converge to a solution that agrees with the original syndrome and correct the error. If BP cannot converge, it means there is still significant uncertainty whether some of the bits have errors or not and postprocessing is necessary to refine the result. This will be discussed in the following section.
\n", "\n", + "**Exercise 4:**\n", "\n", - "\n", - "\n", - "div style=\"background-color: #f9fff0; border-left: 6px solid #76b900; padding: 15px; border-radius: 4px;\">\n", - "

Exercise 4:

\n", - "

\n", "Below is the start of a BP implementation for decoding the 5-qubit repetition code. Fill in the sections marked \"TODO\" to complete the code. Most of the BP loops are calculated for you. Make sure to review them and understand what is going on. Then, you will complete the code by fixing the code to calculate the final belief on each qubit and determine where errors occurred.\n", - "

\n", + "\n", "
\n", "\n" ] @@ -773,9 +881,7 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import sys\n", - "\n", + "# EXERCISE 4\n", "physical_error_rate = 0.1\n", "max_iter = 5\n", "\n", @@ -907,11 +1013,6 @@ "metadata": {}, "outputs": [], "source": [ - "import requests\n", - "import bz2\n", - "import os\n", - "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", - "\n", "if __name__ == \"__main__\":\n", " # See other test data options in https://github.com/NVIDIA/cudaqx/releases/tag/0.2.0\n", " filename = 'osd_1008_8785_0.001.json' # lower error rate\n", @@ -951,7 +1052,6 @@ ] }, { - "attachments": {}, "cell_type": "markdown", "id": "4e3d1953-c564-4b9f-9bea-d2b2d6247613", "metadata": {}, @@ -962,7 +1062,7 @@ "\n", "In benchmarks of large code instances, the NVIDIA decoder was up to 35x faster than the industry standard implementation for benchmarks run on the [[144,12,12]](https://arxiv.org/abs/2308.07915) code. \n", "\n", - "\"Drawing\"\n", + "\"Bar\n", "\n" ] }, @@ -1024,11 +1124,6 @@ "metadata": {}, "outputs": [], "source": [ - "import requests\n", - "import bz2\n", - "import os\n", - "from Images.decoder.bp import run_decoder, parse_csr_mat, parse_H_csr, parse_obs_csr\n", - "\n", "if __name__ == \"__main__\":\n", " # See other test data options in https://github.com/NVIDIA/cudaqx/releases/tag/0.2.0\n", " filename = 'osd_1008_8785_0.005.json' # lower error rate\n", @@ -1082,11 +1177,12 @@ "\n", "\n", "\n", - "
\n", - "

Exercise 5:

\n", - "

\n", - " The following exercise is based on a lecture by Joschka Roffe. Given the parity check matrix below and the probabilities of error from BP. Perform OSD manually and find the error profile that satisfies the syndrome. Note, all computations must be performed using mod 2 arithmetic. This can be accomplished using the galois library which creates a Galois field and allows all numpy operations to compute mod 2.\n", - "

\n", + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "The following exercise is based on a [lecture](https://www.youtube.com/watch?v=b9N2Ps3FTto) by Joschka Roffe. Given the parity check matrix below and the probabilities of error from BP. Perform OSD manually and find the error profile that satisfies the syndrome. Note, all computations must be performed using mod 2 arithmetic. This can be accomplished using the `galois` library which creates a Galois field and allows all `numpy` operations to compute mod 2.\n", + "\n", "
\n" ] }, @@ -1097,9 +1193,7 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import galois\n", - "\n", + "# EXERCISE 5\n", "\n", "GF2 = galois.GF(2)\n", "\n", @@ -1189,7 +1283,7 @@ "To summarize, syndromes are of varying difficulty. Easy syndromes are solved with BP, OSD-0 is used for moderate syndrome difficulties, and higher order OSD is used for the most challenging. \n", "\n", "\n", - "\"Drawing\"\n", + "\"Flowchart\n", "\n", "\n", "Try running the code below on the 10000 shot data set. See what happens when `osd_method` is set to 1 for OSD-0. Then try setting this variable to 3 to run a variant of higher order OSD. Does the logical error rate improve? How much more time does it take to perform higher order OSD?" @@ -1231,15 +1325,26 @@ "id": "b31e66fd-08dc-4ed5-bcf0-00858109baf6", "metadata": {}, "source": [ - "## Summary\n", + "## Conclusion\n", "\n", "After completing this notebook, you should now have an understanding for the challenges behind decoding. This begins with the brute force most likely error decoding which is incredibly inefficient. You now have experience using other techniques like AI and BP+OSD decoding to accelerated QEC decoding. " ] + }, + { + "cell_type": "markdown", + "id": "0ab9420760b24f57", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 5: Magic State Distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) — continues the QEC series with magic state distillation\n", + "* [QEC 101 — Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb) — explores noise models that generate the syndrome data decoders must process\n", + "* [QEC 101 — Lab 8: Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — extends decoder concepts with performance metrics and parallel decoding" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": ".venv (3.11.5)", "language": "python", "name": "python3" }, @@ -1253,7 +1358,25 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.11.5" + }, + "learning_goals": { + "application_domain": "error_correction", + "cfqt_domain": "QCS", + "cfqt_proficiency": "B2", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG", + "QCS.ML" + ], + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms", + "quantum_machine_learning", + "hpc_integration" + ] } }, "nbformat": 4, diff --git a/qec101/Solutions/05_QEC_MSD_Solution.ipynb b/qec101/Solutions/05_QEC_MSD_Solution.ipynb index e7c9131..7c58c67 100644 --- a/qec101/Solutions/05_QEC_MSD_Solution.ipynb +++ b/qec101/Solutions/05_QEC_MSD_Solution.ipynb @@ -4,9 +4,7 @@ "cell_type": "code", "execution_count": null, "id": "eead95ce", - "metadata": { - "id": "eead95ce" - }, + "metadata": {}, "outputs": [], "source": [ "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", @@ -27,70 +25,94 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 5- $T$ gates and Magic State Distillation\n", - "\n", - "Fault tolerant quantum computing (FTQC) requires a universal gate set from which any quantum algorithm can be implemented. The T-gate is a challenging gate to implement fault tolerantly, yet holds the key to unlocking the power of quantum computing.\n", + "# QEC 101 — Lab 5: $T$ Gates and Magic State Distillation — Solutions\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", "\n", - "A common procedure for producing T-gates is called magic state distillation (MSD), and will likely consume the lion's share of the resources necessary to realize FTQC. Much quantum research is currently directed at finding ways to efficiently implement MSD. \n", + "---\n", "\n", - "In a recent paper titled [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165), researchers from QuEra, MIT, and Harvard showcased MSD on QuEra's neutral atom quantum processor.\n", + "**What You Will Do:**\n", + "* Explain why the Clifford + $T$ gate set is necessary for universal quantum computing\n", + "* Describe how fault-tolerant $T$ gates are applied using magic state injection\n", + "* Analyze the resource overhead of magic state distillation across multiple rounds\n", + "* Implement the [[5,1,3]] MSD protocol using CUDA-Q to distill higher-fidelity magic states\n", + "* Determine the distillation threshold by comparing input and output state fidelities\n", "\n", "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic quantum computing concepts (qubits, gates, measurement)\n", + "* Familiarity with quantum error correction basics ([Lab 1: Introduction to QEC](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/01_QEC_Intro.ipynb))\n", + "* Understanding of the Steane [[7,1,3]] code ([Lab 2: Stabilizer Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb))\n", + "\n", + "**Key Terminology:**\n", + "* Universal gate set\n", + "* T gate\n", + "* Transversal gates\n", + "* Eastin-Knill Theorem\n", + "* Magic state\n", + "* Magic state distillation (MSD)\n", + "* [[5,1,3]] code\n", + "* Color code\n", + "* Clifford gates\n", + "* Fault tolerant quantum computing (FTQC)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.get_state`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.get_state) — returns the statevector\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd21d6cf34b34383", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", - "If you have completed the previous labs in this series (1-4), you should have a good foundation for the basics of QEC and the fundamentals of stabilizer codes, and have already coded up the Steane code which is used in the QuEra MSD implementation.\n", - "\n", - "This lab will introduce the importance of T-gates, how MSD works, and allow you to implement a version of it yourself. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **5.1** Learn about the Clifford + $T$ gate set.\n", - "* **5.2** Learn how fault tolerant $T$ gates are applied.\n", - "* **5.3** Learn how magic state distillation works.\n", - "* **5.4** Learn how QuEra performed MSD in their recent experimental paper.\n", - "* **5.5** Write a CUDA-Q code to perform MSD distillation.\n", - "\n", - "Terminology and notation you'll use:\n", - "- universal gate set, T-gates, transversal gates\n", - "- Eastin-Knill Theorem\n", - "- magic state, magic state distillation, [[5,1,3]] code.\n", - "- color code\n", - "\n", - "To get started, run the cell below to install the prerequisite libraries and then restart the kernel." + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "bd279c318aca4b05", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", + "import sys\n", + "import os\n", + "sys.path.append(os.path.join(os.getcwd(), '..'))\n", + "\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "\n", - "import sys\n", "\n", "try:\n", - " import numpy as np\n", " import qutip\n", - "\n", - "\n", "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade pip\n", - " !{sys.executable} -m pip install qutip\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", + " print(\"qutip not found, installing. Please restart your kernel after this is done.\")\n", + " !pip install qutip -q\n", + " print(\"\\nNew library installed. Please restart your kernel!\")\n", "\n", "import cudaq\n", "cudaq.set_target('nvidia')" @@ -99,74 +121,71 @@ { "cell_type": "markdown", "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc", - "metadata": { - "id": "c2fa8e57-2f20-4d1b-a8c1-a8291da83fdc" - }, + "metadata": {}, "source": [ - "## 5.1 Clifford + $T$ Gates ##\n", + "---\n", "\n", + "## 1. Clifford + $T$ Gates\n", "\n", - "So far in this QEC series, all of the QEC procedures have been contextualized as critical for realizing FTQC. However, FTQC is broader than QEC and involves careful consideration of all operations such that errors are not propagated through a computation. That is to say, implementing syndrome extraction of a QEC code correctly is a key step for preserving quantum memory, but still requires a set of FT operations (state prep, gate, measurement, etc) to perform computations in a way that ensures a single errors do not propogate uncontrollably.\n", + "So far in this QEC series, all of the QEC procedures have been contextualized as critical for realizing **fault tolerant quantum computing (FTQC)**. However, FTQC is broader than QEC and involves careful consideration of all operations such that errors are not propagated through a computation. That is to say, implementing syndrome extraction of a QEC code correctly is a key step for preserving quantum memory, but still requires a set of FT operations (state prep, gate, measurement, etc.) to perform computations in a way that ensures single errors do not propagate uncontrollably.\n", "\n", "For example, proper FT implementation of the Steane code would require logically encoded ancilla qubits to measure the stabilizers to make sure an ancilla qubit error does not become an uncorrectable error itself.\n", "\n", - "There are many aspects of FTQC that will not be covered here. However, the concept of a universal gate set needs to be introduced before the MSD protocol can be understood. A **universal gate set** is a set of quantum gates from which any unitary operation can be approximated to arbitrary precision. The is analogous to classical computing where any digital computation can be constructed from AND, NOT, and OR gates.\n", + "There are many aspects of FTQC that will not be covered here. However, the concept of a universal gate set needs to be introduced before the MSD protocol can be understood. A **universal gate set** is a set of quantum gates from which any unitary operation can be approximated to arbitrary precision. This is analogous to classical computing where any digital computation can be constructed from AND, NOT, and OR gates.\n", "\n", - "Certain gates, known as **transversal gates**, are nice to have in a gate set, because they are already fault tolerant. A transversal gate can be applied individually to every data qubit encoding a logical qubit such that if any of them have an error, it only affects the target data qubit. An example of a transversal get would be the $X$ operation in the Steane code. A logical $X$ is obtained by performing an $X$ on each data qubit and if any of those result in an error, it is correctable and will not propagate to the other data qubits. \n", + "Certain gates, known as **transversal gates**, are nice to have in a gate set, because they are already fault tolerant. A transversal gate can be applied individually to every data qubit encoding a logical qubit such that if any of them have an error, it only affects the target data qubit. An example of a transversal gate would be the $X$ operation in the Steane code. A logical $X$ is obtained by performing an $X$ on each data qubit and if any of those result in an error, it is correctable and will not propagate to the other data qubits.\n", "\n", - "So, all that is needed for FTQC to be easy is a universal gate set consisting only of transversal gates. Unfortunately, no such set exists. The **Eastin-Knill** Theorem proves that no quantum error-correcting code can implement a universal set of gates using only transversal operations.\n", + "So, all that is needed for FTQC to be easy is a universal gate set consisting only of transversal gates. Unfortunately, no such set exists. The **Eastin-Knill Theorem** proves that no quantum error-correcting code can implement a universal set of gates using only transversal operations.\n", "\n", - "The most common universal gate set is the Clifford + T gates. The Clifford gates, such as $H$, $S$, and CNOT, are the gates that map Pauli operators to other Pauli operators under conjugation. The $T$ gate, while not part of the Clifford group, is necessary to promote the Clifford gate set to a universal gate set. In most quantum error-correcting codes, the $T$ gate cannot be implemented transversally. In the case of the Steane code, the Clifford gates can be implemented transversally (see figure below), but the $T$ gate is not.\n", + "The most common universal gate set is the Clifford + T gates. The **Clifford gates**, such as $H$, $S$, and CNOT, are the gates that map Pauli operators to other Pauli operators under conjugation. The **T gate**, while not part of the Clifford group, is necessary to promote the Clifford gate set to a universal gate set. In most quantum error-correcting codes, the $T$ gate cannot be implemented transversally. In the case of the Steane code, the Clifford gates can be implemented transversally (see figure below), but the $T$ gate is not.\n", "\n", - "\n", + "\"Diagram\n", "\n", - "Consider why the $T$-gate is necessary for the universal gate set. For any logical operation $\\bar{U}$, it must transform all stabilizers $s_i \\in S$ into another $s_j \\in S$. \n", + "Consider why the $T$-gate is necessary for the universal gate set. For any logical operation $\\bar{U}$, it must transform all stabilizers $s_i \\in S$ into another $s_j \\in S$.\n", "\n", "$H$ does this fine as $H \\otimes X \\otimes H ^{\\dagger} = Z \\in S$ and $H \\otimes Z \\otimes H ^{\\dagger} = X \\in S$.\n", "\n", "$T$ does transform $Z$ into a valid stabilizer, but not the $X$ stabilizers.\n", "\n", - "$$T \\otimes Z \\otimes T ^{\\dagger} = Z \\in S$$ \n", + "$$T \\otimes Z \\otimes T ^{\\dagger} = Z \\in S$$\n", "\n", - "$$T \\otimes X \\otimes T ^{\\dagger} = e^{i \\pi/4}SZX \\notin S$$ \n", + "$$T \\otimes X \\otimes T ^{\\dagger} = e^{i \\pi/4}SZX \\notin S$$\n", "\n", - "The $T$ gate ends up being the key to any advantage realized by quantum algorithms. This is because without $T$ gates, the other gates could only jump between the six pole states of the Bloch sphere, which can be simulated efficiently with classical computing. Explore this fact with the widget [linked here](https://nvidia.github.io/cuda-q-academic/qec101/Images/MSD/bloch_sphere.html). Notice how the T gate is the only operation capable of producing a non-pole state. " + "The $T$ gate ends up being the key to any advantage realized by quantum algorithms. This is because without $T$ gates, the other gates could only jump between the six pole states of the Bloch sphere, which can be simulated efficiently with classical computing. Explore this fact with the widget [linked here](https://nvidia.github.io/cuda-q-academic/qec101/Images/MSD/bloch_sphere.html). Notice how the T gate is the only operation capable of producing a non-pole state." ] }, { "cell_type": "markdown", "id": "255bcdd8", - "metadata": { - "id": "255bcdd8" - }, + "metadata": {}, "source": [ - "## 5.2 Fault Tolerant $T$ Gate Application ##\n", - "\n", - "Thankfully, there is a way to implement a fault-tolerant $T$ gate, it is just difficult and resource intensive. The procedure involves the circuit below.\n", + "---\n", "\n", + "## 2. Fault-Tolerant $T$ Gate Application\n", "\n", - "\n", + "Thankfully, there is a way to implement a fault-tolerant $T$ gate, it is just difficult and resource intensive. The procedure involves the circuit below.\n", "\n", + "\"Circuit\n", "\n", - "It begins with an encoded $\\ket{0}$ qubit (bottom wire) and an arbitrary logical qubit state $\\ket{\\psi}$ (top wire). Then, the bottom wire is prepared in a so called magic state, $\\frac{\\ket{0} + e^{i\\pi/4}\\ket{1}}{\\sqrt{2}}$. A fault tolerant CNOT gate is then applied with the magic state qubit as the control. The target qubit is then measured and if the result is a 0, $T$ was successfully applied to $\\ket{\\psi}$. If measurement produces a 1, then a FT SX gate is applied to the qubit and the result becomes $T$ applied to $\\ket{\\psi}$.\n", + "It begins with an encoded $\\ket{0}$ qubit (bottom wire) and an arbitrary logical qubit state $\\ket{\\psi}$ (top wire). Then, the bottom wire is prepared in a so called **magic state**, $\\frac{\\ket{0} + e^{i\\pi/4}\\ket{1}}{\\sqrt{2}}$. A fault-tolerant CNOT gate is then applied with the magic state qubit as the control. The target qubit is then measured and if the result is a 0, $T$ was successfully applied to $\\ket{\\psi}$. If measurement produces a 1, then a FT $SX$ gate is applied to the qubit and the result becomes $T$ applied to $\\ket{\\psi}$.\n", "\n", - "The \"Prepare Magic State\" subcircuit is deceptively simple. It consists of a single logical Hadamard gate followed by a noisy $T$ gate to form $\\ket{T}$. This process may seem circular as a $T$ gate is required in the process to apply a fault tolerant $T$ gate. However, the procedure is a bit more nuanced. A \"good enough\" (high enough fidelity) magic state is the output of an onerous process called magic state distillation (MSD). MSD will be discussed more in the next section. \n", + "The \"Prepare Magic State\" subcircuit is deceptively simple. It consists of a single logical Hadamard gate followed by a noisy $T$ gate to form $\\ket{T}$. This process may seem circular as a $T$ gate is required in the process to apply a fault-tolerant $T$ gate. However, the procedure is a bit more nuanced. A \"good enough\" (high enough fidelity) magic state is the output of an onerous process called **magic state distillation (MSD)**. MSD will be discussed more in the next section.\n", "\n", - "Another distinction is that the magic state can only be produced from an easy to prepare logical 0 state. Only by performing the fault tolerant circuit above, can the $T$ gate act on an arbitrary qubit state $\\ket{\\psi_L}$. \n", + "Another distinction is that the magic state can only be produced from an easy to prepare logical 0 state. Only by performing the fault-tolerant circuit above can the $T$ gate act on an arbitrary qubit state $\\ket{\\psi_L}$.\n", "\n", "The math is instructive for why the above circuit works. After the CNOT is applied to the magic state and $\\ket{\\psi}$ qubits, the resulting state is\n", "\n", - "$$ \\frac{1}{\\sqrt{2}} [ \\ket{0}(a\\ket{0} +b\\ket{1}) + e^{i\\pi/4}\\ket{1}(a\\ket{1} + b\\ket{0})] $$\n", + "$$\\frac{1}{\\sqrt{2}} \\left[ \\ket{0}(a\\ket{0} + b\\ket{1}) + e^{i\\pi/4}\\ket{1}(a\\ket{1} + b\\ket{0}) \\right]$$\n", "\n", "Then, if the second qubit is measured (fault tolerantly) and is a 0, the intended $T\\ket{\\psi_L}$ state is prepared.\n", "\n", - "$$ a\\ket{0} +b*e^{i\\pi/4}\\ket{1} $$\n", + "$$a\\ket{0} + b \\cdot e^{i\\pi/4}\\ket{1}$$\n", "\n", "Otherwise, if 1 is measured, the following state is prepared.\n", "\n", - "$$ b\\ket{0} + a*e^{i\\pi/4}\\ket{1} $$\n", + "$$b\\ket{0} + a \\cdot e^{i\\pi/4}\\ket{1}$$\n", "\n", - "In the latter case, the objective state can be obtained with a Clifford correction consisting of an $S$ and $X$ operation to flip the bits and amplitudes. such a process ensures that the $T$ gate is implemented fault tolerantly and will not propagate through the computation if it introduces an error." + "In the latter case, the objective state can be obtained with a Clifford correction consisting of an $S$ and $X$ operation to flip the bits and amplitudes. Such a process ensures that the $T$ gate is implemented fault tolerantly and will not propagate through the computation if it introduces an error." ] }, { @@ -174,25 +193,27 @@ "id": "2e9e58e5-c274-4ebf-8511-01cd9c7e90c2", "metadata": {}, "source": [ - "## 5.3 Magic State Distillation ##\n", + "---\n", + "\n", + "## 3. Magic State Distillation\n", "\n", - "Preparing the magic state is a very resource intensive process. First, the process will be explained at a high level and then in terms of the details used in [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165). \n", + "Preparing the magic state is a very resource intensive process. First, the process will be explained at a high level and then in terms of the details used in [*Experimental Demonstration of the Logical Magic State Distillation*](https://arxiv.org/abs/2412.15165).\n", "\n", "Magic states are produced by magic state factories which follow a procedure like the one pictured below. This process is decoupled from any quantum application and can be used to build and store magic states which are consumed as an application needs $T$ gates applied.\n", "\n", "1) Select a QEC code to encode a collection of logical qubits\n", "2) Prepare each logical qubit in a noisy $\\ket{T}$ state.\n", - "3) Apply a second MSD QEC code on the logical qubits. In the figure below, the [[5,1,3]] code used by QuEra is represented.\n", - "4) Measure all but one of the logical qubits to produce a syndrome to determine if the remaining qubit is in a good magic state or not. If the syndrome indicates no error, the process worked and proceed to 5), if an error is flagged, the process restarts at 1).\n", - "5) A less noisy (high fidelity) magic state has been prepared. It can now be used, or input into another round of MSD.\n", + "3) Apply a second MSD QEC code on the logical qubits. In the figure below, the **[[5,1,3]] code** used by QuEra is represented.\n", + "4) Measure all but one of the logical qubits to produce a syndrome to determine if the remaining qubit is in a good magic state or not. If the syndrome indicates no error, the process worked and proceed to 5); if an error is flagged, the process restarts at 1).\n", + "5) A less noisy (higher fidelity) magic state has been prepared. It can now be used, or input into another round of MSD.\n", "\n", - "\n", + "\"Flowchart\n", "\n", - "It should be noted that this is one approach to producing $T$ gates, but others exits.\n", + "It should be noted that this is one approach to producing $T$ gates, but others exist.\n", "\n", "Usually, one round of MSD is not enough, even if the resulting state does have higher fidelity. To fix this, the output from the first round can be combined with other output magic states and the procedure repeated. Each round exponentially grows the overhead required to produce a single magic state. This is likely the greatest challenge facing FTQC and will consume most of the resources for any quantum application.\n", "\n", - "" + "\"Diagram" ] }, { @@ -200,57 +221,47 @@ "id": "77046502-787f-4814-bc51-7410c48fab34", "metadata": {}, "source": [ - "## 5.4 Performing MSD with the Color Code ##\n", + "---\n", "\n", + "## 4. Performing MSD with the Color Code\n", "\n", "It is helpful to look at a concrete example of MSD in practice to better understand the above process. This section will follow the procedure from the QuEra paper and explain the steps they took, preparing you to code up a similar example in the following section.\n", "\n", - "Each magic state is prepared using the [[7,1,3]] color code, also known as the Steane code which was covered in the second lab of this QEC series. The image below shows the procedure which QuEra used based on the gate set of their neutral atom QPU. The bottom data qubit can be set to an arbitrary qubit state and will \"inject\" that state into the logical qubit. The circuit is slightly different from the circuit used to encode the Steane code in lab 2, but enforces equivalent stabilizer constraints.\n", - "\n", - "\n", + "Each magic state is prepared using the [[7,1,3]] **color code**, also known as the Steane code which was covered in the second lab of this QEC series. The image below shows the procedure which QuEra used based on the gate set of their neutral atom QPU. The bottom data qubit can be set to an arbitrary qubit state and will \"inject\" that state into the logical qubit. The circuit is slightly different from the circuit used to encode the Steane code in Lab 2, but enforces equivalent stabilizer constraints.\n", "\n", "\n", "\n", - "\n", - "\n", - "\n", "In this case, the prepared state is a noisy $T$ state created by starting in the $\\ket{0}$ state and applying a rotation of $\\arccos(1/\\sqrt{3})$ about the (-1,1,0) axis.\n", "\n", - "This is repeated five times to create five logically encoded noisy magic states. They are then input into a [[5, 1, 3]] code which produces one magic state with high fidelity from five low fidelity ones. This is an example, like the Shor code, where QEC codes are concatenated. That is to say, the logical qubits are themselves used in a QEC code. The circuit below demonstrates what logical operations need to be performed on each logical qubit to complete the MSD process.\n", - "\n", + "This is repeated five times to create five logically encoded noisy magic states. They are then input into a [[5, 1, 3]] code which produces one magic state with high fidelity from five low fidelity ones. This is an example, like the Shor code, where QEC codes are concatenated. That is to say, the logical qubits are themselves used in a QEC code. The circuit below demonstrates what logical operations need to be performed on each logical qubit to complete the MSD process.\n", "\n", "\n", "\n", + "All but the first logical qubit are measured to produce a syndrome. If the correct syndrome (which can vary by implementation) is measured, a magic state with less error has been produced. QuEra's experimental results found that a raw logically encoded magic state had a fidelity of 94.1% which increased to 98.9% when postselected based on syndrome measurements, demonstrating the procedure worked.\n", "\n", - "All but the first logical qubit are measured to produce as syndrome. If the correct syndrome (which can vary by implementation) is measured, a magic state with less error has been produced. QuEra's experimental results found that a raw logically encoded magic state had a fidelity of 94.1\\% which increased to 98.9\\% when postselected based on syndrome measurements, demonstrating the procedure worked.\n", + "
\n", "\n", + "**Exercise 1:**\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", "Write a simple script to calculate the number of physical qubits needed to perform $N$ rounds of MSD given an arbitrary QEC code and MSD procedure. Assuming the [[5,1,3]] code is used for the MSD, plot how many data qubits are needed for up to 5 rounds of MSD when the Steane code is used and the other [[17,1,5]] color code shown below, also used by QuEra in the paper.\n", "\n", - "

\n", + "\"Diagram\n", "\n", - "Is it clear why FTQC is so hard? This entire process must happen for every single $T$ gate applied in a quantum circuit, which could require millions or more $T$ gates.\n", - "

\n", - "
\n", - "\n" + "*[Adapted from: Experimental Demonstration of the Logical Magic State Distillation](https://arxiv.org/abs/2412.15165)*\n", + "\n", + "Is it clear why FTQC is so hard? This entire process must happen for every single $T$ gate applied in a quantum circuit, which could require millions or more $T$ gates.\n", + "\n", + "
" ] }, { @@ -260,51 +271,41 @@ "metadata": {}, "outputs": [], "source": [ - "def qubits_for_MSD(dataq_per_logicalq, ancilla_per_logicalq, max_msd_rounds, ms_per_msd_round):\n", - " \"\"\"Function that calculates the number of physical qubits required for n rounds of MSD and plots them\n", + "# EXERCISE 1\n", + "\n", + "def qubits_for_MSD(dataq_per_logicalq: int, ancilla_per_logicalq: int,\n", + " max_msd_rounds: int, ms_per_msd_round: int) -> None:\n", + " \"\"\"Calculate the number of physical qubits required for n rounds of MSD and plot them.\n", "\n", " Parameters\n", " ----------\n", - " dataq_per_logicalq: int\n", - " The number of data qubits required to encode a logical qubit for the QEC code you selected \n", + " dataq_per_logicalq : int\n", + " The number of data qubits required to encode a logical qubit for the QEC code you selected.\n", " ancilla_per_logicalq : int\n", - " The number of ancilla qubits required to measure the stabilizers of a logical qubit (assume no reuse)\n", - " max_msd_rounds: int\n", - " The number of MSD distillation rounds to perform\n", - " ms_per_msd_round: int\n", - " The number of magic states that must be input for the MSD protocol selected\n", - "\n", - " Returns\n", - " -------\n", + " The number of ancilla qubits required to measure the stabilizers of a logical qubit (assume no reuse).\n", + " max_msd_rounds : int\n", + " The number of MSD distillation rounds to perform.\n", + " ms_per_msd_round : int\n", + " The number of magic states that must be input for the MSD protocol selected.\n", " \"\"\"\n", - "\n", " rounds = []\n", - " qubits= []\n", - " #TODO Start\n", + " qubits = []\n", " ms_factories = 0\n", " for round in range(max_msd_rounds):\n", - " \n", - " ms_factories += 5**round # computes total number of magic state factories\n", - " n_qubits = ms_factories * ms_per_msd_round * (dataq_per_logicalq + ancilla_per_logicalq) # number of qubits per magic state factory\n", + " ms_factories += 5**round\n", + " n_qubits = ms_factories * ms_per_msd_round * (dataq_per_logicalq + ancilla_per_logicalq)\n", " rounds.append(round)\n", " qubits.append(n_qubits)\n", - " \n", - " #TODO End\n", - " \n", "\n", - " plt.plot(rounds, qubits, 'b-o') \n", + " plt.plot(rounds, qubits, 'b-o')\n", " plt.autoscale(tight=True)\n", - "\n", - "\n", " plt.xlabel('MSD Rounds')\n", " plt.ylabel('Physical Qubits Required')\n", " plt.title('Qubits Needed for MSD')\n", + " plt.show()\n", "\n", - " plt.show() \n", "\n", - "\n", - "qubits_for_MSD(7,3,6,5)\n", - " " + "qubits_for_MSD(7, 3, 6, 5)" ] }, { @@ -312,25 +313,24 @@ "id": "31a6ae76-794a-4ba9-ab56-81e6891edb5d", "metadata": {}, "source": [ - "## 5.5 Using CUDA-Q to perform MSD ##\n", - "This section will walk you through an example using CUDA-Q to perform MSD. You will be coding the [[7,1,3]] code and following the MSD protocol of [Bravyi and Kitaev](https://arxiv.org/abs/quant-ph/0403025). Because this procedure only requires seven qubits, it can be simulated with a state vector simulator. However, remember that in practice each qubit would be a logical qubit and the procedure below would need to be performed with logical operations.\n", + "---\n", "\n", + "## 5. Using CUDA-Q to Perform MSD\n", "\n", + "This section will walk you through an example using CUDA-Q to perform MSD. You will be coding the [[7,1,3]] code and following the MSD protocol of [Bravyi and Kitaev](https://arxiv.org/abs/quant-ph/0403025). Because this procedure only requires seven qubits, it can be simulated with a state vector simulator. However, remember that in practice each qubit would be a logical qubit and the procedure below would need to be performed with logical operations.\n", "\n", + "
\n", "\n", + "**Exercise 2:**\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - " The first step of this protocol is to prepare a kernel that produces a noisy $\\ket{T_0}$ state. \n", + "The first step of this protocol is to prepare a kernel that produces a noisy $\\ket{T_0}$ state.\n", "\n", - "$$ \\ket{T_0} = \\cos (\\frac{\\theta}{2}) \\ket{0} + e^{\\frac{i\\pi}{4}}\\sin (\\frac{\\theta}{2}) \\ket{1} $$\n", + "$$\\ket{T_0} = \\cos\\!\\left(\\frac{\\theta}{2}\\right) \\ket{0} + e^{\\frac{i\\pi}{4}} \\sin\\!\\left(\\frac{\\theta}{2}\\right) \\ket{1}$$\n", "\n", - "Where $\\theta = \\frac{1}{2}\\cos^{-1}(\\frac{1}{\\sqrt{3}}) = 0.4776583090622546$.\n", + "Where $\\theta = \\frac{1}{2}\\cos^{-1}\\!\\left(\\frac{1}{\\sqrt{3}}\\right) = 0.4776583090622546$.\n", "Let the kernel take a single float as an input which applies an error to $\\theta$. The code underneath the kernel creates a list of perturbed $\\theta$s over a suggested range.\n", - "

\n", - "
\n", - "\n" + "\n", + "
" ] }, { @@ -340,21 +340,17 @@ "metadata": {}, "outputs": [], "source": [ - "cudaq.set_target(\"nvidia\")\n", + "# EXERCISE 2\n", "\n", - "#Build t0 with some error\n", "@cudaq.kernel\n", - "def noisy_t0(y:float):\n", - " #TODO start (Make a kernel that builds a noisy T0 state given an input angle theta)\n", + "def noisy_t0(y: float):\n", " reg = cudaq.qvector(1)\n", - " ry(2.0*y, reg) # Rotation around Y\n", - " r1(np.pi/4.0, reg)\n", - " #TODO End\n", + " ry(2.0 * y, reg)\n", + " r1(np.pi / 4.0, reg)\n", "\n", "# Perturbation to y gate angles\n", "epsilon = 0.005\n", - "initial_thetay = [0.4776583090622546 + epsilon*i for i in range(75)] \n", - " " + "initial_thetay = [0.4776583090622546 + epsilon * i for i in range(75)]" ] }, { @@ -362,13 +358,13 @@ "id": "82a19172-4ff6-4d34-9ccc-e46ead5abb3d", "metadata": {}, "source": [ - "
\n", - "

Exercise 3:

\n", - "

\n", - "Next, initialize a perfect $\\ket{T_0}$ state and a second state with some error (It is suggested to use element 30 from the list above to get a noticeable amount of noise but still allows MSD to work). Use CUDA-Q's $\\texttt{get\\_state}$ and $\\texttt{overlap}$ commands to compute the fidelity of the noisy state and visualize both states using the Bloch spheres below. Note that fidelity is the overlap squared.\n", - "

\n", - "
\n", - "\n" + "
\n", + "\n", + "**Exercise 3:**\n", + "\n", + "Next, initialize a perfect $\\ket{T_0}$ state and a second state with some error (it is suggested to use element 30 from the list above to get a noticeable amount of noise but still allow MSD to work). Use CUDA-Q's `get_state` and `overlap` commands to compute the fidelity of the noisy state and visualize both states using the Bloch spheres below. Note that fidelity is the overlap squared.\n", + "\n", + "
" ] }, { @@ -378,11 +374,11 @@ "metadata": {}, "outputs": [], "source": [ - "#TODO Start\n", + "# EXERCISE 3\n", + "\n", "initial_t0 = cudaq.get_state(noisy_t0, initial_thetay[30])\n", - "perfect_t0 = cudaq.get_state(noisy_t0,initial_thetay[0] )\n", + "perfect_t0 = cudaq.get_state(noisy_t0, initial_thetay[0])\n", "initial_fidelity = (perfect_t0.overlap(initial_t0)**2).real\n", - "#TODO End\n", "\n", "print(\"Initial Fidelity\")\n", "print(initial_fidelity)\n", @@ -398,16 +394,17 @@ "id": "f74f93f8-cb32-4a78-83b8-f8f0fb666e14", "metadata": {}, "source": [ - "
\n", - "

Exercise 4:

\n", - "

\n", - " Now prepare a kernel to perform the [[5,1,3]] QEC code. The kernel should prepare each qubit as a noisy $\\ket{T_0}$ state, all with the same angle perturbation. Then, implement the circuit below. This circuit essentially maps the stabilizer measurements of the code to qubits 1,2,3 and 4 which need to be measured to produce a syndrome. Note that this version of the [[5,1,3]] code is slightly different from the version used by QuEra, but is more amenable to gate operations native to CUDA-Q. It is the implementation from A study of the robustness of magic state distillation against Clifford gate faults.\n", - "

\n", - " \n", - "
\n", - " One caveat to this code. It actually produces a distilled $\\ket{T_1}$. Add a Pauli Y operation followed by a Hadamard gate to the first qubit to convert back to $\\ket{T_0}$. Play around with this in the Bloch spheres above to understand what the transformation does.\n", - "

\n", - "
\n" + "
\n", + "\n", + "**Exercise 4:**\n", + "\n", + "Now prepare a kernel to perform the [[5,1,3]] QEC code. The kernel should prepare each qubit as a noisy $\\ket{T_0}$ state, all with the same angle perturbation. Then, implement the circuit below. This circuit essentially maps the stabilizer measurements of the code to qubits 1, 2, 3, and 4 which need to be measured to produce a syndrome. Note that this version of the [[5,1,3]] code is slightly different from the version used by QuEra, but is more amenable to gate operations native to CUDA-Q. It is the implementation from [A study of the robustness of magic state distillation against Clifford gate faults](https://dspacemainprd01.lib.uwaterloo.ca/server/api/core/bitstreams/44d06403-0b58-456f-a2d2-fd56ef2ea7b7/content).\n", + "\n", + "\"Circuit\n", + "\n", + "One caveat to this code: it actually produces a distilled $\\ket{T_1}$. Add a Pauli Y operation followed by a Hadamard gate to the first qubit to convert back to $\\ket{T_0}$. Play around with this in the Bloch spheres above to understand what the transformation does.\n", + "\n", + "
" ] }, { @@ -417,19 +414,18 @@ "metadata": {}, "outputs": [], "source": [ - "#Kernel implementing [[5,1,3]] code\n", + "# EXERCISE 4\n", + "\n", "@cudaq.kernel\n", "def msd(y: float):\n", - " #TODO Start \n", - " reg = cudaq.qvector(5) \n", - "\n", - " # noisy state prep 5 copies\n", - " ry(2.0*y, reg) # Rotation around Y\n", - " r1(np.pi/4.0, reg)\n", + " reg = cudaq.qvector(5)\n", "\n", + " # Noisy state prep: 5 copies\n", + " ry(2.0 * y, reg)\n", + " r1(np.pi / 4.0, reg)\n", "\n", - " x.ctrl(reg[1], reg[0]) \n", - " z.ctrl(reg[1], reg[0]) \n", + " x.ctrl(reg[1], reg[0])\n", + " z.ctrl(reg[1], reg[0])\n", " z.ctrl(reg[1], reg[2])\n", " z.ctrl(reg[1], reg[4])\n", "\n", @@ -437,11 +433,10 @@ " z.ctrl(reg[2], reg[3])\n", " z.ctrl(reg[2], reg[4])\n", "\n", - " x.ctrl(reg[3], reg[0]) \n", - " \n", - " x.ctrl(reg[4], reg[0]) \n", - " z.ctrl(reg[4], reg[0]) \n", + " x.ctrl(reg[3], reg[0])\n", "\n", + " x.ctrl(reg[4], reg[0])\n", + " z.ctrl(reg[4], reg[0])\n", "\n", " z(reg[0])\n", " z(reg[1])\n", @@ -452,14 +447,13 @@ " h(reg[3])\n", " h(reg[4])\n", "\n", - " a=mz(reg[1])\n", - " b=mz(reg[2])\n", - " c=mz(reg[3])\n", - " d=mz(reg[4])\n", + " a = mz(reg[1])\n", + " b = mz(reg[2])\n", + " c = mz(reg[3])\n", + " d = mz(reg[4])\n", "\n", - " h(reg[0]) #Apply to swap from T1 to T0\n", - " y(reg[0]) \n", - " #TODO End" + " h(reg[0]) # Apply to swap from T1 to T0\n", + " y(reg[0])" ] }, { @@ -467,7 +461,7 @@ "id": "a93dad50-263b-422f-b931-4b3d6fa52c6d", "metadata": {}, "source": [ - "Now, select an entry from the perturbed angle you created above and use it to run the code below. This will produce ten shots and for each print the syndrome, state, and fidelity with respect to the perfect $\\ket{T_0}$ state. How does the initial fidelity compare to the results? For results with improved fidelity, what is the resulting syndrome? If you do not get a 0000 syndrome, rerun the code. Take one of these improved states and visualize it using the Bloch sphere. Notice how it is closer to the perfect $\\ket{T_0}$ state than the state you started with." + "Now, select an entry from the perturbed angle you created above and use it to run the code below. This will produce ten shots and for each print the syndrome, state, and fidelity with respect to the perfect $\\ket{T_0}$ state. How does the initial fidelity compare to the results? For results with improved fidelity, what is the resulting syndrome? If you do not get a 0000 syndrome, rerun the code. Take one of these improved states and visualize it using the Bloch sphere. Notice how it is closer to the perfect $\\ket{T_0}$ state than the state you started with." ] }, { @@ -477,22 +471,22 @@ "metadata": {}, "outputs": [], "source": [ - "print(\"\\n\") \n", + "print(\"\\n\")\n", "for i in range(10):\n", - " print(\"\\n\") \n", + " print(\"\\n\")\n", " print(f\"shot: {i}\")\n", - " distilled_ms = cudaq.get_state(msd, initial_thetay[30]) # uses arbitrary selection \n", + " distilled_ms = cudaq.get_state(msd, initial_thetay[30])\n", " distilled_ms = np.array(distilled_ms)\n", "\n", " indices = np.nonzero(distilled_ms)[0]\n", " for j in np.nonzero(distilled_ms)[0]:\n", - " syndrome=j\n", + " syndrome = j\n", " print(f\"syndrome: {np.binary_repr(syndrome, width=5)[0:4]}\")\n", "\n", " distilled_ms = distilled_ms[np.nonzero(distilled_ms)]\n", " print(f\"state:\\n{distilled_ms}\")\n", - " print(f\"fidelity: {perfect_t0.overlap(distilled_ms)**2}\") \n", - " print(\"\\n\") " + " print(f\"fidelity: {perfect_t0.overlap(distilled_ms)**2}\")\n", + " print(\"\\n\")" ] }, { @@ -500,12 +494,13 @@ "id": "cca10e2a-3a59-4715-9726-e01dc9ceaf0b", "metadata": {}, "source": [ - "
\n", - "

Exercise 5:

\n", - "

\n", - "Now, build a loop below that performs the MSD protocol for initial states corresponding to each error in the initial list of $\\theta$'s. Plot this against the line $y=x$ to determine the threshold required for the input state? That is, the minimum fidelity of the input state such that the procedure works and does not produce worse results. From the graph, estimate how many rounds of MSD would be needed (assuming the same procedure) to distill a state above .98 starting from .94.\n", - "

\n", - "
\n" + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "Now, build a loop below that performs the MSD protocol for initial states corresponding to each error in the initial list of $\\theta$'s. Plot this against the line $y=x$ to determine the threshold required for the input state. That is, the minimum fidelity of the input state such that the procedure works and does not produce worse results. From the graph, estimate how many rounds of MSD would be needed (assuming the same procedure) to distill a state above 0.98 starting from 0.94.\n", + "\n", + "
" ] }, { @@ -515,45 +510,36 @@ "metadata": {}, "outputs": [], "source": [ + "# EXERCISE 5\n", + "\n", "input_fidelity = []\n", "output_fidelity = []\n", "\n", "for error in initial_thetay:\n", - "\n", - "# TODO Start\n", " initial_t0 = cudaq.get_state(noisy_t0, error)\n", " input_fidelity.append((perfect_t0.overlap(initial_t0)**2).real)\n", "\n", - " i0 = 100 \n", + " i0 = 100\n", " i1 = 100\n", - " while (i0 != 0)*(i1 != 1):\n", - " \n", + " while (i0 != 0) * (i1 != 1):\n", " distilled_ms = cudaq.get_state(msd, error)\n", " distilled_ms = np.array(distilled_ms)\n", " indices = np.nonzero(distilled_ms)[0]\n", - " \n", - " i0=indices[0]\n", - " i1=indices[1]\n", - " \n", + "\n", + " i0 = indices[0]\n", + " i1 = indices[1]\n", + "\n", " distilled_ms = distilled_ms[np.nonzero(distilled_ms)]\n", "\n", " output_fidelity.append(perfect_t0.overlap(distilled_ms)**2)\n", - "#TODO End\n", - " \n", - "import matplotlib.pyplot as plt\n", "\n", - "# Create the plot\n", "plt.figure(figsize=(8, 6))\n", - "plt.plot(input_fidelity, input_fidelity, label=\"Y=X\", marker='.') # First series: y=x\n", - "plt.plot(input_fidelity, output_fidelity, label=\"Input vs Output\", marker='x') # Second series: input vs output\n", - "\n", - "# Add labels, title, and legend\n", + "plt.plot(input_fidelity, input_fidelity, label=\"Y=X\", marker='.')\n", + "plt.plot(input_fidelity, output_fidelity, label=\"Input vs Output\", marker='x')\n", "plt.xlabel(\"X-axis\")\n", "plt.ylabel(\"Y-axis\")\n", "plt.title(\"Plot of Two Series\")\n", "plt.legend()\n", - "\n", - "# Show the plot\n", "plt.grid(True)\n", "plt.show()" ] @@ -563,7 +549,7 @@ "id": "2ee4954d-0eff-493a-aff3-c7c60e160618", "metadata": {}, "source": [ - "About three rounds are needed. To find this, start with 0.94 on the x axis, and select the y-axis value which is the output (~ 0.955). Use this as the new input and get a state of about 0.975. Doing this one more time gets above 0.98." + "About three rounds are needed. To find this, start with 0.94 on the x axis and select the y-axis value which is the output (~0.955). Use this as the new input and get a state of about 0.975. Doing this one more time gets above 0.98." ] }, { @@ -573,15 +559,22 @@ "source": [ "## Conclusion\n", "\n", - "after completing this notebook you should now have a sense of how MSD works, why it is needed, and what makes it so challenging. As you continue with QEC 101 content, remember that there are many aspects of FTQC beyond simply the QEC code selected." + "After completing this notebook you should now have a sense of how MSD works, why it is needed, and what makes it so challenging. The Eastin-Knill Theorem means that no QEC code can implement a universal gate set transversally, making the $T$ gate the critical bottleneck for fault-tolerant quantum computing. Magic state distillation provides a path forward, but at an exponential resource cost that represents one of the greatest challenges facing practical FTQC. As you continue with QEC 101 content, remember that there are many aspects of FTQC beyond simply the QEC code selected." + ] + }, + { + "cell_type": "markdown", + "id": "28d100b289c04eb8", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 — Lab 2: Stabilizer Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — covers the Steane code used as the inner code for MSD in this lab\n", + "* [QEC 101 — Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — the next notebook in the QEC 101 series, exploring surface and toric codes\n", + "* [QEC 101 — Lab 4: Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — introduces quantum decoding strategies that complement the distillation approach" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -595,11 +588,24 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.ALG", + "QCS.SW" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_algorithms", + "quantum_software" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/Solutions/06_QEC_Topological_Codes_Solution.ipynb b/qec101/Solutions/06_QEC_Topological_Codes_Solution.ipynb index 9073b63..dc7c8c5 100644 --- a/qec101/Solutions/06_QEC_Topological_Codes_Solution.ipynb +++ b/qec101/Solutions/06_QEC_Topological_Codes_Solution.ipynb @@ -27,109 +27,121 @@ { "cell_type": "markdown", "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0", - "metadata": { - "id": "135f6e3e-e55f-4b5c-80cb-f8979bba94e0" - }, + "metadata": {}, "source": [ - "# QEC 101\n", - "## Lab 6 - Topological Codes\n", - "\n", - "This lab builds on the stabilizer formalism introduced in [lab 2]() towards much more complex yet powerful QEC codes: the topological codes. [Recent announcements](https://research.google/blog/making-quantum-error-correction-work/) from Google about their new Willow chip have brought a specific topological code (the surface code) center stage, as it is the primary code being explored by Google today and a very promising candidate for useful QEC.\n", - "\n", - "The toric code is one of the earliest topological QEC codes and a precursor to the surface code. It is a more complex code relative to the repetiton code and Steane code, but also elegant and worth the time to understand. This lab introduces you to the conceptual aspects of the toric code and enables you to code an implementation yourself. \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "\n", - "* **6.1** Learn about topological codes and why they are so promising.\n", - "* **6.2** Learn the toric code layout and its stabilizers\n", - "* **6.3** Learn the toric code logical operators\n", - "* **6.4** Learn how to code the toric code in CUDA-Q\n", - "* **6.5** Learn how to decode the toric code errors\n", - "* **6.6** Implement a minimum weight perfect matching decoder\n", - "* **6.7** Learn about the planar and surface codes\n", - "\n", - "Lab 5 Learning Objectives:\n", - "* What are topological codes and why are they so promising. \n", - "* Understand the basics of the toric code and how to construct it visually\n", - "* Understand how to intupret toric code errors\n", - "* Understand how to code the toric code in CUDA-Q\n", - "* Understand how minimum-weight perfect matching decoding works\n", - "\n", - "\n", - "Terminology and notation you'll use:\n", - "\n", - "* topological code, toric code, surface code, threshold\n", - "* stabilizers, logical operators, syndromes\n", - "* minimum weight perfect matching\n", + "# Topological Codes — QEC 101: Lab 6 — Solutions\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}\\renewcommand{\\bra}[1]{\\langle#1|}$\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Explore topological quantum error correction codes and their advantages over simpler codes\n", + "* Analyze the toric code's stabilizer structure, including plaquette and vertex stabilizers\n", + "* Identify logical operators on the torus using topological equivalence classes\n", + "* Implement the toric code encoding and syndrome measurement in CUDA-Q\n", + "* Apply minimum weight perfect matching (MWPM) decoding to toric code syndromes\n", + "* Compare the toric, planar, and rotated surface code architectures\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement) ([Quick Start to Quantum](https://github.com/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/01_quick_start_to_quantum.ipynb))\n", + "* Stabilizer formalism and syndrome measurement ([Lab 2: Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb))\n", + "* Familiarity with CUDA-Q kernels and `cudaq.sample` ([Lab 3: Noisy Simulation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/03_QEC_Noisy_Simulation.ipynb))\n", + "\n", + "**Key Terminology:**\n", + "* Topological code\n", + "* Toric code\n", + "* Surface code\n", + "* Threshold\n", + "* Stabilizer\n", + "* Logical operator\n", + "* Syndrome\n", + "* Minimum weight perfect matching (MWPM)\n", + "* Plaquette stabilizer\n", + "* Vertex stabilizer\n", + "* Planar code\n", + "* Rotated surface code\n", + "* Equivalence class\n", + "* Topology\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "\n", + "This lab was inspired by excellent work in [*Quantum Error Correction: An Introductory Guide*](https://arxiv.org/pdf/1907.11157) and a blog entitled [*An Interactive Guide to the Surface Code*](https://arthurpesah.me/blog/2023-05-13-surface-code/#solution-of-the-exercise)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8dd39d5c17594d62", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", "\n", - "This lab was inspired by excellent work in [*Quantum Error Correction an Introductory Guide*](https://arxiv.org/pdf/1907.11157) and a blog entitled [*An Interactive Guide to the Surface Code*](https://arthurpesah.me/blog/2023-05-13-surface-code/#solution-of-the-exercise)" + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" ] }, { "cell_type": "markdown", "id": "b5046650", - "metadata": { - "id": "b5046650" - }, + "metadata": {}, "source": [ - "Execute the cell below to load all the necessary packages for this lab. " + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "644b9c82", - "metadata": { - "id": "644b9c82" - }, + "metadata": {}, "outputs": [], "source": [ - "import cudaq\n", - "from cudaq import spin\n", - "from cudaq.qis import *\n", + "import sys\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", - "from typing import List\n", - "import sys\n", - "\n", - "try:\n", - " import networkx as nx\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade networkx\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")\n", + "import networkx as nx\n", "\n", "import cudaq\n", + "from cudaq import spin\n", + "from cudaq.qis import *\n", + "\n", "cudaq.set_target('stim')" ] }, - { - "cell_type": "markdown", - "id": "b3e18790-87ab-4180-b349-a4210abca136", - "metadata": {}, - "source": [ - "## 6.1 What is a topological code?" - ] - }, { "cell_type": "markdown", "id": "3f64f39f-0c0c-4082-a63d-609b81fceebf", "metadata": {}, "source": [ - "The previous lab discussed stabilizers, and how the stabilizer formalism can be a helpful tool for generating more complex QEC codes, particularly at scales where specifying the codewords is impractical. The **toric code** is one of the most famous **topological** codes which combines the stabilizer formalism and principles of topology to produce a QEC code with a number of favorable properties. \n", + "---\n", + "\n", + "## 6.1. What is a Topological Code?\n", "\n", - "**Topology** is a branch of mathematics that studies objects abstractly and relates them to so called equivalence classes based on how they can or cannot be transformed into one another. The prototypical example given to introduce topology proposes that a doughnut and a coffee mug are in the same class (see image below), as they each have one hole while a shape like a sphere, would be in a different equivalence class.\n", + "The previous lab discussed **stabilizers**, and how the stabilizer formalism can be a helpful tool for generating more complex QEC codes, particularly at scales where specifying the codewords is impractical. The **toric code** is one of the most famous **topological codes** which combines the stabilizer formalism and principles of **topology** to produce a QEC code with a number of favorable properties.\n", "\n", - "\n", + "Topology is a branch of mathematics that studies objects abstractly and relates them to so called **equivalence classes** based on how they can or cannot be transformed into one another. The prototypical example given to introduce topology proposes that a doughnut and a coffee mug are in the same class (see image below), as they each have one hole while a shape like a sphere, would be in a different equivalence class.\n", "\n", - "The coffee mug example, though certainly not rigorous and perhaps somewhat arbitrary, is an accessible glance into the sorts of distinctions topology can make to classify all sorts of geometries, even those that cannot be visualized. Though the details of topology are well outside the scope of this lab, they provide the theoretical foundation of the surface code which takes advantage of the notion of equivalence classes to perform error correction on qubits arranged on a surface.\n", + "\"Illustration\n", "\n", - "The key idea of the surface code is to define stabilizers as small repeating units that can be visually represented on a grid. Repeating such units results in a systematic way to increase the number of data qubits that encode one or more logical qubits, and increase the distance of the code. Defining the code on a grid pays dividends for leveraging topological principles to define errors and logical operations in such a way that lends itself to a nice visual interpretation. Ideally, surface code errors are localized and can be visually inspected much like one can look at an image, assuming the pixels are large enough, and quickly identify which pixels are the wrong color.\n", + "The coffee mug example, though certainly not rigorous and perhaps somewhat arbitrary, is an accessible glance into the sorts of distinctions topology can make to classify all sorts of geometries, even those that cannot be visualized. Though the details of topology are well outside the scope of this lab, they provide the theoretical foundation of the **surface code** which takes advantage of the notion of equivalence classes to perform error correction on qubits arranged on a surface.\n", + "\n", + "The key idea of the surface code is to define stabilizers as small repeating units that can be visually represented on a grid. Repeating such units results in a systematic way to increase the number of data qubits that encode one or more logical qubits, and increase the distance of the code. Defining the code on a grid pays dividends for leveraging topological principles to define errors and logical operations in such a way that lends itself to a nice visual interpretation. Ideally, surface code errors are localized and can be visually inspected much like one can look at an image, assuming the pixels are large enough, and quickly identify which pixels are the wrong color.\n", "\n", "Certain types of QPUs (such as superconducting) are well suited for the surface code as qubits are already arranged on surface with nearest neighbor connectivity.\n", "\n", - "The surface code also has a high **threshold**. The threshold of a QEC code is the error rate required of physical qubits such that adding more physical qubits improves the logical error rate and does not make it worse. A high threshold means that the surface code can tolerate physical qubits of lower relative quality than other codes. \n" + "The surface code also has a high **threshold**. The threshold of a QEC code is the error rate required of physical qubits such that adding more physical qubits improves the logical error rate and does not make it worse. A high threshold means that the surface code can tolerate physical qubits of lower relative quality than other codes." ] }, { @@ -137,45 +149,48 @@ "id": "1a0bf241-9978-4c88-ad2e-21233dbce218", "metadata": {}, "source": [ - "## 6.2 The Toric Code and its Stabilizers\n", + "---\n", + "\n", + "## 6.2. The Toric Code and its Stabilizers\n", "\n", - "The starting point for the toric code is a grid, the lines of which correspond to lines on the surface of a torus (doughnut shape). The horizontal grid lines (like the blue line below) wrap around the equator of the torus. The vertical lines (like the red line) wrap inside of the \"doughnut hole\" of the torus. In order for the grid to appropriately resemble the torus, the endpoints of any grid line connect. Notice that the point A on the torus corresponds to both the left and right endpoint of the blue horizontal line.\n", + "The starting point for the toric code is a grid, the lines of which correspond to lines on the surface of a torus (doughnut shape). The horizontal grid lines (like the blue line below) wrap around the equator of the torus. The vertical lines (like the red line) wrap inside of the \"doughnut hole\" of the torus. In order for the grid to appropriately resemble the torus, the endpoints of any grid line connect. Notice that the point A on the torus corresponds to both the left and right endpoint of the blue horizontal line.\n", "\n", - "\n", + "\"Diagram\n", "\n", "

Image adapted from https://en.wikipedia.org/wiki/Torus#/media/File:Tesseract_torus.png

\n", "\n", "\n", "Each edge (line connecting two vertices) of the grid corresponds to a data qubit depicted by grey squares. The toric code is a $[[2L^2, 2, L]]$ code, encoding two logical qubits, where $L$ is the number of vertical/horizontal edges in each row/column.\n", "\n", - "\n", + "\"Toric\n", "\n", "\n", - "### Stabilizers \n", + "### Stabilizers\n", "\n", - "Stabilizers come in two varieties: plaquette and vertex. A plaquette stabilizer corresponds to the an ancilla qubit (black circle) which sits in the center of each face. The stabilizer consists of four $X$ operators acting on the four adjacent (green edge) qubits. Similarly, each vertex stabilizers corresponds to an ancilla qubit placed at each grid vertex and consists of four $Z$ operators acting on the four adjacent (purple edge) qubits.\n", + "Stabilizers come in two varieties: plaquette and vertex. A **plaquette stabilizer** corresponds to an ancilla qubit (black circle) which sits in the center of each face. The stabilizer consists of four $X$ operators acting on the four adjacent (green edge) qubits. Similarly, each **vertex stabilizer** corresponds to an ancilla qubit placed at each grid vertex and consists of four $Z$ operators acting on the four adjacent (purple edge) qubits.\n", "\n", - "\n", + "\"Diagram\n", "\n", "\n", "### Confirming Stabilizers Commute\n", "\n", "Remember from Lab 2 that all stabilizers must commute with one another. Consider the three possible cases to confirm that the commutation relationships hold.\n", "\n", - "1. *The stabilizers do not intersect*: This is the trivial case as two stabilizers acting on completely different qubits will trivially commute.\n", + "1. *The stabilizers do not intersect*: This is the trivial case as two stabilizers acting on completely different qubits will trivially commute.\n", "\n", - "2. *Two stabilizers of the same sort intersect*: Considering the plaquette stabilizers only as the same logic holds for vertex stabilizers, notice that the only way for two different plaquette stabilizers to intersect is if they share one data qubit. That is, two green squares share an edge. In the example below, the $X_2$ operations cancel out and the commutation relationship holds for the other operators acting on different qubits.\n", + "2. *Two stabilizers of the same sort intersect*: Considering the plaquette stabilizers only as the same logic holds for vertex stabilizers, notice that the only way for two different plaquette stabilizers to intersect is if they share one data qubit. That is, two green squares share an edge. In the example below, the $X_2$ operations cancel out and the commutation relationship holds for the other operators acting on different qubits.\n", "\n", - " \n", + " \"Two\n", "\n", - "3. *A plaquette and vertex stabilizer intersect*: This is the most complex case. Notice, that this can only occur when a green square intersects a purple cross on two shared data qubits. When a $Z$ and an $X$ type operator act on the same qubit an anti-commutation relationship holds and a negative phase is added. However, if this occurs for a pair of operators acting on a second qubit, the anti-commutation relationships cancel and the entire stabilizer commutes.\n", + "3. *A plaquette and vertex stabilizer intersect*: This is the most complex case. Notice, that this can only occur when a green square intersects a purple cross on two shared data qubits. When a $Z$ and an $X$ type operator act on the same qubit an anti-commutation relationship holds and a negative phase is added. However, if this occurs for a pair of operators acting on a second qubit, the anti-commutation relationships cancel and the entire stabilizer commutes.\n", "\n", - "\n", + "\"A\n", "\n", + "---\n", "\n", - "## 6.3 Logical Operators\n", + "## 6.3. Logical Operators\n", "\n", - "In order to understand logical operators, it is necessary to consider the topology of the torus and specifically the sorts of loops that can be drawn on it. It turns out, that there are four sorts of loops that can occur on the torus, each as a distinct equivalence class. \n", + "In order to understand **logical operators**, it is necessary to consider the topology of the torus and specifically the sorts of loops that can be drawn on it. It turns out, that there are four sorts of loops that can occur on the torus, each as a distinct equivalence class.\n", "\n", "1. A trivial loop (yellow) is simply a closed loop on the surface of the torus.\n", "2. A Vertical loop (red and green) is a loop through the center hole.\n", @@ -183,51 +198,51 @@ "4. A Vertical + Horizontal loop combines the two, looping around the equator with a twist through the center hole.\n", "\n", "\n", - " \n", + " \"Four\n", "\n", - "Each sort of loop also manifest within the toric code and correspond to either stabilizers or logical operators. \n", + "Each sort of loop also manifests within the toric code and corresponds to either stabilizers or logical operators.\n", "\n", "\n", - "First consider trivial loops. In the section above, it was noted that the product of two overlapping plaquette (or vertex) stabilizers commute because the operation performed on the overlapping qubit cancels out. Another way to think about this, is the formation of a larger loop which is just another stabilizer!\n", + "First consider trivial loops. In the section above, it was noted that the product of two overlapping plaquette (or vertex) stabilizers commute because the operation performed on the overlapping qubit cancels out. Another way to think about this, is the formation of a larger loop which is just another stabilizer!\n", "\n", - " \n", + " \"Two\n", "\n", "\n", "One could continue to add plaquette stabilizers and expand the loop, or work backwards and cancel out the previous stabilizers to recover $\\ket{\\psi}$. This means that any closed loop on the surface is a stabilizer and in the same equivalence class with the original state.\n", "\n", - "This is not true for a vertical or horizintal loop (pictured below). There is no way to apply stabilizers to either state to recover $\\ket{\\psi}$ Therefore, such loops are in a different equivalence class. It turns out that a horizontal loop (if formed by consecutive $X$ operators) is $\\bar{X}_1$ and a vertical loop is $\\bar{X}_2$. \n", + "This is not true for a vertical or horizontal loop (pictured below). There is no way to apply stabilizers to either state to recover $\\ket{\\psi}$. Therefore, such loops are in a different equivalence class. It turns out that a horizontal loop (if formed by consecutive $X$ operators) is $\\bar{X}_1$ and a vertical loop is $\\bar{X}_2$.\n", "\n", - " \n", + " \"A\n", "\n", "Just as all stabilizers correspond to the same equivalence class, there are many ways to produce vertical and horizontal loops. Such loops do not need to be \"straight\" and can follow a windy path as pictured below.\n", "\n", - " \n", + " \"A\n", "\n", "\n", "It is also possible for two distinct loops to close along the same axis corresponding to $\\bar{X}_i$$\\bar{X}_i$, that is, the application of the logical operator twice.\n", "\n", - " \n", - " \n", - "Finally, if a vertical and horizontal loop close, the results is the same as both logical bitflip operators $\\bar{X}_1$$\\bar{X}_2$\n", + " \"Two\n", "\n", + "Finally, if a vertical and horizontal loop close, the result is the same as both logical bitflip operators $\\bar{X}_1$$\\bar{X}_2$.\n", "\n", - " \n", "\n", - "All of this logic is identical for the $Z$-type vertex stabilizer. The only difference is that $\\bar{Z}_1$ and $\\bar{Z}_2$ correspond to vertical and horizontal loops formed by data qubits on parallel faces of the grid (purple lines). \n", + " \"A\n", "\n", + "All of this logic is identical for the $Z$-type vertex stabilizer. The only difference is that $\\bar{Z}_1$ and $\\bar{Z}_2$ correspond to vertical and horizontal loops formed by data qubits on parallel faces of the grid (purple lines).\n", "\n", - " \n", "\n", + " \"Three\n", "\n", "\n", - " $\\bar{Z}_1$ and $\\bar{X}_1$ need to anticommute which is why a horizontal loop formed by green lines and vertical loops formed by purple lines operate on the same logical qubit. Anticommutation is presevered as a $Z$ and $X$ operator must both occur on an odd number of data qubits.\n", "\n", + " $\\bar{Z}_1$ and $\\bar{X}_1$ need to anticommute which is why a horizontal loop formed by green lines and vertical loops formed by purple lines operate on the same logical qubit. Anticommutation is preserved as a $Z$ and $X$ operator must both occur on an odd number of data qubits.\n", "\n", - " \n", "\n", + " \"Crossing\n", "\n", + "---\n", "\n", - "## 6.4 Implementing the toric code in CUDA-Q\n" + "## 6.4. Implementing the Toric Code in CUDA-Q" ] }, { @@ -235,21 +250,21 @@ "id": "1e33a422-f03f-4ced-a307-f9977993b03b", "metadata": {}, "source": [ - "
\n", - "

Exercise 1:

\n", - "

\n", - "You will now code the toric code in CUDA-Q. It is important to note that there are many nuances that arise when coding the toric code that have not been considered in the conceptual discussions above. For example, in the presence of errors, the order in which the stabilizer gates are applied in the encoding stage matters for the propagation of errors. Such considerations will be ignored for simplicity but are important to note regardless. \n", + "

\n", + "\n", + "**Exercise 1:**\n", "\n", - "In the cells below, you will perform the encoding for the toric code. First, calculate the indices of the stabilizers and print them to ensure all are correct. As the L=3 toric code is an [[18,2,3]] code, only 16 stabilizers are required to reinforce the constraints as all 9 vertex and plaquette stabilizers must produce a product of 1. However, it is OK to use all 18 stabilizers for clarity despite the redundancy as the extra two stabilizers do not add additional constraints.\n", + "You will now code the toric code in CUDA-Q. It is important to note that there are many nuances that arise when coding the toric code that have not been considered in the conceptual discussions above. For example, in the presence of errors, the order in which the stabilizer gates are applied in the encoding stage matters for the propagation of errors. Such considerations will be ignored for simplicity but are important to note regardless.\n", "\n", - "In the cell below, write code that produces the indices for each plaquette (catch $Z$ errors) and vertex (catch $X$ errors) stabilizers. Use the indexing below to match the solution key. Notice how there are 18 data and ancilla qubits. Note: Store the indicies in a single flattened list (one for each stabilizer type) so that they can be easily read into CUDA-Q kernels. \n", + "In the cells below, you will perform the encoding for the toric code. First, calculate the indices of the stabilizers and print them to ensure all are correct. As the L=3 toric code is an $[[18,2,3]]$ code, only 16 stabilizers are required to reinforce the constraints as all 9 vertex and plaquette stabilizers must produce a product of 1. However, it is OK to use all 18 stabilizers for clarity despite the redundancy as the extra two stabilizers do not add additional constraints.\n", + "\n", + "In the cell below, write code that produces the indices for each plaquette (catch $Z$ errors) and vertex (catch $X$ errors) stabilizers. Use the indexing below to match the solution key. Notice how there are 18 data and ancilla qubits. Note: Store the indices in a single flattened list (one for each stabilizer type) so that they can be easily read into CUDA-Q kernels.\n", "\n", "Complete the additional tasks in the following cells.\n", - "

\n", - "
\n", "\n", + "
\n", "\n", - " \n", + "\"Diagram\n", "\n", "\n", "Print the stabilizers and confirm your code works. Do they match what you would expect from the image above?" @@ -257,68 +272,39 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "bc863822-f7c3-4348-a7b8-c5ca746dea36", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "plaq\n", - "9 0 10 3\n", - "10 1 11 4\n", - "11 2 9 5\n", - "12 3 13 6\n", - "13 4 14 7\n", - "14 5 12 8\n", - "15 6 16 0\n", - "16 7 17 1\n", - "17 8 15 2\n", - "vertex\n", - "2 15 0 9\n", - "0 16 1 10\n", - "1 17 2 11\n", - "5 9 3 12\n", - "3 10 4 13\n", - "4 11 5 14\n", - "8 12 6 15\n", - "6 13 7 16\n", - "7 14 8 17\n" - ] - } - ], + "outputs": [], "source": [ - "#TODO \n", - "#Write code here to generate the indicices of each stabilizer.\n", - "L=3\n", + "# EXERCISE 1\n", + "L = 3\n", "\n", "stabilizers_z = [] # Plaquette stabilizers that flag Z errors\n", "for row in range(L):\n", - " for col in range(L): \n", - " stabilizers_z.append(L*row + col+L**2) #left\n", - " stabilizers_z.append(L*row + col ) #top\n", - " stabilizers_z.append((L*row + (col+1)%L)+L**2) #right\n", - " stabilizers_z.append(L*((row + 1)%L) +col) #bottom\n", + " for col in range(L):\n", + " stabilizers_z.append(L * row + col + L**2) # left\n", + " stabilizers_z.append(L * row + col) # top\n", + " stabilizers_z.append((L * row + (col + 1) % L) + L**2) # right\n", + " stabilizers_z.append(L * ((row + 1) % L) + col) # bottom\n", "\n", "\n", - "stabilizers_x = [] # vertex stabilizers that flag X errors\n", + "stabilizers_x = [] # Vertex stabilizers that flag X errors\n", "for row in range(L):\n", - " for col in range(L): \n", - " stabilizers_x.append( (L*row + (col -1 )%L)) #left\n", - " stabilizers_x.append(L*((row-1)%L) + col + L**2) #top\n", - " stabilizers_x.append(L*row +col) #right\n", - " stabilizers_x.append(L*row +col + L**2 ) #bottom\n", - "\n", + " for col in range(L):\n", + " stabilizers_x.append(L * row + (col - 1) % L) # left\n", + " stabilizers_x.append(L * ((row - 1) % L) + col + L**2) # top\n", + " stabilizers_x.append(L * row + col) # right\n", + " stabilizers_x.append(L * row + col + L**2) # bottom\n", "\n", "\n", "print(\"plaq\")\n", "for x in range(L**2):\n", - " print(stabilizers_z[4*x], stabilizers_z[4*x+1],stabilizers_z[4*x+2],stabilizers_z[4*x+3])\n", + " print(stabilizers_z[4*x], stabilizers_z[4*x+1], stabilizers_z[4*x+2], stabilizers_z[4*x+3])\n", "\n", "print(\"vertex\")\n", "for x in range(L**2):\n", - " print(stabilizers_x[4*x], stabilizers_x[4*x+1],stabilizers_x[4*x+2],stabilizers_x[4*x+3])" + " print(stabilizers_x[4*x], stabilizers_x[4*x+1], stabilizers_x[4*x+2], stabilizers_x[4*x+3])" ] }, { @@ -326,80 +312,56 @@ "id": "fadf8e04-6cc8-4403-99d7-c7efc4ef19c2", "metadata": {}, "source": [ - "Now, build a register and an prepare the logical zero state. For the Steane and Shor codes in previous labs, you were provided with an encoding circuit. The encoding circuit prepared a state within the codespace such that all stabilizers were satisfied and returned a 0 when measured. For the toric code, this is impractical and instead, you can take advantage of the fact that if the stabilizers run on a register of all zeros, and then the ancilla qubits are measured, this will project the state into the codespace corresponding to the logical 0 state. \n", + "Now, build a register and prepare the logical zero state. For the Steane and Shor codes in previous labs, you were provided with an encoding circuit. The encoding circuit prepared a state within the codespace such that all stabilizers were satisfied and returned a 0 when measured. For the toric code, this is impractical and instead, you can take advantage of the fact that if the stabilizers run on a register of all zeros, and then the ancilla qubits are measured, this will project the state into the codespace corresponding to the logical 0 state.\n", "\n", - "This means if you run the stabilizer circuit and measure a second time, the same stabilizers will return the same measurements. The downside, is that a syndrome measurement of 000...000 no longer corresponds to a lack of errors. In fact, the baseline stabilizer measurements will be different each time the toric code is prepared this way! \n", + "This means if you run the stabilizer circuit and measure a second time, the same stabilizers will return the same measurements. The downside, is that a **syndrome** measurement of 000...000 no longer corresponds to a lack of errors. In fact, the baseline stabilizer measurements will be different each time the toric code is prepared this way!\n", "\n", - "So what does it mean to flag an error? In practice, the first result of stabilizer measurements is tracked, and effectively considered as the 000....000 result. Then, any deviation from this initial pattern indicates an error. So the result, 01000.... would imply the current QEC round resulted in a deviation from the expected syndrome of the initial state due to flagging of the second stabilizer.\n", + "So what does it mean to flag an error? In practice, the first result of stabilizer measurements is tracked, and effectively considered as the 000....000 result. Then, any deviation from this initial pattern indicates an error. So the result, 01000.... would imply the current QEC round resulted in a deviation from the expected syndrome of the initial state due to flagging of the second stabilizer.\n", "\n", - "Note: This exercise does not consider the possibility of errors occuring while preparing the state, but a true fault-tolerant preparation would have to do so. In that case, the procedure is run for many rounds and every round the syndromes do not change builds confidence that the state was prepared correctly.\n", + "Note: This exercise does not consider the possibility of errors occurring while preparing the state, but a true fault-tolerant preparation would have to do so. In that case, the procedure is run for many rounds and every round the syndromes do not change builds confidence that the state was prepared correctly.\n", "\n", - "In the cell below, build a kernel that runs the stabilizer checks, measures and saves the ancilla measurements, resets the ancilla qubits and repeats these steps at least three times. Print the results of a single shot of the kernel including each ancilla measurement round and the data qubit measurments. Make sure the stabilizer results stay the same. If they change, the preparation is not correct.\n", + "In the cell below, build a kernel that runs the stabilizer checks, measures and saves the ancilla measurements, resets the ancilla qubits and repeats these steps at least three times. Print the results of a single shot of the kernel including each ancilla measurement round and the data qubit measurements. Make sure the stabilizer results stay the same. If they change, the preparation is not correct.\n", "\n", - "Notice, the measurement results for the data qubits change despite the stabilizers not changing. In the next code block you will confirm that this is indeed the logical zero state." + "Notice, the measurement results for the data qubits change despite the stabilizers not changing. In the next code block you will confirm that this is indeed the logical zero state." ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "d78db801-c78a-44b6-9c19-e169ad921292", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{ 110111010000000000:1 }\n", - "\n", - "{ 110111010000000000:1 }\n", - "\n", - "{ 110111010000000000:1 }\n", - "\n", - "{ 110111010000000000:1 }\n", - "\n", - "{ 100100000101011011:1 }\n", - "\n" - ] - } - ], + "outputs": [], "source": [ - "#TODO\n", - "#Write a kernel that prepares the toric code\n", + "# EXERCISE 1\n", "@cudaq.kernel\n", "def toric(plaq: list[int], vert: list[int]):\n", " \"\"\"\n", - " Function that takes a list of plaquette and vertex indices and constructs a kernel for an L=3 surface code.\n", - " The code should apply stabilizer checks and measure the ancilla qubits\n", - " \n", - " ----------\n", - " plaq: list[int]\n", - " indices for plaquette stabilizers. (see figure above)\n", - " vert: list[int]\n", - " indices for vertex stabilizers. (see figure above)\n", + " Constructs a kernel for an L=3 toric code that applies stabilizer checks,\n", + " measures ancilla qubits, and repeats for multiple rounds.\n", "\n", - " Returns\n", - " cudaq.kernel to prepare toric code.\n", - " -------\n", + " Parameters\n", + " ----------\n", + " plaq : list[int]\n", + " Indices for plaquette stabilizers.\n", + " vert : list[int]\n", + " Indices for vertex stabilizers.\n", " \"\"\"\n", "\n", - " \n", - " data = cudaq.qvector(18) #first horizontal then vertical\n", + " data = cudaq.qvector(18) # first horizontal then vertical\n", " anc = cudaq.qvector(18)\n", "\n", - "\n", - " for x in range(9): #loops over 9 plaquette stabilizers \n", + " for x in range(9): # loops over 9 plaquette stabilizers\n", " h(anc[x])\n", " for i in range(4): # loops over data qubit index in stabilizer\n", - " x.ctrl(anc[x], data[plaq[4*x+i]]) #\n", + " x.ctrl(anc[x], data[plaq[4*x+i]])\n", " h(anc[x])\n", "\n", - " for x in range(9):#loops over 9 vertex stabilizers \n", + " for x in range(9): # loops over 9 vertex stabilizers\n", " for i in range(4):\n", - " x.ctrl( data[vert[4*x+i]],anc[x+9])\n", + " x.ctrl(data[vert[4*x+i]], anc[x+9])\n", "\n", - "\n", - " anc0 = mz(anc) # saves round 0 measurements\n", - " reset(anc) # resets only ancilla measurements\n", + " anc0 = mz(anc) # saves round 0 measurements\n", + " reset(anc) # resets only ancilla measurements\n", "\n", " for x in range(9):\n", " h(anc[x])\n", @@ -409,9 +371,7 @@ "\n", " for x in range(9):\n", " for i in range(4):\n", - " x.ctrl( data[vert[4*x+i]],anc[x+9])\n", - "\n", - "\n", + " x.ctrl(data[vert[4*x+i]], anc[x+9])\n", "\n", " anc1 = mz(anc)\n", " reset(anc)\n", @@ -424,13 +384,11 @@ "\n", " for x in range(9):\n", " for i in range(4):\n", - " x.ctrl( data[vert[4*x+i]],anc[x+9])\n", - "\n", + " x.ctrl(data[vert[4*x+i]], anc[x+9])\n", "\n", " anc2 = mz(anc)\n", " reset(anc)\n", "\n", - "\n", " for x in range(9):\n", " h(anc[x])\n", " for i in range(4):\n", @@ -439,20 +397,17 @@ "\n", " for x in range(9):\n", " for i in range(4):\n", - " x.ctrl( data[vert[4*x+i]],anc[x+9])\n", - "\n", + " x.ctrl(data[vert[4*x+i]], anc[x+9])\n", "\n", " anc3 = mz(anc)\n", " reset(anc)\n", "\n", + " data_qubits = mz(data)\n", "\n", - " data_qubits=mz(data)\n", "\n", - "\n", - "# Sample the kernel one shot at a time and print the results from the intermediate measurements to confirm stabilizer checks do not change \n", - "cudaq.set_target('stim')\n", - "\n", - "results = cudaq.sample(toric,stabilizers_z, stabilizers_x,shots_count=1)\n", + "# Sample the kernel one shot at a time and print the results from the\n", + "# intermediate measurements to confirm stabilizer checks do not change\n", + "results = cudaq.sample(toric, stabilizers_z, stabilizers_x, shots_count=1)\n", "\n", "print(results.get_register_counts(\"anc0\"))\n", "print(results.get_register_counts(\"anc1\"))\n", @@ -466,56 +421,41 @@ "id": "24dbc831-1aba-483f-988f-824b5bb336a2", "metadata": {}, "source": [ - "Now, let's test your code to ensure that logical operators work as expected. First, copy the code above that prepares the logical 0 state. Then, run `cudaq.sample` and produce a dictionary of bistrings form measuring the data qubits. Write a function that loops through these results and sums a subset of specified bits mod 2. \n", + "Now, let's test your code to ensure that logical operators work as expected. First, copy the code above that prepares the logical 0 state. Then, run `cudaq.sample` and produce a dictionary of bitstrings from measuring the data qubits. Write a function that loops through these results and sums a subset of specified bits mod 2.\n", "\n", - "The next ingredient is to define the logical operators for $\\bar{X}_1$, $\\bar{X}_1$, $\\bar{Z}_1$, and $\\bar{Z}_2$. \n", + "The next ingredient is to define the logical operators for $\\bar{X}_1$, $\\bar{X}_2$, $\\bar{Z}_1$, and $\\bar{Z}_2$.\n", "\n", - "Remember, these need to be horizontal and vertical loops across the torus. use the figures in the previous section to help identify a set of correct data qubits which correspond to valid logical operators. Remember, there is more than one right selection here. These logical operator will also inform which data qubits we measure (and sum mod 2) to produce the logical observable. The logical operators and obervables do not need to be the exact same loop, but just in the same equivilance class.\n", + "Remember, these need to be horizontal and vertical loops across the torus. Use the figures in the previous section to help identify a set of correct data qubits which correspond to valid logical operators. Remember, there is more than one right selection here. These logical operators will also inform which data qubits we measure (and sum mod 2) to produce the logical observable. The logical operators and observables do not need to be the exact same loop, but just in the same equivalence class.\n", "\n", - "Also remember that just like measuring in the $Z$ basis of a single qubit provides the $\\ket{0}$ or $\\ket{1}$ state, the $Z$ logical observables will produce the logical 0 and 1 states. Confrim the results below. If you take 1000 shots, you should see both $\\bar{Z}_1$ and $\\bar{Z}_2$ return 0 100 percent of the time. Likewise, the $X$ observable should be in a 50/50 superposition. \n", + "Also remember that just like measuring in the $Z$ basis of a single qubit provides the $\\ket{0}$ or $\\ket{1}$ state, the $Z$ logical observables will produce the logical 0 and 1 states. Confirm the results below. If you take 1000 shots, you should see both $\\bar{Z}_1$ and $\\bar{Z}_2$ return 0 100 percent of the time. Likewise, the $X$ observable should be in a 50/50 superposition.\n", "\n", - "Finally, try applying a logical bitflip gate. Make sure you can successfully apply one to both logical qubits. The solution has these gates commented out if you get stuck. " + "Finally, try applying a logical bitflip gate. Make sure you can successfully apply one to both logical qubits. The solution has these gates commented out if you get stuck." ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "1f37fe87-a19e-4637-9a74-6a343dc3d54e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Result of measuring X1:\n", - "(0, 1)\n", - "Result of measuring X2:\n", - "(0, 1)\n", - "Result of measuring Z1:\n", - "(1, 0)\n", - "Result of measuring Z2:\n", - "(1, 0)\n" - ] - } - ], + "outputs": [], "source": [ - "#TODO\n", - "#Write code to measure the logical observables and perform logical gates\n", - "def count_summed_bits_at_indices(bit_dict, indices):\n", + "# EXERCISE 1\n", + "def count_summed_bits_at_indices(bit_dict: dict[str, int], indices: list[int]) -> tuple[int, int]:\n", " \"\"\"\n", - " Function that computes logical 0 or 1 state from toric code.\n", - " \n", + " Computes logical 0 or 1 state from toric code measurement outcomes.\n", + "\n", + " Parameters\n", " ----------\n", - " bit_dict: dictionary\n", - " measurement outcome obtained from cudaq.sample\n", - " indices: list[int]\n", - " list specifying which bits to sum in order to measure logical Z state.\n", + " bit_dict : dict[str, int]\n", + " Measurement outcome obtained from cudaq.sample.\n", + " indices : list[int]\n", + " List specifying which bits to sum in order to measure logical Z state.\n", "\n", " Returns\n", - " the number of logical 0 and logical 1 measurements\n", " -------\n", + " tuple[int, int]\n", + " The number of logical 0 and logical 1 measurements.\n", " \"\"\"\n", - "\n", " zero_count = 0\n", " one_count = 0\n", "\n", @@ -531,12 +471,11 @@ " return zero_count, one_count\n", "\n", "\n", - "logical_x1 = [0,1,2] # L to R horizontal qubits\n", - "logical_x2 = [11,14,17] # T to B vertical qubits\n", + "logical_x1 = [0, 1, 2] # L to R horizontal qubits\n", + "logical_x2 = [11, 14, 17] # T to B vertical qubits\n", "\n", - "\n", - "logical_z1 = [9,10,11] # L to R vertical qubits\n", - "logical_z2 = [1,4,7] # T to B horizontal qubits\n", + "logical_z1 = [9, 10, 11] # L to R vertical qubits\n", + "logical_z2 = [1, 4, 7] # T to B horizontal qubits\n", "\n", "print(\"Result of measuring X1:\")\n", "print(count_summed_bits_at_indices(results, logical_x1))\n", @@ -554,50 +493,54 @@ "id": "e159b585-6705-4ac7-a7c5-8e753aec15ba", "metadata": {}, "source": [ - "## 6.5 Decoding the Toric Code\n", + "---\n", + "\n", + "## 6.5. Decoding the Toric Code\n", "\n", - "Now that you know how to construct the toric code, it is important to think about errors and syndrome decoding so that the code can be put to use. \n", + "Now that you know how to construct the toric code, it is important to think about errors and syndrome decoding so that the code can be put to use.\n", "\n", - "Note that every data qubit (edge) is adjacent to exactly two faces and two vertices, hence, any single error (denoted by red text on a data quit) will produce a syndrome where two stabilizers flag (denoted as yellow squares for plaquette and orange circles for vetex stabilizers.) In the figure below, notice how easy it is to spot the localized errors, a key benefit of topological codes.\n", + "Note that every data qubit (edge) is adjacent to exactly two faces and two vertices, hence, any single error (denoted by red text on a data qubit) will produce a syndrome where two stabilizers flag (denoted as yellow squares for plaquette and orange circles for vertex stabilizers.) In the figure below, notice how easy it is to spot the localized errors, a key benefit of topological codes.\n", "\n", - " \n", + " \"Toric\n", "\n", "\n", - "What gets complicated is the presence of multiple errors. Considering only $X$-type errors, Notice what happens if two adjacent errors occur. Now, the vertex stabilizer bordering both errors no longer fires, while the vertex diagonal to the first stabilizer now fires.\n", + "What gets complicated is the presence of multiple errors. Considering only $X$-type errors, notice what happens if two adjacent errors occur. Now, the vertex stabilizer bordering both errors no longer fires, while the vertex diagonal to the first stabilizer now fires.\n", "\n", - " \n", + " \"Two\n", "\n", "This pattern can continue, placing the highlighted stabilizers in any position.\n", "\n", - " \n", + " \"A\n", "\n", - "Another way to say this is that a pair of stabilizers is connected by a string of edges with errors. So, it is very natural to understand which stabilizers fire given the error pattern. However, in practice, you start with the syndrome results and must infer whihch errors occured occured with a decoder.\n", + "Another way to say this is that a pair of stabilizers is connected by a string of edges with errors. So, it is very natural to understand which stabilizers fire given the error pattern. However, in practice, you start with the syndrome results and must infer which errors occurred with a decoder.\n", "\n", - "There are many ways to go about this, but we will focus on a traditional technique called **minimum weight perfect matching (MWPM)**. \n", + "There are many ways to go about this, but we will focus on a traditional technique called **minimum weight perfect matching (MWPM)**.\n", "\n", - "One of the first principles of MWPM has to do with the minimum weight. Consider a simple case where two stabilizers fire on opposite diagonals of a square in the grid. It is perfectly valid for any of the below error patterns to cause this syndrome. In fact, there are many other potential error patterns not shown. \n", + "One of the first principles of MWPM has to do with the minimum weight. Consider a simple case where two stabilizers fire on opposite diagonals of a square in the grid. It is perfectly valid for any of the below error patterns to cause this syndrome. In fact, there are many other potential error patterns not shown.\n", "\n", - " \n", + " \"Four\n", "\n", "\n", - "Assuming errors are independent of one another, then the most likely error pattern is the one with the fewest number of errors, meaning pattern 1 or 2. If X's (blue) are applied as fixes to correct the errors, there are two minimum weight solutions depending on selection of pattern 1 or 2 as the error pattern. It turns out that in this case, it does not matter which one is chosen. If pattern 1 is the true error distribution and $X$ operations are applied to the two erroneous qubits, the errors are fixed. If instead $X$'s are applied to the other half of the square, the errors are not fixed, but the fixes combined with the errors form a stabilizer and result in an overall operation that will obtain the original state either way!\n", + "Assuming errors are independent of one another, then the most likely error pattern is the one with the fewest number of errors, meaning pattern 1 or 2. If X's (blue) are applied as fixes to correct the errors, there are two minimum weight solutions depending on selection of pattern 1 or 2 as the error pattern. It turns out that in this case, it does not matter which one is chosen. If pattern 1 is the true error distribution and $X$ operations are applied to the two erroneous qubits, the errors are fixed. If instead $X$'s are applied to the other half of the square, the errors are not fixed, but the fixes combined with the errors form a stabilizer and result in an overall operation that will obtain the original state either way!\n", "\n", - "\n", + "\"Two\n", "\n", "If pattern 3, though much less likely, were the true error distribution, then applying the same MWPM corrections would still produce a stabilizer loop and ameliorate all the errors.\n", "\n", - "\n", + "\"A\n", + "\n", + "This would not be the case for pattern 4. If the same corrections are applied, a stabilizer loop is not formed, nor are the errors corrected. Instead, a loop corresponding to a logical operation is closed. This means, the fixes would perform an unexpected logical bitflip and result in a logical error, the worst case scenario. This is why the toric code has a distance of 5, as you need at least 5 errors to complete a vertical or horizontal loop and produce a logical error. Increasing the grid size makes logical errors less likely, but comes at the cost of $L^2$ scaling data qubit overhead.\n", "\n", - "This would not be the case for pattern 4. If the same corrections are applied, a stabilizer loop is not formed, nor are the errors corrected. Instead, a loop corresponding to a logical operation is closed. This means, the fixes would perform an unexpected logical bitflip and result in a logical error, the worst case scenario. This is why the toric code has a distance of 5, as you need at least 5 errors to complete a vertical or horizontal loop and produce a logical error. Increasing the grid size makes logical errors less likley, but comes at the cost of $L^2$ scaling data qubit overhead.\n", + "\"An\n", "\n", - "\n", + "The perfect matching part of MWPM concerns the fact that a valid syndrome can be composed of any even number of stabilizers firing. So, finding the MWPM solution requires solving a larger minimization problem to consider all pairs of stabilizers, and the minimum number of errors required to connect each stabilizer in each pair. This means considering all possible pairings of the stabilizers and the shortest paths for each pairing, a much more complex optimization problem.\n", "\n", - "The perfect matching part of MWPM concerns that fact that a valid syndrome can be composed of any even number of stabilizers firing. So, finding the MWPM solution requires solving a larger minimization problem to consider all pairs of stabilizers, and the minimium number of errors required to connect each stabilizer in each pair. This means considering all possible parings of the stabilizers and the shortest paths for each pairing, a much more complex optimization problem.\n", "\n", + "\"Visualization\n", "\n", - "\n", + "---\n", "\n", - "## 6.6 MQPM Decoding" + "## 6.6. MWPM Decoding" ] }, { @@ -605,100 +548,68 @@ "id": "27fed2d3-e02c-431e-a9a6-dd32fdd5be5a", "metadata": {}, "source": [ + "
\n", "\n", - "
\n", - "

Exercise 2:

\n", - "

\n", - "Use the code provided to solve the MWPM problem for the two syndrome patterns below. Determine in each case if a logical error happens or not? The `mwpm_decoder_toric` function is provided for you below. It will print the graph you are trying to optimize and return the solution. You need to fill in the function called `toric_distance` which returns the Manhattan grid distance between any two syndromes on the torus. \n", + "**Exercise 2:**\n", "\n", - "\n", + "Use the code provided to solve the MWPM problem for the two syndrome patterns below. Determine in each case if a logical error happens or not? The `mwpm_decoder_toric` function is provided for you below. It will print the graph you are trying to optimize and return the solution. You need to fill in the function called `toric_distance` which returns the Manhattan grid distance between any two syndromes on the torus.\n", "\n", - "\n", - "

\n", - "
\n" + "\"First\n", + "\n", + "\"Second\n", + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "311ffa10-8b46-4bb3-9612-34b70129cc26", "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAApQAAAIKCAYAAACdo98PAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjgsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvwVt1zgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAjSpJREFUeJzt3XlYVNX/B/D3DCCIIC4gKqIphQuuua/oNxWXtEXFXHItzVDLrUzLJcs0l8xs03JLLXcTTXPXzBRFyw13BSVXVGSRZWbO7w9/QyADzDDLXeb9ep6eZLhz7xkd4M353M85GiGEABERERFRIWmlHgARERERKRsDJRERERFZhYGSiIiIiKzCQElEREREVmGgJCIiIiKrMFASERERkVUYKImIiIjIKgyURERERGQVBkoiIiIisgoDJREAjUaDKVOmSD0Mi02ZMgUajUbqYTjMtWvXoNFoMHv2bKvOs2bNGpQqVQrJyclWnUep7xsyTQ7/npmZmQgMDMQ333wj6TiILMVASbKm0WjM+m/fvn1SDzWHtLQ0fPHFF2jcuDF8fHzg4eGB4OBgDB8+HBcuXJB6eAW6evUqhg8fjuDgYHh6esLT0xM1atRAREQETp48KfXwrKLX6zF58mSMGDECXl5eWY8/88wzWe8nrVaLEiVKoFatWhgyZAiOHDlis+uvWrUK8+bNs9n56Il9+/bl+z3il19+kXqIZnFzc8Po0aPx6aefIi0tTerhEJnNVeoBEOXnp59+yvHx8uXLsXPnzlyPV69e3arrPH78GK6utvlyuHfvHjp06IDo6Gi8+OKL6N27N7y8vHD+/Hn88ssvWLhwITIyMmxyLXvYsmULevbsCVdXV/Tp0wd16tSBVqvFuXPnsGHDBnz77be4evUqKlWqJPVQCyUyMhLnz5/HkCFDcn2ubt26GDNmDAAgKSkJMTExWLt2LRYtWoRRo0Zh7ty5OY4vzPtm1apVOH36NN59991CvwbK28iRI9GwYcNcjzdt2lSC0RTOwIEDMX78eKxatQqDBg2SejhEZmGgJFnr27dvjo8PHz6MnTt35nq8MAwGAzIyMuDh4QEPDw+rz2c0YMAAnDhxAuvWrUO3bt1yfG7atGmYOHGiza5la5cvX8Zrr72GSpUqYffu3ShXrlyOz8+cORPffPMNtNr8ixspKSkoVqyYPYdaaEuWLEHz5s0REBCQ63MBAQG53lszZ85E79698cUXX+C5557DsGHDsj5ny/cN2UbLli3RvXt3qYdhlRIlSqB9+/ZYunQpAyUpBkvepHgpKSkYM2YMAgMD4e7ujqpVq2L27NkQQuQ4TqPRYPjw4Vi5ciVCQkLg7u6O7du3Z33u6Xun4uPjMXjwYJQvXx7u7u6oXLkyhg0blu/s4pEjR7B161YMHjw4V5gEAHd391z3/+3ZswctW7ZEsWLFUKJECbz00kuIiYnJ9dyDBw+iYcOG8PDwQFBQEL7//vs8x7FixQrUr18fRYsWRalSpfDaa6/h+vXreR5v9PnnnyMlJQVLlizJFSYBwNXVFSNHjkRgYGDWYwMGDICXlxcuX76MTp06wdvbG3369AEA/PHHH+jRowcqVqwId3d3BAYGYtSoUXj8+HGO8xrPceXKFYSFhaFYsWIoX748Pv7441z/jkYLFy5EUFAQ3N3d0bBhQxw9erTA15eWlobt27ejbdu2BR5rVLRoUfz0008oVaoUPv300xzjefp9k5SUhHfffRfPPPMM3N3dUaZMGbRr1w7Hjx8HALRu3Rpbt25FbGxsVin2mWeeAQBkZGRg0qRJqF+/Pnx8fFCsWDG0bNkSe/fuzTGe7PeRmvN3cO7cOYSHh8PPzw9FixZF1apVc/1SEx8fj0GDBsHf3x/u7u4ICQnB4sWLc53rq6++QkhICDw9PVGyZEk0aNAAq1atKvDv8M6dOxg8eDD8/f3h4eGBOnXqYNmyZVa9Lmukp6dj1KhR8PPzg7e3N7p27YobN26YPHbfvn1o0KBBjq+7vO5dNufr7uLFi+jWrRvKli0LDw8PVKhQAa+99hoSExNzHNeuXTscPHgQ9+/ft90LJ7IjzlCSogkh0LVrV+zduxeDBw9G3bp18fvvv2PcuHGIj4/HF198keP4PXv2YM2aNRg+fDh8fX2zfpg/7d9//0WjRo3w8OFDDBkyBNWqVUN8fDzWrVuH1NRUFClSxOTzNm/eDAB4/fXXzRr/rl270LFjR1SpUgVTpkzB48eP8dVXX6F58+Y4fvx41vhOnTqF9u3bw8/PD1OmTIFOp8PkyZPh7++f65yffvopPvroI4SHh+ONN97A3bt38dVXX6FVq1Y4ceIESpQoked4tmzZgmeffRaNGzc2a/xGOp0OYWFhaNGiBWbPng1PT08AwNq1a5Gamophw4ahdOnSiIqKwldffYUbN25g7dq1Oc6h1+vRoUMHNGnSBJ9//jm2b9+OyZMnQ6fT4eOPP85x7KpVq5CUlIShQ4dCo9Hg888/x6uvvoorV67Azc0tz3FGR0cjIyMDzz//vEWvz8vLC6+88gp+/PFHnD17FiEhISaPe+utt7Bu3ToMHz4cNWrUQEJCAg4ePIiYmBg8//zzmDhxIhITE3Hjxo2s96bxPs5Hjx7hhx9+QK9evfDmm28iKSkJP/74I8LCwhAVFYW6deta/Hdw8uRJtGzZEm5ubhgyZAieeeYZXL58GZGRkfj0008BALdv30aTJk2yfuHy8/PDtm3bMHjwYDx69CirNL9o0SKMHDkS3bt3xzvvvIO0tDScPHkSR44cQe/evfP8u3v8+DFat26NS5cuYfjw4ahcuTLWrl2LAQMG4OHDh3jnnXcsfl35SUpKwr1793I9Xrp06awQ+MYbb2DFihXo3bs3mjVrhj179qBz5865nnPixAl06NAB5cqVw9SpU6HX6/Hxxx/Dz88v17HmfN1lZGQgLCwM6enpGDFiBMqWLYv4+Hhs2bIFDx8+hI+PT9b56tevDyEEDh06hBdffLHA100kOUGkIBERESL723bTpk0CgPjkk09yHNe9e3eh0WjEpUuXsh4DILRarThz5kyu8wIQkydPzvq4X79+QqvViqNHj+Y61mAw5Dm+V155RQAQDx48MOv11K1bV5QpU0YkJCRkPfbPP/8IrVYr+vXrl/XYyy+/LDw8PERsbGzWY2fPnhUuLi45/j6uXbsmXFxcxKeffprjOqdOnRKurq65Hs8uMTFRABAvv/xyrs89ePBA3L17N+u/1NTUrM/1799fABDjx4/P9bzsxxl99tlnQqPR5HgtxnOMGDEi6zGDwSA6d+4sihQpIu7evSuEEOLq1asCgChdurS4f/9+1rG//vqrACAiIyPzfH1CCPHDDz8IAOLUqVO5PlepUiXRuXPnPJ/7xRdfCADi119/zXrs6feNj4+PiIiIyHcMnTt3FpUqVcr1uE6nE+np6Tkee/DggfD39xeDBg3KesySv4NWrVoJb2/vHH/XQuR8Dw8ePFiUK1dO3Lt3L8cxr732mvDx8cn6N3zppZdESEhIvq/NlHnz5gkAYsWKFVmPZWRkiKZNmwovLy/x6NEji1+XKXv37hUA8vzv5s2bQggh/v77bwFAvP322zme37t371z/nl26dBGenp4iPj4+67GLFy8KV1fXQn3dnThxQgAQa9euLfDv7d9//xUAxMyZMws8lkgOWPImRfvtt9/g4uKCkSNH5nh8zJgxEEJg27ZtOR4PDQ1FjRo18j2nwWDApk2b0KVLFzRo0CDX5/NbpufRo0cAAG9v7wLHfvPmTfz9998YMGAASpUqlfV47dq10a5dO/z2228Anszc/f7773j55ZdRsWLFrOOqV6+OsLCwHOfcsGEDDAYDwsPDce/evaz/ypYti+eeey5X+dTU2LN3Phu1bt0afn5+Wf99/fXXuY7Jfm+hUdGiRbP+nJKSgnv37qFZs2YQQuDEiRO5jh8+fHjWn40zZhkZGdi1a1eO43r27ImSJUtmfdyyZUsAwJUrV/J8fQCQkJAAADmeay7j30tSUlKex5QoUQJHjhzBv//+a/H5XVxcsma+DQYD7t+/D51OhwYNGmSVzLMr6O/g7t27OHDgAAYNGpTjfQP89x4WQmD9+vXo0qULhBA53jNhYWFITEzMunaJEiVw48YNi8vPv/32G8qWLYtevXplPebm5oaRI0ciOTkZ+/fvt+h1FWTSpEnYuXNnrv+MX2PGr6unv2c83SSl1+uxa9cuvPzyyyhfvnzW488++yw6duyY41hzv+6MM5C///47UlNT830dxr8DU7OtRHLEkjcpWmxsLMqXL58rwBm7vmNjY3M8Xrly5QLPeffuXTx69Ag1a9a0eDzFixcH8CR05Fdazj62qlWr5vpc9erV8fvvvyMlJQVJSUl4/PgxnnvuuVzHVa1aNesHJPDk/iwhhMljAeRbMjT+HZpam/H7779HUlISbt++bbIhytXVFRUqVMj1eFxcHCZNmoTNmzfjwYMHOT739D1jWq0WVapUyfFYcHAwgCf312X3dEAy/vB9+hp5EXncl5kf499Lfr8sfP755+jfvz8CAwNRv359dOrUCf369cv1uvKybNkyzJkzB+fOnUNmZmbW46betwX9HRgDWH7v47t37+Lhw4dYuHAhFi5caPKYO3fuAADef/997Nq1C40aNcKzzz6L9u3bo3fv3mjevHm+ryk2NhbPPfdcrkauvL5Grf23rVWrVr73yMbGxkKr1SIoKCjH409/Hd65cwePHz/Gs88+m+scTz9m7tdd5cqVMXr0aMydOxcrV65Ey5Yt0bVrV/Tt2zdHuRv47z3qTOvMkrIxUJJTyT5jZg/VqlUD8OSeR+PMiiMZDAZoNBps27YNLi4uuT5vavbRyMfHB+XKlcPp06dzfc54T+XTwc7I3d09V2DQ6/Vo164d7t+/j/fffx/VqlVDsWLFEB8fjwEDBsBgMFjwynIy9dqAgoNi6dKlATwJJ6YCcH6Mfy+mAoZReHg4WrZsiY0bN2LHjh2YNWsWZs6ciQ0bNuSa1XraihUrMGDAALz88ssYN24cypQpAxcXF3z22We4fPlyruML+3eQnfHfoG/fvujfv7/JY2rXrg3gSQA8f/48tmzZgu3bt2P9+vX45ptvMGnSJEydOtXsaxbEFq/L0Sz5upszZw4GDBiAX3/9FTt27MDIkSPx2Wef4fDhwznek8YA7evra/8XQGQDDJSkaJUqVcKuXbuQlJSUY+bo3LlzWZ+3lJ+fH4oXL24yWBWkS5cu+Oyzz7BixYoCA6VxbOfPn8/1uXPnzsHX1xfFihWDh4cHihYtiosXL+Y67unnBgUFQQiBypUrZ83uWaJz58744YcfEBUVhUaNGln8/OxOnTqFCxcuYNmyZejXr1/W4zt37jR5vMFgwJUrV3KM27gIfF7NU5YyBv6rV6+iVq1aZj8vOTkZGzduRGBgYIFrnpYrVw5vv/023n77bdy5cwfPP/88Pv3006xAmdeM07p161ClShVs2LAhxzGTJ082e5zZGWdF83sfG7uc9Xq9WZ3vxYoVQ8+ePdGzZ09kZGTg1VdfxaeffooPPvggzyWUKlWqhJMnT8JgMOT4pcOar1FrVKpUCQaDAZcvX84xK/n011KZMmXg4eGBS5cu5TrH049Z+nVXq1Yt1KpVCx9++CEOHTqE5s2b47vvvsMnn3ySdczVq1cBWL/GLpGj8B5KUrROnTpBr9djwYIFOR7/4osvoNFoCpwVMkWr1eLll19GZGQkjh07luvz+c2UNG3aFB06dMAPP/yATZs25fp8RkYGxo4dC+BJ8Khbty6WLVuGhw8fZh1z+vRp7NixA506dQLwZMYmLCwMmzZtQlxcXNZxMTEx+P3333Oc/9VXX4WLiwumTp2aa5xCiKx7CPPy3nvvwdPTE4MGDcLt27cteu1PM87UZH+OEAJffvllns/J/u8ohMCCBQvg5uaGF154wezr5qd+/fooUqSIyX/XvDx+/Bivv/467t+/j4kTJ+YZCPV6fa4yfpkyZVC+fHmkp6dnPVasWLFcxwGm/76OHDmCv/76y+yxZufn54dWrVph8eLFOd432a/h4uKCbt26Yf369SaD5927d7P+/PR7p0iRIqhRowaEEDnK80/r1KkTbt26hdWrV2c9ptPp8NVXX8HLywuhoaGFen2FZfyeMH/+/ByPP717kYuLC9q2bYtNmzbluCf20qVLue7NNvfr7tGjR9DpdDk+X6tWLWi12hzvEeDJigQajUZRC7KTc+MMJSlaly5d0KZNG0ycOBHXrl1DnTp1sGPHDvz666949913c90nZa7p06djx44dCA0NxZAhQ1C9enXcvHkTa9euxcGDB/O9P3L58uVo3749Xn31VXTp0gUvvPACihUrhosXL+KXX37BzZs3s9ainDVrFjp27IimTZti8ODBWcsG+fj45FjfcOrUqdi+fTtatmyJt99+O+sHckhISI6tEIOCgvDJJ5/ggw8+wLVr1/Dyyy/D29sbV69excaNGzFkyJCsQGvKc889h1WrVqFXr16oWrVq1k45QghcvXoVq1atglarNatcXK1aNQQFBWHs2LGIj49H8eLFsX79+jzvhfPw8MD27dvRv39/NG7cGNu2bcPWrVsxYcIEk8u0FIaHhwfat2+PXbt25VqKCHiyHuOKFSsAPJmVPHv2LNauXYtbt25hzJgxGDp0aJ7nTkpKQoUKFdC9e3fUqVMHXl5e2LVrF44ePYo5c+ZkHVe/fn2sXr0ao0ePRsOGDeHl5YUuXbrgxRdfxIYNG/DKK6+gc+fOuHr1Kr777jvUqFGj0HuOz58/Hy1atMDzzz+PIUOGoHLlyrh27Rq2bt2Kv//+GwAwY8YM7N27F40bN8abb76JGjVq4P79+zh+/Dh27dqVtQ5i+/btUbZsWTRv3hz+/v6IiYnBggUL0Llz53zvKx0yZAi+//57DBgwANHR0XjmmWewbt06/Pnnn5g3b55ZDWyW+OOPP0xuWVi7dm3Url0bdevWRa9evfDNN98gMTERzZo1w+7du03ORE6ZMgU7duxA8+bNMWzYsKxfXmvWrJn19weY/3W3Z88eDB8+HD169EBwcDB0Oh1++umnrGCf3c6dO9G8efOs2zSIZM9R7eREtvD0skFCCJGUlCRGjRolypcvL9zc3MRzzz0nZs2alWt5HwB5LumCp5YLEUKI2NhY0a9fP+Hn5yfc3d1FlSpVRERERK6lXUxJTU0Vs2fPFg0bNhReXl6iSJEi4rnnnhMjRozIsZSREELs2rVLNG/eXBQtWlQUL15cdOnSRZw9ezbXOffv3y/q168vihQpIqpUqSK+++47MXny5Fx/H0IIsX79etGiRQtRrFgxUaxYMVGtWjUREREhzp8/X+DYhRDi0qVLYtiwYeLZZ58VHh4eomjRoqJatWrirbfeEn///XeOY/v37y+KFStm8jxnz54Vbdu2FV5eXsLX11e8+eab4p9//hEAxJIlS3Kd4/Lly6J9+/bC09NT+Pv7i8mTJwu9Xp91nHFpmVmzZuW6lql/Q1M2bNggNBqNiIuLy/F4pUqVspaY0Wg0onjx4iIkJES8+eab4siRIybPlf2a6enpYty4caJOnTrC29tbFCtWTNSpU0d88803OZ6TnJwsevfuLUqUKCEAZC0hZDAYxPTp00WlSpWEu7u7qFevntiyZYvo379/jmWGLP07OH36tHjllVdEiRIlhIeHh6hatar46KOPchxz+/ZtERERIQIDA4Wbm5soW7aseOGFF8TChQuzjvn+++9Fq1atROnSpYW7u7sICgoS48aNE4mJifn9dWedf+DAgcLX11cUKVJE1KpVK8e/f2Fe19MKWjYo+/MfP34sRo4cKUqXLi2KFSsmunTpIq5fv27yOrt37xb16tUTRYoUEUFBQeKHH34QY8aMER4eHrnGUNDX3ZUrV8SgQYNEUFCQ8PDwEKVKlRJt2rQRu3btynGehw8fiiJFiogffvgh39dMJCcaIWR8pzMROYUBAwZg3bp1hZ6Js4Rer0eNGjUQHh6OadOm2f16pD4vv/wyzpw5Y/K+ZluYN28ePv/8c1y+fNnujYREtsJ7KInIqbi4uODjjz/G119/7ZAAS8r29DahFy9exG+//YbWrVvb5XqZmZmYO3cuPvzwQ4ZJUhTOUBKR5Bw5Q0lkiXLlymHAgAGoUqUKYmNj8e233yI9PR0nTpzIc91JImfEphwiIqI8dOjQAT///DNu3boFd3d3NG3aFNOnT2eYJHoKZyiJiIiIyCq8h5KIiIiIrMJASURERERWYaAkIiIiIqswUBIRERGRVRgoiYiIiMgqDJREREREZBUGSiIiIiKyCgMlEREREVmFgZKIiIiIrMJASURERERWYaAkIiIiIqswUBIRERGRVRgoiYiIiMgqDJREREREZBUGSiIiIiKyCgMlEREREVmFgZKIiIiIrMJASURERERWYaAkIiIiIqswUBIRERGRVRgoiYiIiMgqDJREREREZBUGSiIiIiKyCgMlEREREVmFgZKIiIiIrMJASURERERWYaAkIiIiIqswUBIRERGRVRgoiYiIiMgqrlIPQAqZegPO3UrCqfhEnI5PxJ2kdGTo9Cji6oIy3u6oGeCDWgE+qFbWG24uzNxERERE+dEIIYTUg3CUGw9SsSoqDiuPxCHxcSYAwFWrgc7w319B9o99irqhT+OK6N2oIiqU9JRkzERERERy5xSB8lFaJqZvjcHqY9eh0QAGC16xVgMIAD3rB2Ji5+rw9nCz2ziJiIiIlEj1gfLAhbsYs/YfJKSkWxQkn6bVAL5e7pjdvQ5aBfvZboBERERECqfqQLns0DVMjjwDrYWzknkxnufjriHo1/QZ609IREREpAKqDZTL/7qGSZvP2O38DJVERERET6iyhfnAhbt2DZMAMGnzGRy4cNeu1yAiIiJSAtUFykdpmRiz9h9oNfa9jlYDjF33D5LSMu17ISIiIiKZU906lNO3xhTYgJNx5xoeHVmH9FuXoE9+AJGZBq17MRQp8wy8ardHsZDWBV7HIIB7yen49LcYzHi1tu1eABEREZHCqGqG8vqDVKw+dr3ABpyMO1eRcmYfdAk3INJTAIMehsePkBZ7EvciZyPxrzVmXc8ggNXHruPGg1QbjJ6IiIhImVQVKH+OioPGjFK3S1EveNUJQ+kXx6DMa5/A9+XxcA+olvX5pGORZl9T+//XJSIiInJWqil5Z+oNWHkkzqzlgYoGNUTRoIY5HnMrWR43l4wEABgyHpt9Xb0AVhyJw7ttg7lNIxERETkl1SSgc7eSsrZTtIQQBuiSEpD097asxzwq1rLoHImPM3H+VpLF1yYiIiJSA9XMUJ6KT7T4OTeXj0HGv+ezPaJB0aAGKN3pnUJdv2aAj8XPIyIiIlI61cxQno5PhKu1awVpNIDWBbBwrXdXraZQgZaIiIhIDVQzQ3knKR06C/dXLN1hOAxpydA9uofkE78hPT4Gjy8exp2kBJQb8IXZ59EZBO4mp1s6ZCIiIiJVUE2gzNDpLX5OkTKVs/7sWbUpbnzZG0KXgYxbF5F5Px5upQLMPld6puXXJyIiIlID1ZS8i7i6mH2sITOv2cT/SuaGtGSLru/uZv71iYiIiNRENTOUZbzd4arVmFX2vrVsFIqUrwqPCjXgUtwPhtREJB3fCqF7EjQ1ru5wKx1o/sUNOvz91wHM/Hc36tevj/r166NkyZKFfSlEREREiqKaQFkzwAerzFxg3JCRhpSTO5FycqfJz5f83yBo3T3Nv7jGBfq7V/HpimVISnqyfFCVKlWywiVDJhEREamZRggLW5pl6lR8IrosOGjWsY+iI/H4UhQy712HPjURgICLV2m4B1SDd72O8AisafH1twxvgRrlvHHx4kVER0cjOjoax44dw4kTJxgyiYiISNVUEygz9QY0+HRXoRY3t5ZPUTccm9jW5E45BoOBIZOIiIhUTTWBEgA+//0cvtt/2aztF23FRQO8FRqEcWHVCj74/zFkEhERkZqoKlDeeJCKlp/vhSNfkAbAH++1QYWSFtxzaQJDJhERESmVqgIlAIxffxJroq87ZpZSGJB8cideq2LAtGnTbB7wGDKJiIhICVQXKJPSMvHC3P24l5xu11Cp1QClvdzRzf0Mpk/9CB4eHpg5cyb69+8PrdZ+y3syZBIREZHcqC5QAsCBC3fRb0mU3a+zfGAjtAr2w82bNzFu3DisXLkSTZo0wddff43nn3/e7tc3YsgkIiIiKakyUALA8r+uYdLmM3Y7/7SuIXi96TM5Htu/fz8iIiIQExODYcOG2aUMbi6GTCIiInIU1QZK4L9QqdXANuVvYQA0Wgx53gcTerQweUhmZiYWLFiAyZMnO6wMbi6GTCIiIrIHVQdK4En5e+y6f6y+p1KrAUoXK4KMAz/i/tk/ERUVhXLlyuV5vNRlcHMxZBIREZG1VB8oAeBRWiamb43B6ujr0ALQW/CKXTSAAUDP+oGY2Lk6HiXcQaNGjRAYGIi9e/eiaNGi+T5fTmVwczFkEhERkSWcIlAa3XiQip+j4rDiSFzWjjquWg102aYus3/sJjIxpE019GpUMcc6k8eOHUOrVq3w8ssvY+XKldBoNPleV85lcHMxZBIREVFenCpQGmXqDTh/Kwmn4hNxKj4Rl27cxv6DhxDaohmereCPWgE+2PrTt/gj8hdcu3rFZGBcs2YNevbsiU8++QQTJ04067pKKYObiyGTiIiIACcNlE87fvw46tevj+jo6KyAt2/fPrRp0waHDx9G48aNTT5v6tSpmDJlCtavX49XX33V7OspsQxuLoZMIiIi58NACdOBUq/XIyAgAH369MGcOXNMPk8Igddeew1btmzBwYMHUa9ePbOvqYYyuLkYMomIiNSNgRKmAyUAREREYMuWLbh27Vqe90mmpqYiNDQUt27dKrDz2xS1lcHNxZBJRESkHgyUyDtQmlP2BoD4+HiLOr9NUXMZ3FwMmURERMrEQIm8A6U5ZW8jSzu/TXGmMri5GDKJiIjkj4ESeQdKwLyyt1FhOr9NcdYyuLkYMomIiOSFgRL5B0pzy95Ghe38NoVlcPMxZBIREUmHgRL5B0pLyt6AdZ3fprAMXngMmURERI7BQIn8AyVgWdkbsL7z2xSWwW2DIZOIiMj2GChRcKC0tOwN2Kbz2xSWwW2PIZOIiMg6DJQoOFBaWvY2skXntyksg9sfQyYREZH5GChRcKAELC97G9mq89sUlsEdiyGTiIjINAZKmBcoC1P2NrJl57cpLINLhyGTiIiIgRKAeYGysGVvwPad36awDC4fDJlERORsGChhXqAECl/2BuzT+W0Ky+DyxJBJRERqxkAJ8wOlNWVvwH6d36awDC5/DJlERKQWDJQwP1BaU/Y2slfntyksgysPQyYRESkRAyXMD5SAdWVvI3t2fpvCMriySRkyhRD4+uuvsWbNGowbNw5dunSxyXmJiEhdGChhWaC0tuxtZO/Ob1NYBlcPe4ZMg8EArVaLlJQUrFy5Eu+99x6KFSuGoUOHYtKkSfZ8WUREpFAMlLAsUNqi7A04pvPbFJbB1cuSkNmgQQP873//yzXLLoSARqNBcnIyJk6ciIcPH6JevXr49ddf8fbbb6NHjx5ZxxARERkxUMKyQAnYpuwNOK7z2xSWwZ1DXiGzUqVKOHXqlMnnZGRkoG/fvnBzc8OXX36JNWvWYNWqVfjiiy/QsGFDB78CIiJSAk5LFUKPHj0QFxeHqKgoq87j6emJTZs2wWAw4JVXXkFaWpqNRliwcuXKYcWKFdi3bx+SkpLQsGFDDB8+HA8ePHDYGMj+tFotqlatit69e2POnDnYv38/Hj58iB07dpg8XqfT4fXXX0fFihXx448/wtfXF4cPH4avry/Kli3r4NETEZFSMFAWQsuWLeHv7481a9ZYfa6AgAD8+uuvOHnyJAYPHgxHTxiHhobixIkTmD17NpYvX46qVatiyZIlMBgMDh0HOY5Wq81zNnzlypVYu3Yt/P39cebMGej1ety5cwchISEIDAx08EiJiEgpGCgLwcXFBd26dcO6detsEgAbNGiApUuXYtWqVZg+fboNRmgZNzc3jBo1CufPn0f79u0xaNAgNG/eHMePH3f4WEhazz//PEaOHInVq1fjhRdegIeHB/bv34+1a9di+fLlJp/z5ZdfYteuXZzdJiJyYgyUhWSrsrdReHg4pkyZgg8//BAbNmywyTktxTI41apVC/PmzcPhw4fx8OFDzJkzB+XKlYOvry/OnDmT6/g7d+7gww8/RLt27VCqVCkEBQUhPDwcM2fOZMgkInIirlIPQKmyl72tWT4ou0mTJuHs2bN4/fXXUblyZYd1fj/NWAY3doOvWbOG3eBOxthslpaWhsqVK2Pu3LmoU6dOruPKlCmDxMTErMafY8eOITo6Gp988gmSk5MBcDF2IiJnwC5vWN7lbWSrbu/spOz8NoXd4M7JuDRQnz59cPPmTaxYsQLly5c3+/nZu8uNIfP48eMMmUREKsVAicIHSlstcv607Ht+79u3Dx4eHjY7d2FxUXTnlJqaips3b6JKlSpW/9LEkElEpF4MlCh8oLTVIuemGPf8fuWVV7BixQpZLCTNRdHJ1hgyiYjUgYEShQ+UgH3K3kaO3vPbXCyDkz0xZBIRKQ8DJawLlPYqextJsee3uVgGJ0dhyCQikjcGSlgXKO1Z9gak2/PbXCyDk1QYMomI5IOBEtYFSsC+ZW9Afp3fprAMTnLAkElEJA0GSlgfKO1d9gbk2fltCsvgJDcMmURE9sdACesDpb3L3kZy7Pw2hWVwkjuGTCIi22KghPWBErB/2dtIrp3fprAMTkrCkElEVHgMlLBNoHRE2dtIzp3fprAMTkrFkElEZB4GStgmUDqq7A3Iv/PbFJbBSS0YMomIcmOghG0CJeC4sjegjM5vU1gGJzViyCQiZ8dACdsFSkeWvQHldH6bwjI4qR1DJhE5EwZK2C5QOrLsbaSUzm9TWAYnZ8OQSURqxUAJ2wVKwLFlbyMldX6bwjI4OTOGTCJSAwZK2DZQOrrsbaS0zm9TWAYneoIhk4iUhoEStg2UUpS9AWV2fpvCMjiRaQyZRCRnDJSwbaAEpCl7A8rt/DaFZXCigjFkEpFcMFDC9oFSqrI3oOzOb1NYBieyDEMmEUmBgRK2D5RSlb2NlNz5bQrL4ETWYcgsvEy9AeduJeFUfCJOxyfiTlI6MnR6FHF1QRlvd9QM8EGtAB9UK+sNNxd+TyLnxUAJ2wdKQLqyt5HSO79NYRmcyHYYMvN340EqVkXFYeWROCQ+zgQAuGo10Bn++5GZ/WOfom7o07giejeqiAolPSUZM5GUGChhn0ApZdnbSA2d36awDE5kHwyZwKO0TEzfGoPVx65DowEMFvyE1GoAAaBn/UBM7Fwd3h5udhsnkdwwUMI+gVLqsjegns5vU1gGJ3IMZwqZBy7cxZi1/yAhJd2iIPk0rQbw9XLH7O510CrYz3YDJJIxBkrYJ1AC0pe9AXV1fpvCMjiR46kxZC47dA2TI89Aa+GsZF6M5/m4awj6NX3G+hMSyRwDJewXKOVQ9gbU1/ltCsvgRNJScshc/tc1TNp8xm7nZ6gkZ8BACfsFSjmUvY3U1vltCsvgRPKihJB54MJd9FsSZffrLB/YiOVvUjUGStgvUALyKHsbqbHz2xSWwYnkS04h81FaJl6Ys9/qeyYLYryncvfoUDbqkGoxUMK+gVIuZW+jqVOnYt68eTh//jzKlCkj9XDsimVwImWQKmSOX38Sa6KvWxwm76ydgseXj2V9XP7Nb+FWOjDf52g1QHiDQMx4tXZhhkokewyUsG+glFPZG3jS+X379m34+vrC1dVV6uHYHcvgRMpk75B5/UEqWn2+F5b+AEw+sxcJkTm/l5sTKAFAowH+GNeG61SSKvGnqp25uLigW7duWLduHeSQ3TUaDfz9/QsMk+np6di8eTN0Op2DRmYfbm5uGDVqFM6fP4/27dtj0KBBaN68OY4fPy710IgoH1qtFlWrVkXv3r0xd+5c7N+/H4mJiTh37hxWrlyJl156Cbdv38Ynn3yCdu3aoVSpUggKCkJ4eDhmzpyJXbt24cGDB3me/+eoOFh6F5I+NREPdi0CoAFcLP+FXPv/1yVSIwZKB+jRowfi4uIQFWX/G7/NYc69nNHR0fjll19UsyB6uXLlsGLFCuzbtw9JSUlo2LAhhg8fnu8PHCKSF1uFzEy9ASuPxFlc6n6wexEMjx/Bq24YXIqVsnj8egGsOBKHTL3B4ucSyR0DpQO0bNkS/v7+WLNmjdRDMVvjxo0xadIkXL9+HYMGDZJ6ODYTGhqKEydOYPbs2Vi+fDmqVq2KJUuWwGDgN3giJSpMyKza+H9Z2yma6/GVaKSc2QcXr1Io2Xpgoceb+DgT528lFfr5RHLFQOkAcit7FyQ5ORkuLi6oVq0afvnlF+zcuRPbt2+Xelg2wzI4kboVFDLrtH4RsOB7sSHjMRK2fw0AKNX+bWg9ilk1vlPxiVY9n0iOGCgdRG5lb1OEELhx4wY6duyI2NhYAEDVqlVRoUIFVZaGWQYnch7ZQ2b15mFwdTH/x9/D/cuhf3QHntVawDO4iVXjcNVqGChJlRgoHUSuZe+kpCRs27YNwJN7KytUqIDAwECEhobir7/+wpw5c3DkyBH4+PhIPFL7YRmcyLncSUqHzswbKDMTriPp+FZoPbxQqt1Qq6+tMwjcTU63+jxEcsNA6SByLHvr9XqMGDECe/bsyXrMYDBg+vTp0Gg0+PrrrzFt2jR89NFHaNWqlYQjtT+WwYmcR4ZOb/ax+uQHgDDAkJaMG1+9jtgZLyJ2xovQP7qTdcy/i4bh38UjzD5neqb51ydSCgZKB5Jb2dvFxQU9e/bE4sWLcfDgQQBPykKenp5IT0/HiBEjcOPGDUyZMgVeXl4Sj9YxWAYnUr8iri6SXt/dTdrrE9mD+le2lpHsZW857JoDAB07dkRERAT69++POXPmoFy5cli/fj0CAwNRpUoVpwmSTzOWwY2Loq9Zs4aLohOpRBlvd7hqNWaVvV1LlkfJF97M9Xjinz/DkPZkkfXiTXvAzbeiWdd21Wrg5+Vu2YCJFIA/GR1IjmVvAPj444/Ro0cPfPnll2jatCn27duHtm3bqvq+SXOwDE6kTs+VdofOzHukXYv7onjDl3L9pyny3243XjX/B6+QNmadT2cQqBXg3N9bSZ0YKB1MbmVvoxkzZuDnn3/GgQMHsHTpUnz66acoUqSI1MOSBZbBiZQvLi4OX3/9NTp27IjhfV4CYOE2OTbEQElqxL28Yd+9vJ8mt729zSGEMGt3HWfAvcGJlMFgMCAqKgpbtmxBZGQkTp48CVdXVzRt2hTFfUri1HN9oHG3bj3JwvAp6oZjE9vCzYJli4iUgO9oB5Nr2TsvOp0O6enpuH//vtRDkQWWwYnkKzk5GRs3bsSgQYNQrlw5NG3aFN999x1q166NDz/8EJ07d8Zff/2F3bt2oGJGnMN/ALpogL6NKzJMkirxXS0BuZa9TdFoNOjatSs6deqEtLQ0qYcjGyyDE8lD9lJ26dKl8eqrr+Lw4cMYMGAAfv31V4wZMwZ//fUXPvnkE1y8eBFz5sxBfHw8fp72Nhz9K70BQK9G5jXvECkNA6UE5LrIuSkuLi6YPn06Tp48icGDBytiVtWRuCg6kWMZDAYcPnwYH374IerUqYNKlSrh3XffRWZmJj7//HNcuHAB8+fPx5UrV9CtWzd8/PHHaN68OQ4ePIjTp09j5MiRKFWqFCqU9ETPBoHQOuhuHq0G6NkgEBVKehZ8MJECMVBKQGll7wYNGmDp0qVYtWoVpk+fLvVwZIdlcCL7yquUXadOHaxevRr37t3DypUrkZaWho4dO6Jdu3Y4e/Zs1mzksmXL0Lx581z3gk/sXB2+Xu52D5VaDeDr5Y6Jnarb90JEEmKglIiSyt4AEB4ejilTpuDDDz/Ehg0bpB6OLLEMTmQ7+ZWy//jjD9y+fRtLly5FqVKl8MYbb6BChQqYMmWKydnIvHh7uGF29zowcxfGQjMIYHb3OvD2cLPvhYgkxC5vOLbL20ip3d6vvfYatmzZgoMHD6JevXpSD0m22A1OZJm8urJDQ0PRpUsXvPjiiwgKCgKArDC5aNEiXL58GTVq1MDQoUPRt2/ffANkXpb/dQ2TNp+x9UvKMq1rCF5v+ozdzk8kBwyUkCZQAkBERAS2bNmCa9euKWZZntTUVISGhuLWrVuIiopCuXLlpB6SrN28eRPjxo3DypUr0aRJE3z99dcOfY8RyVlycjJ27tyJyMhIbN26FXfu3EHp0qXRqVMnvPjiiwgLC8vaYMFgMGDPnj34/vvvsWnTJri6uiI8PBxDhgxBs2bNrPoeGhMTg9A3PoJnq4HQamCTGUvjeRgmyVlwukRCSit7A4Cnpyc2bdoEg8GAV155hZ3fBWAZnCgnYym7Q4cOeZayly9fjvDwcPj4+OD27duYOXMmgoODzb430hIJCQno0qULyiSew3c9a9rknkoNBHy93LF8YCOGSXIanKGEdDOUSix7Gx07dgytWrXCK6+8ghUrVihmhlVKLIOTM7KklJ39OfaajcwuIyMDYWFhOH36NKKiolC5cmU8SsvE9K0xWB19HVoAegt+QrpoAL0QSD21C/u+GIkaz1WxyTiJlICBEtIFSkCZZW+jNWvWoGfPnvjkk08wceJEqYejGCyDk9pZUsrOztb3RuZHCIGhQ4di6dKl2L17N1q2bJnj8zcepOLnqDisOBKHxMeZAABXrQa6bPXw7B/7FHVD38YV8VJNX7SqXxNt27bFihUrbDpmIlkTJKKjowUAER0d7fBr7927VwAQhw8fdvi1bWHKlCkCgFi/fr3UQ1Gcffv2iZCQEKHVakVERIS4f/++1EMiKrTY2FixYMECERYWJooUKSIAiOrVq4v33ntP/PHHH0Kn05l8nl6vFzt37hTdu3cXrq6uwsPDQ/Tr108cPHhQGAwGu4133rx5AoBYvHhxvsdl6PTi1I2HYtWRWPHBhpPijeVHxes/HhZvLD8qPthwUqw6EitO3XgoMnT6rOcsXLhQABBHjx612/iJ5IaBUkgbKHU6nfD39xejR492+LVtwWAwiPDwcOHp6SmOHz8u9XAUJyMjQ8ydO1d4e3sLPz8/sXjxYqHX6wt+IpHE9Hq9+Ouvv8TEiRNF7dq1BQDh6uoqXnjhBTFv3jxx6dKlfJ9/69YtMWPGDBEUFCQAiBo1aogvv/xSJCQk2H3s27ZtE1qtVowZM8Yu59fpdKJmzZoiNDTUrqGYSE4YKIW0gVIIId5++21RsWJFxX7jSUlJEQ0aNBAVKlQQ//77r9TDUaR///1X9OnTRwAQTZo0key9SJSfpKQksWHDBjFw4EBRpkwZAUCULl1avP7662L16tXi4cOH+T5fqtnI7M6ePSuKFy8uOnfunOesqS1s27ZNABCbNm2y2zWI5IT3UELaeygBYN++fWjTpg0OHz6Mxo0bO/z6thAfH49GjRohMDAQ+/btg4eHh9RDUqT9+/cjIiICMTExGDZsGKZNm4aSJUtKPSxyYnFxcYiMjERkZCT27t2LjIwMVK9eHV26dEGXLl3QtGlTuLi45HsOR94bmZ+EhAQ0btwYHh4eOHToEIoXL263awkhEBYWhtjYWJw+fRpublzUnFRO4kArC1LPUCq97G109OhRUbRoUdG7d2/FzrbKAcvgJCVrS9nZzyP1bGR26enponXr1sLX11dcuXLFIdf8+++/hUajEQsWLHDI9YikxEAppA+UQii/7G20evVqAUB88sknUg9F8VgGJ0cxVcouVaqU6Nu3r1ml7OykvDcyLwaDQbz55pvCzc1NHDhwwKHXHjhwoPD19bXo75BIiRgohTwCpdK7vbNj57dtsRuc7KGwXdmmyG028mnmdnTbw40bN4Snp6cYP368w69N5EgMlEIegVItZW8h2PltDyyDk7VsVcrOTo6zkU+zd0e3OSZNmiTc3d3FtWvXJBsDkb0xUAp5BEoh1FP2FoKd3/bCMjhZwpalbCO5z0Zm56iO7oIkJSUJf39/0adPH8nGQGRvDJRCPoFSTWVvIZ6UesqXLy8aN24sHj9+LPVwVIVlcMqLLUvZ2SlhNjK7e/fuiaCgIBESEiISExOlHg4XOyfVY6AU8gmUaip7G7Hz235YBich7FPKzn5upcxGZidFR3dBuNg5qR0DpZBPoBRCXWVvI3Z+2xfL4M7HHqXs7JQ2G5mdlB3dBeFi56RmDJRCXoFSbWVvI3Z+2x/L4Opmr1K2kVJnI58mZUd3QQwGg2jXrp0IDg4WGRkZUg+HyKYYKIW8AqUay95CsPPbUVgGVw97lrKzU/Js5NPk0NFdEC52TmrFQCmEuH37tpg/f764ffu21EMRQqiz7C0EO78diWVwZbJ3KdtILbOR2cmlo9scXOyc1Ih7eQMwGAzQ6XRwdXWFVquVejiq2Ns7L9zz27G4N7j82WKvbHPJZU9tW3PkHt22EB8fj+DgYIwcORKfffaZ1MMhsg2pEy3lptaytxE7vx2LZXB5cVQpO/v11DYbmZ0cO7rNwcXOSW0YKGVKrWVvI3Z+Ox7L4NJxVCk7OzXdG5kXOXd0F4SLnZPaMFDKlFq7vbNj57c02A1esNTUVPHTTz+Jbt26if79+4t169ZZ/MudvbuyTVH7bOTT5NzRbQ4udk5qwkApU2ovewvBzm8psQyet59++klUq1ZNhISEiFGjRokRI0YIjUYjvvvuO5GZmZnn8xxdys7OGWYjn6aEju6CcLFzUhMGShlTe9lbCHZ+S41l8Ny2b98u1q5dK5KSkrIe69Wrl+jQoYNIT0/PdbxerxcnTpxwWCk7+3WdaTYyOyV1dBeEi52TWrDLW8bU3O2dHTu/pcdu8P9kZGTAzc0NGo0m67HevXujUqVKeXbkpqen4+OPP0bHjh1t2pVtilo7tc2ltI7uggghEBYWhtjYWJw+fRpubm5SD4mocCQOtJI4e/asWLx4sYiJiRFCCBETEyPeeustMXDgQLF7926JR/cfZyh7G7HzW3osg+eUmJgo3n//fREcHCz8/PzEN998I9m6gc48G5mdUju6C8LFzkkNnC5Qbtu2TRQpUkSUKlVKeHh4iG3btgk/Pz/Rtm1b8b///U+4uLjIKlQ6Q9nbiJ3f8sAy+BNxcXEiPDxczJgxQ3zzzTeiUqVKok+fPuLcuXMOG4Mz3huZFyV3dJuDi52T0jldoGzatKmYOHGiEEKIn3/+WZQsWVJMmDAh6/Pjx48X7dq1k2p4uThDt3d27PyWD3aD53Tw4EFRr1498dVXX9n1OpyNNE3pHd0FuXHjhvD09BTjx4+XeihEheJ0gbJ48eLi4sWLQogn37hdXV1zdBifOnVK+Pv7SzW8XJyp7C0EO7/lRs1lcL1eL44dO1bgccamj1OnTgkfHx+xceNGu4yHs5F5U0NHtzm42DkpmVMGyuzLd3h5eYnLly9nfXzt2jXh4eEhxdDy5ExlbyFydn7fvHlT6uGQUE8Z3NQC4ydPnszza8sYnu/evSvGjBkjmjRpIq5fv26z8XA2smBq6uguCBc7JyVzukBZu3ZtsW3btqyPT506lWNtuQMHDojKlStLMbQ8OVvZW4gn5Z/y5cuLxo0bi8ePH0s9HPp/SiyDX7t2Ld8FxvOacd2wYYMYO3aseOmll4S/v79o0KCB2Lt3r03GxNlI89y7d08EBQWJkJAQkZiYKPVwHIKLnZNSOV2g/Pbbb8WWLVvy/PwHH3wgBg8e7MARFczZyt5G7PyWJ7mXwY0LjE+YMMGqBcZjY2NFr169xPjx423yw52zkZZRa0d3QbjYOSmV0wVKpXK2sreRsfP7008/lXoo9BQ5lcGl2CvbXJyNtJzaO7oLwsXOSYkYKBXCGcveRuz8ljepyuAFlbKlvN+Os5HWUXtHd0EMBoNo166dCA4OFhkZGVIPh8gsDJQK4axlbyHY+a0EjiiD26qUbU+pqamcjbSSs3R0F4SLnZPScOtFAJmZmXj48CFKlCgh622vIiIisGXLFly7di3HtnDOIDU1FaGhobh16xaOHj2KsmXLSj0kMuHmzZsYN24cVq5ciSZNmuDrr7/G888/X+jzJScnY+fOnYiMjMTWrVtx584dlCpVCp06dUKXLl0QFhYGHx8fG74Cyxm/hV64cAEvvPACEhISEB4ejiFDhqBZs2ZO97VqjZiYGDRp0gQtW7bEr7/+atctLJVg0KBBiIyMxKVLlyR/nxMVSOJAKwvR0dECgOyXQnHmsrcQ7PxWEmvK4HIuZWdnLF2fPn1aDB06VNSsWZOzkVZwxo7ugnCxc1ISBkqhnEDpzGVvI3Z+K4e5ZXAllLKz0+v14ujRo2LIkCGicuXKvDfSBpy1o9scXOyclIKBUignUArhvN3e2bHzW1lMdYPLuSvbFJ1OJ44fPy4GDx4sPDw8eG+kDTl7R3dBuNg5KQUDpVBWoHT2srcRO7+V55dffhHlypUTAIRWq5VtKdvIYDCIc+fOibFjx7JT246cvaPbHFzsnJSATTkAjh8/jvr16yM6OtqqBgJH0Ov1CAgIQJ8+fTBnzhyphyMZIQRee+01bNmyBQcPHkS9evWkHhI9xWAwICoqCpGRkdiyZQtOnjwJFxcXVKlSBXFxcShWrBhmz56N/v37Q6vVSj3cLLdv38aSJUuwaNEiXLlyBTVq1MDQoUPRt29flCpVSurhqcr27dvRuXNnjBo1CrNnz5Z6OLKl1+tRt25dlC5dGnv37mWjF8mTxIFWFpQ0QykEy95G3PNbfpKSksT69esLLGXLaVF0IZ7cF7ljxw6uG+lAzrRHty1wsXOSO85QQlkzlACwb98+tGnTBocPH0bjxo2lHo6k4uPj0ahRIwQGBmLfvn3w8PCQekhOJzY2Flu2bEFkZCT27t2LjIwMVK9eHV26dEGXLl3QtGnTPJd/2b9/PyIiIhATE4Nhw4Zh2rRpKFmypMPGztlIaSQkJKBx48bw8PDAoUOHULx4camHJHtCCISFhSE2NhanT5+W9RJ35KQkDrSyoLQZSnZ758TOb8eydVe2o/cG52yktNjRXXhc7JzkjIFSKC9QCsGy99PY+W1f5payrWHvMvitW7fEZ599JqpUqcJdbCTCjm7rDRw4UPj6+spuJQQiBkqhzEDJbu/c2PltW1ItMG7LvcE5Gykv7Oi2Hhc7J7lioBTKDJQse+fGPb+tI6cFxq0tg3M2Un64R7ftcLFzkiMGSqHMQCkEy96msPPbMo4oZVvDkjI4ZyPlix3dtsXFzkmOGCiFcgMly96mcc/v/Cllr+zs8iuDczZS3rhHt31wsXOSGy4bBOUtG2TERc7zduzYMbRq1QqvvPIKVqxY4dQLAZtaYNzV1RWhoaF48cUX0aVLFwQFBUk9zAJlZmZiwYIFmDx5Mjw8PNCvXz9cu3YNv/76K1xdXREeHo4hQ4agWbNmTv3vLScZGRkICwvD6dOnERUVhcqVK0s9JNXgYuckN65SD4AKz8XFBd26dcO6deswe/ZsfkPJpkGDBli6dCl69uyJkJAQTJgwIc9jM/UGnLuVhFPxiTgdn4g7SenI0OlRxNUFZbzdUTPAB7UCfFCtrDfcXOSzo0t+kpOTsWPHDmzZsgVbt27FnTt3UKpUKXTq1AkTJ05EWFgYfHx8pB6mRdzc3NC7d2/cvXsXX331FebMmYOiRYvi3XffxQcffMB1I2VGCIHhw4fjzz//xO7duxkmbczFxQWzZs1Cx44dsXnzZrz00ktSD4mcHAOlwvXo0QPffPMNoqKinH6R86eFh4cjJiYGEydORLVq1fDqq6/m+PyNB6lYFRWHlUfikPg4EwDgqtVAZ/hv0t5Vq8GqqDgAgE9RN/RpXBG9G1VEhZKejnshZsprgfEBAwYUuMC4nBkMBuzevRsLFy7Epk2bsmYjGzZsiO+++w5z587F48ePHb4oOuVv/vz5WLRoERYvXoyWLVtKPRxVCgsLQ7t27fDee++hU6dOXOycJMWSN5Rb8gZY9i6IMLHn96O0TEzfGoPVx65DowEMFnwFaDWAANCzfiAmdq4Obw/pvoFnL2VHRkbi1KlTiixl58WcXWyeLoPPnDlTdnuDOyPu0e04//zzD+rVq4evvvoKERERUg+HnBgDJZQdKAEgIiICW7ZswbVr11j2NiE1NRWhoaG4desWFqzbjU92xSEhJd2iIPk0rQbw9XLH7O510CrYz3aDLUB+pewuXboospSdXV6zkQXdG3nz5k2MGzcOK1euRJMmTfD1118r8mtZDWJiYtCkSRO0bNkSv/76qyJnxZVm0KBBiIyMxKVLlxT99U/KxkAJ5QdK7u1dsPj4eDR5/X24NO4FrYWzknkxnufjriHo1/QZ60+YB2v2ylYKW+2pLfXe4M6Oe3RLIz4+HsHBwRg5ciQ+++wzqYdDTop1IRVo2bIl/P39sWbNGqmHIlu74zLh0rgXANuEyeznmbT5DJb/dc02J8WTWbrDhw9j4sSJqF27Np555hm8++670Ol0mDlzJi5duoSzZ89i5syZaNGihWLDpMFgwM6dO9GjRw9UqFABU6dORYsWLXDw4EGcPn0aI0eOtLjRJjQ0FCdOnMDs2bOxfPlyVK1aFUuWLIHBYLDTqyCjjIwMdO/eHYmJiYiMjGSYdKCAgACMHTsWX3zxBWJjY6UeDjkpzlBC+TOUAMve+Tlw4S76LYmy+3WWD2xU6PK32kvZ2dlqNrIgLIM7jhACQ4cOxdKlS7F792424UggOTkZzz77LNq2bYsVK1ZIPRxyQpyhVIkePXogLi4OUVH2D05K8igtE2PW/gOtnTO2VgOMXfcPktIyzX5ObGwsvv76a3To0AGlS5dGt27dcPjwYQwYMAB//PEH7ty5g59++gnh4eGKD5P2mI0sSLly5bBixQrs27cPSUlJaNiwIYYPH44HDx7Y9Dr0X0f3999/zzApES8vL0ybNg0rV67EsWPHpB4OOSHOUEIdM5Ts9jZt/PqTWBN93WZl7vxoNUB4g0DMeLW2yc+rvSvbFEfNRhaE3eD2w45u+eBi5yQlBkqoI1ACLHs/7fqDVLT6fC8KeoNn3LqMlHN/IP36aegS70Cf+ghad0+4l6+K4k26wSOwptnX1GiAP8a1yVqn0plK2UaF7dR2BJbBbYsd3fKzfft2dOzYEZs2beJi5+RQDJRQT6Bkt3dOn/9+Dt/tv1zg7GTC9gVI/nu76U9qtPB7eTw8qzYz65ouGqBXXT+Uu3tU0V3ZDx48QGxsLOrWrQuDwWDWTJ5cZiPN8XQ3+IwZM+Dl5VXg8+7evQs/P8ctEyVn7OiWJyEEwsLCEBsbi9OnT3Oxc3IY1ntUhN3e/8nUG7DySJzZpW6XYiVRvFlPlAmfCt+u4+BaqsKTTwgD7u/+wezr6gWw7NAVvDt6jCK7srdv3442bdqgdOnSWLZsGQDkGyaluDfSFrJ3g2/cuBFpaWnI63frhIQEzJw5E02bNkV4eDjCwsJw5swZB49YXtjRLV8ajQazZs3CxYsXsXDhQqmHQ06EM5RQzwwlwLK30an4RHRZcNCsY9Oun0GRskHQunlkPZZx+wpuLhmZ9XGFESvgUqyE2df/ZUBdNKkaYPbxUtuzZw8GDBiAR48eoW/fvhgxYgSqVq1a4PPefPNN/PDDD7KejSxIcnIyPD09TQZnnU6HTp064fHjxxgwYAACAgKwYcMGrFixAjNnzsSIESMkGLG02NGtDFzsnByNM5Qqw27vJ07FJ5p9rEdgSI4wCQCupcrn+Fjj5m7R9a8m6i06XmoPHz5EZmYm1qxZgwULFqBq1ap49OhRvs/JzMxERESE7GcjC+Ll5ZXnLOysWbNw8uRJtG7dGlWrVkWHDh2wcOFCLFmyBF5eXkhPT3fwaKXHjm5lmDZtGlJTUzFjxgyph0JOgoFSZVj2fuJ0fCJcrVgrKPX8oaw/u1cIgbZIUbOf66rVWBRo5eCFF15A48aNsXjxYvz4448IDQ3Fa6+9hnHjxuHevXsmn+Pm5oa6deuiefPmqpwNv337NiZOnIjHjx/jypUrGD58OBo2bIhr166he/fu6Nq1K9zdLftFQ+m2b9+O0aNHY8yYMRg4cKDUw6F8cLFzcjQGSpVxcXFBt27dsG7dujzvCXMGd5LSoSvkWkHpty7h/s7vn3zg4oaSbd+06Pk6g8DdZGXNXPn4+KBx48b4/fff8eOPP+LVV19FSEgIVqxYgT59+jjlPYPvvvsuatasicjISKxcuRK7du1CYGAgfvrpJ7i4uKB06dJSD9GhYmJi0LNnT3Ts2BEzZ86UejhkhnHjxqFEiRKYOHGi1EMhJ8BAqUIsewMZusKVnNOun8HtnydApKcAWhf4dR0H97LPWnye9ExllbwBoGfPnvj888+xevVqvPPOO5g1axb27NmDuLg4REZGSj08h0pMTMTFixfRo0cPtGrVCgDg6+sLb29v7Nu3DxkZGRKP0LESEhLQpUsXBAYGYtWqVbJuLKP/cLFzciQGShVi2Rso4mr5D7zHV4/jzppJEOmpgIsb/F7+wOzlgp7m7qa8H7iVK1dGv379EBgYmPVY9erVUaFCBZw/fx6ZmebvAqR0Op0O586dQ+XKlbMeS0xMhI+PD9zd3Z1qb3B2dCvboEGDULNmTYwdO9apq1ZkfwyUKsSyN1DG292ieyhTzx/CnXUfQ2SmQ+PmgTI9JsMzuEmhru2q1cDPS3n31mk0mqx7AoUQ0Ov10Ov1uHfvHtLS0pxqPTsfHx+88sorWbM6mZmZ+O2337B161a0b98eHh4eBZxBHYQQGD58OP78809s2LAhR8AmZXBxccGsWbOwf/9+bN68WerhkIoxUKqUs5e9awb4mH0PZcq5g7i7aQag1wHQwKdFL2hc3JB2/UzWf0Jn/uycziBQK0Cey3RkXzfy77//hl6fuzSv0+mg0Wig0Wjw/fdP7iUdP368o4cqKVdXV/Tv3x+rVq1CixYtEBYWhgkTJqB58+Z49913cxyr5l/a2NGtDmFhYWjXrh3ee+89p6o0kGO5Sj0Aso/sZW9n3DXHkkD3+NJRQBhLmAIP9y7JdUzAWz/CtYS/Xa7vCKZ2sUlISDC5XM7HH3+Me/fuYdu2bdDr9ZgwYQJq1jR/+0m1aNu2LW7fvo158+bBzc0NtWvXRt26dXMdp9FosG/fPrRq1UpVe4Ozo1s9jIud16tXDwsXLkRERITUQyIVUs93P8rB2cve1cp6w6eoNCVab3ctqpb1luTa2RW0i80LL7xgcrmfdu3a4dGjR5g5cybi4uLw1ltvOW0ThkajwahRozB8+HC0atUq1/2DQgjExMSgTZs2aN68OY4fPy7RSG2LHd3qU6dOHQwYMABTpkxBYqKyljUjZeBOOVDXTjnZOfve3ubu5W1LwqDHo8Pr0ND9FoYOHYquXbs6/N5DJe2prRZP7w0+bdo0lCxZUuphFQr36Fav+Ph4BAcHY+TIkfjss8+kHg6pDGcoVczZu71fqekHg4N/X9JqXTDl9XZISUlB9+7dERgYiA8++ABXrlyx63WVuqe2WmTfG3z58uWoWrUqlixZorhucHZ0qxsXOyd7YqBUMWctewshsH79erzQtB5ST+2CBo557VoN0LNhIN5543X8+eefOHnyJMLDw/Htt98iKCgI7du3x/r16216U/zt27cxY8YMPPfcc2jfvj3Onj2LOXPmID4+HsuWLVPtLjZy5ObmhlGjRuH8+fNo3749Bg0apKgyODu6nQMXOyd7YaBUOWfr9r5w4QI6dOiA7t27o06dOtg16234eXvAil0YzaLVAL5e7pjYqXrWY7Vq1cL8+fPx77//YunSpTabteRspLyVK1cOK1aswL59+5CUlISGDRti+PDhePDggdRDyxc7up0DFzsne2GgVDlnKXunpKRkdSNfvHgRmzdvRmRkJGpXD8bs7nXsfh+lQQCzu9eBt0fu+yU9PT3Rv39/q2ctORupLEoqg7Oj27lwsXOyBwZKlVN72dtY3q5evTrmzp2LCRMm4MyZM+jSpUvWMa2C/fBx1xC7jmNa1xC0CvYr8DhLZy05G6lsSiiDs6Pb+XCxc7IHdnlDvV3eRmrt9r5w4QJGjBiBHTt24MUXX8S8efMQFBSU5/HL/7qGSZvPQKuBTWYsjeeZ1jUErzd9ptDnOXXqFBYtWoTly5cjMTER7dq1Q8+ePXHr1i0sXryYndoqIrducHZ0Oy8hBMLCwhAbG4vTp0871U5YZB+coXQCait751Xezi9MAkC/ps9g+cBG8PVyt/qeSuM9k8sHNrIqTAL/zVreuHEDY8eOxfHjx/HGG2/gww8/hKenJ1avXs3ZSJWQUxmcHd3OzbjY+cWLF7Fw4UKph0MqwEDpBNRS9janvF2QVsF+2DU6FOH1A6HRAC4WBksXDaDRAOH1A7F7dKhZZe6CGO+NrFOnDmbPng1/f3+8//77ePPNN3H9+nX07NkTYWFhNu8QJ2nIoQzOjm4CuNg52RYDpZNQerf3093bZ86cwZQpU1C0aFGLz1Xcww0zutXGH+Pa4K3QoBw76rg+NXWZ/WOfom54KzQIf4xrgxndaptswDFXQfdGzpgxAwsXLrR5hzjJh5Td4OzoJqNp06YhNTUVM2bMkHoopHSCRHR0tAAgoqOjpR6K3eh0OuHv7y9Gjx4t9VAskpycLD744APh5uYmKleuLDZv3mzza2To9OLUjYdi1ZFY8cGGk6LH/J2iTPhU0WP+TvHBhpNi1ZFYcerGQ5Gh01t9rVu3bonPPvtMVKlSRQAQNWrUEF9++aVISEgo8LknT54UI0aMED4+PgKAaNeunVi3bp3IyMiwelwkrYyMDDF37lzh7e0t/Pz8xOLFi4Veb/37zZRt27YJrVYrxowZY5fzk/JMmjRJuLu7i2vXrkk9FFIwBkrhHIFSCCHefvttUbFiRWEwGKQeSoEMBoNYt26dCAwMFO7u7mLy5MkiNTXVIde29ftBr9eLHTt2iO7duwtXV1fh4eEh+vXrJw4ePFiof4uUlBSxdOlS0axZMwFA+Pv7i/Hjx4vLly/bZLwknX///Vf06dNHABBNmjSx+feks2fPiuLFi4vOnTsLnU5n03OTciUlJQl/f3/Rp08fqYdCCsZAKZwnUO7du1cAEIcPH5Z6KPk6f/68aN++vQAgXnzxRXHp0iWHXt9W7wdrZiPNxVlLddq3b58ICQkRWq1WREREiPv371t9znv37omgoCAREhIiEhMTbTBKUpOFCxcKAOLo0aNSD4UUioFSOE+glHvZ2xHlbXNY836w9WykuThrqT62LIOnp6eL1q1bC19fX3HlyhUbj5TUQKfTiZo1a4rQ0FBFVLFIfhgohfMESiHkWfaWsrxtSmHeD46YjTQXZy3VxdoyuMFgEG+++aZwc3MTBw4csNMoSQ22bdsmAIhNmzZJPRRSIAZK4VyBUm5lb6nL26aY+36QajbSXJy1VJfClsHnzZsnAIjFixfbeYSkdAaDQbRr104EBwfzF1CyGAOlcK5AKZeyt1zK26YU9H6Q02ykuThrqQ6WlsHZ0U2W+vvvv4VGoxELFiyQeiikMAyUwrkCpRDSlr3lVt42xdT7Qe6zkebirKU6mFMGZ0c3FdbAgQOFr6+vePjwodRDIQVhoBTOFyilKnvLsbxtSvb3gxJnI83FWUvly6sMzo5ussaNGzeEp6enGD9+vNRDIQVhoBTOFygdXfaWc3nblKNHjwoA4oUXXlD0bKS5OGupbE+XwRcuXChCQ0PZ0U1W4WLnZCkGSuF8gVIIx5S9lVDezs44GxkQECAAiCpVqqhmNtJcnLVUruxlcI1GIxYtWiT1kEjBuNg5WYp7eTsp497ex44ds8v5bbn3tj2Z2lO7bt26AIA1a9Zg5MiRKFWqlLSDdKBatWph/vz53ENcgcqVK4eGDRsCAMqXL4+hQ4c6bG9wUh8vLy9MmzYNK1eutNvPCVIZqROtHDjjDKVOpxNDhgwRjx49sul5lVLezu/eSGd8P+SHs5bKkL2j25F7g5N6cbFzsgQDpXDOQGlkq+5PJZS3ze3Udub3Q354r6V85dXRbe+9wUn9uNg5mYuBUjBAWEvu3duWdmrz/VAwzlrKhzkd3fbYG5ycAxc7J3PxHkoqtJSUFEyYMAE1a9bExYsXsXnzZkRGRiIoKEjqoZm8N7JFixY4ePAgTp8+7XT3Rtoa77WUh4yMDHTv3h2JiYmIjIxE8eLFTR4XGhqKEydOYPbs2Vi+fDmqVq2KJUuWwGAwOHjEpDQajQazZs3CtWvXcPjwYamHQ3ImdaKVA85IWUbO5W1brBvJ90PhcNbSsQq7RzfL4FQYd+7cEXq9nvdSUp44Q0kWy8jIwOuvvy6b7m3ORsoDZy0da/78+Vi0aBG+//57tGzZ0uznlStXDitWrMC+ffuQlJSEhg0bshucCuTr6wutVguNRiP1UEimGCgpByFEgY+7u7vjwIEDkpe3b9++jRkzZuC5555D+/btcfbsWcyZMwfx8fFYtmwZmjdvzm9+EvD09ET//v3x559/4uTJkwgPD8e3336LoKAgtG/fHuvXr0dmZqbUw1S07du3Y/To0RgzZgwGDhxYqHOwDE6W4PdSKggDpZNbv3493nvvPXz11VeIjY3N85tG9h8yQgg0aNDAUUPMNQ7ORioHZy1tLyYmBj179kTHjh0xc+ZMq87l5uaGUaNG4fz582jfvj0GDRqE5s2b4/jx4zYaLamdcbJBr9dLPBKSGgOlk7p69SpatWqFMWPG4Nq1a5g1axbCwsJw9epVk8e7uLhk/VmK31Q5G6lsnLW0jYSEBHTp0gWBgYFYtWpVjq9La7AMTpYSQiAzMzPr+66t3oukXAyUTujOnTsYNWoUgoKCcOzYMaxZswZXr17F/fv3sX79egAwWfZydCmMs5HqxFnLwjG3o9saLIOTOW7duoXBgwcjJCQEb775Jnr06IFPP/0U8+bNw59//om///5b6iGSBBgonVDp0qWRmpqKt99+G76+vsjMzISLiwvat2+Po0ePAgC02txvDVOP2QNnI50DZy3NJ4TA8OHD8eeff2LDhg2oXLmy3a7FMjgVpGjRooiOjkbVqlXxxhtvIDQ0FCkpKdDr9ejVqxcaNmyITZs2ST1McjAGSifk4uKCjRs3Zu376+bmBgCIjY1FvXr1JBkTZyOdG2ct81fYjm5rsAxOefHx8cH48eOxf/9+1K5dG8OHD8fUqVNRunRpPPvss6hZsybOnTsn9TDJ0aRcs0gunHXdwezriSUnJ4uaNWuKXbt2OXQMtlg30tac9f0gN1zX8onse3RLhXuDU3Z6vV5kZGSI1q1biwEDBogdO3aI9u3bi7p164o33nhDbNy4UaSkpEg9THIwBkrBACGEEEePHhVlypQRV65cyXosOTnZLtcyd09tqfD9IC/OvId4Xnt0S4WLolN2X3zxhdBqtaJGjRqib9++4scffxR3797N+rwcvp+T47Dk7eTE/y/5cODAAQQEBKBy5cpISUnBG2+8gbFjx9q0vMV7I6kwnPVeS3t1dFuDZXACgPPnz2PgwIFYtWoVAgICkJSUhB9++AGDBg2Cr69v1s8Vfj93LgyUTs74BX/y5El069YN69atwzPPPIM///wTI0aMQMmSJa06P++NJFtylnstHdHRbQ12gzu32NhYnDhxAl27dsXs2bORnp6O5OTkrM8zSDopqadI5cDZS5zJycmiQoUKQqPRCC8vL7Fw4UKrzynHeyPN5ezvB6VR272Whd2jWyosgzun7P/OmZmZEo6E5IIzlCp38eLFAsuBxYoVQ8WKFTFq1Cg8evQIb775ZqGuxdlIkoLaZi2l6Oi2Bsvgzun5558H8OS2KVdXV4lHQ7IgdaKVAzXOSCUnJ4sPPvhAuLm5iR07dhTYkWnNDf9Kno00RY3vB2ej1FlLOXR0W4Pd4MRGHOfFQCnUFSAMBoNYv369CAwMFO7u7mLy5MkiPT3d5teRe6e2NdT0fnB2SuoQl1tHtzVYBnc+xl8cbt26JfFISCoseavIhQsX0KFDB3Tr1g116tTBmTNnMGXKFBQpUsRm10hJScGcOXPYqU2KoJQOcTl2dFuDZXDno9Fo8N5776FVq1ay+Joix2OgVIGUlBRMmDABNWvWxMWLF7F582ZERkYiKCjIZtdISEjAO++8gxIlSuDDDz/kvZGkOHK911LuHd3WYDe489BoNOjTpw8uXryIhQsXSj0ckoLUU6RyoNQSp6nydmpqqs3Or9PpxKNHj8SwYcNUcW+kuZT6fiDLSX2vpdI6uq3BMrhzGDhwoPD19RUPHz6UeijkYJyhVKi8yttFixa1+twGgwE6nQ4zZ85EpUqVkJKSwtlIUiWpZy2V1tFtDZbBncO0adOQmpqKGTNmSD0UcjSpE60cKGlGKnv3duXKlcXmzZtten69Xi82btwoOnXqJLy9vcWwYcNsen4lUNL7gWzPUbOWSu/otga7wdVt0qRJwt3dXVy7dk3qoZADcYZSIYQQ2LBhA6pXr465c+diwoQJOHPmDLp06VLocxrXjQwPD0e9evXQsWNHDBs2DH5+ftiyZQtef/11bN26NWsbLSJn4IhZy5iYGPTs2RMdO3bEzJkzbTBqZXFzc8OoUaNw/vx5tG/fHoMGDULz5s1x/PhxqYdGNjBu3DiUKFECEydOlHoo5EhSJ1o5kPuM1Pnz50X79u0FAPHiiy+KS5cuWXU+c9eN3Lt3rwAgDh8+bNX1lEbu7wdyPFvOWt67d08EBQWJkJAQkZiYaIfRKs++fftESEiI0Gq1IiIiQty/f1/qIZGVFi5cKACIo0ePSj0UchAGSiHfAGHL8nZh1o3U6XTC399fjB49utDXVSK5vh9Ietaua5meni5at24tfH19xZUrV+w8WmVhGVxddDqdqFmzpggNDVX82sRkHgZKIb8AYcvubWt3sXn77bdFxYoVneobgtzeDyRPls5aOlNHtzXYDa4e27ZtEwDEpk2bpB4KOQADpZBXgLBFeduWu9g4Y9lbTu8Hkj9zZy3nzZsnAIjFixdLNFJlYRlc+QwGg2jXrp0IDg6W/banZD0GSiGPAGGL8rY99tR2xrK3HN4PpEx5zVpGRkY6bUe3NVgGV76///5baDQasWDBAqmHQnbGQCmkDRDWlrcdsae2s5W9GSjJWk/PWmo0GhEUFCQuXLgg9dAUiWVwZeNi586BywZJyJrFyW/fvo0ZM2Y4ZE/tHj16IC4uDlFRUTY5H5HaGfcQ37x5MypUqICSJUvi3r17CA4OltUe4krBRdGVjYudOwcGSgkUdu9t47qRPXr0QIUKFTB16lSH7KndsmVL+Pv7Y82aNTY/N5FaGffoTktLw7Fjx2S3h7gScW9wZQoICMDYsWPxxRdfIDY2VurhkL1IPUUqB44qcRa2vG2PeyMt5Uxlb5a8yVoFdXRLvYe4GrAMrixJSUnC399f9OnTR+qhkJ0wUArHBAhLu7cdcW+kJZyp25uBkqxlbke3tetaErvBlYSLnaubUwbKDJ1enLzxUKw8Eis+2HBSdJ+/U5QJnyq6z98pPthwUqw8EitO3ngoMnTWdxNa2r0th9lIU5yp25uBkqxR2D26OWtZeOwGVwYudq5uGiGcZ6PmGw9SsSoqDiuPxCHx8ZMb4l21GugM//0VZP/Yp6gb+jSuiN6NKqJCSU+LriWEwMaNG/Huu+/izp07GD9+PN5//32TDTcGgwG7d+/GwoULsWnTJri6uiI8PBxDhgxBs2bNbNZcY62IiAhs2bIF165dk82Y7OH48eOoX78+oqOj8fzzz0s9HFKQmJgYNGnSBC1btsSvv/4KFxcXi8+RmpqKtWvXYuHChTh06BD8/f0xcOBAvPnmm6hSpYodRq0eN2/exLhx47By5Uo0adIEX3/9Nb+GZWb79u3o2LEjNm3ahJdeeknq4ZANOUWgfJSWielbY7D62HVoNIDBgles1QACQM/6gZjYuTq8PdwKfM6FCxcwYsQI7NixAy+++CLmzZtnsuHm9u3bWLJkCRYtWoQrV66gRo0aGDp0KPr27WuX5hpr7du3D23atMHhw4fRuHFjqYdjNwyUVBgJCQlo3LgxPDw8cOjQIRQvXtzqc546dQqLFi3C8uXLkZiYiHbt2mHo0KHo2rUr3NwK/l7krPbv34+IiAjExMRg2LBhmDZtGkqWLCn1sAhPJlvCwsIQGxuL06dP832sIqrv8j5w4S5emLMfa6KvQ8CyMIn/P14IYE30dbwwdz8OXLib57HmdG9L1altC+z2JjLN2NGdmJiIyMhIm4RJAKhVqxbmz5/PDnELsRtcvjQaDWbNmoWLFy9i4cKFUg+HbEjVgXLZoWvotyQKCSnpFgfJpxkEcC85Hf2WRGH5X9dyfE4IgQ0bNqB69eqYO3cuJkyYgDNnzqBLly5Zxzhy3Uh7cXFxQbdu3bBu3To4wcQ2kVmEEBg+fDj+/PNPbNiwAZUrV7b5NYzrWv755584efIkwsPD8e233yIoKIjrWubBzc0No0aNwvnz59G+fXsMGjQIzZs3x/Hjx6UemtOrU6cOBgwYgClTpiAxMVHq4ZCNqDZQLv/rGiZHngFg+axkXoznmbT5TFaozG9xciXPRuaFi5wT5TR//nwsWrQI33//PVq2bGn363HW0jJcFF2euNi5+qgyUB64cBeTNp+x6zUmbT6DgR98brK8rYbZyLyw7E30n+3bt2P06NEYM2YMBg4c6NBrc9bSMiyDywsXO1cf1TXlPErLxAtz9tukzJ0fYTDAkPoQr5e6io/Gj4O7u7siOrVtQe3d3mzKIXPYoqPb1tghbh52g8tDcnIynn32WbRt2xYrVqyQejhkJdXNUE7fGmNWmNQ/TsKDfUtxa+V4xM3uhtgZLyJ2xou4t+ULs66j0Wrh5l0KKcFh+PLLL1U5G5kXlr3J2QkhkJycjFq1amHVqlWyCJOA/Wct9Xq9DUcrHZbB5cHLywvTpk3DypUrcezYMamHQ1ZSVaC8/iAVq49dN2tmUv/oLh4dXof066chdOmFup5BAJtO3cG0uV8r/t5IS7DsTc5Oo9GgXr162Lt3r806um3N1vda6nQ6RERE4KWXXsL27dvtNGrHYhlceoMGDULNmjUxduxYNnsqnKoC5c9RcTB7MtDFFe6BNVG8SXcUq92u0NfUajR4f1GkamcjTWG3NzmL/N7frq6uilhDz1azlleuXMGxY8dw4cIF9OvXD35+fpgwYQKSk5Md8Crsh93g0nJxccGsWbOwf/9+bN68WerhkBVUEygz9QasPBJn9n2TRXwromyfGSjZegDcyz1X6OsKaLD+nzvI1DvXb7Qse5OarVy5EgBU9wtifrOWX3yR/+0+mzZtQmZmJmJiYnDnzh188cUX2Lt3LzZu3Oig0dsXy+DSCQsLQ7t27fDee++xkUzBVBMoz91KytpO0dESH2fi/K0kSa4tFZa9Sa1ee+01zJo1K8f9gmqbiTc1a1m0aNE8f5g/evQIx48fx/nz5/Hjjz8iJSUFffv2xV9//YXXX3/dwaO3L5bBHY+LnauDagLlqXhpF0eV+vqOxrI3qdG0adOwffv2HF3ber1edTOV2RlnLYcOHZpnCT89PR19+/bFtGnTsHz5crRu3RrR0dEOHqnjsAzueFzsXPlUEyhPxyfCVSvNN31XrcbpAiXAsjepy9atWzF58mTs3bsXlSpVwp49ezB58mS0aNECEydOxO7du6Ueol3lF5r9/Pzw4osvYty4cVi1ahW8vLzwxRdfqL48yTK4Y3Gxc2VTTaC8k5QOnT0XnsyHziBwN7lwneJKxrI3qUVqairmzJmDMmXKIDg4GOfPn8ebb76JI0eOIDg4GGvXrsW0adOy7hd01ll5g8GAgIAADB48GL/99pvUw3EYlsEdg4udK5tqAmWGTtr10dIz1bE+myVY9ia18PT0xOTJk9GsWTM0aNAAzZo1w+DBg7FmzRosW7YMu3fvhsFgwFdffQVAfc06+dm8eTN0Oh2A/4L0+fPnERwcjOvXr0s5NIdiGdwxxo0bhxIlSmDixIlSD4UspJpAWcRVwoWFhcC1K5ewevVq/P3330hJSZFuLA7GsjepRWhoKGbNmoX69eujffv2GDBgAIoXLw6DwYDAwECMGjUK//zzD+7evSv1UB3m5s2bmDp1KjZu3AiDwYDMzEzExcVh9erVqF+/PsqUKSP1EB2OZXD74mLnyqWaQFnG292ieygNmWlIOXcQKecOIuP2fwv86h7dyXpcl3jHvJMJAy6fPoHXXnsN9erVg5eXFypUqIAXXngBw4YNw7x58/Dbb7/h0qVLWb/pqwXL3qQmQUFB+OqrrxAREYFy5coB+G82Mj09HcHBwU5V5vTx8cErr7yCt99+G9WqVUPPnj1Rr149eHt746233oKXl1eO4x89eoQNGzao/t5KgGVwe+Ji58qkmr28V0XFYcLGU2Yfr3t4G/HfDc73mNKd3oVX7bZmne+zV2qhw3PeuHDhAi5cuIDz589n/f/ixYt4/PgxgCdlk6CgIFStWhXBwcEIDg7O+nOZMmUUWUpT297e3MubnpacnIy2bduiYcOGWWVvZ7Nx40acOHECLVu2RM2aNbMCt5HBYMDKlSvRr18/p9tDnHuD29727dvRsWNHbNq0CS+99JLUwyEzqCZQnopPRJcFB80+3taBcsvwFqgZ4GPycwaDAfHx8TlCpjF4Xrt2Les3Wh8fn1wh0/hfsWLFzH5tjrZv3z60adMGhw8fRuPGjaUejtUYKCm7DRs24KeffsL169dZgjPDqVOnsGjRIixfvhyJiYlo164dhg4diq5duypiZyFr7N+/HxEREYiJicGwYcMwbdo0lCxZUuphKZIQAmFhYYiNjcXp06dV/95RA9UEyky9AQ0+3SXJ4uY+Rd1wbGJbuLlYfgdBeno6Ll++nCNkGv+c/V6tgIAAk7OazzzzDFxdXW35ciym1+sREBCAPn36YM6cOZKOxRYYKJ2TEAJCCGi1Ob+OlyxZgg0bNuDnn3/OVeKlvKWmpmLt2rVYuHAhDh065DSzlpmZmViwYAEmT54MDw8PzJw5E/3798/1vqKC/fPPP6hXr17WbSgkb6oJlADw+e/n8N3+y2Zvv2gLLhrgrdAgjAurZvNzP3jwQDEldDWVvRkonY8QAgsWLMCgQYNQtGjRXD/809LS4OHhIdHolM8ZZy1ZBreNQYMGITIyEpcuXYKPj+kqIMmDqgLljQepaPn5XjjyBWk0wB/j2qBCSU+HXVOOJXQ1lb0ZKJ3Pl19+iXfffRcbNmzAK6+8IvVwVMsZZy1ZBrdOfHw8goODMXLkSHz22WdSD4fyoapACQDj15/EmujrDpml1GqA8AaBmPFqbftfzExSldDVVPZmoHQu27dvR+fOnTFq1CjMnj1b6uE4DWeatWQZ3DqTJ0/GzJkzcf78eVSqVEnq4VAeVBcok9Iy8cLc/biXnG7XUKnVAL5e7tg9OhTeHsr45mfvErpayt4MlM4jJiYGTZo0QcuWLXPs302O40yzliyDF05ycjKeffZZtG3bFitWrJB6OJQH1QVKADhw4S76LbH/QtvLBzZCq2A/u1/H3mxVQldL2ZuB0jkkJCSgcePG8PDwwKFDh1C8eHGph+T0nGXWkmVwyy1atAhDhgzB0aNH0aBBA6mHQyaoMlACwPK/rmHS5jN2O/+0riF4vekzdju/XFhSQg8ODsbhw4fRqFEjvPfee7LpQrcUA6X6ZWRkICwsDKdPn0ZUVBQqV64s9ZAoG2eYtWQZ3DJ6vR5169ZF6dKlsXfvXkVXwdRKtYES+C9UajWwTflbGACNFkPrl8AH3Zvb4ITKZqqEvmfPHty/fz9rdwNjCT37rKbcF3JnoFQ3IQSGDh2KpUuXYvfu3WjZsqXUQ6J8qH3WkmVw83Gxc3lTdaAEnpS/x677x+p7KrUaoHSxIkjbtxCPLkQhKioK/v7+thuoShjL3r/++is8PT1l04VuCQZKdTN2dC9evBgDBw6UejhkJrXPWrIMXjAudi5vqg+UAPAoLRPTt8ZgdfR1aAHoLXjFLhrAAKBn/UBM7Fwdifduo2HDhqhSpQp2797NtemeUlC3txIWcmegVC92dKuDWmctWQYvGBc7ly+nCJRGNx6k4ueoOKw4Epe1o46rVgNdtqnL7B/7FHVD38YV0atRxRzrTEZFRSE0NBQ9evTAsmXLZFm2lVJhu70t6UK3ZwmdgVKd2NGtPmqdtWQZPH9c7FyenCpQGmXqDTh/Kwmn4hNxKj4Rd5PTkZ6ph7ubC/y83FErwAe1AnxQtax3ntsp/vzzz+jduzdmzJiB999/38GvQN5s3e3t6IXcGSjVhx3d6qfGWUuWwU3jYufy5JSB0lY++ugjfPrpp9i4cSNvEM7GkYuc26OEzkCpLuzodi5qm7VkGdw0LnYuPwyUVjAYDAgPD8f27dvx559/ok6dOlIPSTbksMh5YUvoGo0Gb7zxBo4dO4b69etLMnayDXZ0Ozc1zVqyDJ4TFzuXHwZKK6WkpKBly5ZISEhg53c2cl7k3NwSupeXF6pXry67LnQyHzu6CVDXrCXL4P/hYufywkBpAzdu3GDn91OUurd3eno6IiMj0aNHD4wYMQIpKSmy60In87Cjm0xRw6wly+BPcLFzeWGgtBF2fucmh7J3YeR1D6VcutCpYOzopoKoYdaSZXAudi4nDJQ2xM7vnORc9s6PpU05ju5Cp/yxo5sspfRZS2cug3Oxc/lgoLQxdn7/R6llb1t2eSthIXc1YUc3WUPJs5bOXAbnYufywEBpY+z8zkmJZW9HLRvEErptsaObbEmps5bOWgbnYufSY6C0A3Z+/0eJZW+p16FkCb1w2NFN9pDXrOUbb7yBoKAgqYeXJ2crg3Oxc+kxUNoJO7+fUGLZW+pAmR9LSuimZjXVWkJnRzc5gqlZyyFDhuCll16S5ayls5XBudi5tBgo7Yid308orewt50CZH2ctobOjmxxNabOWzlIG52Ln0mKgtDN2fiuv7K3UQJkXNZfQ2dFNUlPSrKUzlMG52Ll0GCgdwNk7v5VW9lZboMyPkkvo7OgmOVHKrKXay+Bc7Fw6DJQOwM5vZZW9MzMz8fDhQ5QoUUJ2MwyOVNgSuvHP9iyhs6Ob5EwJs5ZqLoNzsXNpMFA6iLN3fiut7E15M7eEXrx4cZNra9qihM6OblICJcxaqrEMzsXOpcFA6UDO3PmttLI3FY4jSujs6CYlkvOspRrL4Fzs3PEYKB3MmTu/lVT2JtuzRQn93LlzknV0GwwGPHjwAKVLl3bYNUl95DxrqbYyOBc7dywGSgk4a+c3y95kirkldG9vb2RkZMDDwwMRERGoXbu2Q7rQDQYDtFotIiMj8dZbb8Hd3R3r1q1T9A9akge5zlqqpQzOxc4di4FSIs7Y+c2yN1kqLS0NV65cwZkzZzB+/HjEx8ejZs2aiIuLc0gXuk6ng6urK1avXo3Vq1cjJiYGVatWxfTp01GjRg1bvEQiWc5aqqUMzsXOHYeBUiLO2vnNsjdZKq+Obnt3oQshoNFosHv3bowYMQLLly/Hl19+mfWD1tfX1yGvn5yL3GYtlV4G52LnDiRIMsnJyaJevXqiYsWK4tatW1IPxyH27t0rAIjDhw9LPRRSiHnz5gkAYvHixWYdr9frRVxcnNi5c6f4+uuvxciRI0WHDh1ElSpVhFarFQAEAFG8eHHxv//9L99zHTt2TFSuXFmsWbNGCCFEu3btxLBhw4ROp7P6dRHlJyUlRSxdulQ0a9ZMABD+/v5i/Pjx4tKlS5KMZ9++fSIkJERotVoREREh7t+/L8k4CmPhwoUCgDh69KjUQ1E1BkqJXb9+XZQtW1Y0a9ZMPH78WOrh2J1OpxP+/v5i9OjRUg8ly9mzZ8XixYtFTEyMEEKImJgY8dZbb4mBAweK3bt3Szw657Zt2zah1WrFmDFjbHK+x48fizNnzogNGzaIGTNmiGnTpuV5bFRUlGjdurXYtm2bEEKItLQ00aRJEzF58mSbjIXIXCdPnhQjRowQPj4+AoBo166dWLt2rcjIyHDoODIyMsTcuXOFt7e38PPzE4sXLxZ6vd6hYygMnU4natasKUJDQ4XBYJB6OKrFQCkDhw8fFu7u7uL11193ijf722+/LSpWrCiL17pt2zZRpEgRUapUKeHh4SG2bdsm/Pz8RNu2bcX//vc/4eLiwlApkbNnz4rixYuLzp07O3xGUKfTiZEjRwqNRiPeeecd8e2334olS5aIWrVqZc2UPj0mObyfSd3kMmv577//ij59+ggAokmTJiI6Otqh1y+Mbdu2CQBi06ZNUg9FtRgoZWLVqlUCgJgxY4bUQ7E7OZW9mzZtKiZOnCiEEOLnn38WJUuWFBMmTMj6/Pjx40W7du2kGp7TunfvnggKChIhISEiMTHR4dc3GAwiOjpaTJs2TfTo0UM0aNBAeHt7iyJFigiNRiPGjh2b6zkNGzYUDRs2FH369BFTp04Vv/zyizh+/LhITk52+PhJ/eQwa6mkMrjBYBDt2rUTwcHBDp/ZdRYMlDLy4YcfCo1Go/rfoORU9i5evLi4ePGiEOLJvXeurq7i+PHjWZ8/deqU8Pf3l2p4Tik9PV20bt1a+Pr6iitXrkg9nCwzZswQDRs2FPPnzxfnz5/P8TmDwSBmzZolBg0aJJo3by78/Pyy7tUEIAICAkSbNm3EW2+9JebOnSu2bt0qLl68KDIzMyV6NaQWUs9aKqkM/vfffwuNRiMWLFgg9VBUiV3eMuJMnd9y6fb28fHB8ePHs5bm8Pb2xj///IMqVaoAAGJjY1GtWrWsrmGyLyHDPbqN61AOGzYM165dw3fffWfW8iNy3gud1EnKDnGldINzsXM7kjjQ0lOcpfNbLmXv2rVrZzVdCPFkRjL7rNGBAwdE5cqVpRiaU7K0o9uRRo4cKQYMGGB1Cd6SLnSW0KkwpJy1lHsZ/MaNG8LT01OMHz9e6qGoDmcoZcgZ9vyWyyLn3333HQIDA9G5c2eTn58wYQLu3LmDH374wcEjcz7co/u/hdztuRc6ORcpZi3lvig6Fzu3DwZKmTpy5AhCQ0MRHh6u2j2/5VL2JunFxMRItke3UrCETtaQYjceuZbBudi5fTBQypja9/zm3t4EAAkJCWjcuDE8PDxw6NAhFC9eXOohKYq5e6EXL148V8h0xF7oJD+OnrWU497gixYtwpAhQ3D06FE0aNBA0rGoBQOlzKl5z2+5lL2fZjAYsvZwlkuJRq0yMjIQFhaG06dPIyoqCpUrV5Z6SKrCEjrlx5GzlnIrg+v1etStWxelS5fG3r17OXtvAwyUMqf2zm85lr3v3LmD1atXo2fPnihTpozUw1EtIcOObmdiaQn96cDJErq6OGrWUk5l8O3bt6Njx47YtGmT6iZspMBAqQApKSlo2bIlEhISEBUVBX9/f6mHZDNyLHsfP34c9evXR3R0tCzu91GrL7/8Eu+++y4WL16MgQMHSj0c+n8GgwE3btwwGTavXbsG44+M4sWLm5zVZAld2Rw1aymHMrgQAmFhYYiNjcXp06ftvrSS2jFQKoRaO7/lWPZmoLQ/dnQrE0vozsXes5ZyKIP/888/qFevHr766itEREQ47LpqxECpIGrt/JZb2ZuB0r7Y0a1OLKGrl71nLaUug3Oxc9tgoFQYNXZ+y63szUBpP+zodj7WltCfe+45eHl5SfwqyMies5ZSlcHj4+MRHByMkSNH4rPPPrP79dSKgVKB1Nb5LbeyNwOlfWRkZKB9+/Y4c+YMO7oJQOFL6MY/s4QuHXvNWkpVBudi59ZjoFQgNXZ+y6nszUBpe9k7uvfs2YMWLVpIPSSSOZbQlcMes5aOLoNzsXPrMVAqlNo6v+VU9magtD12dJOtsIQuX/aYtXRkGZyLnVuHgVLB1NT5LaeyNwOlbRk7ukePHo1Zs2ZJPRxSMZbQ5cOWs5aOKoNzsXPrMFAqnJo6v+VS9magtB1jR3erVq2wadMmdnSTZFhCl4YtZy0dUQbnYueFx0CpAmrp/JZL2ZuB0jbY0U1KwBK649hq1tKeZXAudl54DJQqoYbOb7mUvRkorceOblIDltDtwxazlvYsg3Ox88JhoFQJtXR+y6HszUBpHXZ0kzNgCd02rJ21tFcZnIudW46BUkXU0Pkth7I3A6V12NFNzowl9MKxdtbS1mVwLnZuOQZKlVF657ccyt4MlIXHjm6ivLGEbp7CzlraugzOxc4tw0CpQkrv/Ja67M1AWTjs6CYqPJbQcyvsrKWtyuCmFjvP1Btw7lYSTsUn4nR8Iu4kpSNDp0cRVxeU8XZHzQAf1ArwQbWy3nBzse/uPnLDQKlSSu78lrrszUBpOXZ0E9kHS+hPFGbW0hZlcONi57/tO4xTaT5YeSQOiY8zAQCuWg10hv8iVPaPfYq6oU/jiujdqCIqlPQs5KtWFgZKFVNq57fUZW8GSsuwo5tIGk+X0LOX0tVaQrd01tLaMvjDlDQ0ePMz6Co2hFargcGCxKTVAAJAz/qBmNi5Orw91L0EEQOliim581vKsjcDpfnY0U0kT/fv38fFixdz3a+pphK6JbOWhSmDH7hwF2PW/oN7yemwJihpNYCvlztmd6+DVsF+VpxJ3hgoVU6pnd9Slr0ZKM3Hjm4iZSlsCd34ZzmW0C2ZtTS3DL7s0DVMjjwDrQYWzUrmxXiej7uGoF/TZ6w/oQwxUDoBJXZ+S1n2ZqA0Dzu6idTF0hL607OaciihmzNrWVAZfPlf1zBp8xm7jVGtoZKB0kkosfNbqrI3A2XB2NFN5FyUVkI3Z9bSVBk82SsQ/ZZE2X18ywc2Ul35m4HSiSit81uqsjcDZf7Y0U1ERkoooRc0a2ksg5+7HIugkUuR6eJhkzJ3Xoz3VO4eHaqqRh0GSifz0Ucf4auvvsL58+dlfz+lVGVvBsq8saObiMwltxJ6frOWFStWxMvT1+B0qjc02vwrLmmxJ3H75wl5ft6neS+UaNkn33NoNUB4g0DMeLV2oV6LHDFQOhmDwYBbt26hTJkykt/rYg4pyt4MlKaxo5uIbEXqEvrTs5atO7+Kq7UGAij4nLYIlACg0QB/jGujmnUqGSidkBCiwC/E9PR0/P777+jUqZOkwVOKsjcDpWns6CYie3N0Cd04azlrx3kkVWhS4OwkkDNQlmw7FEX8q+T4vGtxP7j6lCnwPC4a4K3QIIwLq2b2eOVM/lNUZHPm/FYXHR2NX375BT/88AM2b97sgFGZ1rJlS/j7+2PNmjWS7JpDT2zfvh2jR4/G2LFjGSaJyG60Wi0qVqyIihUrom3btjk+l1cJfceOHYUuoXt6eqJ339cxN3YXNP+/A44livhVgkdgSKFeq14AK47E4d22warYppEzlGSSXq/HxYsX0atXL9SrVw+LFy+WbCyOLntzhjIndnQTkdyZU0J3dXVFUFBQru0pDT4BGPDLObOvlX2G0sWrFPSPH0Hr6o4i5YJRvEk3FH2mrkVj3zK8BWoG+Fj0HDlioKRckpOTs0oG58+fR9u2bbFo0SJ06NBBkvE4uuzNQPkfdnQTkZKZU0L3qhOGUh2Gmz1hkf89lBqU7vQOvGq3zePzuX32Si30alTR7OPliiVvyiKEQHx8PHr16oUVK1agUqVKqFq1KipUqIAHDx5INi6WvaWRkZGBbt26ITExETt37mSYJCLFMaeEPm37JRy5K2AwoyHn/08K94q14Vm1KdxKlochLRmPojYh49ZFAAL3dy+CZ7UW0BYpeBMRV60Gp+IT0asQr01ulF+0J6skJSVh27ZtAJ7cW1mhQgUEBgYiNDQUf/31F+bMmYMjR47Ax0e66XgXFxd069YN69atAyfUHUMIgeHDh+PQoUPYuHEjlwciItXx8PBAjRo1UMy3PAwWxCGPwJoo23s6itfvgqJV6qNYjVD4vzYNGvdiAACRnoL0+BizzqUzCNxNTi/U+OWGgdKJ6fV6jBgxAnv27Ml6zGAwYPr06dBoNPj6668xbdo0fPTRR2jVqpWEIwV69OiBuLg4REXZfwcDAubPn49Fixbh+++/5/JARKRqGTq91efQenjBrWT5rI8NqYlmPzc90/rrywEDpRNzcXFBz549sXjxYhw8eBDAk/KAp6cn0tPTMWLECNy4cQNTpkyx+04GBcle9ib7Ykc3ETmTIq6WNRqm37qU6zFDWjIyH8RnfawtVsLs87m7qaPRkfdQOrmOHTsiIiIC/fv3x5w5c1CuXDmsX78egYGBqFKliuRB0ih72Xv27NmK2ItciWJiYtCzZ0906tQJM2bMkHo4RER2YewZiI6ORtz5BMDgC5ixBiUAPNj9AwzpKfCq+T+4lakMQ2oiHkVtgkhPBQBoixaHe0B1s87lqtXAz8u90K9DThgoCR9//DEyMjLw5ZdfYv/+/WjQoAHatWsn6X2TpvTo0QPffPMNoqKi2JxjBwkJCejSpQsCAwOxcuVKLg9ERKqQPTxGR0fj2LFjiI6Oxp07dwAA5Vv2gFuzfhadM/POVTzY82PuT2hdUbrjCGjdzAuJOoNALRUsGQQwUNL/mzFjBm7duoVLly6hVKlSqFGjhtRDyoXd3vbDjm4iUoOCwqO/vz/q16+PoUOHon79+mjQoAESRDF0/fpPs69R8n+DkHJmP9Ji/4E++T4M6Slw8fSBe2BN+DTuhiJlgywaMwMlqU7ZsmVRtmzZXI+bs1WjI7DsbR/ZO7r37NnDjm4iUoTChMfy5cvn+tlRRm+AT1E3JJq5U457uWC4lwu2yWvwKeqGqmW9bXIuqTFQUr70ej10Oh0eP36MEiVKSD0clr3twNjRvXjxYnZ0E5Es2So8muLmokWfxhXx3f7LMDhwZToXDdC3cUVVbLsIMFCSGbp27Yrk5GTs3r0bHh4FL9RqTyx72xY7uolIbuwZHvPSu1FFfLvvsq1eglkMgCp2yDHi1otUoCNHjiA0NBTh4eFYtmyZ5KVme+/t7SxbL3KPbiKSmrnh0fifLcJjXsavP4k10dcdMkup1QDhDQIx49Xa9r+Yg3CGkgrUuHFjLFmyBL1790ZISAjef/99ScfDsrf12NFNRI4mxcyjJSZ2ro495+/gXnK6XUOlVgP4erljYifzlhZSCgZKMkuvXr1w9uxZfPDBB6hWrRpeeuklycbCsrd12NFNRPYm9/BoireHG2Z3r4N+S+y7I5tBALO714G3h5tdr+NoLHmT2QwGA8LDw7F9+3b8+eefqFOnjmRjsWfZW80lbyEEhg4diqVLl2LPnj1swiEiq8mpbG0Ly/+6hkmbz9jt/NO6huD1ps/Y7fxSYaAki6SkpKBly5ZISEhAVFQU/P39JRnHvn370KZNGxw+fNjms5RqDpRffvkl3n33XSxevJhNOERkMbWFx7wYQ6VWA5uUv43nUWuYBBgoqRBu3LiBhg0bokqVKpJ1fuv1egQEBKBPnz6YM2eOTc+t1kC5fft2dO7cGaNHj8asWbOkHg4RyZyzhMe8HLhwF2PX/WP1PZXGeyZnd6+DVsF+thugzDBQUqHIofPbXmVvNQZKdnQTUX6cPTzm5VFaJqZvjcHq6OvQAtBbkJhcNE+WBupZPxATO1dX3T2TT2OgpEL7+eef0bt3b8yYMUOSzm97lb3VFigTEhLQuHFjeHh44NChQ2zCIXJyDI+Wu/EgFT9HxWHFkbisHXVctRrosk1dZv/Yp6gb+jauiF6NKqJCSU9Jxuxo7PKmQpO685vd3gVjRzeRc1Nit7UcVSjpiXFh1fBu22Ccv5WEU/GJOBWfiLvJ6UjP1MPdzQV+Xu6oFeCDWgE+qFrWWzU74JiLM5RkFak7v+1R9lbLDCU7uomcC2ceSUoMlGQ1KTu/7VH2VkugZEc3kXoxPJLcsORNVitWrBg2b96Mhg0b4tVXX3Vo5zfL3qZxj24i9WDZmpSAM5RkM1J1ftu67K30GUp2dBMpF2ceSak4Q0k2I9We39zb+z/co5tIOTjzSGrCQEk2JUXnN8veT7Cjm0i+GB5J7RgoyeamTp2KmJgY9OnTxyGd3y4uLujWrRvWrVuH2bNnO+U3YCEEhg8fjkOHDmHPnj2oXLmy1EMicloMj+SMeA8l2YWjO79t2e2txHso2dFNJA3e80j0BGcoyS4c3fntzGVvdnQTOQZnHonyxhlKsitHdn7bqttbSTOU7Ogmsg/OPBJZhjOUZFeO7Px2tm5vdnQT2QZnHomsx0BJdueozm9nKnuzo5uocBgeieyDgZIcwhGd387S7c2ObiLzMDwSOQ7voSSHcUTnty26veV+DyU7uoly4z2PRNLiDCU5jCM6v9Ve9mZHNxFnHonkiDOU5HD27vy2tttbrjOU7OgmZ8SZRyJl4AwlOZy9O7/V2O3Njm6Si4yMDBQpUsQu5+bMI5FyMVCSJOzZ+a22sjc7uklqQgh8++23+PDDD/H+++/j/fffhxDCqiDH8EikLgyUJBl7dX6rqdubHd0kB6mpqZg3bx78/Pxw8+ZNGAwGaLVas5/P8EikfryHkiRlr85va7q95XQPJTu6SQ5mzpyJxMREXLlyBTqdDgsWLEDZsmVNHst7HomcE2coSVL26vxWQ9mbHd0kBzqdDlu3bsWMGTPw999/Y9WqVYiLi8szUNauXRunT58GwJlHImfCQEmSq1ChAjZt2oTQ0FAMGTLEJp3fSi97x8TEoGfPnujUqRNmzJgh9XDICRnvkVy8eDGKFi2KZs2a4e7du3j8+DESEhIAAHq9PleD2Pvvvw9vb2+GRyInY/5NMER2ZOz8/umnn/D555/b5Jw9evRAXFwcoqKibHI+R2FHN8mBRqNBUlISoqOj8cYbbwAAWrduDQDYtm0bli9fbvK92bdvX7z00ksICAhgmCRyIpyhJNmwdee3Esve7OgmRxNC4Pbt2yZL2FeuXMGiRYvw3HPP4eWXX8aJEydw48YNnDlzBnv37sVrr71mtyWEiEhZOENJsjJ16lS8+uqr6NOnD/755x+rzpW97K2E3rPsHd0bN25kRzfZnBACN27cwK+//opJkyahU6dOKFu2LMqXL4/k5ORcx6empqJWrVr45Zdf4Ofnh5kzZ6Jz585o3749duzYwTBJRFk4Q0myotVqsWzZMrRs2RJdu3a1uvNbSYucz58/H4sWLcLixYvRokULqYdDCmfpUj1ubm651pZs2LAh9u/fD29v76zydlxcHDZu3IjY2FiUK1fO6vUoiUgdGChJdooVK4Zff/0VjRo1srrzWyllb3Z0kzUsDY/169c36x5HV1dXlChRAgCy1p6sVq0aihcvDr1eDwAMk0QEgOtQkozZas9vS/f2dvQ6lNyjmyxh6TqP5oZHS67PEElET+MMJclW48aNsXjxYvTp08eqPb/lXPZmRzflx14zj9ZgmCQiUxgoSdZ69+6NmJgYqzq/5Vr2Zkc3ZSfH8EhEZC4GSpI9a/f8luMi59yj27kxPBKR2vAeSlIEa/f8tmRvb0fcQ8k9up2H1Pc8EhE5AmcoSRGs7fyWU9mbHd3qxZlHInJWnKEkRbGm89vcbm97zlCyo1s9OPNIRPQfzlCSoljT+S11tzc7upWLM49ERPljoCTFKWznt5Rlb3Z0KwfDIxGR5RgoSZEK0/ktVbc3O7rli+GRiMg2eA8lKVZhOr/N6fa29T2U7OiWB97zSERkP5yhJMUqTOe3o8ve7OiWBmceiYgcizOUpHiWdn4X1O1tqxlKdnQ7BmceiYikxxlKUjxLO78d0e3Njm774MwjEZE8MVCSKljS+W3vsjc7um2D4ZGISDkYKEk1zO38tme3Nzu6C4fhkYhI2XgPJamKuZ3f+XV7W3MPJTu6C8Z7HomI1IczlKQq5nZ+26PszY7u3DjzSETkHDhDSapkTud3Xt3ehZmhZEc3Zx6JiJwZAyWp1qpVq9CnTx/MmDHDZOe3sex98NBfKF6xOk7FJ+J0fCIu3riNAwcPoVWLZniugj9qBvigVoAPqpX1hpuLNtd5EhIS0LhxY3h4eODQoUNO0YTD8EhERNkxUJKqffTRR/j000+xcePGXJ3fcfeS0azfeyhauz0yNW4AAFetBjrDf18S2T/2KeqGPo0ronejiqhQ0hPAk47u9u3b48yZM4iKilJlEw7DIxERFYSBklTNYDAgPDwc27dvz+r8fpSWielbY7D62HUIYQA0uWcd86LVAAJAz/qBmNCpGsa+MxxLly7Fnj170KJFC/u9EAdheCQiosJgoCTVy975PX/tTnyyMw4JKekwWPHO12qAohodrqyaim8mjVBkEw7DIxER2QoDJTmF69evo0m/9+HWpA+0GlgVJo2EQQ+N1gUfdw1Bv6bPWH9CO2J4JCIie+KyQeQU9t7Qw61JHwC2CZMAoNE+6eSetPkMAMgmVHKpHiIicjTOUJLqHbhwF/2WRNn9OssHNkKrYD+7Xyc7zjwSEZEcMFCSqj1Ky8QLc/Zbfc9kQbQawNfLHbtHh8Lbw80u12B4JCIiuWKgJFUbv/4k1kRft2uYNNJqgPAGgZjxam2rz8XwSERESsJASap1/UEqWn2+F+a+wYUuE4+iNiLlzF5kPrwFrZsH3AND4NP8NbiXfdasc2g0wB/j2mStU2nWdRkeiYhI4RgoSbU+//0cvtt/2azZSWHQ487qSUiL/Sf3J13cUKbHZBR9pm6B53HRAG+FBmFcWDXT12F4JCIiFWKgJFXK1BvQ4NNdSHycadbxj45txoNdCwEAbn6VUKJFH2TcvozEQ6sBAC7evggYugga14Lvj/Qp6oZjE9vCVatheCQiIqfAZYNIlc7dSjI7TAJA8oltWX8u3WEE3AOqwbNqM6TfvIi0q8ehT7qH1EtRKFateYHnSnycifbhA3D24O9cqoeIiJwCAyWp0qn4RLOP1T9OQmbC9ScfaF1RpNxzWZ9zD6iOtKvHAQDpN86YFSiFEEjzLMPwSEREToOBklTpdHwiXLUa6My4gVKXeDvrzy5FvbMWLAcAl2I+/x338DbM4eaiRZtX++PjV2pZMGIiIiLl0ko9ACJ7uJOUblaYBACRmfbfBy45f8fSaF1NH5cPnUHgbnK6WccSERGpAQMlqVKGTm/2sRo3j6w/C33O+y6FQWfyuIKkZ5p/fSIiIqVjoCRVKuLqUvBB/8/Vxz/rz4bHSRCG/8KgPvnBf8eV8Ie53N3Mvz4REZHSMVCSKpXxdoer1rwmGJei3nArHfjkA4MeGTcvZH0u/d9zWX92rxBi1vlctRr4ebmbP1giIiKFY6AkVaoZ4GP2PZQA4FWvY9afE7Z9hdTzh/DgwE9Iu3oCwJN1KD2fbWTWuXQGgVoBPgUfSEREpBLs8iZVsjTQeT/fGY8vHkFa7D/IvBeHuxun//dJFzeU7vyuWYuaF/b6RERESsZASapUraw3fIq6mb24uUbrgjI9puBR1EYkn9kD3cPbT/byrlADPi16mb2XN/Bkp5yqZb0LO3QiIiLF4daLpFqW7OVtKwXt5U1ERKRGvIeSVKt3o4pw9K9LBgC9GlV07EWJiIgkxkBJqlWhpCd6NgiEmc3eVtNqgJ4NAlGhpKdjLkhERCQTDJSkahM7V4evl7vdQ6VWA/h6uWNip+r2vRAREZEMMVCSqnl7uGF29zp2v4/SIIDZ3evA28P8TnAiIiK1YKAk1WsV7IePu5q3KHlhTesaglbBfna9BhERkVwxUJJT6Nf0maxQaavyt/E807qG4PWmz9jmpERERArEZYPIqRy4cBdj1/2De8npVpXBjfdMzu5ehzOTRETk9Bgoyek8SsvE9K0xWB19HVoAegu+Alw0T5YG6lk/EBM7V+c9k0RERGCgJCd240Eqfo6Kw4ojcVk76rhqNTn2AM/+sU9RN/RtXBG9GlXk0kBERETZMFCS08vUG3D+VhJOxSfiVHwi7ianIz1TD3c3F/h5uaNWgA9qBfigallvuLnwtmMiIqKnMVASERERkVU43UJEREREVmGgJCIiIiKrMFASERERkVUYKImIiIjIKgyURERERGQVBkoiIiIisgoDJRERERFZhYGSiIiIiKzCQElEREREVmGgJCIiIiKrMFASERERkVUYKImIiIjIKgyURERERGQVBkoiIiIisgoDJRERERFZhYGSiIiIiKzCQElEREREVmGgJCIiIiKrMFASERERkVUYKImIiIjIKgyURERERGQVBkoiIiIisgoDJRERERFZhYGSiIiIiKzCQElEREREVmGgJCIiIiKrMFASERERkVUYKImIiIjIKgyURERERGQVBkoiIiIisgoDJRERERFZhYGSiIiIiKzCQElEREREVmGgJCIiIiKr/B+NQg+smZrNDgAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MWPM solution 1: [((4, 5), (5, 2)), ((2, 1), (1, 6)), ((2, 4), (1, 3))]\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAApQAAAIKCAYAAACdo98PAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjgsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvwVt1zgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAh6ZJREFUeJzt3XlYVGX/BvB7ZtgFwdwXBHLfTc0ic8l9j9RSNBU0bXF58+21stTMXrHCKK3sldRADde0zErJ3SQ1cd8VSxQlZRFB1pl5fn/4GwLZZpgzc87M3J/r8rpkOHPOMwjOzfM93+dRCSEEiIiIiIgqSS33AIiIiIjItjFQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEgFQqVSYN2+e3MMw2bx586BSqeQehtX89ddfUKlUWLRokVnn2bBhAx555BFkZWWZdR5b/b6h0inh37OgoAC+vr5YunSprOMgMhUDJSmaSqUy6s/evXvlHmoxubm5+PTTT/HEE0/A29sbbm5uaNq0KaZOnYpLly7JPbwK/fnnn5g6dSqaNm0KDw8PeHh4oGXLlpgyZQpOnTol9/DMotPp8N5772HatGnw9PQsfNzf37/w+0mtVsPHxwdt2rTB5MmTcfjwYcmuHxMTg88++0yy89EDe/fuLff/iHXr1sk9RKM4Ozvj3//+NxYsWIDc3Fy5h0NkNCe5B0BUntWrVxf7eNWqVfj1119LPN6iRQuzrpOTkwMnJ2l+HFJSUtC/f3/Ex8dj8ODBGD16NDw9PXHx4kWsW7cOkZGRyM/Pl+RalrBt2zaMHDkSTk5OGDNmDNq1awe1Wo0LFy5g8+bN+Oqrr/Dnn3/Cz89P7qFWyo8//oiLFy9i8uTJJT7Xvn17vPHGGwCAzMxMnD9/Hhs3bsTXX3+NGTNmICIiotjxlfm+iYmJwZkzZ/D6669X+jVQ2aZPn47HH3+8xOOBgYEyjKZyQkND8fbbbyMmJgYTJkyQezhERmGgJEV78cUXi3186NAh/PrrryUerwy9Xo/8/Hy4ubnBzc3N7PMZhISE4Pjx49i0aROGDx9e7HMffPAB3n33XcmuJbWEhASMGjUKfn5+2LVrF+rWrVvs8x999BGWLl0Ktbr84sb9+/dRpUoVSw610r755ht06dIF9evXL/G5+vXrl/je+uijjzB69Gh8+umnaNKkCV599dXCz0n5fUPS6Nq1K0aMGCH3MMzi4+ODvn37IioqioGSbAZL3mTz7t+/jzfeeAO+vr5wdXVFs2bNsGjRIgghih2nUqkwdepUfPvtt2jVqhVcXV2xffv2ws89fO9UUlISJk6ciHr16sHV1RUBAQF49dVXy51dPHz4MH766SdMnDixRJgEAFdX1xL3/+3evRtdu3ZFlSpV4OPjg2effRbnz58v8dzffvsNjz/+ONzc3NCoUSMsW7aszHGsWbMGHTt2hLu7Ox555BGMGjUK169fL/N4g48//hj379/HN998UyJMAoCTkxOmT58OX1/fwsdCQkLg6emJhIQEDBw4EF5eXhgzZgwA4MCBA3j++efRsGFDuLq6wtfXFzNmzEBOTk6x8xrOcfXqVfTr1w9VqlRBvXr1MH/+/BL/jgaRkZFo1KgRXF1d8fjjj+OPP/6o8PXl5uZi+/bt6N27d4XHGri7u2P16tV45JFHsGDBgmLjefj7JjMzE6+//jr8/f3h6uqKWrVqoU+fPjh27BgAoEePHvjpp59w7dq1wlKsv78/ACA/Px9z585Fx44d4e3tjSpVqqBr167Ys2dPsfEUvY/UmK/BhQsX8MILL6BmzZpwd3dHs2bNSvxSk5SUhAkTJqB27dpwdXVFq1atsHLlyhLn+vzzz9GqVSt4eHigWrVq6NSpE2JiYir8Gt6+fRsTJ05E7dq14ebmhnbt2iE6Otqs12WOvLw8zJgxAzVr1oSXlxeGDh2KGzdulHrs3r170alTp2I/d2Xdu2zMz93ly5cxfPhw1KlTB25ubmjQoAFGjRqFjIyMYsf16dMHv/32G9LS0qR74UQWxBlKsmlCCAwdOhR79uzBxIkT0b59e+zYsQMzZ85EUlISPv3002LH7969Gxs2bMDUqVNRo0aNwjfzh928eROdO3fG3bt3MXnyZDRv3hxJSUnYtGkTsrOz4eLiUurztm7dCgAYO3asUePfuXMnBgwYgEcffRTz5s1DTk4OPv/8c3Tp0gXHjh0rHN/p06fRt29f1KxZE/PmzYNWq8V7772H2rVrlzjnggULMGfOHLzwwgt46aWXcOfOHXz++efo1q0bjh8/Dh8fnzLHs23bNjRu3BhPPPGEUeM30Gq16NevH55++mksWrQIHh4eAICNGzciOzsbr776KqpXr44jR47g888/x40bN7Bx48Zi59DpdOjfvz+efPJJfPzxx9i+fTvee+89aLVazJ8/v9ixMTExyMzMxMsvvwyVSoWPP/4Yw4YNw9WrV+Hs7FzmOOPj45Gfn48OHTqY9Po8PT3x3HPPYcWKFTh37hxatWpV6nGvvPIKNm3ahKlTp6Jly5ZITU3Fb7/9hvPnz6NDhw549913kZGRgRs3bhR+bxru47x37x6WL1+O4OBgTJo0CZmZmVixYgX69euHI0eOoH379iZ/DU6dOoWuXbvC2dkZkydPhr+/PxISEvDjjz9iwYIFAIC///4bTz75ZOEvXDVr1sQvv/yCiRMn4t69e4Wl+a+//hrTp0/HiBEj8K9//Qu5ubk4deoUDh8+jNGjR5f5tcvJyUGPHj1w5coVTJ06FQEBAdi4cSNCQkJw9+5d/Otf/zL5dZUnMzMTKSkpJR6vXr16YQh86aWXsGbNGowePRpPPfUUdu/ejUGDBpV4zvHjx9G/f3/UrVsX77//PnQ6HebPn4+aNWuWONaYn7v8/Hz069cPeXl5mDZtGurUqYOkpCRs27YNd+/ehbe3d+H5OnbsCCEE4uLiMHjw4ApfN5HsBJENmTJliij6bfv9998LAOK///1vseNGjBghVCqVuHLlSuFjAIRarRZnz54tcV4A4r333iv8eNy4cUKtVos//vijxLF6vb7M8T333HMCgEhPTzfq9bRv317UqlVLpKamFj528uRJoVarxbhx4wofCwoKEm5ubuLatWuFj507d05oNJpiX4+//vpLaDQasWDBgmLXOX36tHBycirxeFEZGRkCgAgKCirxufT0dHHnzp3CP9nZ2YWfGz9+vAAg3n777RLPK3qcwcKFC4VKpSr2WgznmDZtWuFjer1eDBo0SLi4uIg7d+4IIYT4888/BQBRvXp1kZaWVnjsDz/8IACIH3/8sczXJ4QQy5cvFwDE6dOnS3zOz89PDBo0qMznfvrppwKA+OGHHwofe/j7xtvbW0yZMqXcMQwaNEj4+fmVeFyr1Yq8vLxij6Wnp4vatWuLCRMmFD5mytegW7duwsvLq9jXWoji38MTJ04UdevWFSkpKcWOGTVqlPD29i78N3z22WdFq1atyn1tpfnss88EALFmzZrCx/Lz80VgYKDw9PQU9+7dM/l1lWbPnj0CQJl/bt26JYQQ4sSJEwKAeO2114o9f/To0SX+PYcMGSI8PDxEUlJS4WOXL18WTk5Olfq5O378uAAgNm7cWOHX7ebNmwKA+Oijjyo8lkgJWPImm/bzzz9Do9Fg+vTpxR5/4403IITAL7/8Uuzx7t27o2XLluWeU6/X4/vvv8eQIUPQqVOnEp8vb5mee/fuAQC8vLwqHPutW7dw4sQJhISE4JFHHil8vG3btujTpw9+/vlnAA9m7nbs2IGgoCA0bNiw8LgWLVqgX79+xc65efNm6PV6vPDCC0hJSSn8U6dOHTRp0qRE+bS0sRftfDbo0aMHatasWfjnyy+/LHFM0XsLDdzd3Qv/fv/+faSkpOCpp56CEALHjx8vcfzUqVML/26YMcvPz8fOnTuLHTdy5EhUq1at8OOuXbsCAK5evVrm6wOA1NRUACj2XGMZvi6ZmZllHuPj44PDhw/j5s2bJp9fo9EUznzr9XqkpaVBq9WiU6dOhSXzoir6Gty5cwf79+/HhAkTin3fAP98Dwsh8N1332HIkCEQQhT7nunXrx8yMjIKr+3j44MbN26YXH7++eefUadOHQQHBxc+5uzsjOnTpyMrKwv79u0z6XVVZO7cufj1119L/DH8jBl+rh7+P+PhJimdToedO3ciKCgI9erVK3y8cePGGDBgQLFjjf25M8xA7tixA9nZ2eW+DsPXoLTZViIlYsmbbNq1a9dQr169EgHO0PV97dq1Yo8HBARUeM47d+7g3r17aN26tcnjqVq1KoAHoaO80nLRsTVr1qzE51q0aIEdO3bg/v37yMzMRE5ODpo0aVLiuGbNmhW+QQIP7s8SQpR6LIByS4aGr2FpazMuW7YMmZmZ+Pvvv0ttiHJyckKDBg1KPJ6YmIi5c+di69atSE9PL/a5h+8ZU6vVePTRR4s91rRpUwAP7q8r6uGAZHjzffgaZRFl3JdZHsPXpbxfFj7++GOMHz8evr6+6NixIwYOHIhx48aVeF1liY6OxieffIILFy6goKCg8PHSvm8r+hoYAlh538d37tzB3bt3ERkZicjIyFKPuX37NgDgrbfews6dO9G5c2c0btwYffv2xejRo9GlS5dyX9O1a9fQpEmTEo1cZf2Mmvtv26ZNm3Lvkb127RrUajUaNWpU7PGHfw5v376NnJwcNG7cuMQ5Hn7M2J+7gIAA/Pvf/0ZERAS+/fZbdO3aFUOHDsWLL75YrNwN/PM96kjrzJJtY6Akh1J0xswSmjdvDuDBPY+GmRVr0uv1UKlU+OWXX6DRaEp8vrTZRwNvb2/UrVsXZ86cKfE5wz2VDwc7A1dX1xKBQafToU+fPkhLS8Nbb72F5s2bo0qVKkhKSkJISAj0er0Jr6y40l4bUHFQrF69OoAH4aS0AFwew9eltIBh8MILL6Br167YsmULYmNjER4ejo8++gibN28uMav1sDVr1iAkJARBQUGYOXMmatWqBY1Gg4ULFyIhIaHE8ZX9GhRl+Dd48cUXMX78+FKPadu2LYAHAfDixYvYtm0btm/fju+++w5Lly7F3Llz8f777xt9zYpI8bqszZSfu08++QQhISH44YcfEBsbi+nTp2PhwoU4dOhQse9JQ4CuUaOG5V8AkQQYKMmm+fn5YefOncjMzCw2c3ThwoXCz5uqZs2aqFq1aqnBqiJDhgzBwoULsWbNmgoDpWFsFy9eLPG5CxcuoEaNGqhSpQrc3Nzg7u6Oy5cvlzju4ec2atQIQggEBAQUzu6ZYtCgQVi+fDmOHDmCzp07m/z8ok6fPo1Lly4hOjoa48aNK3z8119/LfV4vV6Pq1evFhu3YRH4spqnTGUI/H/++SfatGlj9POysrKwZcsW+Pr6Vrjmad26dfHaa6/htddew+3bt9GhQwcsWLCgMFCWNeO0adMmPProo9i8eXOxY9577z2jx1mUYVa0vO9jQ5ezTqczqvO9SpUqGDlyJEaOHIn8/HwMGzYMCxYswKxZs8pcQsnPzw+nTp2CXq8v9kuHOT+j5vDz84Ner0dCQkKxWcmHf5Zq1aoFNzc3XLlypcQ5Hn7M1J+7Nm3aoE2bNpg9ezbi4uLQpUsX/O9//8N///vfwmP+/PNPAOavsUtkLbyHkmzawIEDodPp8MUXXxR7/NNPP4VKpapwVqg0arUaQUFB+PHHH3H06NESny9vpiQwMBD9+/fH8uXL8f3335f4fH5+Pv7zn/8AeBA82rdvj+joaNy9e7fwmDNnziA2NhYDBw4E8GDGpl+/fvj++++RmJhYeNz58+exY8eOYucfNmwYNBoN3n///RLjFEIU3kNYljfffBMeHh6YMGEC/v77b5Ne+8MMMzVFnyOEwOLFi8t8TtF/RyEEvvjiCzg7O6NXr15GX7c8HTt2hIuLS6n/rmXJycnB2LFjkZaWhnfffbfMQKjT6UqU8WvVqoV69eohLy+v8LEqVaqUOA4o/et1+PBh/P7770aPtaiaNWuiW7duWLlyZbHvm6LX0Gg0GD58OL777rtSg+edO3cK//7w946LiwtatmwJIUSx8vzDBg4ciOTkZKxfv77wMa1Wi88//xyenp7o3r17pV5fZRn+T1iyZEmxxx/evUij0aB37974/vvvi90Te+XKlRL3Zhv7c3fv3j1otdpin2/Tpg3UanWx7xHgwYoEKpXKphZkJ8fGGUqyaUOGDMEzzzyDd999F3/99RfatWuH2NhY/PDDD3j99ddL3CdlrLCwMMTGxqJ79+6YPHkyWrRogVu3bmHjxo347bffyr0/ctWqVejbty+GDRuGIUOGoFevXqhSpQouX76MdevW4datW4VrUYaHh2PAgAEIDAzExIkTC5cN8vb2Lra+4fvvv4/t27eja9eueO211wrfkFu1alVsK8RGjRrhv//9L2bNmoW//voLQUFB8PLywp9//oktW7Zg8uTJhYG2NE2aNEFMTAyCg4PRrFmzwp1yhBD4888/ERMTA7VabVS5uHnz5mjUqBH+85//ICkpCVWrVsV3331X5r1wbm5u2L59O8aPH48nnngCv/zyC3766Se88847pS7TUhlubm7o27cvdu7cWWIpIuDBeoxr1qwB8GBW8ty5c9i4cSOSk5Pxxhtv4OWXXy7z3JmZmWjQoAFGjBiBdu3awdPTEzt37sQff/yBTz75pPC4jh07Yv369fj3v/+Nxx9/HJ6enhgyZAgGDx6MzZs347nnnsOgQYPw559/4n//+x9atmxZ6T3HlyxZgqeffhodOnTA5MmTERAQgL/++gs//fQTTpw4AQD48MMPsWfPHjzxxBOYNGkSWrZsibS0NBw7dgw7d+4sXAexb9++qFOnDrp06YLatWvj/Pnz+OKLLzBo0KBy7yudPHkyli1bhpCQEMTHx8Pf3x+bNm3CwYMH8dlnnxnVwGaKAwcOlLplYdu2bdG2bVu0b98ewcHBWLp0KTIyMvDUU09h165dpc5Ezps3D7GxsejSpQteffXVwl9eW7duXfj1A4z/udu9ezemTp2K559/Hk2bNoVWq8Xq1asLg31Rv/76K7p06VJ4mwaR4lmrnZxICg8vGySEEJmZmWLGjBmiXr16wtnZWTRp0kSEh4eXWN4HQJlLuuCh5UKEEOLatWti3LhxombNmsLV1VU8+uijYsqUKSWWdilNdna2WLRokXj88ceFp6encHFxEU2aNBHTpk0rtpSREELs3LlTdOnSRbi7u4uqVauKIUOGiHPnzpU45759+0THjh2Fi4uLePTRR8X//vc/8d5775X4egghxHfffSeefvppUaVKFVGlShXRvHlzMWXKFHHx4sUKxy6EEFeuXBGvvvqqaNy4sXBzcxPu7u6iefPm4pVXXhEnTpwoduz48eNFlSpVSj3PuXPnRO/evYWnp6eoUaOGmDRpkjh58qQAIL755psS50hISBB9+/YVHh4eonbt2uK9994TOp2u8DjD0jLh4eElrlXav2FpNm/eLFQqlUhMTCz2uJ+fX+ESMyqVSlStWlW0atVKTJo0SRw+fLjUcxW9Zl5enpg5c6Zo166d8PLyElWqVBHt2rUTS5cuLfacrKwsMXr0aOHj4yMAFC4hpNfrRVhYmPDz8xOurq7iscceE9u2bRPjx48vtsyQqV+DM2fOiOeee074+PgINzc30axZMzFnzpxix/z9999iypQpwtfXVzg7O4s6deqIXr16icjIyMJjli1bJrp16yaqV68uXF1dRaNGjcTMmTNFRkZGeV/uwvOHhoaKGjVqCBcXF9GmTZti//6VeV0Pq2jZoKLPz8nJEdOnTxfVq1cXVapUEUOGDBHXr18v9Tq7du0Sjz32mHBxcRGNGjUSy5cvF2+88YZwc3MrMYaKfu6uXr0qJkyYIBo1aiTc3NzEI488Ip555hmxc+fOYue5e/eucHFxEcuXLy/3NRMpiUoIBd/pTEQOISQkBJs2bar0TJwpdDodWrZsiRdeeAEffPCBxa9H9icoKAhnz54t9b5mKXz22Wf4+OOPkZCQYPFGQiKp8B5KInIoGo0G8+fPx5dffmmVAEu27eFtQi9fvoyff/4ZPXr0sMj1CgoKEBERgdmzZzNMkk3hDCURyc6aM5REpqhbty5CQkLw6KOP4tq1a/jqq6+Ql5eH48ePl7nuJJEjYlMOERFRGfr374+1a9ciOTkZrq6uCAwMRFhYGMMk0UM4Q0lEREREZuE9lERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZxknsAcijQ6XEhOROnkzJwJikDtzPzkK/VwcVJg1permhd3xtt6nujeR0vOGuYuYmIHBXfL4iMoxJCCLkHYS030rMRcyQR3x5OREZOAQDASa2CVv/Pl6Dox97uzhjzREOM7twQDap5yDJmIiKyPr5fEJnGIQLlvdwChP10HuuPXodKBehNeMVqFSAAjOzoi3cHtYCXm7PFxklERPLi+wVR5dh9oNx/6Q7e2HgSqffzTPqP4WFqFVDD0xWLRrRDt6Y1pRsgEREpAt8viCrPrgNldNxfeO/Hs1Cb+FtmWQznmT+0FcYF+pt/QiIiUgS+XxCZx24D5arf/8LcrWctdn7+J0FEZB/4fkFkPrsMlPsv3cG4b45Y/DqrQjuznEFEZMP4fkEkDbtb4+BebgHe2HgSapVlr6NWAf/ZdBKZuQWWvRAREVkE3y+IpGN361CG/XTeqBuqdTmZuHf4O+QlXUD+rcsQ2jwAQJXWvVBj8IwKr6MXQEpWHhb8fB4fDmsrxdCJiMiKjH2/yL12Cn+vfafMz3t3CYZP1zFlfp7vF+QI7GqG8np6NtYfvW7UDdW6e3dw79Am5F0/UxgmTaUXwPqj13EjPbtSzyciInmY8n4hBb5fkL2zqxnKtUcSoVIBRt0VqnGCq29ruNZvDl12Bu6f+rVS11T//3Vn9mteqecTKc1XX32Fr776Cn/99RcAoFWrVpg7dy4GDBgg78CIJGTS+0UR1Xq/DJfajxZ7zKmqcfdG8v2C7JndzFAW6PT49nCi0b9tutRoiDpjPkS1HiFwrduk0tfVCWDN4UQU6PSVPgeRkjRo0AAffvgh4uPjcfToUfTs2RPPPvsszp61XBcskTWZ+n5RlEtNP7j5tir2x8m7llHP5fsF2TO7maG8kJxZuD2WtWXkFOBiciZa1/eW5fpEUhoyZEixjxcsWICvvvoKhw4dQqtWrWQaFZF0zHm/SPlxEXQ596B2coVL3aao+uRwuPu3N/r5fL8ge2U3M5SnkzIc+vpElqDT6bBu3Trcv38fgYGBcg+HSBLm/H+ty0oDdFro8+4j96/juL1uDrJO7bTa9YmUym5mKM8kZcBJrYLWWndYF+GkVuF0UgaCrX5lIss4ffo0AgMDkZubC09PT2zZsgUtW7aUe1hEkjD5/UKthmvDtvBoFgjnavWgz83CvSPfIz/5MgCBtF1fw6P501C7uFV4Kr5fkL2ym0B5OzNPljAJAFq9wJUbf+PYMa4xRrahWrVqCAgIKPPzzZo1w4kTJ5CRkYFNmzZh/Pjx2LdvX6mh8syZM8jPz7fkcIkkdflGmknvF26+rVFndFixx9wf7YgbX02EyLsPkXcfeUnn4R7wWIXn0uoF7mRVbmURIiWzm0CZr9XJev19v8Vh4/T3ZB0DkbGCg4MRExNT5uddXFzQuHFjAEDHjh3xxx9/YPHixVi2bFmJYwcNGoTExESLjZVIarVeeB/uj3Y06xxqN084V6v3/7OUgD7b+DJ2XoG871dElmA3gdLFSSPr9bs//RTefite1jEQGatatWomHa/X65GXV/qsyk8//cQZSrIpCw+m4Y+bxs8S5iVfgWudxsUe0+dmoSA9qfBjdRUfo8/n6izv+xWRJdhNoKzl5WrSPTH6glzkJBwFAOT/fbXwce2927h/4TcAgGvdpkYtB+GkVqFxg9ro0KFNJUZOpCyzZs3CgAED0LBhQ2RmZiImJgZ79+7Fjh07Sj2+devWVh4hkXmaXDuN48nXjX6/SN+1HPq8+/Bs3RPOtQKgz87AvSPfQ+Q9WKRc7V4VrvVbGHUuJ7UKNT1dKz12IqWym0DZur43Yo4YX3bT389Ayvcflng8L/E08hJPAwCqD3wdnm17V3gurV6gDZeAIDtx+/ZtjBs3Drdu3YK3tzfatm2LHTt2oE+fPnIPjUgSpr5fAEDB7T+RvntFyU+onVB9wDSonY0LiXy/IHtlN4FS7h9Qua9PJJUVK0p50ySyI6b+f12t5wTcP7sPuddOQpeVBn3efWg8vOHq2xreTwyHS51GFr0+kS1QCWHqxlPKVKDTo9OCnbIsbq7LyUTzc1EIDRmHoKAgeHh4WH0MRERkHDnfL7zdnXH03d5w1tjNMtBEAOxoYXNnjRpjnmgItcrKFxZ6dKx6H3m52RgzZgzq1q2LyZMnIy4uDnaS1YmI7Ipc7xcaFfDiEw0ZJsku2dV39ejODWHtDCcA/LTkHfj6+mL79u3417/+hR07dqBLly5o1qwZwsLCcP36desOioiIyiXH+4UeQHDnhta9KJGV2FWgbFDNAyM7+Vrtt061CnihYwMsWTgP+/fvx4ABA5CQkIBffvkFu3fvRmBgIBYsWAA/Pz/07dsXMTExyM7Ots7giIioTHK8X4zs5IsG1XhLFNknuwqUAPDuoBao4elq8f8k1Cqghqcr5g5pjVdffRVXrlzBl19+if3796N169ZYvnw5Zs2aheTkZCxfvhy5ubksiRMRKYi13y/eHWjc0kJEtsjuAqWXmzMWjWgHS+/CqBfAohHt4OXmDABwdXUtESxbtmyJV155BU899RT279+Py5cvsyRORKQQXm7O+GhYa6u/XxDZI7sLlADQrWlNzB/ayqLX+GBoK3RrWrPE42UFyzFjxkCr1WL+/Pn4888/WRInIpKZVqvFlGE9kRr7lUWvU9b7BZE9sctACQDjAv0LQ6VU5QzDeT4Y2gpjA/3LPba8YHnp0iU888wziI6OZkmciEgGWq0Wbdu2xfnz5zG0uTdwdAMA6d4vIPQAjHu/ILIHdrMOZVn2X7qD/2w6iZSsPLPKGoZ7YBaNaFep3zTz8vKwcuVKhIWFISkpCcHBwZgzZw6aN29eeMyVK1ewatUqREdHIzExEU2aNEFISAjGjh0LX1/fyg+eiIgKFQ2TISEhuHHjBo4fP46vtuxF+IFks98vVBAoyEzD9Ceq4c2xQ6QbOJGC2X2gBIB7uQUI++k81sdfhxqAzoRXrFY9WBpoZEdfvDuohdn3wBgTLPV6Pfbt24eoqChs2rQJOTk56N27N0JCQrhwOhGRGYqGydDQUNSoUQMRERGIjY1Fz549zXq/0KgeLA30QkdfnIyai1PxR3D27FlUq1bNUi+HSDEcIlAa3EjPxtojiVhzOLFwhwQntQraIr+KFvs47z4aFiQiZv5rki/1YEywBIDMzExs3LgRUVFROHDgAKpWrYqRI0ciJCQEgYGBUKmsvZI7EZFtejhM9u3bF8HBwYiIiMCMGTOKHWvq+4W3uzNefKIhgjs3RINqHkhKSkKrVq3w7LPPIjo62novkkgmDhUoDQp0elxMzsTppAycTsrAlRt/Y99vcej+9FNo3KA22tT3Rpv63vg+6kss+O8HuHnzpsV+wzQ2WAIsiRMRVdbDYfJf//oXAgMDMWzYMKxevbrMX84ffr+4k5WHvAIdXJ01qOnpWvh+0ayOV4kdcKKiohAaGoqtW7diyBCWvsnOCRLx8fECgIiPjy/2+K1bt4STk5NYsmSJxceQm5srli5dKho0aCBUKpUYPXq0OH/+fKnH6nQ6sXv3bjFu3Djh4eEhVCqV6NOnj/j222/F/fv3LT5WIiJbUlBQIFq0aCEAiNDQUJGSkiL8/f1F+/btLfp/pl6vFwMHDhR169YVaWlpFrsOkRLYbZe3FOrUqYOhQ4ciMjLS4h3X5XWFX7hwodixarWaXeJEREZ4eGYyMjISo0aNQmZmJrZs2WLRe9JVKhUiIyORnZ2N119/3WLXIVICBsoKTJ48GWfOnMGhQ4escj1TgiUAeHl5YcKECVw4nYjoIQ+HyZUrV+Kdd97Bnj17sGHDBvj7+1t8DPXr18dnn32GVatW4ccff7T49YhkI/cUqRKUVfIW4kF52d/fX4wfP976AxOmlcINWBInIkf3cJlbCCHWrl0rAIiIiAirjoWlb3IEnKGsgFqtxqRJk7B+/Xqkp6db/fqmzlgCLIkTkWMrbWby5MmTmDBhAsaMGWP18jNL3+QIGCiNMGHCBGi1WqxZs0a2MVQmWAIsiRORYyktTKampiIoKAjNmjVDZGSkLMutsfRN9s4hlw162LFjx9CxY0fEx8ejQ4cOpR4zfPhwXLp0CadOnVLE2o+mLDf0MC6cTkT2qLQwqdVqMWDAABw/fhxHjx61yn2TZRFCYPDgwTh+/DgXPCe7wxlKI1m7OacilZ2xBFgSJyL7U1qYBGD1JpzysPRNdk3WOzgVorymHAO5m3MqUpnmnYddvnxZzJkzRzRs2FAAEE2aNBELFiwQiYmJFho1EZH5SmvAEUK+JpyKfPPNNwKA2Lp1q9xDIZIMA6UwLlAKIcSCBQuEm5uborv0pAiW7BInIltRVpg8ceKEcHd3F2PGjBF6vV7GEZbErm+yRyx5m0AJzTkVMacUbsCSOBHZgrLK3EpowikPS99kjxgoTWDNnXPMJUWwBNglTkTKVFaY1Gq1VtsJxxzs+ia7I/cUqRIYW/IWQojt27cLACIuLs4KI5OOFKVwA5bEiUhOZZW5hRBi5syZQqPRiF27dsk0OuOx9E32hDOUJurTpw/8/f2xbNkyuYdiEqlmLIGSJfEVK1YgLy+PJXEisriyZiYBYN26dQgPD0d4eDh69uwp4yiNw9I32RW5E60SmDJDKYRtNOdURMoZS4MrV66IuXPnCj8/P3aJE5HkypuZVHITTkXY9U32gIFSmB4ob926JZycnMSSJUssPDLLs0SwZEmciKRWXphMSUkR/v7+on379jb5fwxL32QPWPKuBFtqzqmIlKVwA5bEiUhK5ZW5baUJpzwsfZM9YKCsJKXtnGMuSwRL4EGXeGhoKPbt24crV67g9ddfR2xsLLvEicgo5YVJQFk74ZiDXd9k8+SeIlUCU0veQih/5xxzWaIUbsCSOBEZo7wytxDK3Qmnslj6JlvGGcpKUqvVmDRpEtavX4/09HS5hyM5S81YAiyJE1HFKpqZPHnyJCZMmIAxY8bYTZmYpW+yZQyUZrCFnXPMZclgCbAkTkQlVRQmlb4TjjlY+iZbpRKcBsKxY8fQsWNHxMfHo0OHDiY9d/jw4bh06RJOnTplV/+plSUvLw8rV65EWFgYkpKSEBwcjDlz5qB58+aSXUOv12Pfvn2IiorCpk2bkJOTg969eyMkJARBQUE2edM9ERmnojCp1WoxYMAAHD9+HEePHrXp+ybLIoTA4MGDcfz4cZw9exbVqlWTe0hEFeIMpZnsrTmnIpaesQRYEidyVBWFScB+mnDKw9I32SRZ7+BUiMo05RjYe3NORSzZvPMwLpxOZL8qasARwv6acCrCBc/JljBQCvMCpRD2sXOOuawZLNklTmRfjAmTtrwTTmWx65tsCUveEnCE5pyKWKMUbsCSOJH9MKbMbc9NOOVh6ZtsCQOlBOxp5xxzWTNYAuwSJ7JlxoRJe9gJxxzs+iabIfcUqRKYW/IWQojt27cLACIuLk7Ckdk+a5bCDVgSJ1I+Y8rcQggxc+ZModFoxK5du6w4OmVh6ZtsAWcoJdKnTx/4+/tj2bJlcg9FUaw9YwmwJE6kdMbMTALAunXrEB4ejvDwcPTs2dPKo1QOlr7JJsidaJVAihlKIdicYww5ZiwN2CVOJD9jZyYdsQmnIuz6JiVjoBTSBcpbt24JJycnsWTJEolGZr/kDJYsiRPJw9gwmZKSIvz9/UX79u35M1kES9+kZCx5S4jNOcaToxRuwJI4kfUZW+Z29Cac8rD0TUrGQCkxR9s5x1xyBkuAXeJE1mBsmAQcYyccc7DrmxRL7ilSJZCq5C0Ed84xl5ylcAOWxImkY2yZWwjH2wmnslj6JiXiDKXE1Go1Jk2ahPXr1yM9PV3u4dgcuWcsAZbEiaRiyszkyZMnMWHCBIwZM4bl3Aqw9E1KxEBpAdw5x3xKCJYAS+JElWVKmHTUnXDMwdI3KY1KcJoFx44dQ8eOHREfH48OHTpIcs7hw4fj0qVLOHXqFP9zlEBeXh5WrlyJsLAwJCUlITg4GHPmzEHz5s2tPha9Xo99+/YhKioKmzZtQk5ODnr37o2QkBAEBQWxiYAcnilhUqvVYsCAATh+/DiOHj3K+yZNIITA4MGDcfz4cZw9exbVqlWTe0jkwDhDaSFszpGWUmYsAZbEicpjSpgE2IRjDpa+SVFkvYNTIaRsyjFgc45lKaF552FcOJ0cnSkNOEKwCUcqXPCclICBUlgmUArBnXOsQYnBkl3i5IhMDZPcCUc67PomJWDJ24LYnGN5SiqFG7AkTo7G1DI3m3CkxdI3KQEDpQVx5xzrUWKwBNglTvZPCIEePXoYHSa5E45lsOubZCfzDKkiWKrkLYQQ27dvFwBEXFyc5OemsimxFG7AkjjZk4KCAnHu3Dkxffp0o46fOXOm0Gg0YteuXRYemeNh6ZvkxBlKC+vTpw/8/f2xbNkyuYfiUMqbsbx48aJR5xAWmlVmSZzsiZOTE5o2bYrFixdXeOy6desQHh6O8PBw9OzZ0wqjcywsfZOcGCgtjDvnyKu0YPnll18aFdYM93Vt3boVCxYswN27dyUfH0vipGRpaWm4cOECLl++jPz8/DKP02g0FZ6LO+FYB0vfJBcubA7LLGxeVHJyMnx9fREREYFp06ZJfn4yXl5eHnQ6Hdzc3KBWl/x9SghRGCQ3b96MAwcOICoqChkZGZg+fToWLVoEJycni46RC6eTEpw5cwbjxo2DVqvFpUuXMHv2bMyaNcuo8Piw1NRUdOrUCT4+Pjh48CC/hy1McMFzkgFnKK2AzTnK4erqCg8Pj1LDJAAUFBTgxIkTeO655/DJJ59gwIABeP755zFkyBD06dMHTk5OFv83ZEmc5Hbu3Dn06NEDvXr1wrp167BgwQLMnTsXN2/eNPlcbMKxPpa+SQ4MlFbCnXOU7/79+xg7diw++eQTPProo9i5cyc6dOiAU6dOISAgAM888wwAWHWJE5bEydpSUlLw6quv4sUXX0R4eDhatmyJf//73+jXrx9u3LiBEydOmPQ9x51w5MHSN1mdXN1ASmLJLm8D7pyjbHq9XqxatUqoVCoRHh5e+Pi7774rnnzySfHzzz8XHic3domTJaWkpIiwsDBx6dKlwsfmz58vVCqVaN++vWjQoIHo16+fOHDgQIXnunjxInfCkRG7vsmaOENpJWzOUTaVSoXBgwfj448/RkREBBo0aICPP/4Ye/fuRa9evWSZnSwLS+JkSdWrV8fUqVPRpEkTAA86s9977z2sW7cOu3btwrfffou0tDTs2rWrwnP5+/vjlVdeYdlVJix9kzWxKQeWb8oxYHOObbh9+zY2bNiAWbNmQQiBF198Ef/73/+g0+kq1ZBgLQkJCVi1ahWio6Nx7do1NGnSBCEhIRg7dix8fX3lHh7ZqGvXriE1NbXY/42DBw+GWq3G1q1bK3x+Tk4O3N3dLTlEqkBUVBRCQ0OxdetWDBkyRO7hkJ1ioIT1AiUADB8+HJcuXcKpU6cUMdtFxen1eqjVaty8eRNBQUFwdXVFfHw8/vjjD7Rq1QpA8U5wJWKXOFmKXq9Hfn4+QkJC0LZtW7zzzjtyD4mMINj1TVbAkreVsTlH2QxB0bA80Lx585CdnV0YJg3HvP/++7Ju6VgelsTJUtRqNcLCwvD777/j+eefL/dYvV5vpVFRRVj6JmtgoLQy7pyjbCqVCjqdDj4+PnjssccQGBhY7PNCCNy4cQPLly9XxF7hFWGXOJmqrF80Nm7ciKlTp2Lp0qX4/vvvC++xLI1hpp+Ug13fZGn8ibcyNucon0ajwdy5c7Fo0SJ4eHgUe4NVqVRo0KBBqVs6KjlYAkCjRo3w/vvv4+rVq9i9ezcCAwOxYMEC+Pn5oW/fvoiJiUF2drbcwyQZ6XQ65OfnQ6fTlfhcy5YtcefOHRw4cACPPfZYuedQ8i0hjmz8+PEYOHAgXn75Zb7/kOQYKGUwYcIEaLVarFmzRu6hUDkMjQSlvTmWt1e40oMlS+JUGq1WizZt2mDgwIGllqtbtWqFNWvWoEWLFmWeQ6fTQa1WM1AqFEvfZEkMlDLgzjn2w5aDJcCSOD2g1WrRtm1bnD9/Hn5+fnB2di71uLIeN5xDpVIxTCocS99kKQyUMmFzjn2x9WAJsCTuqIqGydDQUKxcubJS5wDA+yZtBEvfZAn86ZcJm3Pskz0ES5bEHYdUYVKv18PJyckCIyRLYOmbLIGBUiZszrFv9hAsAZbE7ZlUYVKr1ZZbCidlYumbpMZAKSM259g/ewmWAEvi9kSKMKnT6XD//n0AytiSlEzH0jdJiYFSRmzOcRz2FCxZErdt5oZJw1qsixcvRkZGBtzc3Cw0UrI0lr5JSgyUMmNzjmOxp2AJsCRua6SYmXzrrbfg7++P9u3bo2HDhhYYJVkTS98kGUEiPj5eABDx8fFWv7ZOpxP+/v5i/PjxVr82yS83N1csXbpUNGjQQKhUKjF69Ghx/vx5uYdlFp1OJ3bv3i3GjRsnPDw8hEqlEn369BHffvutuH//vtzDc1gFBQWiRYsWAoAIDQ2t1DnWrl0rAIiIiAiJR0dy0uv1YuDAgaJu3boiLS1N7uGQjeIMpczYnOPY7G3GEmBJXImkmJk8efIkJkyYgDFjxrA8amdY+iYpMFAqAJtzyB6DJcCSuBJIESZTU1MRFBSEZs2aITIykk04doilbzKXSnCaAMeOHUPHjh0RHx+PDh06yDKG4cOH49KlSzh16hT/sybk5eVh5cqVCAsLQ1JSEoKDgzFnzhw0b95c7qGZTa/XY9++fYiKisKmTZuQk5OD3r17IyQkBEFBQfDw8JB7iHZDqqWBBgwYgOPHj+Po0aPw9/eXfqCkCEIIDB48GMePH8fZs2dRrVo1uYdENoQzlArx8ssvszmHCtnrjCXAkri1SBEmAeCdd97Bnj17sGHDBoZJO8fSN5lF1js4FULOphwDNudQeeyxeedhV65cEXPnzhV+fn4CgGjSpIlYsGCBSExMlHtoNkeKBhwh2ITjqL755hsBQGzdulXuoZANYaAUygiUQgixYMEC4ebmxi47KpMjBEt2iZtHqjB54sQJ4e7uLsaMGSP0er2EIySlY9c3VQZL3grC5hyqiD2Xwg1YEq88qcrcbMJxbCx9U2UwUCoId84hYzlCsATYJW4KqcKkVqvFqFGjkJmZiS1btrBJykGx65tMJvcUqRIopeQthBA7duwQAERcXJzcQyEb4gilcAOWxEuSqswthBAzZ84UGo1G7Nq1S6LRka1i6ZtMwRlKhenduzf8/f2xbNkyuYdCNsRRZiwBlsQfJtXMJACsW7cO4eHhCA8PR8+ePSUcJdmioqXv999/X+7hkNLJnWiVQEkzlEKwOYfM50gzlgaO2CUu5cwkm3CoLAcOHBD37t0TOp1O7qGQgnGGUoHYnEPmcqQZS4NGjRrh/fffx9WrV7F7924EBgZiwYIF8PPzQ9++fRETE4Ps7OxKnfvWrVuIi4uTeMTmkXJmkk04VJ6nn34aVapUgVrNyEBl43eHArE5h6TiiMFSypL43bt38dxzz2HEiBEIDQ2Fj48PXnrpJSQlJVnhlZRNyjDJJhwyBsMkVYTfIQrFnXNISo4YLAHjusRTUlJKfW5+fj7GjRuHtLQ0REZG4vDhw9i0aRNu376NAwcOQK/XW/nVPCBlmAS4Ew4RSYOBUqHYnEOW4KjBEii7JB4XFwetVlvi+O3btyMxMRH9+/fHlStXcPfuXfTu3RvffPMNunXrJsuMjdRhkk04JCW5fskiZVAJ1lRx7NgxdOzYEfHx8ejQoYPcwykUFhaGDz74ADdv3kS1atXkHg7Zoby8PKxcuRJhYWFISkpCcHAw5syZg+bNm8s9NKvIysqCh4dHqeGwefPmuHTpEoYMGYLs7GycO3cO8+bNw6RJk2QYqfRh8uTJkwgMDMSwYcOwevVq3jdJRtNqtfjrr79w9OhRBAQE4Pbt23j00UeRn58PLy8vqNVqPProo3IPk6xN3p4gZVBal7fBrVu3hJOTk1iyZIncQyE754hd4eVJTk4WKpVKvPbaa+LevXsiPz9fREZGikaNGom//vrL6uORsptbCCFSUlKEv7+/aN++vcOu3UmVd+7cOVGvXj3RsmVLMXz4cNGzZ0/xxBNPiIkTJ4o6deoIHx8fsX//frmHSVbGkreCsTmHrMWRS+Gl2blzJ1q3bo2xY8fCy8sLzs7O6Ny5MzQaDY4ePWrVsUg9M8kmHDJXQEAAOnbsiIYNG2LTpk3YtWsXDh06hJCQELRs2RJ16tTB77//LvcwycoYKBWOzTlkTQyWD2i1WmRkZKB9+/aFj2VlZUEIgYKCAquOQ8owCbAJh8zn5uaGRYsW4dChQ1i9ejUA4L333sO0adPg5eWFt956C6+++qrMoyRrY6BUODbnkBwcPVgGBASgoKAAf/zxBwAgJSUFP/74IwoKCjB06FCrjMESYZJNOCQFnU6Hpk2bYsmSJZg0aRL69u2LX3/9FT179sTcuXMREhICLy8vVtYcDAOlwqnVakyaNAnr169Henq63MMhB+OowfKpp57CiBEj8Nxzz+Gll17C888/jy1btmD27Nmllojv3buHtWvXVnrh9IdZIkyePHkSEyZMwJgxY/D666+bP0hyWBqNBtnZ2UhISIC7uztOnz6Nt99+G/PmzSvW2MpGL8fCQGkDuHMOyc3RgqWTkxOWLFmCjRs3QqPRYPDgwdi0aRMmTJhQ4li9Xo9ffvkFo0ePlmQvcUuESe6EQ1JKTExE7969ERsbi+HDh+Pvv/9GTk4OZyUdHJcNgnKXDSpq+PDhuHTpEk6dOsU3A5Kdoy83VJqEhASsWrUK0dHRuHbtGpo0aYKQkBCMHTsWvr6+Rp3DEmFSq9ViwIABOH78OI4ePcr7JkkSb775Jho1aoSQkBCcPHkS7dq1g6urq9zDIjnJ2GGuGEpdNqioHTt2CAAiLi5O7qEQFeJyQyXpdDqxe/duMW7cOOHh4SFUKpXo06eP+Pbbb8tdokfqpYEMZs6cKTQajdi1a5dk5yTKzc2VewikMCx52wg255ASOVop3BiV2UvcEjOTAJtwyHIMs5HioSLnwx+T42CgtBFsziElY7AsnTF7if/5558WCZNswiFrKHoLll6vx3vvvYdt27bJOCKSC++hhG3cQwkAycnJ8PX1RUREBKZNmyb3cIjKxHssy6bX67Fv3z5ERUVh48aNyMnJAQB0794dP//8syQLjaempqJTp07w8fHBwYMHuXg5WYUQAoMHD8bx48dx9uxZbhnsYDhDaUO4cw7ZCs5Yls1QEl+xYgUaNmwIAKhduzb27dsnWZc4d8IhOahUKkRGRiI7O5uz4g6IgdLGcOccsiUMlqUz3DN58eJFhIaGIjk5ucyS+PXr1006N3fCITnVr18fn332GVatWoUff/xR7uGQNcnXD6QcttDlbaDT6YS/v78YP3683EMhMhm7wivu5q5sl7gQQqxdu1YAEBEREZYaPlGF9Hq9GDhwoKhbt65IS0uTezhkJZyhtDFsziFb5ugzlsZ0c1emSxxgEw4pB0vfjomB0gZx5xyydY4YLCuzNJAxXeLXr1/nTjikOCx9Ox52ecN2uryL4s45ZE/svStcynUmi3aJb9q0CdnZ2XjkkUeQn5+PI0eOoEWLFhKOnKjyBLu+HQpnKG0Um3PIntjzjKXUi5Y/XBLv378/0tLSkJWVhSeffNLsLnEiqbD07VgYKG0Ud84he2RvwdJSO+AY/PTTT9i+fTsiIiIk6xInkhJL346DJW/YZskbAMLCwvDBBx/g5s2bLCWQXZKjFF6g0+NCciZOJ2XgTFIGbmfmIV+rg4uTBrW8XNG6vjfa1PdG8zpecNaU/Tu5pcPkyZMnERgYiGHDhmH16tWFt748XBLPyclB7969ERISgqCgIK5LSVbH0rdjYKCE7QZK7pxDjsIawfJGejZijiTi28OJyMgpAAA4qVXQ6v/5L7Lox97uzhjzREOM7twQDaoVD2mWDpPG7oSTmZmJTZs2ISoqCvv370fVqlUxcuRIhISEIDAwkPdfk9UkJSWhVatWePbZZxEdHS33cMgCGChhu4ESYHMOORZLBMt7uQUI++k81h+9DpUK0JvwP6JaBQgAIzv64t1BLeDl5mzxMKnVajFgwAAcP34cR48eNXrx8oSEBKxatQrR0dG4du0amjRpgpCQEIwdOxa+vr6SjpGoNFFRUQgNDcXWrVsxZMgQuYdDEuM9lDaOzTnkSKS+x3L/pTvo9ck+bIi/DgHTwiT+/3ghgA3x19ErYh/2XEi2aJgEKr8TTqNGjfD+++/j6tWr2L17NwIDA7FgwQL4+fmhb9++iImJQXZ2tuTjJTIYP348Bg4ciJdffpnrKNshBkobx+YcckRSBMvouL8w7psjSL2fZ3KQfJheAClZeQiNjsd190ctFibXrVuH8PBwhIeHo2fPnpU6R2UXTicyF7u+7RsDpY3jzjnkyCobLFf9/hfe+/EsANNnJctiOE/1vq+ix6S50py0CEvshGPswulEUmHXt/3iPZSw7XsoATbnEBkYc4/l/kt3MO6bIxYfy6rQzujWtKYk5zK2CUcK7BInS2PXt33iDKUdqFOnDoYOHYrIyEiWqcihVTRjeS+3AG9sPAm1hfvX1CrgP5tOIjO3wOxzabVajBo1CpmZmdiyZYvFAx1L4mRpLH3bJ85QwvZnKAEgNjYW/fr1Q1xcHAIDA+UeDpEiPDxj+fiUT3HHq3G5Ze7823/h3uFNyEu+Al1WOkRBLtSuVeBSyx+ebfuiSqseRl1brQJe6OSLD4e1Nes1vPnmm4iIiEBsbGyl75uUArvESWrs+rYvDJSwj0Cp1+vRqFEjdO/eHVFRUXIPh0hR8vLy8GlkNL5Kqlfh8lpZZ/YgddsnZX7ep/s4eAe+YNR1VSrgwMxnSqxTaax169YhODgYERERmDFjRqXOITWWxEkqLH3bF5a87QSbc4jK5urqCnXTbtAYUevWuHvCs10/VB/8BmqN+i9qBL0N1/r/3IOZedT4RgI1gLVHEiszZIs04UiBJXGSCkvf9oWB0o5MmDABWq0Wa9askXsoRIpSoNPj28OJRnV0uzd6HNUHTINn62fg7t8eVZo/jUf6vlb4eX1+jtHX1QlgzeFEFOj0Jo03NTUVQUFBaNasGSIjIxW7aQG7xMlc7Pq2HwyUdoTNOUSlu5CcWbidoimE0EObmYrME78UPubWsI1J58jIKcDF5Eyjj7d2E45UuHA6VRYXPLcPDJR2hjvnEJV0OinD5OfcWvUGEj8aiqQvxyPr+C8AVA9mLwf+y6LXr+xOOErBkjiZiqVv+8BAaWe4cw5RSWeSMuBk7lpBKhWg1jzYa9EETmqV0YFSip1wlIQlcTIWS9+2j4HSzrA5h6ik25l50Jq4JU71/lNRe/RCVB/8BlzrtwCEHjmXD+H2pvkmnUerF7iTlVfhcUptwpEKS+JUEZa+bRsDpR1icw5RcflancnPcakVALeGbeDZ+hnUGvUBVE4uD86VfBkFaUkmnSuvoPzr20oTjhRYEqeysPRt2xgo7RCbc4iKc3HSGH2svqCs2cR/Qp4+N8uk67s6l319W23CkQJL4vQwlr5tFwOlnWJzDtE/anm5Gn0PZXL0DKT8vBhZp35Fzl8ncP/cPtxePxdC+yBoqpxc4Vzd+J1hnNQq1PR0LfPztt6EIxWWxMmApW/bxEBpp9icQ/SP1vW9jb6HUp+fi/unfkXqz4txe91spGwNR96Nc4Wfr9ZzAtSuxs8iFuj02LM5GuHh4fjjjz+g1WoLP2dvTThSYEmcWPq2TQyUdorNOUT/aFPf2+hjqz7xHNwCHoPGqwagcQY0TtB414ZHy+6oPeZDeHUYZNK1VSoVXDKTMW/ePHTu3BmPPPIIBg0ahBkzZiAkJATBwcF80ywDS+KOi6Vv28O9vGEfe3mXJjk5Gb6+voiIiMC0adPkHg6RbAp0enRasLNSi5uby9vdGUff7Q2h0yI+Ph579+7Fr7/+ir1790IIAS8vL3Tt2hU9evRAjx498Nhjj8HJycnq47QV3EvccXCvb9vCGUo7xuYcogecNWqMeaIhzF2K0lQaFfDiEw3hrFHDxcUFgYGBmDlzJjQaDapVq4bvvvsOs2bNglarLTGDWVqJnFgSdyQsfdsWzlDCfmcoASA2Nhb9+vVDXFwcAgMD5R4OkWxupGej68d7YM3/8FQq4MDMZ9Cg2j+zZm+++SYiIiIQGxtb7L7J/Pz8whnMvXv34rfffkN2djZnMI2UkJCAVatWITo6GteuXUOTJk0QEhKCsWPHwtfX+CYqUp6oqCiEhoZi69atGDJkiNzDoTIwUMK+A6Ver0ejRo3QvXt3REVFyT0cIlm9/d0pbIi/DhPXOK8UtQp4oZMvPhzWtvCxdevWITg4GBEREZgxY0a5z2fArByWxO0PS9+2gYES9h0oASAsLAwffPABbt68yR9EcmiZuQXoFbEPKVl5Fg2VahVQw9MVu/7dHV5uzgAe7IQTGBiIYcOGYfXq1SYvXs6AabrMzExs2rQJUVFR2L9/P6pWrYqRI0ciJCQEgYGBdr2AvL1JSkpCq1at8OyzzyI6Olru4VApGChh/4GSzTlE/9hzIRmh0fEWv86q0M7o1rQmgAc74XTq1Ak+Pj44ePCgJLNkDJimYUnc9rH0rWwMlLD/QAkAw4cPx6VLl3Dq1Cn+Vk4OS6vVom3btrju/iiq933VYtf5YGgrjA30L7zmgAEDcPz4cRw9etRii5czYBqHJXHbxdK3srHL20Fw5xxydIYwef78eTzfrhbmD20FABB60/f5Lo2hg7xomASstxOOoYt81qxZ2LFjB9LT0xEXF8cu8oewS9x2setb2ThDCceYoWRzDjmyomEyNDQUK1euRFZWFlr3eR6ap0IgXD3NuqfScM/kohHtCsvcgGlNOJbGGczysSRuO1j6ViYGSjhGoATYnEOOqbQwCQBTpkxBVFQU4o4ex9pzuVgffx1qADoT/kfUqAA9gJEdffHuoBaFDTiA+U04lsaAWTqWxJWPpW9lYqCE4wRKNueQoykrTO7Zswc9e/bEkiVLCn8WbqRnY+2RRKw5nFi4o46TWlVsD/CiH3u7O+PFJxoiuHPDYutMApZpwrE0BsyS2CWuXOz6Vh4GSjhOoATYnEOOo6wwmZWVhTZt2sDPzw+7d++GWl38VvICnR4XkzNxOikDp5MycCcrD3kFOrg6a1DT0xVt6nujTX1vNKvjBWdNydvQrdWEY2kMmMWxJK48LH0rjCARHx8vAIj4+Hi5h2JxO3bsEABEXFyc3EMhspiCggLRokULAUCEhoYW+9xrr70mPDw8xJUrVyxy7ZkzZwqNRiN27dplkfPLJS8vT8TFxYmwsDDRt29f4eHhIQAILy8vMXDgQPHxxx+LI0eOiIKCArmHalE6nU7s3r1bjBs3Tnh4eAiVSiX69Okjvv32W3H//n25h+dQ9Hq9GDhwoKhbt65IS0uTezgOj4FSOFag1Ol0wt/fX4wfP17uoRBZRHlhcvfu3QKAWLJkiUWuvXbtWgFAREREWOT8SsKAKcS9e/fEypUrRbdu3QQAUbVqVTFp0iRx8OBBodfr5R6eQ7hx44bw9vYW48aNk3soDo8lbzhWyRtgcw7Zr7LK3EDFpW5zKb0Jx9IcvUTOkrh8WPpWBgZKOF6gZHMO2aPywiTwT1f3qVOn0KhRI0mvbYtNOJbmqAGTXeLWJ9j1rQgMlHC8QAmwOYfsS0VhsrSubimvbQ9NOJbmiAGTXeLWw65vBZCx3K4YjnQPpQGbc8helHfPpBBCZGZmCn9/f9G9e3eh0+kkv769NuFYmqPdg3nlyhUxd+5c4efnJwCIJk2aiAULFojExES5h2Y3vvnmGwFAbN26Ve6hOCQGSuGYgZLNOWQPKgqTQli2q9uRmnAszVECJrvELYdd3/JiyRuOWfIGgO+//x41atTA008/LfdQiExWUZkbsGyp29GbcCzNEUrkLIlLj6Vv+TBQwnEDJfDgTdlW/zMmx2VMmLRkV7cQAp9++inWrFmD3377jY0WVmDvAZNd4tJh17c8GCjh2IGSyNYYEyYBabq6hRDlzhLdv38fVapUqdS5yTxyBMzc3Fzs27cPtWrVwmOPPSbJOR/GLnHzCXZ9y0O2YruCOOI9lES2yJh7JoUwfwHznJycwr9zgWrbYI17MC9duiTGjRsnfH19RdWqVUW/fv3EoUOHJHwVxXHh9MrjgufWxxlKcIayLLm5uTh48CCqVKmCJ598Uu7hkIMzdmbS3FK3TqfD6NGj0bNnT4SGhsLFxaXCmUpSHkvMYN69excZGRnw9PTErVu3sGjRIiQnJ2PlypWoV6+eRV8PS+KmY+nbymQOtIrAGcrSpaWlibCwMNG1a1e5h0IOztiZSSHM7+qOjIwUKpVKDBkyRCxYsEAkJydX6jykLMbOYGq1WqPPmZSUJFQqlfj1118tOPLi2CVuPHZ9W5e0e4+RTRP/P1mt1+sBANWqVcOYMWOQkJCAXbt2yTk0cmDGzkwCD7q6ly5dig8//LDS900GBASgfv368PHxwYYNG7BgwQJcvXoVwINdpsg2ubi4IDAwELNmzcKOHTuQnp6OuLg4zJo1C1qtFvPmzUPnzp3x008/QafTlXmec+fO4c6dOwCA/fv3w9nZ2VovAQCgVqvxzDPPIDo6GsnJyVixYgXy8vIwZswY1K1bF5MnT0ZcXFzh/+eOTKVSITIyEtnZ2Xj99dflHo79kzvRKoEjz1DGxcWJ+fPnF7uvKCwsTGzYsKHw42nTponhw4fLMTxycKbMTEq1gHl2drYICgoSd+7cEV988YXo0KGDCA0NFW3atBFdunQRQvC+SntkmMEsev/sw3Q6nfjPf/4jVCqV8Pb2Fs8++6z49NNPrTfIcnDh9LJxwXPr4Aylg1uxYgVu375d7B6izMxMzJs3r/Dj2rVrQwgBrVYrwwjJUZkyMwkAb731Fm7fvo0VK1aYtUSQu7s7NBoNfvzxR0yZMgWffvopdu/ejYSEBPTt25f3U9opwwymm5tbqZ8XQkCtViM8PBxXrlxBSEhI4cygEjRq1Ajvv/8+rl69it27dyMwMBALFiyAn58f+vbti5iYGGRnZ8s9TFmMHz8eAwcOxMsvv4z09HS5h2O3GCgdXL9+/bBt2zbo9frCwDhw4ECcP38e8+fPx+LFi/Hf//4XHTt2tMm13cg2mRompSh1AygsdT755JP47rvvAADOzs64ceMGWrVqhV27duG9995DSkpKpa9BtqnoLxGPPvoo5syZg7Nnz2LPnj0yjqoklsRLYunbOhgoHdzzzz+PvLw8bNiwoTAw/v777xg4cCCysrIQHh6O0NBQTJ06VeaRkqMwNUxmZWVhwoQJ6N69O6ZMmWLWtTUaDQBgxIgR8PDwwLVr19C/f3989NFH2LVrF5o0aYJffvml3HvsyP6kpqYW/oJhoNFo4OfnhyNHjpT6nJ9++gl//PGHrJUdLy8vhIaGYt++fbhy5Qpef/11xMbGokuXLmjWrBnCwsJw/fp12cZnTfXr18dnn32GVatW4ccff5R7OPZJ3oq7MjjyPZRCCPHxxx+LLl26iFGjRom33npLtGjRQoSHhwudTidyc3NFenq63EMkB2HKPZMGltir++7du6JFixZCpVKJF154odh9dZcuXZLsOmQb9u3bJx577DHx2muvia1bt4pff/1VPPPMM6JFixbip59+KvU5TZo0UeRe5I7cJc6ub8tioBQMlDk5OWLTpk2iXbt2okePHmLq1KkiMzNT7mGRg6lMmDR3AfPy/Pjjj+K1114rXDbIlOVkyL7k5OSIrVu3ihdeeEE0atRItGnTRowfP15s27atzOfk5+dbfKF1czniwulc8NxyuLA5uLC5QW5uLlJSUtCgQQO5h0IOxtQyN2DZvbqBB8tn5efnl9mkQY4rMzMTzs7OJn1vFBQU4OjRo4rdi9yRFk7ngueWwUAJBkoiOVUmTALS7NVNJBelBkxH2EtccK9vi2CgBAMlkVwqGyb37NmDnj17YsmSJZg2bZokY7lz5w5q1qzJZYFIFkoMmJmZmdi0aROioqKwf/9+VK1aFSNHjkRISAgCAwNt+uckKSkJrVq1wrPPPovo6Gi5h2MXGCjBQFkarVbLZYLIoiobJi1R6k5NTUWnTp0wduxYvP/++zb9Rkn2QWkB0x5L4ix9S4uBEgyUpTl37hyio6Px4Ycf8s2VJFfZMAlIX+rWarUYMGAAjh8/jqNHj8Lf39/scxJJTSkB055K4ix9S0yWViCFcfQu79Ls2LFDABBxcXFyD4XsTGW6uQ0s0dU9c+ZModFoxK5duyQ7J5GlKaGL3B66xNn1LR3OUIIzlKXR6/Vo3LgxunXrhqioKLmHQ3bCnJlJS5S6161bh+DgYERERGDGjBlmn49ILnLPYNpySZylb4nInWiVgDOUpQsLCxNubm5cAJYkYc7MpBDSL2B+4sQJ4e7uLsaMGWMzsylExpJrBtMWF07ngufSYKAUDJRluXXrlnBycrLIotHkWMwNk1KXulNSUoS/v79o3769Yt/kiKQkR8C0pZI4S9/mY8kbLHmXZ/jw4bh06RJOnTrF5hyqFHPK3ID0pW424RBZv0RuCyVxlr7NJHeiVQLOUJaNzTlkDnNnJoWQvtTNJhyikqw1g6nkkjhL3+ZhoBQMlOXR6XQiICBAjB8/Xu6hkI2RIkxKXepeu3atACAiIiIkOR+RvbJGwFRiSZyl78pjoBQMlBVhcw6ZSoowmZmZKfz9/UX37t2FTqcze0xswiGqPEsHzCtXroi5c+cKPz8/AUA0adJELFiwQCQmJkr8Sir2zTffCABi69atVr+2LeM9lOA9lBVJTk6Gr68vIiIiJNvmjuyXufdMGki5gLlhJxwfHx8cPHjQphZfJlIiS92DqYSF00U5C54X6PS4kJyJ00kZOJOUgduZecjX6uDipEEtL1e0ru+NNvW90byOF5w15i9tZksYKMFAaQw255AxpAqTUu7VzSYcIsuzRMCUcy/xh/f6vpGejZgjifj2cCIycgoAAE5qFbT6fyJU0Y+93Z0x5omGGN25IRpUc4xfYBkowUBpjNjYWPTr1w9xcXEIDAyUezikQFKFSam7ut98801EREQgNjYWPXv2NOtcRGQcqQOmHF3iUVFRmPDKFIxcuB6HbqugUgF6ExKTWgUIACM7+uLdQS3g5eZskXEqBQMlGCiNwZ1zqDxShUlA2lI3d8IhUgapAqY1S+L7Lt3GxMi9KNC4Q2XGL7ZqFVDD0xWLRrRDt6Y1JRuf0jBQgoHSWAsXLsT8+fNx8+bNYveUkGOTMkxKWeo+efIkAgMDMWzYMKxevZq3ahApiBQB05Il8ei4v/Dej2ehUgFSpCT1/89uzh/aCuMC/c0/oQIxUIKB0lhszqGHSRkmpSx1swmHyLaYGzClLImv+v0vzN16VqqXVoK9hkoGSgBXrlzB66+/js8++wyNGzeWeziKxuYcMpAyTALSlbrZhENk+yobMM0tie+/dAfjvjli6ZeHVaGd7a78zUBJJmFzDgHSh0kpS91swiGyP5UJmKaWxO/lFqDXJ/uQej/PpOYbUxnuqdz17+521ajDQEkmYXMO6XQ6tGnTRrIwKWWpm004RI7B1IBpTEn87e9OYUP8dZPD5O2N85CTcLTw43qTvoJz9fLL7GoV8EInX3w4rK3Jr12pGCjJZGzOoTfeeAPp6elmh0lAulI3m3CIHJexAbNdu3Y4ePBgiZL4s6NDsehiVZgaiLLO7kHqj58Ue8yYQAkAKhVwYOYzdrNOJQMlmYzNOfbr1q1bSE9PR8uWLa1yPalK3WzCIaKijAmYjz/+OK5cuYLVq1fjFPxR9cnhUKk1Rl9Dl52Bm1+/Cn1OJqDRADotAOMDpUYFvNK9EWb2a17p16kkDJRUKWzOsT9JSUlo164dunXrhnfeeQedOnWy6PWkKnWzCYeIKlJewHy6a3dcaTMR+SrT7mdM+XER7p/dC8/2/ZFz9Rh0924DMD5QAg921Dn6bm+72KbR9l8ByeLll1/GmTNncOjQIbmHQhK5fPkyMjIykJGRgc8//xzHjh0r/Jwlfu986623cPv2baxYscKs+ybfeecd7NmzBxs2bGCYJKJSOTs7IzAwELNmzcKOHTtw9+5dxMXFYdasWbjv7G1ymMy5Go/7Z/dC4/kIqvUIrfS4MnIKcDE5s9LPVxIGSqqU3r17IyAgAMuWLZN7KCSRtm3bYuDAgRg5ciTOnDmDiIgInD37YC02qQPlnj17sHTpUnz44Ydm3Te5bt06hIeHIzw8nB3dRGS0ogHz5XfCTHquPj8Hqdu/BAA80vc1qN2qmDWW00kZZj1fKRgoqVLUajUmTZqE9evXIz09Xe7hkJl0Oh10Oh0uXLiAQYMGYfbs2bh06RIWL16MLl264IUXXpDsWllZWZgwYQK6d++OKVOmVPo8J0+exIQJEzBmzBi8/vrrko2PiBzLmaQMOKmNv3Xr7r5V0N27DY/mT8Oj6ZNmXdtJrWKgtFfXr1/HhAkT5B6GTQgNDYVWq8WaNWvkHgqZSa1Wo2bNmnj88cdx5swZPPfcc5g3bx62bNmC06dPY/DgwZJdS4pSd2pqKoKCgtCsWTNERkbyPl4iqrTbmXnQGrlWUEHqdWQe+wlqN0880udls6+t1Qvcycoz+zxKwED5kLS0NERHR8s9DJtQp04dDB06FJGRkRa5x46sxxDINBoN9u7dCwDYvHkzdDodfH19ceDAARw5Yv7uEVKUurVaLUaNGoXMzExs2bKFHd1EZJZ8rc7oY3VZ6YDQQ5+bhRufj8W1Dwfj2oeDCxtyAODm16/i5krjV63IKzD++kpW+o7rdmzr1q3lfv7q1atWGol9ePnll9GvXz8cOnSIO+fYMCEEVCoVevbsiT///BOvvfYafv75Z8THx+PEiROYOXMmXFxc0LZtW7i5uVXqGlKVug1NOLGxsWzCISKzuTgZv1SQJbg6y3t9qThcoAwKCoJKpSp3Ro3lM+MVbc5hoLRdhu/5gIAAhIaGonbt2ti2bRsCAgIQEBAAlUqFdu3aVTpMAv+Uunfu3FnpUrehCSciIoJNOEQkiVpernBSq4wqeztVq4dqvSaVeDzj4Froc7MAAFUDn4dzjYZGXdtJrUJNT1fTBqxQDlfyrlu3LjZv3gy9Xl/qn6JLpVDF2JxjXwIDA7F8+XLs2LEDHTt2LPzFKygoCAEBAZU+rxSlbjbhEJEltK7vbfQ9lE5Va6Dq48+W+KNy+efWG8/WPeHZ6hmjzqfVC7Sp712pcSuNwwXKjh07Ij4+vszPVzR7SSWxOcd+ODs7IyQkBG3bPthfVorZeilK3WzCISJLSE5OxoW4WFnHYC+B0uF2yjlw4ADu37+P/v37l/r5+/fv4+jRo+jevbuVR2bbuHOObdLr9QBg1sLiFTF3r27uhENEUkpOTsbmzZuxdu1aHDx4EEKlRoNpa6Bx97L6WOxppxyHu4eya9eu5X6+SpUqDJOVwOYc26PVatGhQwfMmTMHw4cPt0ioNJS6lyxZUulSN5twiMhchhC5YcMG7N+/H8A/Fcke3bvCr6EL9qcARla+JaFRAS8+0dAuwiTggDOUZBl6vR6NGzdGt27dEBUVJfdwqAJarRZt27bF+fPn8dJLL+Hrr7+W/BpS7NW9bt06BAcHIyIiAjNmzJB8jERkv0oLka6ursjNzUWjRo0wceJEvPjii/D19cWN9Gx0/XgPrBmIVCrgwMxn0KCafSx95nAzlGQZhuac+fPn49NPP0W1atXkHhKVoWiYDA0NtUiYBMzv6mYTDhGZ6uEQqVKp4O3tDSEEPD09MXr0aISEhODJJ58sdntWg2oeGNnJFxvir1tlllKtAl7o5Gs3YRJwwKac0iQkJGDIkCFISEiQeyg2jc05yvdwmFy5cqVFrmNuVzebcIjIWMnJyVi6dCl69OiBevXqYdq0abh8+TKcnZ2h1+vx+OOPIyYmBrdv3y5c4q60/1PeHdQCNTxdYcIujJWiVgE1PF3x7sAWlr2QlbHkDeDYsWOF3d8dOnSQezg2jc05ymWtMGluqZtNOERUkYdnItVqNfz9/ZGeno60tDQ0bdoUISEhhSVtY+2/dAfjvjF/V7CKrArtjG5Na1r8OtbEGUqS1Msvv4wzZ87g0KFDcg+FirBWmATM36vb0ISzYcMGhkkiKvTwTOT06dORmpqKxo0bQ6fT4c6dOxgxYgTi4uJw4cIFzJo1y6QwCQDdmtbE/KGtLPQKHvhgaCu7C5MAAyVJrOjOOaQM1gyT5pa6DTvhhIeHcyccIio1RGZnZ+OJJ56Ai4sLzp49i4CAAMTExCA5ObnckraxxgX6F4ZKqcrfhvN8MLQVxgb6S3NShWHJGyx5S23hwoWYP38+bt68yeYcmVkzTJpb6j558iQCAwMxbNgwrF69mrdMEDmo0srZTz31FLy9vXH8+HEkJSVVuqRtiv2X7uA/m04iJSvPrEYdwz2Ti0a0s8uZSQMGSjBQSi05ORm+vr6IiIjAtGnT5B6Ow7JmmATMW8A8NTUVnTp1go+PDw4ePAgPD/vpfCSiipUWInv06AFfX19cuHABhw4dQtWqVTFq1KhSu7Qt5V5uAcJ+Oo/18dehBqAzITFpVIAewMiOvnh3UAt4uTlbapiKwEAJBkpLYHOOvKwdJvfs2YOePXtiyZIlJv8SwSYcIsdUWojs1asX2rRpg+vXr2Pbtm3IyclBnz59EBISgqCgILi7u8sy1hvp2Vh7JBFrDiciI6cAAOCkVhXbA7zox97uznjxiYYI7tzQrpYGKg8DJRgoLSE2Nhb9+vVDXFwcd86xMmuHSXNL3W+++SYiIiIQGxvL+yaJ7FxpIbJ3797o3r07UlNTsXHjRiQmJlqlpF0ZBTo9LiZn4nRSBk4nZeBOVh7yCnRwddagpqcr2tT3Rpv63mhWx8tudsAxFhc2J4so2pzDQGk91g6TgHkLmBuacCIiIhgmiexUWSHy888/h06nw6ZNm/DOO+/IUtI2lbNGjdb1vdG6vjeC5R6MwjBQkkVw5xzrkyNMmrNXN3fCIbJfZYXIZcuWoUaNGtiyZQvefPPNwpJ2TEyMrCVtMh9L3mDJ21LYnGM9coRJc0rdbMIhsj9lhcjnn38e7dq1w9atWxEdHa3okjZVHmcoyWLq1KmDoUOHIjIyElOnTlVk+cIeyBEmgcqXurVaLUaNGoXMzEzs2bOHYZLIhpUVIr/++mv06tULu3fvRlRUFA4cOGATJW2qPAZKsqiXX34Z/fr1w6FDh3gvpQXIFSbNKXUbdsKJjY1lRzeRDSovRA4dOhSnT59GVFQUpk+fzpK2A2GgJItic47lyBUms7KyMGHCBHTv3h1Tpkwx6blswiGyTeWFyKCgINy9exfR0dHo1KlTYUl79uzZLGk7EMfqaSerMzTnrF+/Hunp6XIPx27IFSaByu/VzSYcIttS2raHbm5u+Prrr/H3339j48aNUKlUeO6559C4cWMsXrwY/fv3N2svbbJdDJRkcaGhodBqtVizZo3cQ7ELcobJAwcOVGqv7tTUVAQFBaFZs2aIjIzkvVNEClVRiPz5558REBCAGTNmoE6dOnjppZfg7u4u6V7aZJtY8iaLY3OOdOQMk0IItGjRAm+//bZJpW424RApW0Xl7OrVqyMhIQGLFy8u1qXNkjYVxUBJVsHmHPPJGSYBQKVSoVq1ali4cKFJz2MTDpHyGBMiMzMzsXHjRnZpk1EYKMkq2JxjHrnDpIFGozHpeDbhECmHMSFSr9dj7969iIqKwnfffccubTIa76Ekq2BzTuVZK0xu27YNf/zxR+HH5u55wCYcIvlVdE/k9u3bMXHiRNy9exdz585FQEAAevXqhcOHD2P27Nm4du0aduzYgeDgYIZJKhd3ygF3yrEW7pxjOmuFyfj4eDz++OMYPnw4Bg0ahBEjRsDT07PS5+NOOETyKW/HGsNMJACWtElSLHmT1bA5xzTWLHNnZWXB1dUVaWlpWLp0Kc6cOYPRo0ejQ4cOOHr0KDp16mTSuNmEQ2RdxpSzAUCv1xfuXsOSNkmJgZKsis05xrH2PZPdu3dHcHAwJk6ciEOHDmHNmjW4fv06NBoN1q1bh/v378Pd3R1CiAp/EWATDpF1GBsiASAhIQHR0dHs0iaLYaAkq2JzTsWsHSb1ej3UajWqVKmCqKgofP311wgMDMS8efOwf/9+9O7dGwkJCWjdurVRs8qrVq1CeHg4m3CILMCUEMmSNlkTm3LIqticUz45urkNu93MnDkTV69eRWZmJp566ink5+ejYcOGSE9Px5tvvomoqCijGnXYhEMkLWMbawxd2rt378a4ceO48DhZFWcoyepCQ0Mxd+5crFmzhs05Rci9aHnDhg3h4eGB/fv3486dOzh16hQuXLiACxcu4O2334ZGo6nwTej27dv44IMP+GZFZCZTZiIBlrRJfgyUZHVszilJKetMjhw5Em+++SYuXryIL774ArVq1UKtWrWwe/duuLm5lftcvV4PZ2dnNuEQVZKpIZIlbVISBkqSBZtz/qGEMGl443nhhRewZs0adO7cGaGhoQAeBMWKwmRBQQHy8/NRrVo1i4+VyJ6YGiK58DgpFQMlyYLNOQ8oIUwW5eLigtWrV8PFxQWurq4A/rnHsjwajQZVqlSx9PCI7IKpIRJgSZuUj4GSZGFozpk/fz4+/fRTh5zZUlqYNKhZs6ZJx+v1epbWiCpQmRDJkjbZEnZ5k2xCQ0Oh1WqxZs0auYdidUoLk5XdMEur1QIA39iISmFKd7YBu7TJVnGGkmTjqM05SguTWVlZ2LRpE0JCQoxauLwoYzq/iRxJZWYiAZa0yfYxUJKsHK05R2lhEgDeeustREVFoU+fPqhfv77RzzM1fBLZq8qGSJa0yZ4wUJKsHKk5R4lhcs+ePVi6dCmWLFliUpg07K5D5KgqGyLZpU32SiUqe/OUHTl27Bg6duyI+Ph4dOjQQe7hOJyFCxdi/vz5uHnzpt025ygxTGZlZaFNmzbw8/PD7t27jQ6IhiYczp6QoykrRD7//PPlhkig9JJ2SEgIS9pkNzhDSbKz951zlBgmgQel7tu3b2Pnzp0VhkkhBLRaLdRqNdRqNcMkOQxDiNy4cSP27dtn9EwkwJI2ORYGSpKdPTfnKDVMFi11N2rUqNxjtVotvv76a/j4+KBfv3545JFHrDRKInmYEyJZ0iZHxUBJimCPzTlKDZNZWVmYMGECunfvjilTplR4/DvvvIOIiAjExsYyTJLdMidEAuzSJmKgJEWwt+YcpYZJwLRS97p16xAeHo6IiAj07NnTSiMksg5zQyRL2kRFCBLx8fECgIiPj5d7KA4tLCxMuLm5ibS0NLmHYpaCggLRokULAUCEhobKPZxidu/eLQCIJUuWVHjsiRMnhLu7uxgzZozQ6/VWGB2R5d26dUt8+eWXokePHkKlUgmNRiP69esnli9fLlJSUip8vk6nE7t27RJjx44VHh4eQqVSib59+4qYmBiRnZ1thVdApEzs8ga7vJUiOTkZvr6+iIiIsNnmHCXPTJrS1Z2amopOnTrBx8cHBw8ehIeHhxVHSiStsmYijenONmCXNlH5WPImxahTpw6effZZm23OUXKYBIwvdWu1WowaNQqZmZnYs2cPwyTZJHPL2QBL2kSmYKAkRZk8ebJNNucoPUya0tX9zjvvYM+ePYiNjYW/v791BkgkASlCJLu0iSqHJW+w5K0ker0ejRs3Rrdu3RAVFSX3cIyi9DBpSql73bp1CA4ORkREBGbMmGHFURJVjhTlbIAlbSJzcYaSFEWtVmPSpEmYP38+Pv30U8XvnKP0MAkYX+o+efIkJkyYgDFjxuD111+33gCJTCTFTCTAkjaRpOTtCVIGdnkry61bt4STk5NRnchyUnI3t4GxXd0pKSnC399ftG/fXty/f99KoyMynrnd2Qbs0iayDJa8wZK3Eo0YMQIXL17EqVOnFDlLYAszk8aWurVaLQYMGIDjx4/j6NGjvG+SFEOqcjbAkjaRpbHkTYqk5OYcWwiTgPGlbjbhkJJIVc4GWNImsiYGSlIkpe6cYyth0tiubu6EQ0ogZYhklzaRPFjyBkveSrVw4ULMnz8fN2/eVERzjk6nQ5s2bRQfJo0tdZ88eRKBgYEYNmwYVq9ezdkasiopy9kAS9pEcuMMJSlWaGgo5s6dizVr1si+c44QAlqtFnXq1MGTTz6p2DAJGFfqTk1NRVBQEJo1a4bIyEiGSbIKKWciAZa0iZSEgZIUy9o75+h0Omg0GgghSlxLpVLByckJO3bsgLOzs0XHYQ5jSt3cCYcq49atW/D09ISXl5dJz5M6RLKkTaRMZd+pT6QAkydPxpkzZ3Do0CGLXufEiRMICgpCdnZ2mcFVo9EoOkxmZWVhwoQJ6N69O6ZMmVLmcYYmnA0bNrAJh8qVlJSEefPmoXHjxvD19cXevXuNel5ycjKWLl2KZ555BvXq1cP06dPh6uqKr7/+Gn///Te2b9+OiRMnmtylPXfuXAQEBKBXr144fPgwZs+ejWvXrmHHjh0IDg5mmCSSk5xrFikF16FULp1OJwICAsT48eMtdo0TJ04Id3d38dZbbxV7XK/XW+yalvDaa68JDw8PceXKlTKPWbt2rQAgIiIirDgysjVZWVli0qRJQqVSiR49eoglS5ZUuNajXq8XFy9eNHudyKLu3bsnVqxYIbp27SoAiKpVq4rJkyeLuLg4m/v5JLJ3LHmToll655xTp06hS5cumDp1Kj788MPCx/Pz8+Hi4iLptSzJmFI3d8IhY7m5uUGr1aJz587Ys2ePUc9RqVRo0KABvLy8Kl3OBljSJrJV7PIGu7yVLjk5Gb6+voiIiJC0OSc5ORmPPfYY2rVrh+3bt0On0+E///kPLl++jISEBLz88svo378/mjdvLtk1LcGYru7U1FR06tQJPj4+OHjwIO+bpApt2bIF77zzDl555RVcvHgRV65cQZcuXdCvXz88+eSTkl+PXdpEto33UJLiFW3Okfr3n8DAQKSmpuKHH37A4MGDcfr0aTRv3hy9evXCkiVLsGjRIiQmJkp6TakZurpXrFhRapgs2oSzZcsWhkkyyhNPPIFHHnkEn3zyCVQqFZ566ils3rwZQ4cORVxcnCTXyMzMxMqVK9GtWzc0btwYixcvRv/+/REXF4cLFy5g1qxZDJNEtkLmkrsi8B5K5duxY4cAIOLi4iQ9782bN8W4ceOEu7u76NOnT7F7vb799lvh4+Mjfv75Z0mvKSVj9uqeOXOm0Gg0YteuXVYcGdk6nU4n1qxZIw4dOlT4sRBCdO7cWYSGhlb6vkjupU1kn1jyBkvetkCv16Nx48bo1q0boqKiJD33zZs38cUXX6B3797o2bNnsWWDmjRpgueeew4ff/yxpNeUgjGl7nXr1iE4OBgRERGYMWOGDKMkW1ZQUFC4soFhWa0PP/wQGzZsQExMjEm3g7CkTWTf2JRDNsGSzTn16tXD22+/DTc3NwAPmguEEEhLS0PNmjXRvn17ya4lpYoWMGcTDhnDsE7k4MGD4evrW2zZrNKWydLr9bhx4waaNGlS4bm58DiR4+A9lGQzQkNDodVqsWbNGsnPXbVq1WJd3SqVCkuWLEFKSgq6dOki+fXMZejq/vDDD0vt6uZOOFSe0taJ3LdvX6n3KBse02g0iI+Px+bNmzF16tQyd2ECHsxmTpw4EXXq1MFLL70Ed3d3xMTEIDk5GcuWLUNgYCC/J4nsDGcoyWZYa+ecdevWYc+ePdi4cSN27doFPz8/i1ynsipawJw74VBpKrtjzddff41z587h2LFjOHfuHAYOHIhXXnml3J8/jUYDnU6H2bNns6RN5CAYKMmmTJ48Gf369cOhQ4cQGBhY6fMIIaDT6eDkVPJHoGXLllizZg0OHDiAVq1amTNci6io1G3YCSc2NpY74Tg4KbY97Nq1K2JjYzFgwADExMSgQYMGRl37m2++4SwkkQNhUw7YlGNLpGjO0Wq16NGjB5YvX44mTZpAo9GUOEapC5vv2bMHPXv2xJIlS0pdk5NNOFRWiHz++ecrvdg4EVFFeA8l2RRDc8769euRnp5u8vO1Wi3atm2LgwcP4quvvio1TAJQZJisqNTNJhzHZYm9s4mITMFASTanss05hjB5/vx5hIaGYvHixRYaoWWUt4A5m3AcD0MkESkJ76Ekm1OZ5pyHw+TKlSutMFLplLdXN5twHIcU90RWRK/XQ6VS8ZcSIjIJAyXZJFOac2w9TFZU6mYTjn2zRogE/ll4PDY2Fvv371fkbR9EpFwMlGSTevfujYCAgMI17cpi62ESKL+re926dQgPD0dERAR69uwp0whJatYKkWUtPJ6UlISAgABJrkFEDkK2TR8VhHt526awsDDh5uYm0tLSSv18QUGBaNGihQAgQkNDrTw6aZS3V/eJEyeEu7u7GDNmjNDr9TKMjqR069Yt8eWXX4oePXoIlUolNBqN6Nevn1i+fHml980uDffSJiJL4LJB4LJBtio5ORm+vr6IiIgosYSOPcxMlrdXd2pqKjp16gQfHx8cPHiQ903aKGsu8cO9tInIkljyJptVVnOOPYRJoOxSN5twbJu1ytkA99ImIuthoCSb9nBzjr2EyfK6utmEY3usGSL1ej327t2LqKgofPfdd8jJyUGfPn0QExODoKAguLu7S3YtIiIDlrzBkrctK7pzzvLly+0iTJZX6uZOOLbD2jvWsKRNRHLiDCXZNMPOOfPnz8fvv/+OS5cu2XSYBMoudXMnHOWz5kwkwJI2ESkHZyjBGUpbd+PGjcIZGFsPk2Xt1c0mHOWy9kxkWSXtkJAQlrSJSDacoSSbptVq0bdvXwCAj48PVqxYIfOIKq+sBczZhKM81p6JBEovac+ePZslbSJSBAZKsllFG3D69u2L2NhYo3bOUaqySt1swlEGrVaL3Nxc/P777/j3v/+N8+fPWzxEsqRNRLaCgZJs0sPd3MuXL0fjxo0r3DlHqcrq6uZOOPLS6/VQq9XQarVwcnKCp6cnnnnmGRw/fhxZWVnw8fGxyDXZpU1Etob3UIL3UNqaspYGWrhwIebPn4+bN2+iWrVqMo/SeGV1dZ88eRKBgYEYNmwYVq9ezdkoKykoKICzs3NhiLQWdmkTkS1TV3wIkXKUt85kaGgotFot1qxZI+MITWcoda9YsaIwTKampiIoKAjNmjVDZGQkw6SF5ebmQq/XQ6/XQ6PRAIBVwmRmZiZWrlyJbt26oXHjxli8eDH69++PuLg4XLhwAbNmzWKYJCKbwEBJNqOiRcuL7pxjKxPvhlL3hx9+WFjqLtqEs2XLFjbhWMjt27dx69YtAIBGo4FarS78Y0l6vR67d+/GuHHjUKdOHbz00ktwd3dHTEwMkpOTC2/b4C8RRGRLGCjJJhi7A87kyZNx5swZHDp0yMojNF1ZXd2GJpwNGzawCUdiycnJWLp0Kfr06YOcnBzUqFEDAODs7GzxayckJGDu3LkICAhAr169cPjwYcyePRvXrl3Djh07EBwczPsjichmsSmHFM+U7RR79+6NgIAAm2jOKa2rm0040iu6xM/+/fuhUqmwcOFC+Pn5Wfza7NImIochSMTHxwsAIj4+Xu6h0EMKCgpEixYtBAARGhpq1HPCwsKEm5ubSEtLs/DoKm/37t0CgFiyZEnhYydOnBDu7u5izJgxQq/Xyzg623fr1i3x5Zdfih49egi1Wi00Go3o16+fWL58uUhJSTHpXIcOHRKpqalGH6/T6cSuXbvE2LFjhYeHh1CpVKJv374iJiZGZGdnm/pSiIhsAgOlYKBUqsqESSEehAknJ6diYU1JMjMzhb+/v+jevbvQ6XRCCCFSUlKEv7+/aN++vbh//77MI7RNUobIa9euiZdeekl4e3uL9u3bi99++63w36osV65cEXPmzBENGzYUAETTpk1FWFiYSExMNOdlERHZBJa8SZFMKXM/rGhzztSpUxVXVny41M2dcCqvtHJ27969ERkZadZi4//973+RmJiIzZs3o3Xr1nB1dS2zWUev12Pjxo0YNWoUS9pE5LAYKElxzAmTBpMnT0a/fv0Ut3NOaQuYcycc01gqRBqsW7cOsbGxiI+PR/Xq1ZGWllbu8Wq1GgMGDMDatWvx7LPPsrGGiBwSAyUpihRhElBmc05pXd1swjGOpUNkUdeuXUOrVq3w999/4/nnn0dSUhLq1KmDYcOGYdy4caUumm+YmSQiclRcNogUQ6owCTyYNZo0aRLWr1+P9PR0CUdZeQ8vYH7y5ElMmDABY8aMweuvvy738BTHsMTPM888g/r162P69OlwdXVFZGQk/v77b2zfvh0TJ06UfA/trKwsVK1aFXPmzMFjjz2GmJgYtGnTBsuXL0dYWJik1yIishty38SpBGzKkV9lG3DKo6TmnIe7utmEUzopG2sq68SJE0KlUonHHnus2EoBH330kWjQoAH/vYiISsEZSpKdlDOTRSll55yHS93cCac4uWYiy9K2bVs4OTnBx8enWHn78ccfh4+PDw4fPmyVcRAR2RIGSpKVpcKkgRJ2znm41M2dcJQTIrOysqDX64s9plKpsGDBAty+fbvY983Vq1eRlpbmsP9mRETlYaAk2Vg6TALFm3Pk8PBe3YYmnPDwcIdrwlFKiCy6l3bt2rWxbds2aLXaYse89NJLqFatGiZPnozffvsNhw4dwrZt2zBq1CgEBARYdHxERDZJ7pq7EvAeSuuzxD2TZZFr55yHFzB3xJ1wlHBPpEFZC4/fuXOn1OMvX74s+vTpIx577DHh6ekpxowZI65fv27VMRMR2QqVEDLeXKYQx44dQ8eOHREfH48OHTrIPRy7Z42ZyaKSk5Ph6+uLiIgITJs2zaLXKmrKlCmIiorCqVOn4OPjg06dOsHHxwcHDx606/smy1ri5/nnn5d8iZ+KmLuXdkFBAf788080atQIGo3GSqMmIrI9XIeSrMraYRKQZ+ecoguY+/n5YcCAAXa9E44114msiF6vx969exEVFYXvvvsOOTk56NOnD2JiYhAUFGTSwuPOzs5o2rSpBUdLRGQfGCjJauQIkwbW3Dnn4a7ut99+2y53wlFSiASAhIQEREdHIzo6GomJiWjatClmz56NF198Eb6+vlYdCxGRo2GgJKuQM0wC1t05p+he3Rs2bLCrnXCUFiLNLWkTEZE0GCjJ4uQOk8A/O+fMnz8fn376aanb50mhaKnbMFNp6zvhKC1ESlnSJiIiabApB2zKsSQlhEkDSzfnZGVloU2bNvDz88PGjRvRuXNnm23CUVJjjUFpJe2QkBCWtImIFIAzlGQxSgqTgOWbcwyl7u3bt2P06NE214SjtJlIgCVtIiJbwUBJFqG0MGlgqeacoqXuFStW2EwTjhJDJEvaRES2h4GSJKfUMAlYpjmnaFd39erVMX36dEU34SgxRALs0iYismUMlCQpJYdJwDLNOYZS9+eff44XXnhBkU04Sg2RLGkTEdkHNuWATTlSUXqYNJCyOWfPnj3o2bMnFi5ciGXLlimqCUeJjTVA2SXtkJAQlrSJiGwUAyUYKKVgK2HSYMSIEbh48SJOnTpV6VkwQ1d3w4YN4ezsjBMnTuDo0aOy3jep1BAJsEubiMieseRNZrO1MAkUb87p1PkJXEjOxOmkDJxJysDtzDzka3VwcdKglpcrWtf3Rpv63mhexwvOGnXhOQyl7j59+mDlypWyNeEotZwNsKRNROQoGCjJLLYYJoEHzTn+rTrgrZiDyPg1Exk5BQAAJ7UKWv0/k/ZOahVijiQCALzdnTHmiYYY3bkhLp84jKVLl2L8+PH4+uuvrd6Eo+QQyS5tIiLHw5I3WPKuLFsNk/dyCxD203msO5oIoddDpdYY/Vy1ChACEAkH8ci1fbhw+jiGDRuG1atXW3y2TcnlbIAlbSIiR8YZSqoUWw2T+y/dwRsbTyL1fh4AlUlhEgAMk5ci4Emk1m2FR72+R2RkpMXCpJJnIgGWtImI6AEGSjKZrYbJ6Li/8N6PZ6FW/RMMK0ul1kC4VUXW4yHYdPI2xgX6SzJGQPkhkiVtIiJ6GAMlmcRWw+Sq3x+EScD8MGmgUj9o0Jm79cF5zQmVSg+RABceJyKisjFQktFsNUzuv3SnMPRZytytZ+FfvQq6Na1p9HNsIUSypE1ERMZgoCSj2GqYvJdbgDc2npSkzF0etQr4z6aT2PXv7vBycy7zOFsIkSxpExGRqRgoqUK2GiYBIOyn80i9n2fRMAk8CKspWXlY8PN5fDisbbHP2UKIBFjSJiKiymOgpHLZcpi8np6N9Uevo6IsmZ+cgPsXDiDv+hloM25Dl30PalcPuNZrhqpPDoebb2ujrqcXwPqj1zH1mcZwyrtnEyGSJW0iIpICAyWVyZbDJACsPZII1f+vG1mezBO/IOvE9mKP6XPuISfhD+RcjUfNoLfh0ewp4y4qBAb9ayHOfBum2BDJkjYREUmNgZJKZethskCnx7eHE40udWuqVEOVdn3h1qAl9LlZuPvbWmjTbgBCj7Rdy40OlAIqZNZuj6+WRWL4c8oJkQBL2kREZDkMlFSCrYdJALiQ/M92ihWp0uoZVOv1EtTOboWPOVf3xa1vpgMAdPduQ3f/LjRVfIw6n97JDU8NGIHq1b1NHrfUWNImIiJrYKCkYuwhTALA6aQMo491821V4jGnR+oV+1jl7Gry9VvXlydQsqRNRETWxkBJhewlTALAmaQMOKlV0FayvTv7Ylzh310btILaxfgQ5qRW4XRSBoIrdeXKY0mbiIjkwkBJAOwrTALA7cy8SofJvOQrSPt12YMPNM6o1nuSSc/X6gXuZOVV6tqmYkmbiIiUgIGS7C5MAkC+Vlep5+VeP4vbm96HyMsG1BrUHDoTrnUam3yevILKXd8YLGkTEZHSMFA6OHsMkwDg4qQx+Tk5fx7Dnc0LIAryAI0zaj77FjyaPlmp67s6m379irCkTURESsVA6cDsNUwCQC0vV5Puocy+GIc7Wz8GdFqonN1Qc/hsuPu3r9S1ndQq1PQ0rYmnLCxpExGRLWCgdFD2HCYBoHV9b8QcSTTq2PsXfkPKDx8DQg9ABe+ng6HSOCP3+tnCY1zrNoXKqew9uovS6gXamNHhzZI2ERHZGgZKB2TvYRKASYEu58of/x8mAUDg7p5vShxT/5UVcPKpbZHrG7CkTUREtoqB0sE4QpgEgOZ1vODt7mz04uZS8nZ3RrM6XkYdy5I2ERHZAwZKB+IoYRIA9NoCtHBKwe96T6jU5TfI1Bg8AzUGz5DkuhoV8OITDeGsUZc9Npa0iYjIzjBQOghHCZN5eXlYsWIFFi5ciL+zClDv5eVWvb4eQHDnhqV+jiVtIiKyV2VPo5DdcIQwmZeXh6VLl6Jx48aYNm0aunfvjlO/78WoxxtCbaWqsVoFjOzkiwbVPAofy8zMxMqVK9GtWzc0btwYixcvRv/+/REXF4cLFy5g1qxZDJNERGTzOENp5+w9TBadkbx58yaCg4Mxe/ZsNG/eHADwrn8Bdl+8jZSsPFRy4xyjqFVADU9XvDuwBUvaRETkcDhDacfsOUyWNiN59uxZrFmzpjBMAoCXmzMWjWhn0TAJAHoBvNGlFsLDPkBAQAB69eqFw4cPY/bs2bh27Rp27NiB4OBghkkiIrJLnKG0U/YaJiuakSxNt6Y1MX9oK8zderbMY8xVLSEWo3osYZc2ERE5JAZKO2SPYbIyQbKocYH+AIC5W89CrYI0M5ZCD6jUSIv9Cs1qFLCkTUREDouB0s7YW5g0N0gWNS7QH/7Vq+A/m06afU+l0OugysvCwOqpmPX9V2ysISIih8ZAaUfsKUxKGSSL6ta0Jnb+uzvCfjqP9fHXoQagMyFYCr0OKpUaPf3dsTikP6q6u5g1HiIiInvAQGkn7CVMWipIFlXVzRkfDm+LqT0bY+2RRKw5nFi4o46TWgWtYepSCAi9DlBroFKp4K7RY9zTj2LsU48WWxqIiIjI0TFQ2gF7CJPWCJIPa1DNAzP7NcfrvZviYnImdp+4gm1xJ3HxejJyC7So4uaCZn71MLhLW3RvHYBmdbzK3QGHiIjIUTFQ2jhbD5NyBMmiuJc2ERGR+RgobZgth0k5gyQXHiciIpIWA6WNstUwKWeQ5F7aRERElsFAaYNsMUzKFSRZ0iYiIrI8BkobY2thUo4gyZI2ERGRdTFQ2hBbCpNyBEmWtImIiOTBQGkjbCVMWjtIsqRNREQkPwZKG2ALYdKaQZIlbSIiImVhoFQ4pYdJawZJlrSJiIiUiYFSwZQcJq0VJFnSJiIiUj4GSoWydpgUQhSGs6J/f5g1giRL2kRERLaFgVKBrBkmU1JSsGjRImRlZaFly5aYOHEiXF1dSxwnhMCZM2cwcOBAiwVJlrSJiIhsk1ruAVBx1gqTeXl5+O9//4tGjRrh3LlzuHfvHt5++22EhITg/v37JY5XqVRo3rw5nnvuOZw9exZr1qyRJExmZmZi5cqV6NatGxo3bozFixejf//+iIuLw4ULFzBr1iyGSSIiIoXjDKWCWHNm8vDhw9i+fTuio6MRFBQEADh27Bg6deqEefPmoVmzZiWe4+TkhCVLlph9bZa0iYiI7AsDpUJY+57Jxx57DD179kT//v0Lr9+4cWPUrVsXZ86cKTVQmtsAw5I2ERGRfWKgVAA5urm9vLwwf/58AA/uj3RyckJCQgKEEOjSpYtk12GXNhERkf1joJSZ3EsD6fV6qNUPbqU9cOAAGjVqhOrVqxd7vDLnZEmbiIjIcTBQykjuMAkAarUaOp0OGo0GP/zwAwIDA+Hs7Fypc7GkTURE5JgYKGWihDBpoNFokJCQgJMnT+Lrr78GAFy9ehU//PADBg8ejCZNmpT5XJa0iYiIiMsGyUCOMJmXlwedTlfm53/99Vc89dRT0Gg0GDNmDBo3boy9e/eiVq1apR4vhMC6detQp04dvPTSS3B3d0dMTAySk5OxbNkyBAYGMkwSERE5CAZKK7N2mMzLy8PSpUvRuHFjLF26tNRQqdfrsWnTJmzbtg2NGjVCSkoKzp07hx9++AHe3t5lvg4XFxfMnj0b165dw44dOxAcHMz7I4mIiBwQS95WZM0wWdoWiQMGDIBGoylxrFqtRvPmzeHt7Y2PPvoIjRs3rvD8Tk5OeO655zgLSURERAyU1mLNHXAqs9f2kiVLTOrqZpAkIiIiA5a8rcAaYbJoaXvatGno3r27SVskVnaJICIiIiKmCAuzdJg0N0gSERERmYuB0oIsGSbNCZJ6vR56vV6ysRAREZFjY6C0EEuFSXOC5JUrVzBnzhwEBATgiy++KHcZISIiIiJjsSnHAiwRJivbbJOZmYkNGzYgKioKv/32W+HC44GBgbxvkoiIiCTBQCkxqcNkZYIk99ImIiIia2KglJCUYbIyQfLKlSuIjo7GqlWruJc2ERERWQ0DpUSkCpOmBsmyStrcS5uIiIishYFSAlKESVOCJEvaREREpCQMlGYyN0yaEiRZ0iYiIiIlUgkhhNyDsLYCnR4XkjNxOikDZ5IycPnG39j/Wxy6Pf0UmjSojdb1vdGmvjea1/GCs6bsTmhzwqSxQZIlbSIiIlI6hwqUN9KzEXMkEd8eTkRGTgEAwEmtglb/z5eg6Mfe7s4Y80RDjO7cEA2qeRQ7V2XDpDFBsqySdkhICEvaREREpDgOESjv5RYg7KfzWH/0OlQqQG/CK1arAAFgZEdfvDuoBbzcnCsVJo0JkqWVtENCQljSJiIiIkWz+0C5/9IdvLHxJFLv55kUJB+mVgE1PF3x0bDWmDKsp9FhsqIgyZI2ERER2Tq7DpTRcX/hvR/PQm3irGRZDOdJjf0Kz7erVW6YLC9IsqRNRERE9sRuA+Wq3//C3K1nLXb++UNbYVygf4nHywuSLGkTERGRPbLLQLn/0h2M++aIxa+zKrQzujWtCaDsIFm/fn2WtImIiMiu2V2gvJdbgF6f7DP7nsmKGO6p/GVKINZ/G10sSL7zzjtITk5mSZuIiIgcgt0Fyre/O4UN8dctGiYNVBDQXf4NSVvCERwcjLFjx+K3335jSZuIiIgcil0Fyuvp2ej28R4Y+4KEtgD3jmzB/bN7UHA3GWpnN7j6toJ3l1FwrdPYyJMIjPQ4i7hft7GkTURERA7JrrZeXHskESoVYExEFnodbm+ch9xrJwsf0+sKkHP5EHKuxqPW8+/B3b99xecRekTuOofOHh7cS5uIiIgckt3MUBbo9Oi0YGfhDjgVuXd0K9J3RgIAnGv6wefpMcj/OwEZcesBABqvGqj/8tdQOTlXeC4vVw2Ozelb7jaNRERERPbKbhLQheRMo8MkAGQd/6Xw79X7T4NHs6fg020s3AI6AAB0mSnIvmJcp3hmng4XkzNNGzARERGRnbCbQHk6KcPoY3U5mShIvf7gA7UTXOo2Kfyca/0WhX/Pu2H8OpamXJ+IiIjInthNoDyTlAEntXENMNqMvwv/rnH3gkqt+efjKt7/HHf3bxjDSa1ioCQiIiKHZTeB8nZmHrRGrhUkCnL/+UBTvC9JpXYq/bhyaPUCd7LyjDqWiIiIyN7YTaDM1+qMPlbl7Fb4d6Erft+l0GtLPa4ieQXGX5+IiIjInthNoHRx0lR80P9z8q5d+Hd9TiaE/p8wqMtK/+c4n9owlquz8dcnIiIisid2EyhrebkafQ+lxt0LztX/f+cavQ75ty4Vfi7v5oXCv7s2aGXU+ZzUKtT0dDV+sERERER2xG4CZev63kbfQwkAno8NKPx76i+fI/tiHNL3r0bun8cBPFiH0qNxZ6POpdULtKnvXfGBRERERHbIbnbKMTXQeXUYhJzLh5F77SQKUhJxZ0vYP5/UOKP6oNeNWtS8stcnIiIishd2Eyib1/GCt7uz0Yubq9Qa1Hp+Hu4d2YKss7uhvfv3g728G7SE99PBxu/lDcDb3RnN6nhVduhERERENs1utl4EgI93XMD/9iXAhMq32TQq4JXujTCzX3PrXZSIiIhIQezmHkoAGN25Iawdj/UAgjs3tO5FiYiIiBTErgJlg2oeGNnJF0Y2e5tNrQJGdvJFg2oe1rkgERERkQLZVaAEgHcHtUANT1eLh0q1Cqjh6Yp3B7ao+GAiIiIiO2Z3gdLLzRmLRrSz+H2UegEsGtEOXm7Gd4ITERER2SO7C5QA0K1pTcwfatyi5JX1wdBW6Na0pkWvQURERGQL7DJQAsC4QP/CUClV+dtwng+GtsLYQH9pTkpERERk4+xq2aDS7L90B//ZdBIpWXlmlcEN90wuGtGOM5NERERERdh9oASAe7kFCPvpPNbHX4cagM6EV6xRPVgaaGRHX7w7qAXvmSQiIiJ6iEMESoMb6dlYeyQRaw4nFu6o46RWFdsDvOjH3u7OePGJhgju3JBLAxERERGVwaECpUGBTo+LyZk4nZSB00kZuJOVh7wCHVydNajp6Yo29b3Rpr43mtXxgrPGbm8zJSIiIpKEQwZKIiIiIpIOp9+IiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERkFgZKIiIiIjILAyURERERmYWBkoiIiIjMwkBJRERERGZhoCQiIiIiszBQEhEREZFZGCiJiIiIyCwMlERERERklv8D9tYOj6gE5xsAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MWPM solution 2: [((1, 0), (2, 1)), ((3, 4), (5, 3)), ((1, 6), (4, 6))]\n" - ] - } - ], + "outputs": [], "source": [ - "def toric_distance(u, v, L):\n", - "\n", + "# EXERCISE 2\n", + "def toric_distance(u: list[int], v: list[int], L: int) -> int:\n", " \"\"\"\n", - " Function that computes Manhattan distance between two flagged syndromes on toric grid\n", - " \n", + " Computes Manhattan distance between two flagged syndromes on toric grid.\n", + "\n", + " Parameters\n", " ----------\n", - " u: list[int]\n", - " x and y corrdinate of first syndrome\n", - " v: list[int]\n", - " x and y corrdinate of second syndrome.\n", - " L: int\n", - " Size of toric surface\n", + " u : list[int]\n", + " x and y coordinate of first syndrome.\n", + " v : list[int]\n", + " x and y coordinate of second syndrome.\n", + " L : int\n", + " Size of toric surface.\n", "\n", " Returns\n", - " list of syndromes pairs obtained by MWPM\n", " -------\n", + " int\n", + " Manhattan distance between two syndrome locations on the torus.\n", " \"\"\"\n", - "\n", " dx = abs(u[0] - v[0])\n", " dx = min(dx, L - dx)\n", " dy = abs(u[1] - v[1])\n", " dy = min(dy, L - dy)\n", " return dx + dy\n", "\n", - "def mwpm_decoder_toric(flagged_stabilizers, L):\n", "\n", + "def mwpm_decoder_toric(flagged_stabilizers: list[tuple[int, int]], L: int) -> list[tuple[tuple[int, int], tuple[int, int]]]:\n", " \"\"\"\n", - " Function that takes a list of syndrome locations and returns MWPM result\n", + " Takes a list of syndrome locations and returns MWPM result.\n", + "\n", + " Parameters\n", " ----------\n", - " u: list[list[int]]\n", - " list of lists containing flagged syndrome locations\n", - " L: int\n", - " Size of toric surface\n", + " flagged_stabilizers : list[tuple[int, int]]\n", + " List of (x, y) coordinates of flagged syndrome locations.\n", + " L : int\n", + " Size of toric surface.\n", "\n", " Returns\n", - " interger Manhattan distance between two syndrome locations\n", " -------\n", + " list[tuple[tuple[int, int], tuple[int, int]]]\n", + " List of syndrome pairs obtained by MWPM.\n", " \"\"\"\n", - "\n", " G = nx.Graph()\n", " # Add each flagged stabilizer as a node\n", " for i, coord in enumerate(flagged_stabilizers):\n", @@ -719,7 +630,7 @@ " plt.title(\"Toric Code Graph (Distances on Edges)\")\n", " plt.show()\n", "\n", - " #Performs MWPM\n", + " # Performs MWPM\n", " matching_indices = nx.min_weight_matching(G)\n", "\n", " # Convert node indices back to stabilizer coordinates for clarity\n", @@ -728,11 +639,12 @@ " matching_solution.append((G.nodes[i]['pos'], G.nodes[j]['pos']))\n", " return matching_solution\n", "\n", - "flagged_1 = [(2, 1), (1, 3), (2, 4), (1, 6), (4,5), (5,2)]\n", - "flagged_2 = [(1, 0), (1, 6), (2, 1), (3, 4), (5,3), (4,6)]\n", + "\n", + "flagged_1 = [(2, 1), (1, 3), (2, 4), (1, 6), (4, 5), (5, 2)]\n", + "flagged_2 = [(1, 0), (1, 6), (2, 1), (3, 4), (5, 3), (4, 6)]\n", "L = 7\n", "\n", - "print(\"MWPM solution 1:\",mwpm_decoder_toric(flagged_1, L))\n", + "print(\"MWPM solution 1:\", mwpm_decoder_toric(flagged_1, L))\n", "print(\"MWPM solution 2:\", mwpm_decoder_toric(flagged_2, L))" ] }, @@ -741,14 +653,14 @@ "id": "e5eea1eb-d113-4792-b6e6-3cc74c0694a4", "metadata": {}, "source": [ - "A logical error occurs in case 1.\n", + "A logical error occurs in case 1.\n", "\n", - "\n", + "\"MWPM\n", "\n", "\n", - "A logical error does not occur in case 2\n", + "A logical error does not occur in case 2.\n", "\n", - "" + "\"MWPM" ] }, { @@ -756,34 +668,42 @@ "id": "5ff67ed2-75ba-4f5f-b496-b93be15e1703", "metadata": {}, "source": [ - "## 6.7 The Planar and Surface Codes ##\n", + "---\n", "\n", - "One of the biggest problems with the toric code is that it is not very practical. Some qubit modalities could utilize the toric code, but many modalities such as superconducting cannot as there is no easy way to map a torus to qubits arranged on a plane. This requires some modifications to produce the famous surface code that is much more practical and the basis for many experimental QEC demonstrations today. \n", + "## 6.7. The Planar and Surface Codes\n", "\n", + "One of the biggest problems with the toric code is that it is not very practical. Some qubit modalities could utilize the toric code, but many modalities such as superconducting cannot as there is no easy way to map a torus to qubits arranged on a plane. This requires some modifications to produce the famous surface code that is much more practical and the basis for many experimental QEC demonstrations today.\n", "\n", - "The figure below shows the steps to get there. First, the toric code is transformed into the planar code by eliminating the periodic boundary conditions and capping one pair of parallel slides to form a smooth edge. The other edge is called a rough edge. This allows the code to now lie in a plane which is much more suitable for hardware implementation. \n", + "The figure below shows the steps to get there. First, the toric code is transformed into the **planar code** by eliminating the periodic boundary conditions and capping one pair of parallel sides to form a smooth edge. The other edge is called a rough edge. This allows the code to now lie in a plane which is much more suitable for hardware implementation.\n", "\n", - "\n", + "\"Three-step\n", "\n", - "Changing the topology does impact the logical encoding. Now it is not possible to form loops around the smooth edges (top to bottom) resulting in the loss of a degree of freedom and therefore a logical qubit. \n", + "Changing the topology does impact the logical encoding. Now it is not possible to form loops around the smooth edges (top to bottom) resulting in the loss of a degree of freedom and therefore a logical qubit.\n", "\n", - "It is possible to improve on the planar code with a few transformations that allow encoding of a logical qubit with the same distance but using fewer data qubits. If you place a green or purple square on each vertex and plaquette stabilizer, respectively, and then rotate by 45 degrees, the basis for the so called rotated surface code is formed. \n", + "It is possible to improve on the planar code with a few transformations that allow encoding of a logical qubit with the same distance but using fewer data qubits. If you place a green or purple square on each vertex and plaquette stabilizer, respectively, and then rotate by 45 degrees, the basis for the so called **rotated surface code** is formed.\n", "\n", - "Special boundary 2-qubit stabilizers are added around the faces of the surface to finish the code layout. Previously all error chains flagged exactly two stabilizers. Now, if an error chain starts on the boundary, it is possible to only flag a single stabilizer. Thus, this must be taken into account when decoding.\n", + "Special boundary 2-qubit stabilizers are added around the faces of the surface to finish the code layout. Previously all error chains flagged exactly two stabilizers. Now, if an error chain starts on the boundary, it is possible to only flag a single stabilizer. Thus, this must be taken into account when decoding.\n", "\n", - "The rotated surface code can now encode a single logical qubit with distance $L$ using $L^2$ fewer qubits. \n", + "The rotated surface code can now encode a single logical qubit with distance $L$ using $L^2$ fewer qubits.\n", "\n", - "## Summary ##\n", + "## Conclusion\n", "\n", - "Topological codes are central to both the history of QEC and much of the current research literature. after completing this lab, you should have a better understanding of what a topological code is, why it is used, and a basic understanding of their inner workings. In addition, coding the toric code in CUDA-Q provides deeper insight into the challenges of implementing such a code. " + "Topological codes are central to both the history of QEC and much of the current research literature. After completing this lab, you should have a better understanding of what a topological code is, why it is used, and a basic understanding of their inner workings. In addition, coding the toric code in CUDA-Q provides deeper insight into the challenges of implementing such a code." + ] + }, + { + "cell_type": "markdown", + "id": "320783f27cb04eca", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101: Lab 5 — Magic State Distillation](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/05_QEC_MSD.ipynb) — previous lab in the QEC 101 series covering magic state distillation\n", + "* [QEC 101: Lab 7 — qLDPC Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/07_QEC_qLDPC.ipynb) — next lab exploring quantum low-density parity-check codes\n", + "* [QEC 101: Lab 4 — Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — covers decoder fundamentals used in this lab's MWPM exercise" ] } ], "metadata": { - "colab": { - "include_colab_link": true, - "provenance": [] - }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", @@ -797,9 +717,22 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, diff --git a/qec101/Solutions/07_QEC_qLDPC_Solution.ipynb b/qec101/Solutions/07_QEC_qLDPC_Solution.ipynb index 9484340..f911eb6 100644 --- a/qec101/Solutions/07_QEC_qLDPC_Solution.ipynb +++ b/qec101/Solutions/07_QEC_qLDPC_Solution.ipynb @@ -1,63 +1,129 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a1b74c6a4abe4136", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, { "cell_type": "markdown", "id": "eaa3992e-c095-4bd7-9f14-60abd0740b64", "metadata": {}, "source": [ - "# QEC 101 Lab 7: qLDPC Codes #\n", - "\n", - "\n", - "One of the most promising classes of QEC codes are so called quantum low density parity check (qLDPC) codes. These codes are quite general and include well known codes like the surface code. This lab will walk through the basics of classical LDPC codes, the challenges that arise when moving to qLDPC codes, and how to construct valid qLDPC codes with favorable properties. You will eventually implement techniques from \"[Lift-Connected Surface Codes](https://arxiv.org/abs/2401.02911)\" connecting what you have leaned to state-of-the-art research.\n", - "\n", - "**Prerequisites:** This lab assumes you have a moderate knowledge of QEC and have completed the core QEC 101 courses (labs 1-4), especially the labs covering [stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb). \n", - "\n", - "The list below outlines what you'll be doing in each section of this lab:\n", - "* 7.1 Learn the basics of classical LDPC codes and how to analyze their properties.\n", - "* 7.2 Learn why quantum LDPC codes are challenging to construct and how to build hypergraph product (HGP) codes.\n", - "* 7.3 Extend the HGP procedure to produce larger qLDPC codes with improved properties.\n", - "* 7.4 Compare the quality of the codes you created using the NVIDIA BP+OSD decoder.\n", - "\n", - "Terminology you will use:\n", - "* low density parity check, encoding rate, degree\n", - "* hypergraph product\n", - "* lifted product\n", - "* circulants\n", - "\n", + "# QEC 101 Lab 7: qLDPC Codes — Solutions\n", + "\n", + "---\n", + "\n", + "**What You Will Do:**\n", + "* Analyze classical LDPC code properties including node degrees, encoding rate, and four-cycles\n", + "* Construct quantum LDPC parity check matrices using the Hypergraph Product (HGP) construction\n", + "* Extend the HGP procedure to build Lifted Product (LP) codes with improved code distance\n", + "* Compare LP surface codes against surface code copies using the CUDA-Q QEC BP+OSD decoder\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Basic knowledge of quantum computing (qubits, gates, measurement)\n", + "* Completion of QEC 101 Labs 1–4 (especially [Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb))\n", + "* Familiarity with parity check matrices and Tanner graphs\n", + "* Basic linear algebra (matrix multiplication, Kronecker product, rank)\n", + "\n", + "**Key Terminology:**\n", + "* Low Density Parity Check (LDPC)\n", + "* Encoding rate\n", + "* Variable node degree / Check node degree\n", + "* Tanner graph\n", + "* Hypergraph Product (HGP)\n", + "* Lifted Product (LP)\n", + "* Circulant\n", + "* Shannon limit\n", + "* CSS code\n", + "* Belief Propagation (BP)\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`cudaq_qec`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — CUDA-Q Quantum Error Correction library" + ] + }, + { + "cell_type": "markdown", + "id": "593be360231e4a4e", + "metadata": {}, + "source": [ + "
\n", "\n", - "qLDPC codes have a number of favorable properties that make them promising for deployment within nearer term fault tolerant workflows.\n", + "**⚡ GPU Required:** Parts of this notebook require a GPU.\n", "\n", - "First run the cell below to prepare the necessary libraries." + "
" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, + "id": "ac9b4cafa1834f54", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", + "\n", + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "2046baf8b8d846d7", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, + { + "cell_type": "code", + "execution_count": null, "id": "b29a2a16-344d-41f2-994c-60418b46bac9", "metadata": {}, "outputs": [], "source": [ "import sys\n", + "import os\n", + "import time\n", + "from itertools import combinations\n", + "\n", + "import numpy as np\n", + "from scipy.sparse import csr_matrix\n", + "\n", + "import galois\n", + "import ipywidgets as widgets\n", + "from IPython.display import display\n", "\n", - "try:\n", - " import time\n", - " import cudaq_qec as qec\n", - " import galois\n", - " import cudaq_qec\n", - " import ipywidgets as widgets\n", - " import numpy as np\n", - " from IPython.display import display\n", - " from itertools import combinations\n", - " from scipy.sparse import csr_matrix\n", - "\n", - "\n", - "except ImportError:\n", - " print(\"Tools not found, installing. Please restart your kernel after this is done.\")\n", - " !{sys.executable} -m pip install --upgrade pip\n", - " !{sys.executable} -m pip install galois\n", - " !{sys.executable} -m pip install cudaq-qec\n", - " !{sys.executable} -m pip install ipywidgets\n", - " print(\"\\nNew libraries have been installed. Please restart your kernel!\")" + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec\n", + "\n", + "sys.path.append(os.path.join(os.getcwd(), '..'))" ] }, { @@ -65,81 +131,59 @@ "id": "2fb1f3e2-91eb-493a-9272-c54b6599d068", "metadata": {}, "source": [ - "## 7.1 Classical LDPC Codes ##\n", + "---\n", + "\n", + "## 7.1. Classical LDPC Codes\n", "\n", - "Robert Gallager first conceived of low density parity check (LDPC) codes in his [1960 MIT dissertation](https://dspace.mit.edu/handle/1721.1/11804) but it was underappreciated at the time and interest resurged in the 90's as other error correction codes rose in prominence. LDPC codes are now used widely in telecommunications and computer memory applications\n", + "Robert Gallager first conceived of **Low Density Parity Check (LDPC)** codes in his [1960 MIT dissertation](https://dspace.mit.edu/handle/1721.1/11804) but it was underappreciated at the time and interest resurged in the 90's as other error correction codes rose in prominence. LDPC codes are now used widely in telecommunications and computer memory applications.\n", "\n", - "An LDPC code is a classical parity check error correction code with a sparse parity check matrix $H$. The parity check matrix is often represented by a Tanner graph, which was introduced in lab 4 on decoders. A Tanner graph is drawn with check nodes on the top row and variable nodes on the bottom. The Tanner graph for the Steane code is shown below. \n", + "An LDPC code is a classical parity check error correction code with a sparse parity check matrix $H$. The parity check matrix is often represented by a **Tanner graph**, which was introduced in lab 4 on decoders. A Tanner graph is drawn with check nodes on the top row and variable nodes on the bottom. The Tanner graph for the Steane code is shown below.\n", "\n", - "\"Drawing\"\n", + "\"Tanner\n", "\n", - "A sparse $H$ means that each variable and check node only connects to a limited number of other nodes. The **variable node degree** characterizes the maximum number of checks any (q)bit is involved in while the and **check node degree** characterizes the maximum number of (q)bits involved in any given check. Ideally, these two values are as small as possible to maintain low density.\n", + "A sparse $H$ means that each variable and check node only connects to a limited number of other nodes. The **variable node degree** characterizes the maximum number of checks any (q)bit is involved in while the **check node degree** characterizes the maximum number of (q)bits involved in any given check. Ideally, these two values are as small as possible to maintain low density.\n", "\n", "A second important property is the **encoding rate** ($r$).\n", "\n", "$$ r = \\frac{k}{n+c} $$\n", "\n", - "Where, $k$ is the number of encoded logical bits, $n$ is the number of data bits, and $c$ is the number of check bits. A high encoding rate is good and means that many logical bits can be encoded with a lower overhead. However, this competes with other properties like the code distance - i.e. the ability to correctly capture errors.\n", + "Where, $k$ is the number of encoded logical bits, $n$ is the number of data bits, and $c$ is the number of check bits. A high encoding rate is good and means that many logical bits can be encoded with a lower overhead. However, this competes with other properties like the code distance - i.e. the ability to correctly capture errors.\n", "\n", - "What $k$ is depends on the number of linearly independent constraints. To determine this, perform Gaussian elimination over GF(2). GF(2) comes from the world of abstract algebra and corresponds to a field of two elements. Essentially, this just means integer math governed by mod 2 arithmetic. The Gaussian elimination result can be used to determine rank($H$) which is related to $k$ by \n", + "What $k$ is depends on the number of linearly independent constraints. To determine this, perform Gaussian elimination over GF(2). GF(2) comes from the world of abstract algebra and corresponds to a field of two elements. Essentially, this just means integer math governed by mod 2 arithmetic. The Gaussian elimination result can be used to determine rank($H$) which is related to $k$ by\n", "\n", - "$$ k = n - \\mathrm{rank(}H\\mathrm{}) $$\n", + "$$ k = n - \\mathrm{rank(}H\\mathrm{)} $$\n", "\n", "\n", - "A final characteristic of a desirable LDPC code is how suited it is for decoding. Common decoders like belief propagation (BP) can struggle when the Tanner graph has 4-cycles. These form local loops (see image below) which can make it hard for the decoder to converge to a solution.\n", + "A final characteristic of a desirable LDPC code is how suited it is for decoding. Common decoders like **belief propagation (BP)** can struggle when the Tanner graph has 4-cycles. These form local loops (see image below) which can make it hard for the decoder to converge to a solution.\n", "\n", - "\"Drawing\"\n", + "\"Diagram\n", "\n", - "In most cases, it turns out that LDPC codes are very easy to generate. Random generation of $H$ usually produces a good LDPC code. This also provides flexibility as new codes can be generated as needed depending on the problem at hand. Randomly generated codes also perform well and produce results close to the Shannon limit, that is the theoretical maximum of information that can pass through a noisy channel.\n", + "In most cases, it turns out that LDPC codes are very easy to generate. Random generation of $H$ usually produces a good LDPC code. This also provides flexibility as new codes can be generated as needed depending on the problem at hand. Randomly generated codes also perform well and produce results close to the **Shannon limit**, that is the theoretical maximum of information that can pass through a noisy channel." + ] + }, + { + "cell_type": "markdown", + "id": "2fb1f3e2-91eb-493a-9272-c54b6599d068ex1", + "metadata": {}, + "source": [ + "
\n", "\n", + "**Exercise 1:**\n", "\n", - "
\n", - "

Exercise 1:

\n", - "

\n", - "Given the three parity check matrices below, write a function to analyze them and determine the check and variable node degrees, the encoding rate, the indices of any four cycles, and if any nodes are unchecked.\n", - "

\n", - "
\n", + "Given the three parity check matrices below, write a function to analyze them and determine the check and variable node degrees, the encoding rate, the indices of any four-cycles, and if any nodes are unchecked.\n", "\n", - "\n" + "
" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "3d7854b5-09ba-4717-9388-c1a7106198b0", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "H1:\n", - " variable degrees: [2, 2, 2, 2, 2, 2, 2, 1]\n", - " check degrees: [3, 3, 3, 3, 3]\n", - " rate = 0.375 (k = 3)\n", - " 4‑cycles:\n", - " vars (0,1) rows (0,1)\n", - " vars (3,6) rows (2,4)\n", - " all variables are checked\n", - "\n", - "H2:\n", - " variable degrees: [2, 2, 2, 2, 2, 2, 0]\n", - " check degrees: [3, 3, 3, 3]\n", - " rate = 0.571 (k = 4)\n", - " no 4‑cycles\n", - " unchecked variables: [6]\n", - "\n", - "H3:\n", - " variable degrees: [2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 1, 1]\n", - " check degrees: [5, 4, 5, 4, 4, 4]\n", - " rate = 0.625 (k = 10)\n", - " no 4‑cycles\n", - " all variables are checked\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 1\n", + "\n", "H1 = np.array([\n", " [1,1,0,0,1,0,0,0],\n", " [1,1,0,0,0,1,0,0],\n", @@ -155,93 +199,92 @@ "\n", "\n", "H3 = np.array([\n", - " [1,0,0,1,0,1,0,0,0,1, 0, 0, 1, 0, 0, 0], \n", + " [1,0,0,1,0,1,0,0,0,1, 0, 0, 1, 0, 0, 0],\n", " [0,1,0,0,1,0,1,0,0,0, 1, 0, 0, 0, 0, 0],\n", - " [0,0,1,0,0,0,0,1,0,0, 0, 1, 0, 1, 1, 0], \n", - " [1,0,0,0,1,0,0,0,1,0, 0, 0, 0, 1, 0, 0], \n", - " [0,0,0,0,0,0,1,0,1,0, 0, 1, 1, 0, 0, 0], \n", - " [0,0,1,0,0,1,0,0,0,0, 1, 0, 0, 0, 0, 1] \n", + " [0,0,1,0,0,0,0,1,0,0, 0, 1, 0, 1, 1, 0],\n", + " [1,0,0,0,1,0,0,0,1,0, 0, 0, 0, 1, 0, 0],\n", + " [0,0,0,0,0,0,1,0,1,0, 0, 1, 1, 0, 0, 0],\n", + " [0,0,1,0,0,1,0,0,0,0, 1, 0, 0, 0, 0, 1]\n", "], dtype=int)\n", "\n", "\n", - "def degrees(H):\n", - " \"\"\" \n", - " function which computes the degrees of a parity check matrix\n", - " \n", + "def degrees(H: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Compute the degrees of a parity check matrix.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of degrees for each variable bit\n", - " (list): list of degrees for each check bit\n", + " Tuple of (variable node degrees, check node degrees)\n", " \"\"\"\n", - " \n", - " #TODO\n", - " return H.sum(axis=0), H.sum(axis=1) # Sums the 1's vertically and then horizontally\n", - "\n", - "def unchecked_vars(H):\n", - " \"\"\" \n", - " function which identifies any unchecked variable bit\n", - " \n", + " ##TODO##\n", + " return H.sum(axis=0), H.sum(axis=1)\n", + "\n", + "\n", + "def unchecked_vars(H: np.ndarray) -> np.ndarray:\n", + " \"\"\"\n", + " Identify any unchecked variable bits.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of unchecked variable bits\n", + " Array of unchecked variable bit indices\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", " return np.where(H.sum(0) == 0)[0]\n", "\n", - "def four_cycles(H):\n", - " \"\"\" \n", - " function which identifies any four-cycles in a parity check matrix\n", - " \n", + "\n", + "def four_cycles(H: np.ndarray) -> list[tuple[int, int, int, int]]:\n", + " \"\"\"\n", + " Identify any four-cycles in a parity check matrix.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (list): list of nodes involved in a 4-cycle.\n", + " List of (var_i, var_j, check_p, check_q) tuples involved in 4-cycles\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", " cycles = []\n", " m, n = H.shape\n", - " for i, j in combinations(range(n), 2): # variable‑node pairs\n", + " for i, j in combinations(range(n), 2):\n", " shared = np.where(H[:, i] & H[:, j])[0]\n", " if shared.size >= 2:\n", " for p, q in combinations(shared, 2):\n", " cycles.append((i, j, p, q))\n", " return cycles\n", "\n", - "def encoding_rate(H):\n", - " \"\"\" \n", - " function which computes the encoding rate based on rank of H.\n", - " Note: Must use galois for GF2 field definition to ensure computation is correct\n", - " \n", + "\n", + "def encoding_rate(H: np.ndarray) -> tuple[float, int]:\n", + " \"\"\"\n", + " Compute the encoding rate based on rank of H.\n", + " Uses galois for GF2 field definition to ensure computation is correct.\n", + "\n", " Args:\n", - " H (np.array): parity check matrix\n", + " H: parity check matrix\n", "\n", " Returns:\n", - " (float): encoding rate\n", + " Tuple of (encoding rate, number of logical bits k)\n", " \"\"\"\n", " GF2 = galois.GF(2)\n", " Hgf2 = GF2(H)\n", " n = Hgf2.shape[1]\n", - " rank = np.linalg.matrix_rank(Hgf2) \n", + " rank = np.linalg.matrix_rank(Hgf2)\n", " k = n - rank\n", " return k / n, k\n", "\n", "\n", - "def analyze(H, name): \n", - " \"\"\" \n", - " Function that organizes and prints results from previous functions\n", - " \n", - " Args:\n", - " H (np.array): parity check matrix\n", - " name (str): name of the parity chexk matrix\n", + "def analyze(H: np.ndarray, name: str) -> None:\n", + " \"\"\"\n", + " Organize and print analysis results for a parity check matrix.\n", "\n", - " Returns:\n", + " Args:\n", + " H: parity check matrix\n", + " name: name of the parity check matrix\n", " \"\"\"\n", - " #TODO\n", + " ##TODO##\n", " vdeg, cdeg = degrees(H)\n", " R, k = encoding_rate(H)\n", " cycles = four_cycles(H)\n", @@ -253,11 +296,11 @@ " print(f' rate = {R:.3f} (k = {k})')\n", "\n", " if cycles:\n", - " print(' 4‑cycles:')\n", + " print(' 4\\u2011cycles:')\n", " for i, j, p, q in cycles:\n", " print(f' vars ({i},{j}) rows ({p},{q})')\n", " else:\n", - " print(' no 4‑cycles')\n", + " print(' no 4\\u2011cycles')\n", "\n", " if unchk.size:\n", " print(' unchecked variables:', unchk.tolist())\n", @@ -274,15 +317,17 @@ "id": "9861bfe7-5840-47ed-a963-23fe56635c80", "metadata": {}, "source": [ - "## 7.2 Quantum LDPC ##\n", + "---\n", "\n", - "qLDPC codes have many similarities to their classical counterparts, particularly with respect to terms like encoding rate and degree. Unfortunately, a major difference is that valid qLDPC codes with favorable properties cannot be produced by randomly generating parity check matrices. This is because the $Z$ and $X$ parity check matrices ($H_Z$ and $H_X$) must commute ($H_ZH^T_X=0$) for a valid CSS code that can correct both types of errors. \n", + "## 7.2. Quantum LDPC\n", "\n", - "The probability of randomly producing parity check matrices that commute is vanishingly small, let alone exhibit favorable properties. Cutting edge research focused on qLDPC codes is determined to find clever ways to produce quality parity check matrices that meet these constraints. \n", + "qLDPC codes have many similarities to their classical counterparts, particularly with respect to terms like encoding rate and degree. Unfortunately, a major difference is that valid qLDPC codes with favorable properties cannot be produced by randomly generating parity check matrices. This is because the $Z$ and $X$ parity check matrices ($H_Z$ and $H_X$) must commute ($H_ZH^T_X=0$) for a valid **CSS code** that can correct both types of errors.\n", "\n", - "One particularly insightful approach is using [so called hypergraph product codes](https://arxiv.org/pdf/2401.02911). The idea is to take two \"good\" ( in this case a technical term meaning the codes distance scales as $n$) classical parity check matrices $H_1$ ($m_1\\times n_1$) and $H_2$ ($m_2\\times n_2$) and combine them in such a way that $H_Z$ and $H_X$ commute (i.e. $H_ZH_X^T=0$) and the resulting codes have a constant encoding rate and a distance that scales proportionally to the square root of the number of data qubits.\n", + "The probability of randomly producing parity check matrices that commute is vanishingly small, let alone exhibit favorable properties. Cutting edge research focused on qLDPC codes is determined to find clever ways to produce quality parity check matrices that meet these constraints.\n", "\n", - "The procedure works by defining the final parity check matrix $H$ as a block encoding of $H_Z$ and $H_X$. \n", + "One particularly insightful approach is using [so called **hypergraph product (HGP)** codes](https://arxiv.org/pdf/2401.02911). The idea is to take two \"good\" (in this case a technical term meaning the codes distance scales as $n$) classical parity check matrices $H_1$ ($m_1\\times n_1$) and $H_2$ ($m_2\\times n_2$) and combine them in such a way that $H_Z$ and $H_X$ commute (i.e. $H_ZH_X^T=0$) and the resulting codes have a constant encoding rate and a distance that scales proportionally to the square root of the number of data qubits.\n", + "\n", + "The procedure works by defining the final parity check matrix $H$ as a block encoding of $H_Z$ and $H_X$.\n", "\n", "\n", "$$\n", @@ -331,7 +376,7 @@ "= 2(H_1 \\otimes H_2^T) = 0\n", "$$\n", "\n", - "It may not be clear at first why the final term equals zero. Recall that all operations with parity check matrices occur mod 2. So, taking any binary matrix and multiplying it by 2, will make every entry 0 or 2 = 0 mod 2. " + "It may not be clear at first why the final term equals zero. Recall that all operations with parity check matrices occur mod 2. So, taking any binary matrix and multiplying it by 2, will make every entry 0 or 2 = 0 mod 2." ] }, { @@ -339,10 +384,11 @@ "id": "24e40d1e-d6f1-4cfa-a066-ad6e2141c27b", "metadata": {}, "source": [ - "
\n", - "

Exercise 2:

\n", - "

\n", - "Construct a hypergraph product code using a pair of three-qubit repetition code base matrices. That is, begin with:\n", + "

\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Construct a hypergraph product code using a pair of three-qubit repetition code base matrices. That is, begin with:\n", "\n", "$$H_1 =H_2 =\\begin{pmatrix}\n", "1&1&0\\\\\n", @@ -350,165 +396,76 @@ "\\end{pmatrix}\n", "$$\n", "\n", - "Build the parity check matrices for $H_Z$ and $H_X$ and confim they commute. Note: the `galois` package is used to define the matrices over a Galois field (GF2), which ensures modular arithmetic is baked in to your computations. All operations can be performed just like you would with `numpy`.\n", - "

\n", - "
\n", - "\n", + "Build the parity check matrices for $H_Z$ and $H_X$ and confirm they commute. Note: the `galois` package is used to define the matrices over a Galois field (GF2), which ensures modular arithmetic is baked in to your computations. All operations can be performed just like you would with `numpy`.\n", "\n", - "\n" + "
" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "7cb9d9cb-be6c-4d0b-be6f-b0563f34a3b2", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "First term in Hx\n", - "[[1 1 0 0 0 0 0 0 0]\n", - " [0 1 1 0 0 0 0 0 0]\n", - " [0 0 0 1 1 0 0 0 0]\n", - " [0 0 0 0 1 1 0 0 0]\n", - " [0 0 0 0 0 0 1 1 0]\n", - " [0 0 0 0 0 0 0 1 1]]\n", - "\n", - " Second term in Hx\n", - "[[1 0 0 0]\n", - " [0 1 0 0]\n", - " [1 0 1 0]\n", - " [0 1 0 1]\n", - " [0 0 1 0]\n", - " [0 0 0 1]]\n", - "\n", - " Full Hx\n", - "[[1 1 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 1 1 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 1 1 0 0 0 0 1 0 1 0]\n", - " [0 0 0 0 1 1 0 0 0 0 1 0 1]\n", - " [0 0 0 0 0 0 1 1 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 1 1 0 0 0 1]]\n", - "\n", - "First term in Hz\n", - "[[1 0 0 1 0 0 0 0 0]\n", - " [0 1 0 0 1 0 0 0 0]\n", - " [0 0 1 0 0 1 0 0 0]\n", - " [0 0 0 1 0 0 1 0 0]\n", - " [0 0 0 0 1 0 0 1 0]\n", - " [0 0 0 0 0 1 0 0 1]]\n", - "\n", - " Second term in Hz\n", - "[[1 0 0 0]\n", - " [1 1 0 0]\n", - " [0 1 0 0]\n", - " [0 0 1 0]\n", - " [0 0 1 1]\n", - " [0 0 0 1]]\n", - "\n", - " Full Hz\n", - "[[1 0 0 1 0 0 0 0 0 1 0 0 0]\n", - " [0 1 0 0 1 0 0 0 0 1 1 0 0]\n", - " [0 0 1 0 0 1 0 0 0 0 1 0 0]\n", - " [0 0 0 1 0 0 1 0 0 0 0 1 0]\n", - " [0 0 0 0 1 0 0 1 0 0 0 1 1]\n", - " [0 0 0 0 0 1 0 0 1 0 0 0 1]]\n", - "\n", - " Hz times HxT\n", - "[[0 0 0 0 0 0]\n", - " [0 0 0 0 0 0]\n", - " [0 0 0 0 0 0]\n", - " [0 0 0 0 0 0]\n", - " [0 0 0 0 0 0]\n", - " [0 0 0 0 0 0]]\n" - ] - }, - { - "data": { - "text/plain": [ - "(GF([[1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0],\n", - " [0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0],\n", - " [0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0],\n", - " [0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0],\n", - " [0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1],\n", - " [0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1]], order=2),\n", - " GF([[1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],\n", - " [0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],\n", - " [0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0],\n", - " [0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1],\n", - " [0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0],\n", - " [0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1]], order=2))" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ + "# EXERCISE 2\n", + "\n", "H = np.array([[1,1,0],\n", " [0,1,1]]) #Using H as H1 = H2\n", "\n", - "def HGP(H):\n", - " \"\"\" \n", - " Function which takes classical base parity check matricies and performs hypergraph product construction\n", - " \n", + "def HGP(H: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Perform hypergraph product construction from a base parity check matrix.\n", + "\n", " Args:\n", - " H (np.array): Base parity check matrix\n", + " H: Base parity check matrix\n", "\n", " Returns:\n", - " Hz (np.array): Hz matrix from HGP construction\n", - " Hx (np.array): Hx matrix from HGP construction\n", + " Tuple of (Hz, Hx) matrices from HGP construction\n", " \"\"\"\n", - " #TODO Start\n", + " ##TODO##\n", " rows, cols = H.shape\n", "\n", - " I_rows = np.eye(rows, dtype=int) \n", + " I_rows = np.eye(rows, dtype=int)\n", " I_cols = np.eye(cols, dtype=int)\n", "\n", - " #TODO END\n", - " \n", - " # Constructs a Galois field and updates you matricies at Galois field.\n", - " GF2 = galois.GF(2) \n", - " \n", - " H = GF2(H) \n", + " # Constructs a Galois field and updates your matrices as Galois field.\n", + " GF2 = galois.GF(2)\n", + "\n", + " H = GF2(H)\n", " I_rows = GF2(I_rows)\n", " I_cols = GF2(I_cols)\n", "\n", - " #TODO start\n", " print(\"First term in Hx\")\n", " Hx_a = np.kron(I_cols, H)\n", " print(Hx_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hx\")\n", " Hx_b = np.kron(H.T,I_rows)\n", " print(Hx_b)\n", - " \n", + "\n", " print(\"\\n Full Hx\")\n", " Hx = np.concatenate((Hx_a, Hx_b), axis=1)\n", " print(Hx)\n", - " \n", + "\n", " print(\"\\nFirst term in Hz\")\n", " Hz_a = np.kron(H, I_cols)\n", " print(Hz_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hz\")\n", " Hz_b = np.kron(I_rows, H.T)\n", " print(Hz_b)\n", - " \n", + "\n", " print(\"\\n Full Hz\")\n", " Hz = np.concatenate((Hz_a, Hz_b), axis=1)\n", " print(Hz)\n", - " \n", + "\n", " print(\"\\n Hz times HxT\")\n", " print(Hz @ Hx.T)\n", "\n", " return Hz, Hx\n", - " \n", - "HGP(H)\n", - "#TODO END" + "\n", + "HGP(H)" ] }, { @@ -516,9 +473,9 @@ "id": "8ef0283b-69bc-43e5-ae84-46247cbaab6d", "metadata": {}, "source": [ - "It turns out there is a nice visual interpretation of the hypergraph product code you just generated if the Tanner graphs form a multiplication table of sorts. Each node of the product tanner graph that is the product of a check qubit with a check qubit or a data qubit with a data qubit produces a data qubit. If the top Tanner graph is a circle (data qubit) and the left Tanner graph a square (check qubit), the result is an $X$ stabilizer check. Likewise, if the top Tanner graph is a square and the left Tanner graph a circle, the result is a $Z$ parity check, producing the tanner graph below. Does it look familiar?\n", + "It turns out there is a nice visual interpretation of the hypergraph product code you just generated if the Tanner graphs form a multiplication table of sorts. Each node of the product Tanner graph that is the product of a check qubit with a check qubit or a data qubit with a data qubit produces a data qubit. If the top Tanner graph is a circle (data qubit) and the left Tanner graph a square (check qubit), the result is an $X$ stabilizer check. Likewise, if the top Tanner graph is a square and the left Tanner graph a circle, the result is a $Z$ parity check, producing the Tanner graph below. Does it look familiar?\n", "\n", - "\"Drawing\"\n", + "\"Hypergraph\n", "\n", "\n", "Remarkably, it turns out the product of two size $l$ classical repetition codes is a [[ $(l+1)^2 + l^2$, $1$, $l+1)$]] surface code! This is a great example demonstrating how two very simple classical codes can construct a more sophisticated quantum code which obeys the required commutativity constraints." @@ -529,20 +486,28 @@ "id": "b0e5273a-901c-4bb4-a3b3-9e92bc517ad6", "metadata": {}, "source": [ - "## 7.3 Generalizing HGP - Lifted Product Codes\n", + "---\n", + "\n", + "## 7.3. Generalizing HGP — Lifted Product Codes\n", "\n", - "It is possible to build upon the HGP method in a more general manner where products are taken between two parity check matrices that have non-integer entries. Such an approach is called a **lifted product (LP)** as the elements of the parity check matrix are \"lifted\" to higher order elements. LP codes can often retain the degree of checks and provide higher distance codes with a smaller qubit overhead. \n", + "It is possible to build upon the HGP method in a more general manner where products are taken between two parity check matrices that have non-integer entries. Such an approach is called a **lifted product (LP)** as the elements of the parity check matrix are \"lifted\" to higher order elements. LP codes can often retain the degree of checks and provide higher distance codes with a smaller qubit overhead.\n", "\n", "A LP construction still needs to ensure that $H_ZH_X^T=0$ holds as parity check matrices are modified to have non-integer elements. One way to ensure this is to replace parity check matrix elements with a commutative matrix ring, that is, a set of mathematical objects with properties that ensure multiplication of any elements commute, ensuring $H_ZH_X^T=0$ remains true (see the second term in the original proof of commutativity a few cells above). One example is $L \\times L$ **circulant** matrices which are defined as:\n", "\n", "$$ C = \\sum_{i=0}^{L-1} c_iP^{(i)} $$\n", "\n", - "Where $P^{(i)}$ are cyclic permutation matrices that shift columns of the identity matrix by $i$ spaces to the right and where $c_i$ can be either 0 or 1. The notation $B_L(P^{(i)})$ indicates the binary representation of matrix size $L$. \n", + "Where $P^{(i)}$ are cyclic permutation matrices that shift columns of the identity matrix by $i$ spaces to the right and where $c_i$ can be either 0 or 1. The notation $B_L(P^{(i)})$ indicates the binary representation of matrix size $L$." + ] + }, + { + "cell_type": "markdown", + "id": "0057cb5b712d4a08", + "metadata": {}, + "source": [ + "
\n", "\n", + "**Exercise 3:**\n", "\n", - "
\n", - "

Exercise 3:

\n", - "

\n", "Build the following binary matrix representations:\n", "\n", "* $B_4(P^{(2)})$\n", @@ -551,19 +516,20 @@ "I&0\\\\\n", "0&P^{(2)}\n", "\\end{pmatrix}$\n", - "

\n", - "
\n", - "\n" + "\n", + "
" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "8b3dee6f-82c2-44d3-aa80-01247fa19518", "metadata": {}, "outputs": [], "source": [ - "#TODO \n", + "# EXERCISE 3\n", + "\n", + "##TODO##\n", "\n", "arr1 = np.array([\n", " [0, 0, 1, 0],\n", @@ -589,7 +555,7 @@ " [0, 0, 0, 0, 0, 1],\n", " [0, 0, 0, 1, 0, 0],\n", " [0, 0, 0, 0, 1, 0]\n", - "])\n" + "])" ] }, { @@ -597,28 +563,32 @@ "id": "bf2d23dc-5634-4072-bccf-ed4cc67ff8cc", "metadata": {}, "source": [ - "A LP code is built by taking a HGP of two parity check matrices where each 1 is, in this case, a circulant matrix $C$. A LP code of size $L$ will increase the number of qubits and checks by a factor of L. This is because each entry in the parity check matrix is \"lifted\" and replaced by a set of $L$ checks and qubits. \n", + "A LP code is built by taking a HGP of two parity check matrices where each 1 is, in this case, a circulant matrix $C$. A LP code of size $L$ will increase the number of qubits and checks by a factor of L. This is because each entry in the parity check matrix is \"lifted\" and replaced by a set of $L$ checks and qubits.\n", "\n", "The same HGP equations from the previous section hold, but instead, the parity check matrices are denoted with a tilde to note that their elements are circulants where each entry is otherwise a 1. That is to say, $H=LP(\\tilde{H}_1,\\tilde{H}_2) = B_L(\\tilde{H})$\n", "\n", - "The figure below, based on figure 2 of [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911), is helpful for understanding what the LP construction is doing. The overall procedure is similar. First, the base matrices are selected and are used in the same HGP procedure you did previously to form $\\tilde{H}_Z$ and $\\tilde{H}_X$. Then, each 1 in each parity check matrix is replaced with a circulant $C$. This simple swap takes select connections from the original Tanner graph, and lifts it to be replaced with a set of check and data qubits. \n", - "\n", - "\"Drawing\"\n", + "The figure below, based on figure 2 of [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911), is helpful for understanding what the LP construction is doing. The overall procedure is similar. First, the base matrices are selected and are used in the same HGP procedure you did previously to form $\\tilde{H}_Z$ and $\\tilde{H}_X$. Then, each 1 in each parity check matrix is replaced with a circulant $C$. This simple swap takes select connections from the original Tanner graph, and lifts it to be replaced with a set of check and data qubits.\n", "\n", - "If $C$ is simply the identity matrix $I$, the resulting LP codes is the result of the HGP code duplicated trivially $L$ times. Adding permutations to $C$ such as $I + P^{(1)}$ adds non-trivial checks between the HGP code copies. Notice the figure below (adapted figure 3 from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911)) begins with the HGP that you performed earlier. Then, The LP construction creates four copies of the surface code and interconnects them with parity checks.\n", + "\"Diagram\n", "\n", - "\"Drawing\"\n", + "If $C$ is simply the identity matrix $I$, the resulting LP codes is the result of the HGP code duplicated trivially $L$ times. Adding permutations to $C$ such as $I + P^{(1)}$ adds non-trivial checks between the HGP code copies. Notice the figure below (adapted figure 3 from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911)) begins with the HGP that you performed earlier. Then, the LP construction creates four copies of the surface code and interconnects them with parity checks.\n", "\n", - "It turns out that the LP surface code is a [[52,4,4]] code whereas four copies of the surface code would be [[52,4,3]]. This means that the encoding rate is the same, but the code distance improves thanks to the LP procedure. These sorts of clever constructions are driving qLDPC code research to continually improve code properties while maintaining the commutativity properties. \n", - "\n", - "Generally speaking, a LP surface code is parameterized with $l$ and $L$, where $l$ is the size of the base repetition code and $L$ is the number of copies produced by the lift procedure.\n", + "\"LP\n", "\n", + "It turns out that the LP surface code is a [[52,4,4]] code whereas four copies of the surface code would be [[52,4,3]]. This means that the encoding rate is the same, but the code distance improves thanks to the LP procedure. These sorts of clever constructions are driving qLDPC code research to continually improve code properties while maintaining the commutativity properties.\n", "\n", + "Generally speaking, a LP surface code is parameterized with $l$ and $L$, where $l$ is the size of the base repetition code and $L$ is the number of copies produced by the lift procedure." + ] + }, + { + "cell_type": "markdown", + "id": "a1bbfb033a5c4b55", + "metadata": {}, + "source": [ + "
\n", "\n", + "**Exercise 4:**\n", "\n", - "
\n", - "

Exercise 4:

\n", - "

\n", "Build the [[52,4,4]] ( $l =2$ and $L =4$) LP surface code by performing the following steps. First, use the base matrix below which can be conveniently split into $H_{copy}$, which produces trivial copies of the surface code and ($H_{int}$), which interacts these surface code copies.\n", "\n", "$\n", @@ -650,248 +620,66 @@ "H =\\begin{pmatrix}\n", "1 & 1 & 0 \\\\\n", "0 & 1 & 1 \\\\\n", - "\\end{pmatrix} \n", + "\\end{pmatrix}\n", "$ and $H =\\begin{pmatrix}\n", "0 & 1 & 0 \\\\\n", "0 & 0 & 1 \\\\\n", - "\\end{pmatrix} \n", + "\\end{pmatrix}\n", "$. Lifting the first with $B_4(I)$ and the second with $B_4(P^{(1)})$ and summing the results will produce the parity check matrices for the [[52,4,4]] code.\n", "\n", - "Modify your HGP function to lift `Hx_a` and `Hz_a` with an arbitrary $B$ and `Hx_b` and `Hz_b` with the transpose of $B$. \n", + "Modify your HGP function to lift `Hx_a` and `Hz_a` with an arbitrary $B$ and `Hx_b` and `Hz_b` with the transpose of $B$.\n", + "\n", + "Then build the [[52,4,4]] code by lifting H_copy and H_int (defined below) with $B_4(I)$ and $B_4(P^{(1)})$, respectively. Confirm that Hz and Hx of the final result commute.\n", "\n", - "Then build the [[52,4,4]] code by lifting H_copy and H_int (defined below) with $B_4(I)$ and $B_4(P^{(1)})$ , respectively. Confirm that Hz and Hx of the final result commute.\n", - "

\n", - "
\n", - "\n" + "
" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "a6e53ef5-8216-4a7d-bf81-0dd51c1b77e6", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "First term in Hx\n", - "[[1 1 0 0 0 0 0 0 0]\n", - " [0 1 1 0 0 0 0 0 0]\n", - " [0 0 0 1 1 0 0 0 0]\n", - " [0 0 0 0 1 1 0 0 0]\n", - " [0 0 0 0 0 0 1 1 0]\n", - " [0 0 0 0 0 0 0 1 1]]\n", - "First term in Hx lifted\n", - "[[1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1]]\n", - "\n", - " Second term in Hx\n", - "[[1 0 0 0]\n", - " [0 1 0 0]\n", - " [1 0 1 0]\n", - " [0 1 0 1]\n", - " [0 0 1 0]\n", - " [0 0 0 1]]\n", - "Second term in Hx lifted\n", - "[[1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]]\n", - "\n", - " Full Lifted Hx\n", - "[[1 0 0 ... 0 0 0]\n", - " [0 1 0 ... 0 0 0]\n", - " [0 0 1 ... 0 0 0]\n", - " ...\n", - " [0 0 0 ... 1 0 0]\n", - " [0 0 0 ... 0 1 0]\n", - " [0 0 0 ... 0 0 1]]\n", - "\n", - "First term in Hz\n", - "[[1 0 0 1 0 0 0 0 0]\n", - " [0 1 0 0 1 0 0 0 0]\n", - " [0 0 1 0 0 1 0 0 0]\n", - " [0 0 0 1 0 0 1 0 0]\n", - " [0 0 0 0 1 0 0 1 0]\n", - " [0 0 0 0 0 1 0 0 1]]\n", - "First term in Hz lifted\n", - "[[1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1]]\n", - "\n", - " Second term in Hz\n", - "[[1 0 0 0]\n", - " [1 1 0 0]\n", - " [0 1 0 0]\n", - " [0 0 1 0]\n", - " [0 0 1 1]\n", - " [0 0 0 1]]\n", - "Second term in Hz lifted\n", - "[[1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]]\n", - "\n", - " Full Hz\n", - "[[1 0 0 ... 0 0 0]\n", - " [0 1 0 ... 0 0 0]\n", - " [0 0 1 ... 0 0 0]\n", - " ...\n", - " [0 0 0 ... 1 0 0]\n", - " [0 0 0 ... 0 1 0]\n", - " [0 0 0 ... 0 0 1]]\n", - "\n", - " Hz times HxT\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]]\n" - ] - } - ], + "outputs": [], "source": [ + "# EXERCISE 4\n", + "\n", "H_copy = np.array([[1,1,0],\n", - " [0,1,1]]) \n", + " [0,1,1]])\n", "\n", "B_I_4 = np.array([[1,0,0,0],\n", " [0,1,0,0],\n", " [0,0,1,0],\n", - " [0,0,0,1]]) \n", + " [0,0,0,1]])\n", "\n", "H_int = np.array([[0,1,0],\n", - " [0,0,1]]) \n", + " [0,0,1]])\n", "\n", "B_P1_4 = np.array([[0,1,0,0],\n", " [0,0,1,0],\n", " [0,0,0,1],\n", - " [1,0,0,0]]) \n", + " [1,0,0,0]])\n", "\n", "\n", "\n", - "def LP(H, B):\n", - " \"\"\" \n", - " Function which perfoms lifted product construction of base matrices H with lift matrix B\n", - " \n", + "def LP(H: np.ndarray, B: np.ndarray) -> tuple[np.ndarray, np.ndarray]:\n", + " \"\"\"\n", + " Perform lifted product construction of base matrices H with lift matrix B.\n", + "\n", " Args:\n", - " H (np.array): Base parity check matrix\n", - " B (np.array): Binary representation of lift matrix\n", + " H: Base parity check matrix\n", + " B: Binary representation of lift matrix\n", "\n", " Returns:\n", - " Hz (np.array): Hz matrix from HGP construction\n", - " Hx (np.array): Hx matrix from HGP construction\n", + " Tuple of (Hz, Hx) matrices from LP construction\n", " \"\"\"\n", "\n", " rows, cols = H.shape\n", "\n", " I_rows = np.eye(rows, dtype=int)\n", " I_cols = np.eye(cols, dtype=int)\n", - " \n", + "\n", " GF2 = galois.GF(2) # allows mod 2 math.\n", - " \n", + "\n", " H = GF2(H)\n", " I_rows = GF2(I_rows, dtype=int)\n", " I_cols = GF2(I_cols, dtype=int)\n", @@ -902,244 +690,68 @@ " print(Hx_a)\n", "\n", " print(\"First term in Hx lifted\")\n", + " ##TODO##\n", " Hx_a = np.kron(Hx_a, B)\n", " print(Hx_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hx\")\n", " Hx_b = np.kron(H.T,I_rows)\n", " print(Hx_b)\n", "\n", " print(\"Second term in Hx lifted\")\n", + " ##TODO##\n", " Hx_b = np.kron(Hx_b, B.T)\n", " print(Hx_b)\n", - " \n", + "\n", " print(\"\\n Full Lifted Hx\")\n", + " ##TODO##\n", " Hx = np.concatenate((Hx_a, Hx_b), axis=1)\n", " print(Hx)\n", - " \n", + "\n", " print(\"\\nFirst term in Hz\")\n", " Hz_a = np.kron(H, I_cols)\n", " print(Hz_a)\n", "\n", " print(\"First term in Hz lifted\")\n", + " ##TODO##\n", " Hz_a = np.kron(Hz_a, B)\n", " print(Hz_a)\n", - " \n", + "\n", " print(\"\\n Second term in Hz\")\n", " Hz_b = np.kron(I_rows, H.T)\n", " print(Hz_b)\n", "\n", " print(\"Second term in Hz lifted\")\n", + " ##TODO##\n", " Hz_b = np.kron(Hz_b, B.T)\n", " print(Hz_b)\n", - " \n", + "\n", " print(\"\\n Full Hz\")\n", + " ##TODO##\n", " Hz = np.concatenate((Hz_a, Hz_b), axis=1)\n", " print(Hz)\n", - " \n", + "\n", " print(\"\\n Hz times HxT\")\n", " print(Hz @ Hx.T)\n", "\n", " return Hz, Hx\n", "\n", - "Hz_lifted_copy, Hx_lifted_copy = LP(H_copy, B_I_4)\n" + "Hz_lifted_copy, Hx_lifted_copy = LP(H_copy, B_I_4)" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "e4390072-4f06-4840-92d4-8c3af3c9af1e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "First term in Hx\n", - "[[0 1 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 1]]\n", - "First term in Hx lifted\n", - "[[0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]]\n", - "\n", - " Second term in Hx\n", - "[[0 0 0 0]\n", - " [0 0 0 0]\n", - " [1 0 0 0]\n", - " [0 1 0 0]\n", - " [0 0 1 0]\n", - " [0 0 0 1]]\n", - "Second term in Hx lifted\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]]\n", - "\n", - " Full Lifted Hx\n", - "[[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 1 0 0]\n", - " [0 0 0 ... 0 1 0]]\n", - "\n", - "First term in Hz\n", - "[[0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 1]]\n", - "First term in Hz lifted\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]]\n", - "\n", - " Second term in Hz\n", - "[[0 0 0 0]\n", - " [1 0 0 0]\n", - " [0 1 0 0]\n", - " [0 0 0 0]\n", - " [0 0 1 0]\n", - " [0 0 0 1]]\n", - "Second term in Hz lifted\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]]\n", - "\n", - " Full Hz\n", - "[[0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 0 0 0]\n", - " ...\n", - " [0 0 0 ... 0 0 0]\n", - " [0 0 0 ... 1 0 0]\n", - " [0 0 0 ... 0 1 0]]\n", - "\n", - " Hz times HxT\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]]\n" - ] - } - ], + "outputs": [], "source": [ "Hz_lifted_int, Hx_lifted_int = LP(H_int, B_P1_4)" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "f88736eb-349d-485a-82a7-75bf18176c03", "metadata": {}, "outputs": [], @@ -1153,97 +765,15 @@ "id": "5c737d2d-8a6d-4b48-a545-fcf373893379", "metadata": {}, "source": [ - "Now, analyze the Hz and Hx parity check matrices to 1) make sure they commute and 2) confirm the degrees are as expected. Each stabilizer should act on maximum 6 qubits and each qubit should be involved in no more than 6 checks (summing Z and X checks as the full parity check matrix would be a concatenation of both $H_x$ and $H_z$.). " + "Now, analyze the Hz and Hx parity check matrices to 1) make sure they commute and 2) confirm the degrees are as expected. Each stabilizer should act on maximum 6 qubits and each qubit should be involved in no more than 6 checks (summing Z and X checks as the full parity check matrix would be a concatenation of both $H_x$ and $H_z$)." ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "14a7e5be-d6a6-4da0-a277-83f90d1ddada", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - " Hz times HxT\n", - "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n", - " [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]]\n", - "\n", - " Number of logical qubits encoded (data qubits minus checks)\n", - "4\n", - "\n", - "hz lifted:\n", - " variable degrees: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]\n", - " check degrees: [4, 4, 4, 4, 6, 6, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 6, 6, 6, 6, 5, 5, 5, 5]\n", - " rate = 0.538 (k = 28)\n", - " 4‑cycles:\n", - " vars (16,39) rows (4,7)\n", - " vars (17,36) rows (4,5)\n", - " vars (18,37) rows (5,6)\n", - " vars (19,38) rows (6,7)\n", - " vars (20,43) rows (8,11)\n", - " vars (21,40) rows (8,9)\n", - " vars (22,41) rows (9,10)\n", - " vars (23,42) rows (10,11)\n", - " vars (28,47) rows (16,19)\n", - " vars (29,44) rows (16,17)\n", - " vars (30,45) rows (17,18)\n", - " vars (31,46) rows (18,19)\n", - " vars (32,51) rows (20,23)\n", - " vars (33,48) rows (20,21)\n", - " vars (34,49) rows (21,22)\n", - " vars (35,50) rows (22,23)\n", - " all variables are checked\n", - "\n", - "hx lifted:\n", - " variable degrees: [1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]\n", - " check degrees: [4, 4, 4, 4, 4, 4, 4, 4, 6, 6, 6, 6, 6, 6, 6, 6, 5, 5, 5, 5, 5, 5, 5, 5]\n", - " rate = 0.538 (k = 28)\n", - " 4‑cycles:\n", - " vars (16,39) rows (8,11)\n", - " vars (17,36) rows (8,9)\n", - " vars (18,37) rows (9,10)\n", - " vars (19,38) rows (10,11)\n", - " vars (20,43) rows (12,15)\n", - " vars (21,40) rows (12,13)\n", - " vars (22,41) rows (13,14)\n", - " vars (23,42) rows (14,15)\n", - " vars (28,47) rows (16,19)\n", - " vars (29,44) rows (16,17)\n", - " vars (30,45) rows (17,18)\n", - " vars (31,46) rows (18,19)\n", - " vars (32,51) rows (20,23)\n", - " vars (33,48) rows (20,21)\n", - " vars (34,49) rows (21,22)\n", - " vars (35,50) rows (22,23)\n", - " all variables are checked\n" - ] - } - ], + "outputs": [], "source": [ "#Confirm Hz and Hx still commute\n", "print(\"\\n Hz times HxT\")\n", @@ -1263,33 +793,23 @@ "id": "b00fa7f3-cac9-4148-a1f3-314acad888e1", "metadata": {}, "source": [ - "## 7.4 Decoding with CUDA-Q QEC\n", + "---\n", "\n", - "💻 Just a heads-up: This notebook is designed to be run on an environment with a GPU. If you don't have access to a GPU, feel free to read through the cells and explore the content without executing them. Enjoy learning! ⭐\n", + "## 7.4. Decoding with CUDA-Q QEC\n", "\n", "qLDPC codes are well suited for decoding with CUDA-Q's accelerated [BP+OSD decoder](https://nvidia.github.io/cudaqx/components/qec/introduction.html#pre-built-qec-decoders) found in the [CUDA-Q QEC library](https://nvidia.github.io/cudaqx/components/qec/introduction.html). If you want to learn more about BP+OSD decoding, complete lab 4 on decoding.\n", "\n", - "As we have not discussed logical observables for these codes, the code below will randomly generate an error vector with a 5% chance of an error on each qubit (Only assuming bit flip errors for now). The decoder will produce a logical error if the decoder cannot identify all of the errors given the syndrome. However, note that each of these errors might not produce a logical flip in practice, so this considers a worst case scenario. \n", + "As we have not discussed logical observables for these codes, the code below will randomly generate an error vector with a 5% chance of an error on each qubit (only assuming bit flip errors for now). The decoder will produce a logical error if the decoder cannot identify all of the errors given the syndrome. However, note that each of these errors might not produce a logical flip in practice, so this considers a worst case scenario.\n", "\n", "In the cell below, run the decoder using the $H_z$ matrix you produced for the [[52,4,4]] LP surface code. Carefully read the code and see if you can spot where errors and syndromes are generated, where decoder options are specified, and how the decoder is called. Run the code and note what the logical error rate is." ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "22e4c57c-a3c5-488a-8d90-67f24c55ce8e", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1694 logical errors in 10000 shots\n", - "Number of shots that converged with BP: 7966\n", - "Average decoding time: 0.01 ms per shot\n" - ] - } - ], + "outputs": [], "source": [ "# Define parameters\n", "n = Hz_lifted_total.shape[1] # number of physical qubits\n", @@ -1331,7 +851,7 @@ "for i, r in enumerate(results):\n", " bp_converged_flags.append(r.converged)\n", " decoded_error = np.array(r.result, dtype=np.uint8)\n", - " \n", + "\n", " # Check if error was corrected\n", " if not np.array_equal(decoded_error, errors[i]):\n", " num_logical_errors += 1\n", @@ -1343,7 +863,7 @@ "\n", "# Optional: Single shot example\n", "single_syndrome = syndromes[0]\n", - "bp_converged, decoded_result, *_ = decoder.decode(single_syndrome.tolist())\n" + "bp_converged, decoded_result, *_ = decoder.decode(single_syndrome.tolist())" ] }, { @@ -1351,25 +871,15 @@ "id": "8f995a87-3bf1-464c-95f3-ca709f520a29", "metadata": {}, "source": [ - "Now, run the same code but use the `Hz_lifted_copy` parity check matrix. This code is simply four non-interacting copies of the surface code and hence a [[52,4,3]] code. How does the logical error rate compare? Can you see the benefit of the LP surface code as it adds one to the distance and outperforms copies of the surface code significantly." + "Now, run the same code but use the `Hz_lifted_copy` parity check matrix. This code is simply four non-interacting copies of the surface code and hence a [[52,4,3]] code. How does the logical error rate compare? Can you see the benefit of the LP surface code as it adds one to the distance and outperforms copies of the surface code significantly." ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "576353b7-5dcc-4764-8ae6-da3762c4d3cd", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2952 logical errors in 10000 shots\n", - "Number of shots that converged with BP: 8396\n", - "Average decoding time: 0.01 ms per shot\n" - ] - } - ], + "outputs": [], "source": [ "# Define parameters\n", "n = Hz_lifted_copy.shape[1] # number of physical qubits\n", @@ -1411,7 +921,7 @@ "for i, r in enumerate(results):\n", " bp_converged_flags.append(r.converged)\n", " decoded_error = np.array(r.result, dtype=np.uint8)\n", - " \n", + "\n", " # Check if error was corrected\n", " if not np.array_equal(decoded_error, errors[i]):\n", " num_logical_errors += 1\n", @@ -1431,10 +941,10 @@ "id": "b1f1e501-11fd-4943-a7dd-0908fb4f9410", "metadata": {}, "source": [ - "Each choice of $l$ and $L$ will produce a different LP surface code. It varies case by case if the LP approach is better than copies of the surface code. Examine the table below from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911) where the code parameters were obtained via numerical simulation. Note, how the entries highlighted in green are cases where the LP construction results (top entry) in a code with the same overhead but a higher code distance compared to surface code copies (bottom entry).\n", + "Each choice of $l$ and $L$ will produce a different LP surface code. It varies case by case if the LP approach is better than copies of the surface code. Examine the table below from [Lift-Connected Surface Codes](https://arxiv.org/pdf/2401.02911) where the code parameters were obtained via numerical simulation. Note how the entries highlighted in green are cases where the LP construction results (top entry) in a code with the same overhead but a higher code distance compared to surface code copies (bottom entry).\n", "\n", "\n", - "\"Drawing\"" + "\"Table" ] }, { @@ -1442,9 +952,20 @@ "id": "03ba3a34-2173-4df2-9224-09044ad68675", "metadata": {}, "source": [ - "## Summary\n", + "## Conclusion\n", "\n", - "You now have a foundational understanding of qLDPC codes and how they differ from their classical counterparts. qLDPC codes are quite promising and will continue to be an active field of research. The methods covered in this work are just a sample of the different ways to construct qLDPC code parity check matrices yet lay the groundwork for you to understand other state of the art techniques like [bivariate bicycle codes](https://arxiv.org/abs/2308.07915)." + "You now have a foundational understanding of qLDPC codes and how they differ from their classical counterparts. qLDPC codes are quite promising and will continue to be an active field of research. The methods covered in this work are just a sample of the different ways to construct qLDPC code parity check matrices yet lay the groundwork for you to understand other state of the art techniques like [bivariate bicycle codes](https://arxiv.org/abs/2308.07915)." + ] + }, + { + "cell_type": "markdown", + "id": "f49279a6cb904b95", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC 101 Lab 6: Topological Codes](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/06_QEC_Topological_Codes.ipynb) — covers surface codes which are a special case of qLDPC codes\n", + "* [QEC 101 Lab 8: Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — extends the decoding concepts introduced in this lab\n", + "* [QEC 101 Lab 4: Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — prerequisite lab covering belief propagation and Tanner graphs" ] } ], @@ -1462,11 +983,24 @@ "file_extension": ".py", "mimetype": "text/x-python", "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.0" + }, + "learning_goals": { + "cfqt_domain": "QCS", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "cfqt_proficiency": "B1", + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "quantum_algorithms" + ], + "application_domain": "error_correction" } }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/qec101/Solutions/09_QEC_Detector_Error_Models_Solution.ipynb b/qec101/Solutions/09_QEC_Detector_Error_Models_Solution.ipynb new file mode 100644 index 0000000..b5c77a4 --- /dev/null +++ b/qec101/Solutions/09_QEC_Detector_Error_Models_Solution.ipynb @@ -0,0 +1,786 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "790ebf04544e4a8f", + "metadata": {}, + "outputs": [], + "source": [ + "# SPDX-License-Identifier: Apache-2.0 AND CC-BY-NC-4.0\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "id": "b40ae642-189f-4b06-a2ed-fd66286f295d", + "metadata": {}, + "source": [ + "# Detector Error Models and Real-Time Decoding — QEC 101 — Solutions\n", + "$\\renewcommand{\\ket}[1]{|#1\\rangle}$\n", + "\n", + "---\n", + "\n", + "So far the QEC 101 labs have focused primarily on QEC memory — the specific construction of logical qubit encodings with techniques like the repetition code, Steane and Shor codes, the surface code, and qLDPC codes. Though memory is foundational to QEC, it can only correct errors from a simple noise model that ignores multiple sources of noise when an algorithm or QEC routine is run.\n", + "\n", + "Similarly, we have not explored deployment of any codes in a real-time QEC workflow. This includes consideration of decoder metrics like throughput and reaction time covered in \"[Decoder Metrics and (Temporal) Parallel Window Decdoing](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb)\" but also specific implementation details like what APIs are necessary to perform QEC, keeping track of the correct data, and performing computations on the right processor.\n", + "\n", + "This lab will explore the limitations of memory experiments and motivate the need for a more robust tool for decoding how errors arise in practice during QEC rounds. The detector error model (DEM), introduced in a paper by Eisert and coworkers called [\"Designing fault-tolerant circuits using detector error models\"](https://quantum-journal.org/papers/q-2025-11-06-1905/pdf/), will be presented as a solution to this problem. You will explore some of the theory behind DEMs and some of the practical aspects of deploying them for real-time decoding.\n", + "\n", + "**What You Will Do:**\n", + "* Explore the limitations of QEC memory and why code capacity assumptions are insufficient for real-time decoding\n", + "* Construct detector error models (DEMs) for the repetition code under different noise models\n", + "* Compute detector error matrices and verify error identification using syndromes\n", + "* Analyze observables and undetectable error patterns using Tanner graphs\n", + "* Initialize a real-time decoder in CUDA-Q QEC using a DEM for the Steane code\n", + "\n", + "**Prerequisites:**\n", + "* Python and Jupyter familiarity\n", + "* Completion of QEC 101 labs 1–4, especially [Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) and [Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb)\n", + "* Familiarity with the Steane code and repetition code encodings\n", + "* Basic understanding of noise models in quantum computing\n", + "\n", + "**Key Terminology:**\n", + "* Detector error model (DEM)\n", + "* Detector\n", + "* Detector matrix\n", + "* Measurement syndrome matrix\n", + "* Phenomenological noise model\n", + "* Code capacity\n", + "* Circuit-level noise model\n", + "\n", + "**CUDA-Q Syntax:**\n", + "* [`@cudaq.kernel`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.kernel) — defines a quantum kernel function\n", + "* [`cudaq.qvector`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.qvector) — allocates a register of qubits\n", + "* [`cudaq.sample`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.sample) — samples measurement outcomes from a kernel\n", + "* [`cudaq.set_target`](https://nvidia.github.io/cuda-quantum/latest/api/languages/python_api.html#cudaq.set_target) — selects simulation or hardware backend\n", + "* [`cudaq_qec.patch`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — logical qubit register with data and ancilla sub-registers\n", + "* [`cudaq_qec.get_code`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — loads a pre-built QEC code definition\n", + "* [`cudaq_qec.z_dem_from_memory_circuit`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — builds a detector error model from a memory circuit\n", + "* [`cudaq_qec.decoder_config`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — configures decoder parameters\n", + "* [`cudaq_qec.enqueue_syndromes`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — sends syndrome data to a decoder\n", + "* [`cudaq_qec.get_corrections`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — retrieves logical corrections from a decoder\n", + "* [`cudaq_qec.configure_decoders_from_file`](https://nvidia.github.io/cuda-quantum/latest/api/qec/python_api.html) — loads decoder configuration from YAML" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b7e741a4f034a31", + "metadata": {}, + "outputs": [], + "source": [ + "## Instructions for Google Colab. You can ignore this cell if you have CUDA-Q\n", + "## set up locally with all required files on your system.\n", + "## Uncomment the lines below and execute this cell to install CUDA-Q.\n", + "\n", + "#!pip install cudaq -q\n", + "#\n", + "#!wget -q https://github.com/nvidia/cuda-q-academic/archive/refs/heads/main.zip\n", + "#!unzip -q main.zip\n", + "#!mv cuda-q-academic-main/qec101/Images ./Images" + ] + }, + { + "cell_type": "markdown", + "id": "6da893fac4ce4ed3", + "metadata": {}, + "source": [ + "> **Note:** Run the cell below to import all required packages.\n", + "> If you installed packages above, restart the kernel first\n", + "> (**Runtime → Restart session** in Colab, or **Kernel → Restart** in Jupyter)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "a418336d-c945-4fff-87a4-e65d661f50de", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import os\n", + "sys.path.append(os.path.join(os.getcwd(), '..'))\n", + "\n", + "import numpy as np\n", + "\n", + "import cudaq\n", + "\n", + "## To install cudaq-qec (if not already installed), uncomment and run:\n", + "## !pip install cudaq-qec -q\n", + "import cudaq_qec as qec" + ] + }, + { + "cell_type": "markdown", + "id": "5b77ef22-3ad5-4be9-a5b2-7a65166fe133", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 1. Real-Time Decoding and the Limits of Code Capacity\n", + "\n", + "Real-time decoding is an extremely difficult problem that requires a number of key ingredients. First, a QEC code is needed that efficiently encodes logical qubits. There are lots of considerations here, including qubit topology, qubit count, code distance, etc. Recall that in general, the goal is a logical qubit encoding scheme that can capture as many errors as possible using as few physical qubits as possible.\n", + "\n", + "Next, the logical qubits need to perform the logic necessary to run fault-tolerant algorithms and ensure that QEC cycles can continuously run to catch errors. These cycles involve measuring syndromes from the QPU, decoding them in the decoder, and sending the identified errors back to the QPU as shown in the figure below.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "The notebook on [\"Decoder Metrics and Parallel Window Decoding\"](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) explored the specific constraints that arise here in detail. For example, the throughput of the decoder must be greater than the rate at which syndromes are generated from the QPU. If not, the process grinds to a halt. Likewise, the reaction time, or how quickly the decoder can return results to the QPU, will determine QPU wall clock speed.\n", + "\n", + "A problem not yet discussed in a practical setting is what actually goes into preparation of the decoder. In previous notebooks, the parity check matrix that defined the QEC code memory was always the input. This is perfectly fine if we prepare say $\\ket{0}_L$ with the Steane code, allow an error to occur, and *then* run a stabilizer round to check for the error (i.e., **code capacity** assumptions). The issue is that assuming errors only occur on the data qubits between state prep and the stabilizer rounds is quite unrealistic. In a physical QPU, errors can occur on any qubit at any time!\n", + "\n", + "Explore the [Steane Code Error Models widget](https://nvidia.github.io/cuda-q-academic/interactive_widgets/steane_code_error_models.html) to test this with the Steane code. Confirm an error before the first stabilizer begins can be correctly identified. Next, see what happens when an error occurs before an ancilla measurement (choose circuit-level noise model). Finally, see what happens when an error occurs between the stabilizer extractions (choose phenomenological noise model).\n", + "\n", + "It should now be clear that quantum memory is not enough and any practical QEC routine needs to feed the decoder something more robust than the parity check matrix for encoding the logical qubits.\n", + "\n", + "Part of the solution has already been explored in the notebook on [decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb). Recall that decoders do not just work in space but also decode in time, taking multiple syndrome extraction circuits at once. Consider the Steane code again. Assume that only a single measurement error occurs on the first of five syndrome extraction cycles. If only the first round was decoded in isolation, it would be incorrectly assumed an error occurred. If all five are examined at once, it would be instead clear that a one-time measurement error occurred.\n", + "\n", + "This is just one example demonstrating why the decoder needs to be provided with an object more robust than a parity check matrix that can account for errors occurring anywhere in the circuit and the results of multiple stabilizer measurement rounds." + ] + }, + { + "cell_type": "markdown", + "id": "1545f893-b910-4126-932d-b0fc0c23e734", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 2. Detector Error Models (DEMs)\n", + "\n", + "**Detector error models (DEMs)** are powerful constructs that solve the problem described above by constructing a more robust detector error matrix that can initialize a decoder to flag errors at any possible location. To fully appreciate what a DEM is and why it works, it is worth walking through some of the theory. DEMs were first described in [\"Designing fault-tolerant circuits using detector error models\"](https://quantum-journal.org/papers/q-2025-11-06-1905/pdf/) and the next few sections of this lab will cover some of the the key definitions and let you try a number of exercises drawn from the paper.\n", + "\n", + "The first piece we need is to define a **detector** $d_i$. A detector is a sum of measurements corresponding to a parity constraint that arises from a specific circuit. For example, $m_1 \\oplus m_2 \\oplus m_3 =0$. The detector can be represented by a vector of length $m$ where $m$ is the number of measurements in a circuit and 1's indicate measurements included in the sum.\n", + "\n", + "$$d_1 =\\begin{bmatrix}\n", + "1 \\\\\n", + "1 \\\\\n", + "1\n", + "\\end{bmatrix}$$\n", + "\n", + "Stated otherwise, if $d_1$ is a valid detector for some circuit, it will always be true (absent of noise) that all three measurements result in a binary sum of $b_i$, where $b_i$ is the expected binary sum (Usually 0 for the examples in this lesson).\n", + "\n", + "
\n", + "\n", + "**Exercise 1:**\n", + "\n", + "Code the circuit below in CUDA-Q. Prove (by sampling or by stabilizer tracking) that $d_1$ is a detector for this circuit with an expected $b_i$=0.\n", + "\n", + "\n", + "\"Three-qubit\n", + "\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "9917061e-3cba-4ce6-90d8-6834c089da56", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ 000:231 011:263 101:248 110:258 }\n", + "\n" + ] + } + ], + "source": [ + "# EXERCISE 1\n", + "\n", + "@cudaq.kernel\n", + "def example1():\n", + " reg = cudaq.qvector(3)\n", + "\n", + " h(reg[1])\n", + " x(reg[2])\n", + " h(reg[2])\n", + "\n", + " x.ctrl(reg[1], reg[0])\n", + " x.ctrl(reg[2], reg[1])\n", + "\n", + "print(cudaq.sample(example1))" + ] + }, + { + "cell_type": "markdown", + "id": "a3e7e471-eb42-4d8b-8527-487051a32af5", + "metadata": {}, + "source": [ + "The only way for this not to hold true for this circuit would be if an error occurred. We say that a detector is violated if\n", + "$d_i^Tm\\neq b_i$, where $m$ is a vector of measurement outcomes.\n", + "\n", + "More complex circuits can have multiple detectors which can combine to form a **detector matrix** where each row of the matrix is a different detector defined in the absence of noise. These detectors must be linearly independent otherwise they are providing redundant constraints.\n", + "\n", + "
\n", + "\n", + "**Exercise 2:**\n", + "\n", + "Consider the repetition code circuit below. Define a detector matrix which has four detectors. Use this detector matrix to prove that a measurement error would violate at least one detector. Hint: assume all qubits begin in the 0 state.\n", + "\n", + "
\n", + "\n", + "
\n", + " \"Circuit\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "5380ea4e-4eca-4ea7-904e-5f7c1f681a6f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Single measurement error analysis:\n", + " Error on m_1: violated detectors = [1]\n", + " Error on m_2: violated detectors = [2]\n", + " Error on m_3: violated detectors = [3]\n", + " Error on m_4: violated detectors = [3 4]\n", + " Error on m_5: violated detectors = [4]\n" + ] + } + ], + "source": [ + "# EXERCISE 2\n", + "\n", + "D = np.array([[1, 0, 0, 0, 0],\n", + " [0, 1, 0 ,0, 0], \n", + " [0, 0, 1, 1, 0],\n", + " [0, 0, 0, 1, 1]])\n", + "\n", + "\n", + "print(\"Single measurement error analysis:\")\n", + "for i in range(D.shape[1]):\n", + " m = np.zeros(D.shape[1], dtype=int)\n", + " m[i] = 1 # apply measurement error on qubit i\n", + " violated = (D @ m) % 2\n", + " print(f\" Error on m_{i+1}: violated detectors = {np.where(violated)[0] + 1}\")" + ] + }, + { + "cell_type": "markdown", + "id": "32b5d458-8e99-4cdd-b8d2-0d3ea96cf165", + "metadata": {}, + "source": [ + "With detectors in hand, we can begin to expand our noise model and build out a few more pieces of the DEM. Consider the same circuit now with errors added before the stabilizers and before any measurement. This is a **circuit-level noise model** where \"any n-qubit Pauli error can occur after an n-qubit gate and any single Pauli error after state preparation.\"\n", + "\n", + "
\n", + " \"Circuit\n", + "
\n", + "\n", + "With this, or any other noise model, we can define a circuit error vector $e$ which has a 1 if error $E_i$ occurred and 0 if not. This allows us to construct the **measurement syndrome matrix** $\\Omega$ which maps errors to measurements. Each row corresponds to the measurements in the circuit, 5 in this case. Each column corresponds to one of the 8 possible error locations. 1's are populated if error $j$ would flip measurement $i$.\n", + "\n", + "In practice, $\\Omega$ needs to be computed via Pauli propagation for large and more complex circuits. Once obtained, we can compute a **detector error matrix** $H = D\\Omega$. $H$ is similar to what we previously called a parity check matrix, but now contains circuit-level information in addition to QEC memory information. The structure of $H$ is such that the rows correspond to detectors and the columns to errors. A 1 entry means error $j$ would violate detector $i$.\n", + "\n", + "Similar to a standard parity check matrix, we now call $s$ a syndrome obtained from $s = He$. This means that we can now identify errors based on which detectors are flagged, capturing information about QEC memory and the circuit-level noise!\n", + "\n", + "Similarly, DEMs can be extended to encompass an even broader **phenomenological noise model** where errors can happen between syndrome extraction gates. We will not cover this here, but the authors present so-called measurement schedules which can cleverly design syndrome extraction circuits such that a given routine is fault-tolerant by design and error will not propogate uncontrollably.\n", + "\n", + "
\n", + "\n", + "**Exercise 3:**\n", + "\n", + "Complete $\\Omega$ for the circuit-level noise model of the repetition code (image above with the 8 error locations). Use $\\Omega$ to compute $H$. For every weight-1 error, compute the syndrome and prove that each error corresponds to a unique syndrome. Note that we can capture more types of errors using a DEM, but are still constrained by the code distance ($d=3$) for how many errors can be flagged.\n", + "\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "fc4b4f28-de2f-43a3-92f0-ffaea8416a55", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1 1 0 1 0 0 0 0]\n", + " [0 1 1 0 1 0 0 0]\n", + " [1 1 0 0 0 1 1 0]\n", + " [0 1 1 0 0 0 1 1]]\n", + "Weight 1 Error Syndromes\n", + "Error Location i = 0\n", + "[1. 0. 1. 0.]\n", + "Error Location i = 1\n", + "[1. 1. 1. 1.]\n", + "Error Location i = 2\n", + "[0. 1. 0. 1.]\n", + "Error Location i = 3\n", + "[1. 0. 0. 0.]\n", + "Error Location i = 4\n", + "[0. 1. 0. 0.]\n", + "Error Location i = 5\n", + "[0. 0. 1. 0.]\n", + "Error Location i = 6\n", + "[0. 0. 1. 1.]\n", + "Error Location i = 7\n", + "[0. 0. 0. 1.]\n" + ] + } + ], + "source": [ + "# EXERCISE 3\n", + "\n", + "omega = np.array([[1, 1, 0, 1, 0, 0, 0, 0],\n", + " [0, 1, 1 ,0, 1, 0, 0 ,0], \n", + " [1, 0, 0, 0, 0, 1, 0, 0],\n", + " [0, 1, 0, 0, 0, 0, 1, 0],\n", + " [0, 0, 1, 0, 0, 0, 0, 1]]) \n", + "\n", + "H = D @ omega\n", + "\n", + "print(H)\n", + "\n", + "\n", + "print(\"Weight 1 Error Syndromes\")\n", + "for i in range(8):\n", + " error = np.zeros(8)\n", + " error[i] = 1\n", + " print(\"Error Location i =\", i)\n", + " print(H @ error)" + ] + }, + { + "cell_type": "markdown", + "id": "f8aba453-e3a1-4040-a7db-cabbcbd3c8a6", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 3. Observables and DEMs\n", + "\n", + "We can utilize the DEM model to explore computation of observables where an observable $o_i$ is \"a binary sum of measurements, which equals the outcome of measuring a logical operator, for any logical state.\" For example, the figure below is a repetition code with two QEC cycles. A valid choice for a logical $Z$ observable is $o = m_5$. Without noise, a single measurement of any of the data qubits determines if the state is 0 if $\\ket{0_L}$ or 1 if $\\ket{1_L}$.\n", + "\n", + "
\n", + " \"Circuit\n", + "
\n", + "\n", + "Other choices are valid, for example, $m_5 \\oplus m_6 \\oplus m_7$ would also work. The key is to select a constraint that depends on the logical state where the other detectors do not depend on the logical state.\n", + "\n", + "
\n", + "\n", + "**Exercise 4:**\n", + "\n", + "Use the following detectors to build $D$ for the two-round repetition code circuit. Also code the circuit in CUDA-Q. Using the Tanner graph below, try to identify an error pattern that flips the observable and does not trip any detectors. How many errors are required for this? Test one of these cases using your CUDA-Q code by manually entering bitflips, sampling the circuit, and confirming the resulting bitstring(s) do not flag any detectors.\n", + "\n", + "$$d_1: m_1 = 0$$\n", + "$$d_2: m_2 = 0$$\n", + "$$d_3: m_1 \\oplus m_3 = 0$$\n", + "$$d_4: m_2 \\oplus m_4 = 0$$\n", + "$$d_5: m_3 \\oplus m_5 \\oplus m_6 = 0$$\n", + "$$d_6: m_4 \\oplus m_6 \\oplus m_7 = 0$$\n", + "\n", + "
\n", + "\n", + "
\n", + " \"Tanner\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cfa43393-e0cb-4ca9-bbb8-656f87ef0a96", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ 0000111:1000 }\n", + "\n", + "[0 0 0 0 0 0]\n" + ] + } + ], + "source": [ + "# EXERCISE 4\n", + "\n", + "D = np.array([[1, 0, 0, 0, 0, 0, 0],\n", + " [0, 1, 0 ,0, 0, 0, 0], \n", + " [1, 0, 1, 0, 0, 0, 0],\n", + " [0, 1, 0, 1, 0, 0, 0],\n", + " [0, 0, 1, 0, 1, 1, 0],\n", + " [0, 0, 0, 1, 0, 1, 1]]) \n", + "\n", + "\n", + "# Example with E_7 E_8 E_9 E_11\n", + "\n", + "@cudaq.kernel\n", + "def exercise4():\n", + " ancilla = cudaq.qvector(4)\n", + " reg = cudaq.qvector(3)\n", + "\n", + " #state prep 0_L\n", + " x.ctrl(reg[0], reg[1])\n", + " x.ctrl(reg[0], reg[2])\n", + " \n", + " #M1\n", + " x.ctrl(reg[0], ancilla[0])\n", + " x.ctrl(reg[1], ancilla[0])\n", + "\n", + " #M2\n", + " x.ctrl(reg[1], ancilla[1])\n", + " x.ctrl(reg[2], ancilla[1])\n", + "\n", + " x(reg[1])\n", + " x(reg[2])\n", + "\n", + " #M3\n", + " x.ctrl(reg[0], ancilla[2])\n", + " x.ctrl(reg[1], ancilla[2])\n", + "\n", + " #M4\n", + " x.ctrl(reg[1], ancilla[3])\n", + " x.ctrl(reg[2], ancilla[3])\n", + "\n", + " x(reg[0]) # E9\n", + " x(ancilla[2]) #E11\n", + " \n", + "sample = cudaq.sample(exercise4)\n", + "print(sample)\n", + "\n", + "for bitstring in sample:\n", + " bitstring_np = np.fromiter((int(b) for b in bitstring), dtype=int)\n", + "\n", + "print((D @ bitstring_np)%2)\n" + ] + }, + { + "cell_type": "markdown", + "id": "d67d7571-31f9-4cb8-8840-ba1c1be72eb0", + "metadata": {}, + "source": [ + "---\n", + "\n", + "## 4. Initializing a Decoder in CUDA-Q with a DEM for Real-Time Decoding\n", + "\n", + "Aside from when the decoding occurs, what differentiates real-time decoding from how we think about offline decoding?\n", + "\n", + "The answer revolves around how the workflow is implemented in practice and where specific computations occur. Recall the workflow below. Though it is correct in general, it is oversimplified and hides some of the nuance.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "For example, the decoder actually decodes syndromes that are XOR'd with the previous syndrome in order to flag differences (often called detector events). Where does this XOR calculation occur? It cannot be the QPU which only outputs raw syndrome measurements. Similarly, the decoding process might output inferred errors, but all that matters is whether a logical flip occurred. At some point this needs to be determined from the actual decoding solution.\n", + "\n", + "A more slightly more realistic (but still simplified) real-time workflow is the following where the QPU only produces measurements and receives determination if logical flips occurred or not. The decoder must then handle everything else.\n", + "\n", + "
\n", + " \"Diagram\n", + "
\n", + "\n", + "This means when writing code for quantum algorithms, we need APIs that can be called within a quantum kernel to send data to a decoder and retrieve results when finished. CUDA-Q QEC enables real-time decoding workflows that look like the following.\n", + "\n", + "The first key function is `qec.enqueue_syndromes` which takes measurement data from the QPU and sends it to a preconfigured decoder. The second is `qec.get_corrections` which simply returns the logical flips so they can be applied and the algorithm can proceed. Everything else is handled within a preinitialized decoder. Such a construct is important because a more complex quantum algorithm may be sending data to multiple decoders depending on the scenario. Each can be initialized and prepared to use any technique necessary for the job.\n", + "\n", + "So, what does this have to do with DEMs? A real-time decoding workflow involves preparation of a decoder using a DEM. This results in a robust decoder that can handle a range of error models and process syndromes from the QPU with a single function call.\n", + "\n", + "
\n", + "\n", + "**Exercise 5:**\n", + "\n", + "Work through and run the code cells below to initialize a decoder with a DEM in CUDA-Q QEC for the Steane code. Along the way, you might be asked to fix the code or enter values as prompted.\n", + "\n", + "
\n", + "\n", + "First, we need to define a few helper functions that prepare the logical zero state and measure the stabilizer checks for the Steane code. We need to define two kernels, one to prepare the Steane code $\\ket{0_L}$ state and another to define measurement of the $Z$ stabilizers to check for $X$ errors. (We are ignoring $Z$ errors in this example for simplicity.) Both kernels should take a `qec.patch` object as an input and act on its `logical.data[i]`, `logical.ancz[i]`, or `logical.ancx[i]` registers. You can directly import the Steane code from CUDA-Q QEC, but you can also define it manually which is helpful if you bring your own code.\n", + "\n", + "Both of these kernels take a `qec.patch` object as input which is similar to providing a register, but contains three registers for $X$ and $Z$ check ancillas and the data qubits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17c380b7-cd97-4b16-aef5-716c09290648", + "metadata": {}, + "outputs": [], + "source": [ + "# EXERCISE 5\n", + "\n", + "os.environ[\"CUDAQ_DEFAULT_SIMULATOR\"] = \"stim\"\n", + "\n", + "# Prepare logical |0⟩\n", + "@cudaq.kernel\n", + "def prep0(logical: qec.patch):\n", + " h(logical.data[4])\n", + " h(logical.data[5])\n", + " h(logical.data[6])\n", + "\n", + " x.ctrl(logical.data[0],logical.data[1])\n", + " x.ctrl(logical.data[0],logical.data[2])\n", + "\n", + " x.ctrl(logical.data[4],logical.data[0])\n", + " x.ctrl(logical.data[4],logical.data[1])\n", + " x.ctrl(logical.data[4],logical.data[3])\n", + "\n", + " x.ctrl(logical.data[5],logical.data[0])\n", + " x.ctrl(logical.data[5],logical.data[2])\n", + " x.ctrl(logical.data[5],logical.data[3])\n", + "\n", + " x.ctrl(logical.data[6],logical.data[1])\n", + " x.ctrl(logical.data[6],logical.data[2])\n", + " x.ctrl(logical.data[6],logical.data[3])\n", + "\n", + "\n", + "# Measure Z stabilizers for Steane code\n", + "@cudaq.kernel\n", + "def measure_stabilizers_z(logical: qec.patch) -> list[bool]:\n", + "\n", + " for i in range(logical.ancz.size()):\n", + " reset(logical.ancz[i])\n", + "\n", + " h(logical.ancz)\n", + "\n", + " z.ctrl(logical.ancz[0],logical.data[0])\n", + " z.ctrl(logical.ancz[0],logical.data[1])\n", + " z.ctrl(logical.ancz[0],logical.data[3])\n", + " z.ctrl(logical.ancz[0],logical.data[4])\n", + "\n", + " z.ctrl(logical.ancz[1],logical.data[0])\n", + " z.ctrl(logical.ancz[1],logical.data[2])\n", + " z.ctrl(logical.ancz[1],logical.data[3])\n", + " z.ctrl(logical.ancz[1],logical.data[5])\n", + "\n", + " z.ctrl(logical.ancz[2],logical.data[1])\n", + " z.ctrl(logical.ancz[2],logical.data[2])\n", + " z.ctrl(logical.ancz[2],logical.data[3])\n", + " z.ctrl(logical.ancz[2],logical.data[6])\n", + "\n", + " h(logical.ancz)\n", + "\n", + " return [mz(logical.ancz[0]), mz(logical.ancz[1]), mz(logical.ancz[2])]" + ] + }, + { + "cell_type": "markdown", + "id": "3fc11bcb-34e0-4916-975c-d9f1dc196f1a", + "metadata": {}, + "source": [ + "Next, the main QEC circuit is prepared. This prepares a set of registers for the data and ancilla qubits called a patch stored as the variable `logical`. Enter the correct number of qubits for each register.\n", + "\n", + "After preparing the initial state, three syndrome extraction cycles are run. The syndromes are simply obtained from the measurement outcomes of the stabilizer circuit you defined above. These are fed into the function `enqueue_syndromes` which takes the decoder ID, the syndromes, and an optional flag for debugging. Note that in complex QEC workflows, it is likely that multiple decoders will be used, hence a command is needed to send syndromes to any specified decoder.\n", + "\n", + "The decoder then follows any instructions it has and completes decoding.\n", + "\n", + "Finally, the corrections are obtained from the same decoder ID with `get_corrections` and the number of logical observables (3 in the case of three rounds). The corrections are then applied to the data qubits before measurement." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3dd7790e-912a-4d15-aafc-e3412be4e21d", + "metadata": {}, + "outputs": [], + "source": [ + "@cudaq.kernel\n", + "def qec_circuit() -> list[bool]:\n", + " qec.reset_decoder(0)\n", + "\n", + " data = cudaq.qvector(7)\n", + " ancz = cudaq.qvector(3)\n", + " ancx = cudaq.qvector(0) # Keep 0 as we are ignoring Z errors\n", + " logical = patch(data, ancx, ancz)\n", + "\n", + " prep0(logical)\n", + "\n", + " # 3 rounds of syndrome measurement\n", + " for _ in range(3):\n", + " syndromes = measure_stabilizers_z(logical)\n", + " qec.enqueue_syndromes(0, syndromes, 0)\n", + "\n", + " # Get corrections and apply them\n", + " corrections = qec.get_corrections(0, 3, False)\n", + " for i in range(3):\n", + " if corrections[i]:\n", + " x(data[i])\n", + "\n", + " return mz(data)" + ] + }, + { + "cell_type": "markdown", + "id": "1c30a75f-99dd-4076-99b0-e0e940cdb1bd", + "metadata": {}, + "source": [ + "Now that the prerequisites are complete, we can begin the main workflow and decoder initialization. First, use `get_code` to load information about the Steane code from the pre-built code information.\n", + "\n", + "Next, define a noise model. In the cell below, a depolarization error model is applied to all CNOT gates with a probability of 0.01.\n", + "\n", + "Then, call `z_dem_from_memory_circuit` to build a DEM given the QEC code, the logical state prep circuit, the number of rounds, and the noise model. Under the hood, this calculates the syndrome measurement matrix ($\\Omega$) and selects a set of detectors. The set of detectors picked by this function is valid, but may not be optimal." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23481a15-f72c-4c80-8a11-2e0d47508cd6", + "metadata": {}, + "outputs": [], + "source": [ + "code = qec.get_code(\"steane\", distance=3)\n", + "\n", + "# [Begin DEM Generation]\n", + "print(\"Step 1: Generating DEM...\")\n", + "cudaq.set_target(\"stim\")\n", + "\n", + "noise = cudaq.NoiseModel()\n", + "noise.add_all_qubit_channel(\"x\", cudaq.Depolarization2(0.01), 1)\n", + "\n", + "dem = qec.z_dem_from_memory_circuit(code, qec.operation.prep0, 3, noise)\n", + "\n", + "print(dem.detector_error_matrix)" + ] + }, + { + "cell_type": "markdown", + "id": "e0622e0b-cc6e-41cc-b898-5d7673669723", + "metadata": {}, + "source": [ + "The decoder can be set up by setting a number of configurations and saving them in a YAML file. Notice these settings involve selecting the decoder type, how large the decoding problem is, sparse representation of the DEM, etc. The main idea is that you can have lots of flexibility when defining a decoder to balance the many different tradeoffs and that many of the settings come directly from the structure of the DEM. The [CUDA-Q QEC docs](https://nvidia.github.io/cudaqx/components/qec/introduction.html) provide details on all of the different settings." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "029b9078-5119-48c8-b094-963bebebad90", + "metadata": {}, + "outputs": [], + "source": [ + "config = qec.decoder_config() \n", + "config.id = 0 # Sets decoder ID\n", + "config.type = \"multi_error_lut\" # Specifies decoding algorithm\n", + "config.block_size = dem.detector_error_matrix.shape[1]\n", + "config.syndrome_size = dem.detector_error_matrix.shape[0]\n", + "config.H_sparse = qec.pcm_to_sparse_vec(dem.detector_error_matrix)\n", + "config.O_sparse = qec.pcm_to_sparse_vec(dem.observables_flips_matrix)\n", + "\n", + "# Calculate numRounds from DEM (we send 1 additional round, so add 1)\n", + "num_syndromes_per_round = 3 \n", + "num_rounds = dem.detector_error_matrix.shape[0] // num_syndromes_per_round + 1\n", + "config.D_sparse = qec.generate_timelike_sparse_detector_matrix(num_syndromes_per_round, num_rounds, False)\n", + "lut_config = qec.multi_error_lut_config()\n", + "lut_config.lut_error_depth = 2\n", + "config.set_decoder_custom_args(lut_config)\n", + "\n", + "multi_config = qec.multi_decoder_config()\n", + "multi_config.decoders = [config]\n", + "\n", + "with open(\"config.yaml\", 'w') as f:\n", + " f.write(multi_config.to_yaml_str(200))\n", + "print(\"Saved config to config.yaml\")" + ] + }, + { + "cell_type": "markdown", + "id": "4919a40a-0000-49ca-a70f-ff10f6cb1319", + "metadata": {}, + "source": [ + "With the decoder settings specified, all you need to do is load the configuration file with `configure_decoders_from_file()` and then use `cudaq.run()` specifying the `qec_circuit` you defined above. Recall that in the CUDA-Q kernel, you designated a decoder ID, so your main kernel could send syndromes to multiple decoders each with different settings depending on the needs.\n", + "\n", + "This experiment is run on the `stim` backend, but it can easily retarget to a physical QPU to perform real-time error correction on a device!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39e107cb-6fb0-42d4-bd08-a0509d2426fc", + "metadata": {}, + "outputs": [], + "source": [ + "print(\"\\nStep 2: Running circuit with decoding...\")\n", + "\n", + "qec.configure_decoders_from_file(\"config.yaml\")\n", + "\n", + "run_result = cudaq.run(qec_circuit, shots_count=10)\n", + "\n", + "print(\"Ran 10 shots\")\n", + "\n", + "qec.finalize_decoders()\n", + "\n", + "print(\"\\nDone!\")" + ] + }, + { + "cell_type": "markdown", + "id": "51e645f5-6162-4b78-963a-2ab5a56017af", + "metadata": {}, + "source": [ + "A similar workflow was used to obtain the first ever real-time decoding results from a physical QPU. NVIDIA partnered with Quantinuum to use a relay-BP decoder to decode a 30-qubit qLDPC code on Quantinuum's Helios device. Experiments resulted in a median decode time of 67 microseconds and over a 5X error reduction from 4.95 to 0.925 thanks to decoding. This was also enabled by NVIDIA's low-latency NVQLink interconnect. You can read more about the work in the blog [here](https://developer.nvidia.com/blog/nvidia-nvqlink-architecture-integrates-accelerated-computing-with-quantum-processors/).\n", + "\n", + "## Conclusion\n", + "\n", + "As real-time error correction continues to mature, it is critically important to develop intuition for what such a procedure requires. You now have an understanding of DEMs and how they provide a robust means to capture a whole host of different errors rather than just errors within the encoded QEC memory. CUDA-Q QEC provides the infrastructure to prepare a decoder with a DEM (among other settings) and run real-time error correction through simple API calls within a kernel.\n", + "\n", + "Everything discussed in this notebook complements previous lessons you have completed as it is agnostic of which specific QEC code you are using in a real-time experiment." + ] + }, + { + "cell_type": "markdown", + "id": "74c698b892e74165", + "metadata": {}, + "source": [ + "**Related Notebooks:**\n", + "* [QEC Decoders](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/04_QEC_Decoders.ipynb) — covers decoder fundamentals that this notebook builds upon\n", + "* [QEC Decoder Metrics and Parallel Window Decoding](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/08_QEC_Decoder_metrics_and_parallel_window_decoding.ipynb) — explores decoder throughput and reaction time metrics referenced in this notebook\n", + "* [QEC Stabilizers](https://github.com/NVIDIA/cuda-q-academic/blob/main/qec101/02_QEC_Stabilizers.ipynb) — introduces stabilizer formalism used to construct detectors" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv (3.11.5)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + }, + "learning_goals": { + "application_domain": "error_correction", + "cfqt_domain": "QCS", + "cfqt_proficiency": "B1", + "cfqt_subdomain": [ + "QCS.ERR", + "QCS.SW", + "QCS.ALG" + ], + "qist_competency_areas": [ + "quantum_error_correction", + "quantum_software", + "hpc_integration" + ] + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/qis-examples/README.md b/qis-examples/README.md index 3172787..c09ed71 100644 --- a/qis-examples/README.md +++ b/qis-examples/README.md @@ -8,16 +8,6 @@ Learners should have familiarity with Jupyter notebooks and programming in Pytho --- ## Notebooks -The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q and Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in any of the platforms listed below, -simply log in to the account, then click on the icons below to run the notebooks on the listed platform. - - -| Notebook |qBraid[^1] | Brev | Google Colab[^2] | -| ----------- | ----------- | ----------- | ----------- | -|Grover's Algorithm |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/qis-examples/grovers.ipynb)| -| Check back here as new notebooks are added | | | | - -[^1]:If using qBraid Lab, use the [Environment Manager](https://docs.qbraid.com/lab/user-guide/environments) to install the CUDA-Q environment and then activate it in your notebook. In qBraid Lab you can switch to a GPU instance using the [Compute Manager](https://docs.qbraid.com/lab/user-guide/compute-manager). -[^2]:You will need to run the command `!pip install cudaq` in a python code block in each notebook to run on Google CoLab. +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/quantum-applications-to-finance/README.md b/quantum-applications-to-finance/README.md index 68b6eda..40139f9 100644 --- a/quantum-applications-to-finance/README.md +++ b/quantum-applications-to-finance/README.md @@ -13,18 +13,6 @@ Notebook 3 demonstrates how quantum computing can optimize investment portfolios ## Notebooks -The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. - -Otherwise, if you have set up an account in any of the platforms listed below, -simply click on the icons below to run the notebooks on the listed platform. - - - -| Notebook |qBraid[^1] | Brev | Google Colab[^2] | -| ----------- | ----------- | ----------- | ----------- | -|Quantum Walks for Finance Part 1 |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quantum-applications-to-finance/01_quantum_walks.ipynb)| -| Quantum Walks for Finance Part 2 |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quantum-applications-to-finance/02_quantum_walks.ipynb)| -| Portfolio Optimization |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quantum-applications-to-finance/03_qchop.ipynb) | | | -[^1]:If using qBraid Lab, use the [Environment Manager](https://docs.qbraid.com/lab/user-guide/environments) to install the CUDA-Q environment and then activate it in your notebook. In qBraid Lab you can switch to a GPU instance using the [Compute Manager](https://docs.qbraid.com/lab/user-guide/compute-manager). -[^2]: You will need to run the command `!pip install cudaq` in a python code block in each notebook to run on Google CoLab. +The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/quick-start-to-quantum/README.md b/quick-start-to-quantum/README.md index 9dfbf6a..cb2d805 100644 --- a/quick-start-to-quantum/README.md +++ b/quick-start-to-quantum/README.md @@ -18,18 +18,6 @@ ## Notebooks The Jupyter notebooks in this folder are designed to run in an environment with CUDA-Q with Python. ***Please note that to run the interactive widgets in the notebooks, you'll need to copy not only the notebooks, but also the interactive widget folder into your environment.*** -For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). A Dockerfile and requirements.txt are also included in the main directory of the repository to help get you set up. +For instructions on how to install CUDA-Q on your machine, check out this [guide](https://nvidia.github.io/cuda-quantum/latest/using/quick_start.html#install-cuda-q). -Otherwise, if you have set up an account in any of the platforms listed below, -simply click on the icons below to run the notebooks on the listed platform. - -| Notebook |qBraid[^1] | Brev | Google Colab[^2] | -| ----------- | ----------- | ----------- | ----------- | -|Lab 1 - Start Small: Learn quantum information and quantum programming with one qubit |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/01_quick_start_to_quantum.ipynb)| -| Lab 2 - Move onto Bigger and More Entangled Things: Program with multiple qubits |Launch On qBraid |[![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/02_quick_start_to_quantum.ipynb)| -| Lab 3 - Add a Bit of Variation: Write your first variational program |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5)| [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/03_quick_start_to_quantum.ipynb)| -| Lab 4 - Converge on a Solution: Write your first hybrid variational program |Launch On qBraid | [![ Click here to deploy.](https://brev-assets.s3.us-west-1.amazonaws.com/nv-lb-dark.svg)](https://brev.nvidia.com/launchable/deploy/now?launchableID=env-39dN1v7RucHHgj97LILUlnXjnk5) | [![](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/NVIDIA/cuda-q-academic/blob/main/quick-start-to-quantum/04_quick_start_to_quantum.ipynb)| - -[^1]:If using qBraid Lab, use the [Environment Manager](https://docs.qbraid.com/lab/user-guide/environments) to install the CUDA-Q environment and then activate it in your notebook. In qBraid Lab you can switch to a GPU instance using the [Compute Manager](https://docs.qbraid.com/lab/user-guide/compute-manager). To run the optional interactive widgets in the notebooks, you'll need to copy the interactive widget folder into your environment. - -[^2]:You will need to add a code block with the command `!pip install cudaq` and execute it in each notebook to run on Google CoLab. To run the optional interactive widgets in the notebooks, you'll need to copy the interactive widget folder into your environment. +Otherwise, explore our [Learning Pathways page](https://nvidia.github.io/cuda-q-academic/learningpath.html) for additional cloud-based options to run these notebooks. diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 4f8eeb3..0000000 --- a/requirements.txt +++ /dev/null @@ -1,121 +0,0 @@ -anyio==4.9.0 -argon2-cffi==23.1.0 -argon2-cffi-bindings==21.2.0 -arrow==1.3.0 -astpretty==3.0.0 -asttokens==3.0.0 -async-lru==2.0.5 -attrs==25.3.0 -babel==2.17.0 -beautifulsoup4==4.13.4 -bleach==6.2.0 -certifi==2025.4.26 -cffi==1.17.1 -charset-normalizer==3.4.2 -comm==0.2.2 -contourpy==1.3.2 -cuda-quantum-cu12==0.12.0 -cudaq==0.12.0 -cudensitymat-cu12==0.3.1 -cupy-cuda12x==13.4.1 -cuquantum-cu12==25.6.0 -cuquantum-python-cu12==25.9.1 -custatevec-cu12==1.10.1 -cutensor-cu12==2.2.0 -cutensornet-cu12==2.9.1 -cycler==0.12.1 -debugpy==1.8.14 -decorator==5.2.1 -defusedxml==0.7.1 -exceptiongroup==1.3.0 -executing==2.2.0 -fastjsonschema==2.21.1 -fastrlock==0.8.3 -fonttools==4.58.0 -fqdn==1.5.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.28.1 -idna==3.10 -ipykernel==6.29.5 -ipython==8.36.0 -isoduration==20.11.0 -jedi==0.19.2 -Jinja2==3.1.6 -json5==0.12.0 -jsonpointer==3.0.0 -jsonschema==4.23.0 -jsonschema-specifications==2025.4.1 -jupyter-events==0.12.0 -jupyter-lsp==2.2.5 -jupyter_client==8.6.3 -jupyter_core==5.7.2 -jupyter_server==2.16.0 -jupyter_server_terminals==0.5.3 -jupyterlab==4.3.4 -jupyterlab_pygments==0.3.0 -jupyterlab_server==2.27.3 -kiwisolver==1.4.8 -MarkupSafe==3.0.2 -matplotlib==3.10.3 -matplotlib-inline==0.1.7 -mistune==3.1.3 -mpi4py==3.1.6 -nbclient==0.10.2 -nbconvert==7.16.6 -nbformat==5.10.4 -nest-asyncio==1.6.0 -notebook==7.3.2 -notebook_shim==0.2.4 -numpy==1.26.4 -nvidia-cublas-cu12==12.9.1.4 -nvidia-cuda-nvrtc-cu12==12.9.86 -nvidia-cuda-runtime-cu12==12.9.79 -nvidia-curand-cu12==10.3.10.19 -nvidia-cusolver-cu12==11.7.5.82 -nvidia-cusparse-cu12==12.5.10.65 -nvidia-nvjitlink-cu12==12.9.86 -overrides==7.7.0 -packaging==25.0 -pandocfilters==1.5.1 -parso==0.8.4 -pexpect==4.9.0 -pillow==11.2.1 -platformdirs==4.3.8 -prometheus_client==0.21.1 -prompt_toolkit==3.0.51 -psutil==7.0.0 -ptyprocess==0.7.0 -pure_eval==0.2.3 -pycparser==2.22 -Pygments==2.19.1 -pyparsing==3.2.3 -python-dateutil==2.9.0.post0 -python-json-logger==3.3.0 -PyYAML==6.0.2 -pyzmq==26.4.0 -qutip==4.7.6 -referencing==0.36.2 -requests==2.32.3 -rfc3339-validator==0.1.4 -rfc3986-validator==0.1.1 -rpds-py==0.25.0 -scipy==1.12.0 -Send2Trash==1.8.3 -six==1.17.0 -sniffio==1.3.1 -soupsieve==2.7 -stack-data==0.6.3 -terminado==0.18.1 -tinycss2==1.4.0 -tomli==2.2.1 -tornado==6.4.2 -traitlets==5.14.3 -types-python-dateutil==2.9.0.20241206 -typing_extensions==4.13.2 -uri-template==1.3.0 -urllib3==2.4.0 -wcwidth==0.2.13 -webcolors==24.11.1 -webencodings==0.5.1 -websocket-client==1.8.0 \ No newline at end of file